feat: remove Bigquery Storage v1alpha2 API

Committer: @yirutang
PiperOrigin-RevId: 373207078
pull/650/head
Google APIs 4 years ago committed by Copybara-Service
parent 4303a0b6dc
commit 57970fa2ac
  1. 331
      google/cloud/bigquery/storage/v1alpha2/BUILD.bazel
  2. 18
      google/cloud/bigquery/storage/v1alpha2/bigquerystorage_gapic.yaml
  3. 67
      google/cloud/bigquery/storage/v1alpha2/bigquerystorage_grpc_service_config.json
  4. 23
      google/cloud/bigquery/storage/v1alpha2/bigquerystorage_v1alpha2.yaml
  5. 42
      google/cloud/bigquery/storage/v1alpha2/protobuf.proto
  6. 287
      google/cloud/bigquery/storage/v1alpha2/storage.proto
  7. 77
      google/cloud/bigquery/storage/v1alpha2/stream.proto
  8. 100
      google/cloud/bigquery/storage/v1alpha2/table.proto

@ -1,331 +0,0 @@
# This file was automatically generated by BuildFileGenerator
# This is an API workspace, having public visibility by default makes perfect sense.
package(default_visibility = ["//visibility:public"])
##############################################################################
# Common
##############################################################################
load("@rules_proto//proto:defs.bzl", "proto_library")
load("@com_google_googleapis_imports//:imports.bzl", "proto_library_with_info")
proto_library(
name = "storage_proto",
srcs = [
"protobuf.proto",
"storage.proto",
"stream.proto",
"table.proto",
],
deps = [
"//google/api:annotations_proto",
"//google/api:client_proto",
"//google/api:field_behavior_proto",
"//google/api:resource_proto",
"//google/rpc:status_proto",
"@com_google_protobuf//:descriptor_proto",
"@com_google_protobuf//:empty_proto",
"@com_google_protobuf//:timestamp_proto",
"@com_google_protobuf//:wrappers_proto",
],
)
proto_library_with_info(
name = "storage_proto_with_info",
deps = [
":storage_proto",
"//google/cloud:common_resources_proto",
],
)
##############################################################################
# Java
##############################################################################
load(
"@com_google_googleapis_imports//:imports.bzl",
"java_gapic_assembly_gradle_pkg",
"java_gapic_library",
"java_gapic_test",
"java_grpc_library",
"java_proto_library",
)
java_proto_library(
name = "storage_java_proto",
deps = [":storage_proto"],
)
java_grpc_library(
name = "storage_java_grpc",
srcs = [":storage_proto"],
deps = [":storage_java_proto"],
)
java_gapic_library(
name = "storage_java_gapic",
srcs = [":storage_proto_with_info"],
gapic_yaml = "bigquerystorage_gapic.yaml",
grpc_service_config = "bigquerystorage_grpc_service_config.json",
test_deps = [
":storage_java_grpc",
],
deps = [
":storage_java_proto",
],
)
java_gapic_test(
name = "storage_java_gapic_test_suite",
test_classes = [
"com.google.cloud.bigquery.storage.v1alpha2.BigQueryWriteClientTest",
],
runtime_deps = [":storage_java_gapic_test"],
)
# Open Source Packages
java_gapic_assembly_gradle_pkg(
name = "google-cloud-bigquery-storage-v1alpha2-java",
deps = [
":storage_java_gapic",
":storage_java_grpc",
":storage_java_proto",
":storage_proto",
],
)
##############################################################################
# Go
##############################################################################
load(
"@com_google_googleapis_imports//:imports.bzl",
"go_gapic_assembly_pkg",
"go_gapic_library",
"go_proto_library",
"go_test",
)
go_proto_library(
name = "storage_go_proto",
compilers = ["@io_bazel_rules_go//proto:go_grpc"],
importpath = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1alpha2",
protos = [":storage_proto"],
deps = [
"//google/api:annotations_go_proto",
"//google/rpc:status_go_proto",
],
)
go_gapic_library(
name = "storage_go_gapic",
srcs = [":storage_proto_with_info"],
grpc_service_config = "bigquerystorage_grpc_service_config.json",
importpath = "cloud.google.com/go/bigquery/storage/apiv1alpha2;storage",
service_yaml = "bigquerystorage_v1alpha2.yaml",
deps = [
":storage_go_proto",
],
)
go_test(
name = "storage_go_gapic_test",
srcs = [":storage_go_gapic_srcjar_test"],
embed = [":storage_go_gapic"],
importpath = "cloud.google.com/go/bigquery/storage/apiv1alpha2",
)
# Open Source Packages
go_gapic_assembly_pkg(
name = "gapi-cloud-bigquery-storage-v1alpha2-go",
deps = [
":storage_go_gapic",
":storage_go_gapic_srcjar-test.srcjar",
":storage_go_proto",
],
)
##############################################################################
# Python
##############################################################################
load(
"@com_google_googleapis_imports//:imports.bzl",
"py_gapic_assembly_pkg",
"py_gapic_library",
)
py_gapic_library(
name = "storage_py_gapic",
srcs = [":storage_proto"],
grpc_service_config = "bigquerystorage_grpc_service_config.json",
)
py_gapic_assembly_pkg(
name = "bigquery-storage-v1alpha2-py",
deps = [
":storage_py_gapic",
],
)
##############################################################################
# PHP
##############################################################################
load(
"@com_google_googleapis_imports//:imports.bzl",
"php_gapic_assembly_pkg",
"php_gapic_library",
"php_grpc_library",
"php_proto_library",
)
php_proto_library(
name = "storage_php_proto",
deps = [":storage_proto"],
)
php_grpc_library(
name = "storage_php_grpc",
srcs = [":storage_proto"],
deps = [":storage_php_proto"],
)
php_gapic_library(
name = "storage_php_gapic",
src = ":storage_proto_with_info",
gapic_yaml = "bigquerystorage_gapic.yaml",
grpc_service_config = "bigquerystorage_grpc_service_config.json",
package = "google.cloud.bigquery.storage.v1alpha2",
service_yaml = "bigquerystorage_v1alpha2.yaml",
deps = [
":storage_php_grpc",
":storage_php_proto",
],
)
# Open Source Packages
php_gapic_assembly_pkg(
name = "google-cloud-bigquery-storage-v1alpha2-php",
deps = [
":storage_php_gapic",
":storage_php_grpc",
":storage_php_proto",
],
)
##############################################################################
# Node.js
##############################################################################
load(
"@com_google_googleapis_imports//:imports.bzl",
"nodejs_gapic_assembly_pkg",
"nodejs_gapic_library",
)
nodejs_gapic_library(
name = "storage_nodejs_gapic",
src = ":storage_proto_with_info",
extra_protoc_parameters = ["metadata"],
grpc_service_config = "bigquerystorage_grpc_service_config.json",
package = "google.cloud.bigquery.storage.v1alpha2",
service_yaml = "bigquerystorage_v1alpha2.yaml",
deps = [],
)
nodejs_gapic_assembly_pkg(
name = "bigquery-storage-v1alpha2-nodejs",
deps = [
":storage_nodejs_gapic",
":storage_proto",
],
)
##############################################################################
# Ruby
##############################################################################
# Enable once proto2 dependency is eliminated.
#load(
# "@com_google_googleapis_imports//:imports.bzl",
# "ruby_gapic_assembly_pkg",
# "ruby_gapic_library",
# "ruby_grpc_library",
# "ruby_proto_library",
#)
#
#ruby_proto_library(
# name = "storage_ruby_proto",
# deps = [":storage_proto"],
#)
#
#ruby_grpc_library(
# name = "storage_ruby_grpc",
# srcs = [":storage_proto"],
# deps = [":storage_ruby_proto"],
#)
#
#ruby_gapic_library(
# name = "storage_ruby_gapic",
# src = ":storage_proto_with_info",
# gapic_yaml = "bigquerystorage_gapic.yaml",
# package = "google.cloud.bigquery.storage.v1alpha2",
# service_yaml = "bigquerystorage_v1alpha2.yaml",
# deps = [
# ":storage_ruby_grpc",
# ":storage_ruby_proto",
# ],
#)
#
## Open Source Packages
#ruby_gapic_assembly_pkg(
# name = "google-cloud-bigquery-storage-v1alpha2-ruby",
# deps = [
# ":storage_ruby_gapic",
# ":storage_ruby_grpc",
# ":storage_ruby_proto",
# ],
#)
##############################################################################
# C#
##############################################################################
load(
"@com_google_googleapis_imports//:imports.bzl",
"csharp_gapic_assembly_pkg",
"csharp_gapic_library",
"csharp_grpc_library",
"csharp_proto_library",
)
csharp_proto_library(
name = "storage_csharp_proto",
deps = [":storage_proto"],
)
csharp_grpc_library(
name = "storage_csharp_grpc",
srcs = [":storage_proto"],
deps = [":storage_csharp_proto"],
)
csharp_gapic_library(
name = "storage_csharp_gapic",
srcs = [":storage_proto_with_info"],
common_resources_config = "@gax_dotnet//:Google.Api.Gax/ResourceNames/CommonResourcesConfig.json",
grpc_service_config = "bigquerystorage_grpc_service_config.json",
deps = [
":storage_csharp_grpc",
":storage_csharp_proto",
],
)
# Open Source Packages
csharp_gapic_assembly_pkg(
name = "google-cloud-bigquery-storage-v1alpha2-csharp",
deps = [
":storage_csharp_gapic",
":storage_csharp_grpc",
":storage_csharp_proto",
],
)
##############################################################################
# C++
##############################################################################
# Put your C++ rules here

@ -1,18 +0,0 @@
type: com.google.api.codegen.ConfigProto
config_schema_version: 2.0.0
# The settings of generated code in a specific language.
language_settings:
java:
package_name: com.google.cloud.bigquery.storage.v1alpha2
python:
package_name: google.cloud.bigquery.storage_v1alpha2.gapic
go:
package_name: cloud.google.com/go/bigquery/storage/apiv1alpha2
csharp:
package_name: Google.Cloud.Bigquery.Storage.V1alpha2
ruby:
package_name: Google::Cloud::Bigquery::Storage::V1alpha2
php:
package_name: Google\Cloud\Bigquery\Storage\V1alpha2
nodejs:
package_name: storage.v1alpha2

@ -1,67 +0,0 @@
{
"methodConfig": [
{
"name": [
{
"service": "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite",
"method": "CreateWriteStream"
}
],
"timeout": "600s",
"retryPolicy": {
"initialBackoff": "0.100s",
"maxBackoff": "60s",
"backoffMultiplier": 1.3,
"retryableStatusCodes": [
"DEADLINE_EXCEEDED",
"UNAVAILABLE",
"RESOURCE_EXHAUSTED"
]
}
},
{
"name": [
{
"service": "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite",
"method": "AppendRows"
}
],
"timeout": "86400s",
"retryPolicy": {
"initialBackoff": "0.100s",
"maxBackoff": "60s",
"backoffMultiplier": 1.3,
"retryableStatusCodes": [
"UNAVAILABLE",
"RESOURCE_EXHAUSTED"
]
}
},
{
"name": [
{
"service": "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite",
"method": "BatchCommitWriteStreams"
},
{
"service": "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite",
"method": "FinalizeWriteStream"
},
{
"service": "google.cloud.bigquery.storage.v1alpha2.BigQueryWrite",
"method": "GetWriteStream"
}
],
"timeout": "600s",
"retryPolicy": {
"initialBackoff": "0.100s",
"maxBackoff": "60s",
"backoffMultiplier": 1.3,
"retryableStatusCodes": [
"DEADLINE_EXCEEDED",
"UNAVAILABLE"
]
}
}
]
}

@ -1,23 +0,0 @@
type: google.api.Service
config_version: 3
name: bigquerystorage.googleapis.com
title: BigQuery Storage API
apis:
- name: google.cloud.bigquery.storage.v1alpha2.BigQueryWrite
backend:
rules:
- selector: 'google.cloud.bigquery.storage.v1alpha2.BigQueryWrite.*'
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1alpha2.BigQueryWrite.AppendRows
deadline: 21600.0
authentication:
rules:
- selector: 'google.cloud.bigquery.storage.v1alpha2.BigQueryWrite.*'
oauth:
canonical_scopes: |-
https://www.googleapis.com/auth/bigquery,
https://www.googleapis.com/auth/bigquery.insertdata,
https://www.googleapis.com/auth/cloud-platform

@ -1,42 +0,0 @@
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.cloud.bigquery.storage.v1alpha2;
import "google/protobuf/descriptor.proto";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1alpha2;storage";
option java_outer_classname = "ProtoBufProto";
option java_package = "com.google.cloud.bigquery.storage.v1alpha2";
// Protobuf schema is an API presentation the proto buffer schema.
message ProtoSchema {
// Descriptor for input message. The descriptor has to be self contained,
// including all the nested types, excepted for proto buffer well known types
// (https://developers.google.com/protocol-buffers/docs/reference/google.protobuf)
// and zetasql public protos
// (https://github.com/google/zetasql/tree/master/zetasql/public/proto).
google.protobuf.DescriptorProto proto_descriptor = 1;
}
// Protobuf rows.
message ProtoRows {
// A sequence of rows serialized as a Protocol Buffer.
//
// See https://developers.google.com/protocol-buffers/docs/overview for more
// information on deserializing this field.
repeated bytes serialized_rows = 1;
}

@ -1,287 +0,0 @@
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.cloud.bigquery.storage.v1alpha2;
import "google/api/annotations.proto";
import "google/api/client.proto";
import "google/api/field_behavior.proto";
import "google/api/resource.proto";
import "google/cloud/bigquery/storage/v1alpha2/protobuf.proto";
import "google/cloud/bigquery/storage/v1alpha2/stream.proto";
import "google/cloud/bigquery/storage/v1alpha2/table.proto";
import "google/protobuf/empty.proto";
import "google/protobuf/timestamp.proto";
import "google/protobuf/wrappers.proto";
import "google/rpc/status.proto";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1alpha2;storage";
option java_package = "com.google.cloud.bigquery.storage.v1alpha2";
// Request message for `CreateWriteStream`.
message CreateWriteStreamRequest {
// Required. Reference to the table to which the stream belongs, in the format
// of `projects/{project}/datasets/{dataset}/tables/{table}`.
string parent = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "bigquerystorage.googleapis.com/Table"
}
];
// Required. Stream to be created.
WriteStream write_stream = 2 [(google.api.field_behavior) = REQUIRED];
}
// BigQuery Write API.
//
// The Write API can be used to write data to BigQuery.
service BigQueryWrite {
option deprecated = true;
option (google.api.default_host) = "bigquerystorage.googleapis.com";
option (google.api.oauth_scopes) =
"https://www.googleapis.com/auth/bigquery,"
"https://www.googleapis.com/auth/bigquery.insertdata,"
"https://www.googleapis.com/auth/cloud-platform";
// Creates a write stream to the given table.
// Additionally, every table has a special COMMITTED stream named '_default'
// to which data can be written. This stream doesn't need to be created using
// CreateWriteStream. It is a stream that can be used simultaneously by any
// number of clients. Data written to this stream is considered committed as
// soon as an acknowledgement is received.
rpc CreateWriteStream(CreateWriteStreamRequest) returns (WriteStream) {
option (google.api.http) = {
post: "/v1alpha2/{parent=projects/*/datasets/*/tables/*}"
body: "write_stream"
};
option (google.api.method_signature) = "parent,write_stream";
}
// Appends data to the given stream.
//
// If `offset` is specified, the `offset` is checked against the end of
// stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an
// attempt is made to append to an offset beyond the current end of the stream
// or `ALREADY_EXISTS` if user provids an `offset` that has already been
// written to. User can retry with adjusted offset within the same RPC
// stream. If `offset` is not specified, append happens at the end of the
// stream.
//
// The response contains the offset at which the append happened. Responses
// are received in the same order in which requests are sent. There will be
// one response for each successful request. If the `offset` is not set in
// response, it means append didn't happen due to some errors. If one request
// fails, all the subsequent requests will also fail until a success request
// is made again.
//
// If the stream is of `PENDING` type, data will only be available for read
// operations after the stream is committed.
rpc AppendRows(stream AppendRowsRequest) returns (stream AppendRowsResponse) {
option (google.api.http) = {
post: "/v1alpha2/{write_stream=projects/*/datasets/*/tables/*/streams/*}"
body: "*"
};
option (google.api.method_signature) = "write_stream";
}
// Gets a write stream.
rpc GetWriteStream(GetWriteStreamRequest) returns (WriteStream) {
option (google.api.http) = {
post: "/v1alpha2/{name=projects/*/datasets/*/tables/*/streams/*}"
body: "*"
};
option (google.api.method_signature) = "name";
}
// Finalize a write stream so that no new data can be appended to the
// stream. Finalize is not supported on the '_default' stream.
rpc FinalizeWriteStream(FinalizeWriteStreamRequest) returns (FinalizeWriteStreamResponse) {
option (google.api.http) = {
post: "/v1alpha2/{name=projects/*/datasets/*/tables/*/streams/*}"
body: "*"
};
option (google.api.method_signature) = "name";
}
// Atomically commits a group of `PENDING` streams that belong to the same
// `parent` table.
// Streams must be finalized before commit and cannot be committed multiple
// times. Once a stream is committed, data in the stream becomes available
// for read operations.
rpc BatchCommitWriteStreams(BatchCommitWriteStreamsRequest) returns (BatchCommitWriteStreamsResponse) {
option (google.api.http) = {
get: "/v1alpha2/{parent=projects/*/datasets/*/tables/*}"
};
option (google.api.method_signature) = "parent";
}
// Flushes rows to a BUFFERED stream.
// If users are appending rows to BUFFERED stream, flush operation is
// required in order for the rows to become available for reading. A
// Flush operation flushes up to any previously flushed offset in a BUFFERED
// stream, to the offset specified in the request.
// Flush is not supported on the _default stream, since it is not BUFFERED.
rpc FlushRows(FlushRowsRequest) returns (FlushRowsResponse) {
option (google.api.http) = {
post: "/v1alpha2/{write_stream=projects/*/datasets/*/tables/*/streams/*}"
body: "*"
};
option (google.api.method_signature) = "write_stream";
}
}
// Request message for `AppendRows`.
message AppendRowsRequest {
message ProtoData {
// Proto schema used to serialize the data.
ProtoSchema writer_schema = 1;
// Serialized row data in protobuf message format.
ProtoRows rows = 2;
}
// Required. The stream that is the target of the append operation. This value must be
// specified for the initial request. If subsequent requests specify the
// stream name, it must equal to the value provided in the first request.
// To write to the _default stream, populate this field with a string in the
// format `projects/{project}/datasets/{dataset}/tables/{table}/_default`.
string write_stream = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "bigquerystorage.googleapis.com/WriteStream"
}
];
// Optional. If present, the write is only performed if the next append offset is same
// as the provided value. If not present, the write is performed at the
// current end of stream. Specifying a value for this field is not allowed
// when calling AppendRows for the '_default' stream.
google.protobuf.Int64Value offset = 2 [(google.api.field_behavior) = OPTIONAL];
// Input rows. The `writer_schema` field must be specified at the initial
// request and currently, it will be ignored if specified in following
// requests. Following requests must have data in the same format as the
// initial request.
oneof rows {
ProtoData proto_rows = 4;
}
// Only initial request setting is respected. If true, drop unknown input
// fields. Otherwise, the extra fields will cause append to fail. Default
// value is false.
bool ignore_unknown_fields = 5;
}
// Response message for `AppendRows`.
message AppendRowsResponse {
oneof response {
// The row offset at which the last append occurred.
int64 offset = 1;
// Error in case of append failure. If set, it means rows are not accepted
// into the system. Users can retry or continue with other requests within
// the same connection.
// ALREADY_EXISTS: happens when offset is specified, it means the row is
// already appended, it is safe to ignore this error.
// OUT_OF_RANGE: happens when offset is specified, it means the specified
// offset is beyond the end of the stream.
// INVALID_ARGUMENT: error caused by malformed request or data.
// RESOURCE_EXHAUSTED: request rejected due to throttling. Only happens when
// append without offset.
// ABORTED: request processing is aborted because of prior failures, request
// can be retried if previous failure is fixed.
// INTERNAL: server side errors that can be retried.
google.rpc.Status error = 2;
}
// If backend detects a schema update, pass it to user so that user can
// use it to input new type of message. It will be empty when there is no
// schema updates.
TableSchema updated_schema = 3;
}
// Request message for `GetWriteStreamRequest`.
message GetWriteStreamRequest {
// Required. Name of the stream to get, in the form of
// `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
string name = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "bigquerystorage.googleapis.com/WriteStream"
}
];
}
// Request message for `BatchCommitWriteStreams`.
message BatchCommitWriteStreamsRequest {
// Required. Parent table that all the streams should belong to, in the form of
// `projects/{project}/datasets/{dataset}/tables/{table}`.
string parent = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "bigquerystorage.googleapis.com/Table"
}
];
// Required. The group of streams that will be committed atomically.
repeated string write_streams = 2 [(google.api.field_behavior) = REQUIRED];
}
// Response message for `BatchCommitWriteStreams`.
message BatchCommitWriteStreamsResponse {
// The time at which streams were committed in microseconds granularity.
google.protobuf.Timestamp commit_time = 1;
}
// Request message for invoking `FinalizeWriteStream`.
message FinalizeWriteStreamRequest {
// Required. Name of the stream to finalize, in the form of
// `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
string name = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "bigquerystorage.googleapis.com/WriteStream"
}
];
}
// Response message for `FinalizeWriteStream`.
message FinalizeWriteStreamResponse {
// Number of rows in the finalized stream.
int64 row_count = 1;
}
// Request message for `FlushRows`.
message FlushRowsRequest {
// Required. The stream that is the target of the flush operation.
string write_stream = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "bigquerystorage.googleapis.com/WriteStream"
}
];
// Ending offset of the flush operation. Rows before this offset(including
// this offset) will be flushed.
int64 offset = 2;
}
// Respond message for `FlushRows`.
message FlushRowsResponse {
// The rows before this offset (including this offset) are flushed.
int64 offset = 1;
}

@ -1,77 +0,0 @@
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.cloud.bigquery.storage.v1alpha2;
import "google/api/field_behavior.proto";
import "google/api/resource.proto";
import "google/cloud/bigquery/storage/v1alpha2/table.proto";
import "google/protobuf/timestamp.proto";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1alpha2;storage";
option java_package = "com.google.cloud.bigquery.storage.v1alpha2";
option (google.api.resource_definition) = {
type: "bigquerystorage.googleapis.com/Table"
pattern: "projects/{project}/datasets/{dataset}/tables/{table}"
};
// Information about a single stream that gets data inside the storage system.
message WriteStream {
option (google.api.resource) = {
type: "bigquerystorage.googleapis.com/WriteStream"
pattern: "projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}"
};
enum Type {
// Unknown type.
TYPE_UNSPECIFIED = 0;
// Data will commit automatically and appear as soon as the write is
// acknowledged.
COMMITTED = 1;
// Data is invisible until the stream is committed.
PENDING = 2;
// Data is only visible up to the offset to which it was flushed.
BUFFERED = 3;
}
// Output only. Name of the stream, in the form
// `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`.
string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY];
Type type = 2 [(google.api.field_behavior) = IMMUTABLE];
// Output only. Create time of the stream. For the _default stream, this is the
// creation_time of the table.
google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. Commit time of the stream.
// If a stream is of `COMMITTED` type, then it will have a commit_time same as
// `create_time`. If the stream is of `PENDING` type, commit_time being empty
// means it is not committed.
google.protobuf.Timestamp commit_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY];
// Output only. The schema of the destination table. It is only returned in
// `CreateWriteStream` response. Caller should generate data that's
// compatible with this schema to send in initial `AppendRowsRequest`.
// The table schema could go out of date during the life time of the stream.
TableSchema table_schema = 5 [(google.api.field_behavior) = OUTPUT_ONLY];
// Id set by client to annotate its identity.
string external_id = 6;
}

@ -1,100 +0,0 @@
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.cloud.bigquery.storage.v1alpha2;
import "google/api/field_behavior.proto";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1alpha2;storage";
option java_package = "com.google.cloud.bigquery.storage.v1alpha2";
// Schema of a table
message TableSchema {
// Describes the fields in a table.
repeated TableFieldSchema fields = 1;
}
// A field in TableSchema
message TableFieldSchema {
enum Type {
// Illegal value
TYPE_UNSPECIFIED = 0;
// 64K, UTF8
STRING = 1;
// 64-bit signed
INT64 = 2;
// 64-bit IEEE floating point
DOUBLE = 3;
// Aggregate type
STRUCT = 4;
// 64K, Binary
BYTES = 5;
// 2-valued
BOOL = 6;
// 64-bit signed usec since UTC epoch
TIMESTAMP = 7;
// Civil date - Year, Month, Day
DATE = 8;
// Civil time - Hour, Minute, Second, Microseconds
TIME = 9;
// Combination of civil date and civil time
DATETIME = 10;
// Geography object
GEOGRAPHY = 11;
// Numeric value
NUMERIC = 12;
}
enum Mode {
// Illegal value
MODE_UNSPECIFIED = 0;
NULLABLE = 1;
REQUIRED = 2;
REPEATED = 3;
}
// Required. The field name. The name must contain only letters (a-z, A-Z),
// numbers (0-9), or underscores (_), and must start with a letter or
// underscore. The maximum length is 128 characters.
string name = 1 [(google.api.field_behavior) = REQUIRED];
// Required. The field data type.
Type type = 2 [(google.api.field_behavior) = REQUIRED];
// Optional. The field mode. The default value is NULLABLE.
Mode mode = 3 [(google.api.field_behavior) = OPTIONAL];
// Optional. Describes the nested schema fields if the type property is set to STRUCT.
repeated TableFieldSchema fields = 4 [(google.api.field_behavior) = OPTIONAL];
// Optional. The field description. The maximum length is 1,024 characters.
string description = 6 [(google.api.field_behavior) = OPTIONAL];
}
Loading…
Cancel
Save