docs: improve documentation for write client

feat: update default timeout/retry information
feat: update parent annotation for BatchCommitWriteStreamsRequest
feat: expose additional StorageError enum values

PiperOrigin-RevId: 431973595
pull/706/head
Google APIs 3 years ago committed by Copybara-Service
parent c392eb0604
commit 25d691b074
  1. 56
      google/cloud/bigquery/storage/v1/bigquerystorage_grpc_service_config.json
  2. 12
      google/cloud/bigquery/storage/v1/bigquerystorage_v1.yaml
  3. 26
      google/cloud/bigquery/storage/v1/storage.proto

@ -5,7 +5,38 @@
{
"service": "google.cloud.bigquery.storage.v1.BigQueryRead",
"method": "CreateReadSession"
},
}
],
"timeout": "600s",
"retryPolicy": {
"initialBackoff": "0.100s",
"maxBackoff": "60s",
"backoffMultiplier": 1.3,
"retryableStatusCodes": [
"DEADLINE_EXCEEDED",
"UNAVAILABLE"
]
}
},
{
"name": [
{
"service": "google.cloud.bigquery.storage.v1.BigQueryRead",
"method": "ReadRows"
}
],
"timeout": "86400s",
"retryPolicy": {
"initialBackoff": "0.100s",
"maxBackoff": "60s",
"backoffMultiplier": 1.3,
"retryableStatusCodes": [
"UNAVAILABLE"
]
}
},
{
"name": [
{
"service": "google.cloud.bigquery.storage.v1.BigQueryRead",
"method": "SplitReadStream"
@ -25,8 +56,8 @@
{
"name": [
{
"service": "google.cloud.bigquery.storage.v1.BigQueryRead",
"method": "ReadRows"
"service": "google.cloud.bigquery.storage.v1.BigQueryWrite",
"method": "AppendRows"
}
],
"timeout": "86400s",
@ -38,7 +69,8 @@
"UNAVAILABLE"
]
}
}, {
},
{
"name": [
{
"service": "google.cloud.bigquery.storage.v1.BigQueryWrite",
@ -71,22 +103,6 @@
"UNAVAILABLE"
]
}
}, {
"name": [
{
"service": "google.cloud.bigquery.storage.v1.BigQueryWrite",
"method": "AppendRows"
}
],
"timeout": "86400s",
"retryPolicy": {
"initialBackoff": "0.100s",
"maxBackoff": "60s",
"backoffMultiplier": 1.3,
"retryableStatusCodes": [
"UNAVAILABLE"
]
}
}
]
}

@ -15,18 +15,10 @@ backend:
deadline: 21600.0
- selector: google.cloud.bigquery.storage.v1.BigQueryRead.SplitReadStream
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.CreateWriteStram
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.FinalizeWriteStream
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.BatchCommitWriteStreams
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.GetWriteStream
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.FlushRows
- selector: 'google.cloud.bigquery.storage.v1.BigQueryWrite.*'
deadline: 120.0
- selector: google.cloud.bigquery.storage.v1.BigQueryWrite.AppendRows
deadline: 86400.0
deadline: 21600.0
authentication:
rules:

@ -1,4 +1,4 @@
// Copyright 2021 Google LLC
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -167,6 +167,13 @@ service BigQueryWrite {
// * For PENDING streams, data is not made visible until the stream itself is
// finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly
// committed via the `BatchCommitWriteStreams` rpc.
//
// Note: For users coding against the gRPC api directly, it may be
// necessary to supply the x-goog-request-params system parameter
// with `write_stream=<full_write_stream_name>`.
//
// More information about system parameters:
// https://cloud.google.com/apis/docs/system-parameters
rpc AppendRows(stream AppendRowsRequest) returns (stream AppendRowsResponse) {
option (google.api.http) = {
post: "/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}"
@ -408,10 +415,12 @@ message AppendRowsRequest {
// request.
//
// For explicitly created write streams, the format is:
// `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
//
// * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
//
// For the special default stream, the format is:
// `projects/{project}/datasets/{dataset}/tables/{table}/_default`.
//
// * `projects/{project}/datasets/{dataset}/tables/{table}/streams/_default`.
string write_stream = 1 [
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
@ -498,7 +507,10 @@ message BatchCommitWriteStreamsRequest {
// Required. Parent table that all the streams should belong to, in the form of
// `projects/{project}/datasets/{dataset}/tables/{table}`.
string parent = 1 [
(google.api.field_behavior) = REQUIRED
(google.api.field_behavior) = REQUIRED,
(google.api.resource_reference) = {
type: "bigquery.googleapis.com/Table"
}
];
// Required. The group of streams that will be committed atomically.
@ -593,6 +605,12 @@ message StorageError {
// There is a schema mismatch and it is caused by user schema has extra
// field than bigquery schema.
SCHEMA_MISMATCH_EXTRA_FIELDS = 7;
// Offset already exists.
OFFSET_ALREADY_EXISTS = 8;
// Offset out of range.
OFFSET_OUT_OF_RANGE = 9;
}
// BigQuery Storage specific error code.

Loading…
Cancel
Save