Skip to content

Commit

Permalink
feat: add estimated number of rows to CreateReadSession response (#1913)
Browse files Browse the repository at this point in the history
* feat: add estimated number of rows to CreateReadSession response

PiperOrigin-RevId: 495122850

Source-Link: googleapis/googleapis@83b2baf

Source-Link: googleapis/googleapis-gen@6720765
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjcyMDc2NTQzYzQ0ZjZlNGUxNzZkMDRmZDdmMmQ2OTVjZmI2OWVlMyJ9

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
gcf-owl-bot[bot] and gcf-owl-bot[bot] committed Dec 15, 2022
1 parent b782a95 commit 4840b26
Show file tree
Hide file tree
Showing 16 changed files with 477 additions and 280 deletions.
4 changes: 2 additions & 2 deletions README.md
Expand Up @@ -56,13 +56,13 @@ implementation 'com.google.cloud:google-cloud-bigquerystorage'
If you are using Gradle without BOM, add this to your dependencies:

```Groovy
implementation 'com.google.cloud:google-cloud-bigquerystorage:2.26.0'
implementation 'com.google.cloud:google-cloud-bigquerystorage:2.27.0'
```

If you are using SBT, add this to your dependencies:

```Scala
libraryDependencies += "com.google.cloud" % "google-cloud-bigquerystorage" % "2.26.0"
libraryDependencies += "com.google.cloud" % "google-cloud-bigquerystorage" % "2.27.0"
```

## Authentication
Expand Down
Expand Up @@ -93,6 +93,7 @@ public void createReadSessionTest() throws Exception {
.setReadOptions(ReadSession.TableReadOptions.newBuilder().build())
.addAllStreams(new ArrayList<ReadStream>())
.setEstimatedTotalBytesScanned(452788190)
.setEstimatedRowCount(-1745583577)
.setTraceId("traceId-1067401920")
.build();
mockBigQueryRead.addResponse(expectedResponse);
Expand Down Expand Up @@ -145,6 +146,7 @@ public void createReadSessionTest2() throws Exception {
.setReadOptions(ReadSession.TableReadOptions.newBuilder().build())
.addAllStreams(new ArrayList<ReadStream>())
.setEstimatedTotalBytesScanned(452788190)
.setEstimatedRowCount(-1745583577)
.setTraceId("traceId-1067401920")
.build();
mockBigQueryRead.addResponse(expectedResponse);
Expand Down
Expand Up @@ -1437,10 +1437,10 @@ public RowsCase getRowsCase() {
*
*
* <pre>
* Required. The write_stream identifies the target of the append operation, and only
* needs to be specified as part of the first request on the gRPC connection.
* If provided for subsequent requests, it must match the value of the first
* request.
* Required. The write_stream identifies the target of the append operation,
* and only needs to be specified as part of the first request on the gRPC
* connection. If provided for subsequent requests, it must match the value of
* the first request.
* For explicitly created write streams, the format is:
* * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
* For the special default stream, the format is:
Expand Down Expand Up @@ -1469,10 +1469,10 @@ public java.lang.String getWriteStream() {
*
*
* <pre>
* Required. The write_stream identifies the target of the append operation, and only
* needs to be specified as part of the first request on the gRPC connection.
* If provided for subsequent requests, it must match the value of the first
* request.
* Required. The write_stream identifies the target of the append operation,
* and only needs to be specified as part of the first request on the gRPC
* connection. If provided for subsequent requests, it must match the value of
* the first request.
* For explicitly created write streams, the format is:
* * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
* For the special default stream, the format is:
Expand Down Expand Up @@ -2506,10 +2506,10 @@ public Builder clearRows() {
*
*
* <pre>
* Required. The write_stream identifies the target of the append operation, and only
* needs to be specified as part of the first request on the gRPC connection.
* If provided for subsequent requests, it must match the value of the first
* request.
* Required. The write_stream identifies the target of the append operation,
* and only needs to be specified as part of the first request on the gRPC
* connection. If provided for subsequent requests, it must match the value of
* the first request.
* For explicitly created write streams, the format is:
* * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
* For the special default stream, the format is:
Expand Down Expand Up @@ -2537,10 +2537,10 @@ public java.lang.String getWriteStream() {
*
*
* <pre>
* Required. The write_stream identifies the target of the append operation, and only
* needs to be specified as part of the first request on the gRPC connection.
* If provided for subsequent requests, it must match the value of the first
* request.
* Required. The write_stream identifies the target of the append operation,
* and only needs to be specified as part of the first request on the gRPC
* connection. If provided for subsequent requests, it must match the value of
* the first request.
* For explicitly created write streams, the format is:
* * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
* For the special default stream, the format is:
Expand Down Expand Up @@ -2568,10 +2568,10 @@ public com.google.protobuf.ByteString getWriteStreamBytes() {
*
*
* <pre>
* Required. The write_stream identifies the target of the append operation, and only
* needs to be specified as part of the first request on the gRPC connection.
* If provided for subsequent requests, it must match the value of the first
* request.
* Required. The write_stream identifies the target of the append operation,
* and only needs to be specified as part of the first request on the gRPC
* connection. If provided for subsequent requests, it must match the value of
* the first request.
* For explicitly created write streams, the format is:
* * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
* For the special default stream, the format is:
Expand All @@ -2598,10 +2598,10 @@ public Builder setWriteStream(java.lang.String value) {
*
*
* <pre>
* Required. The write_stream identifies the target of the append operation, and only
* needs to be specified as part of the first request on the gRPC connection.
* If provided for subsequent requests, it must match the value of the first
* request.
* Required. The write_stream identifies the target of the append operation,
* and only needs to be specified as part of the first request on the gRPC
* connection. If provided for subsequent requests, it must match the value of
* the first request.
* For explicitly created write streams, the format is:
* * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
* For the special default stream, the format is:
Expand All @@ -2624,10 +2624,10 @@ public Builder clearWriteStream() {
*
*
* <pre>
* Required. The write_stream identifies the target of the append operation, and only
* needs to be specified as part of the first request on the gRPC connection.
* If provided for subsequent requests, it must match the value of the first
* request.
* Required. The write_stream identifies the target of the append operation,
* and only needs to be specified as part of the first request on the gRPC
* connection. If provided for subsequent requests, it must match the value of
* the first request.
* For explicitly created write streams, the format is:
* * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
* For the special default stream, the format is:
Expand Down
Expand Up @@ -27,10 +27,10 @@ public interface AppendRowsRequestOrBuilder
*
*
* <pre>
* Required. The write_stream identifies the target of the append operation, and only
* needs to be specified as part of the first request on the gRPC connection.
* If provided for subsequent requests, it must match the value of the first
* request.
* Required. The write_stream identifies the target of the append operation,
* and only needs to be specified as part of the first request on the gRPC
* connection. If provided for subsequent requests, it must match the value of
* the first request.
* For explicitly created write streams, the format is:
* * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
* For the special default stream, the format is:
Expand All @@ -48,10 +48,10 @@ public interface AppendRowsRequestOrBuilder
*
*
* <pre>
* Required. The write_stream identifies the target of the append operation, and only
* needs to be specified as part of the first request on the gRPC connection.
* If provided for subsequent requests, it must match the value of the first
* request.
* Required. The write_stream identifies the target of the append operation,
* and only needs to be specified as part of the first request on the gRPC
* connection. If provided for subsequent requests, it must match the value of
* the first request.
* For explicitly created write streams, the format is:
* * `projects/{project}/datasets/{dataset}/tables/{table}/streams/{id}`
* For the special default stream, the format is:
Expand Down
Expand Up @@ -75,8 +75,8 @@ public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
*
*
* <pre>
* Required. Parent table that all the streams should belong to, in the form of
* `projects/{project}/datasets/{dataset}/tables/{table}`.
* Required. Parent table that all the streams should belong to, in the form
* of `projects/{project}/datasets/{dataset}/tables/{table}`.
* </pre>
*
* <code>
Expand All @@ -101,8 +101,8 @@ public java.lang.String getParent() {
*
*
* <pre>
* Required. Parent table that all the streams should belong to, in the form of
* `projects/{project}/datasets/{dataset}/tables/{table}`.
* Required. Parent table that all the streams should belong to, in the form
* of `projects/{project}/datasets/{dataset}/tables/{table}`.
* </pre>
*
* <code>
Expand Down Expand Up @@ -571,8 +571,8 @@ public Builder mergeFrom(
*
*
* <pre>
* Required. Parent table that all the streams should belong to, in the form of
* `projects/{project}/datasets/{dataset}/tables/{table}`.
* Required. Parent table that all the streams should belong to, in the form
* of `projects/{project}/datasets/{dataset}/tables/{table}`.
* </pre>
*
* <code>
Expand All @@ -596,8 +596,8 @@ public java.lang.String getParent() {
*
*
* <pre>
* Required. Parent table that all the streams should belong to, in the form of
* `projects/{project}/datasets/{dataset}/tables/{table}`.
* Required. Parent table that all the streams should belong to, in the form
* of `projects/{project}/datasets/{dataset}/tables/{table}`.
* </pre>
*
* <code>
Expand All @@ -621,8 +621,8 @@ public com.google.protobuf.ByteString getParentBytes() {
*
*
* <pre>
* Required. Parent table that all the streams should belong to, in the form of
* `projects/{project}/datasets/{dataset}/tables/{table}`.
* Required. Parent table that all the streams should belong to, in the form
* of `projects/{project}/datasets/{dataset}/tables/{table}`.
* </pre>
*
* <code>
Expand All @@ -645,8 +645,8 @@ public Builder setParent(java.lang.String value) {
*
*
* <pre>
* Required. Parent table that all the streams should belong to, in the form of
* `projects/{project}/datasets/{dataset}/tables/{table}`.
* Required. Parent table that all the streams should belong to, in the form
* of `projects/{project}/datasets/{dataset}/tables/{table}`.
* </pre>
*
* <code>
Expand All @@ -665,8 +665,8 @@ public Builder clearParent() {
*
*
* <pre>
* Required. Parent table that all the streams should belong to, in the form of
* `projects/{project}/datasets/{dataset}/tables/{table}`.
* Required. Parent table that all the streams should belong to, in the form
* of `projects/{project}/datasets/{dataset}/tables/{table}`.
* </pre>
*
* <code>
Expand Down
Expand Up @@ -27,8 +27,8 @@ public interface BatchCommitWriteStreamsRequestOrBuilder
*
*
* <pre>
* Required. Parent table that all the streams should belong to, in the form of
* `projects/{project}/datasets/{dataset}/tables/{table}`.
* Required. Parent table that all the streams should belong to, in the form
* of `projects/{project}/datasets/{dataset}/tables/{table}`.
* </pre>
*
* <code>
Expand All @@ -42,8 +42,8 @@ public interface BatchCommitWriteStreamsRequestOrBuilder
*
*
* <pre>
* Required. Parent table that all the streams should belong to, in the form of
* `projects/{project}/datasets/{dataset}/tables/{table}`.
* Required. Parent table that all the streams should belong to, in the form
* of `projects/{project}/datasets/{dataset}/tables/{table}`.
* </pre>
*
* <code>
Expand Down

0 comments on commit 4840b26

Please sign in to comment.