diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml deleted file mode 100644 index bf48c82da..000000000 --- a/.github/.OwlBot.lock.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-java:latest - digest: sha256:0d1bb26a1a99ae0456176bf891b8490e9aab424a5cb4e4d301d9703c4dc43b58 -# created: 2024-01-30T19:46:55.029238294Z diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml deleted file mode 100644 index 5d9a9d8b5..000000000 --- a/.github/.OwlBot.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -docker: - image: "gcr.io/cloud-devrel-public-resources/owlbot-java:latest" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 6aaf5360d..7892b1d67 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,14 +5,10 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # The @googleapis/api-bigquery is the default owner for changes in this repo -* @googleapis/yoshi-java @googleapis/api-bigquery - -# for handwritten libraries, keep codeowner_team in .repo-metadata.json as owner -**/*.java @googleapis/api-bigquery - +* @googleapis/cloud-java-team-teamsync @googleapis/api-bigquery # The java-samples-reviewers team is the default owner for samples changes -samples/**/*.java @googleapis/java-samples-reviewers +samples/**/*.java @googleapis/cloud-java-team-teamsync @googleapis/java-samples-reviewers # Generated snippets should not be owned by samples reviewers -samples/snippets/generated/ @googleapis/yoshi-java +samples/snippets/generated/ @googleapis/cloud-java-team-teamsync @googleapis/yoshi-java diff --git a/.github/generated-files-bot.yml b/.github/generated-files-bot.yml deleted file mode 100644 index c644a24e1..000000000 --- a/.github/generated-files-bot.yml +++ /dev/null @@ -1,12 +0,0 @@ -externalManifests: -- type: json - file: 'synth.metadata' - jsonpath: '$.generatedFiles[*]' -- type: json - file: '.github/readme/synth.metadata/synth.metadata' - jsonpath: '$.generatedFiles[*]' -ignoreAuthors: -- 'renovate-bot' -- 'yoshi-automation' -- 'release-please[bot]' -- 'gcf-owl-bot[bot]' diff --git a/.github/release-please.yml b/.github/release-please.yml index ab97340a7..f87ac1ecc 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -23,6 +23,22 @@ branches: handleGHRelease: true releaseType: java-backport branch: 2.35.x + - bumpMinorPreMajor: true + handleGHRelease: true + releaseType: java-backport + branch: 2.40.x + - bumpMinorPreMajor: true + handleGHRelease: true + releaseType: java-backport + branch: 2.48.x + - bumpMinorPreMajor: true + handleGHRelease: true + releaseType: java-backport + branch: 2.52.x + - bumpMinorPreMajor: true + handleGHRelease: true + releaseType: java-backport + branch: 2.51.x bumpMinorPreMajor: true handleGHRelease: true releaseType: java-yoshi diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 418e8ab98..8a7fa3e30 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -15,8 +15,8 @@ branchProtectionRules: - units (11) - 'Kokoro - Test: Integration' - cla/google - - OwlBot Post Processor - javadoc + - unmanaged_dependency_check - pattern: 1.127.12-sp isAdminEnforced: true requiredApprovingReviewCount: 1 @@ -61,7 +61,6 @@ branchProtectionRules: - units (11) - 'Kokoro - Test: Integration' - cla/google - - OwlBot Post Processor - pattern: 2.10.x isAdminEnforced: true requiredApprovingReviewCount: 1 @@ -76,7 +75,6 @@ branchProtectionRules: - units (11) - 'Kokoro - Test: Integration' - cla/google - - OwlBot Post Processor - pattern: 2.19.x isAdminEnforced: true requiredApprovingReviewCount: 1 @@ -91,7 +89,6 @@ branchProtectionRules: - units (11) - 'Kokoro - Test: Integration' - cla/google - - OwlBot Post Processor - pattern: 2.35.x isAdminEnforced: true requiredApprovingReviewCount: 1 @@ -105,8 +102,66 @@ branchProtectionRules: - units (11) - 'Kokoro - Test: Integration' - cla/google - - OwlBot Post Processor - javadoc + - pattern: 2.40.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (17) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - javadoc + - pattern: 2.48.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (17) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - javadoc + - unmanaged_dependency_check + - pattern: 2.52.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (17) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - javadoc + - unmanaged_dependency_check + - pattern: 2.51.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (17) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - javadoc + - unmanaged_dependency_check permissionRules: - team: api-bigquery permission: admin diff --git a/.github/trusted-contribution.yml b/.github/trusted-contribution.yml index a0ba1f7d9..88d3ac9bf 100644 --- a/.github/trusted-contribution.yml +++ b/.github/trusted-contribution.yml @@ -1,3 +1,9 @@ trustedContributors: - renovate-bot - gcf-owl-bot[bot] + +annotations: +- type: comment + text: "/gcbrun" +- type: label + text: "kokoro:force-run" diff --git a/.github/workflows/approve-readme.yaml b/.github/workflows/approve-readme.yaml index f5fc7d516..59f00b8eb 100644 --- a/.github/workflows/approve-readme.yaml +++ b/.github/workflows/approve-readme.yaml @@ -21,7 +21,7 @@ jobs: runs-on: ubuntu-latest if: github.repository_owner == 'googleapis' && github.head_ref == 'autosynth-readme' steps: - - uses: actions/github-script@v6 + - uses: actions/github-script@v7 with: github-token: ${{secrets.YOSHI_APPROVER_TOKEN}} script: | diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ae66b1973..b9b8be0c3 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -25,10 +25,10 @@ jobs: strategy: fail-fast: false matrix: - java: [11, 17, 21] + java: [11, 17, 21, 25] steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: distribution: temurin java-version: ${{matrix.java}} @@ -41,8 +41,8 @@ jobs: name: "units (8)" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: java-version: 8 distribution: temurin @@ -51,7 +51,7 @@ jobs: # https://maven.apache.org/surefire/maven-surefire-plugin/test-mojo.html#jvm run: echo "SUREFIRE_JVM_OPT=-Djvm=${JAVA_HOME}/bin/java" >> $GITHUB_ENV shell: bash - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 with: java-version: 17 distribution: temurin @@ -63,8 +63,8 @@ jobs: steps: - name: Support longpaths run: git config --system core.longpaths true - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: distribution: temurin java-version: 8 @@ -78,8 +78,8 @@ jobs: matrix: java: [17] steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: distribution: temurin java-version: ${{matrix.java}} @@ -88,8 +88,8 @@ jobs: javadoc: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: distribution: temurin java-version: 17 @@ -100,11 +100,11 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: distribution: temurin - java-version: 11 + java-version: 17 - run: java -version - run: .kokoro/build.sh env: @@ -112,8 +112,8 @@ jobs: clirr: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: distribution: temurin java-version: 8 diff --git a/.github/workflows/renovate_config_check.yaml b/.github/workflows/renovate_config_check.yaml index 87d8eb2be..47b9e87c9 100644 --- a/.github/workflows/renovate_config_check.yaml +++ b/.github/workflows/renovate_config_check.yaml @@ -7,16 +7,16 @@ on: jobs: renovate_bot_config_validation: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Checkout code uses: actions/checkout@v4 - name: Set up Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: '20' + node-version: '22' - name: Install Renovate and Config Validator run: | diff --git a/.github/workflows/samples.yaml b/.github/workflows/samples.yaml index 10d252d77..f833b8022 100644 --- a/.github/workflows/samples.yaml +++ b/.github/workflows/samples.yaml @@ -20,11 +20,11 @@ jobs: checkstyle: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-java@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: distribution: temurin - java-version: 8 + java-version: 11 - name: Run checkstyle run: mvn -P lint --quiet --batch-mode checkstyle:check working-directory: samples/snippets diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index e29083aab..1524dc8d2 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -32,12 +32,12 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@9bb56186c3b09b4f86b1c65136769dd318469633 # v4.1.2 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Run analysis" - uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1 + uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2 with: results_file: results.sarif results_format: sarif @@ -59,7 +59,7 @@ jobs: # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" - uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1 + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: SARIF file path: results.sarif @@ -67,6 +67,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@a82bad71823183e5b120ab52d521460ecb0585fe # v2.24.9 + uses: github/codeql-action/upload-sarif@b8d3b6e8af63cde30bdc382c0bc28114f4346c88 # v2.28.1 with: sarif_file: results.sarif diff --git a/.github/workflows/unmanaged_dependency_check.yaml b/.github/workflows/unmanaged_dependency_check.yaml index ebf073c1b..5ba388acc 100644 --- a/.github/workflows/unmanaged_dependency_check.yaml +++ b/.github/workflows/unmanaged_dependency_check.yaml @@ -5,7 +5,7 @@ jobs: unmanaged_dependency_check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-java@v3 with: distribution: temurin @@ -17,7 +17,7 @@ jobs: # repository .kokoro/build.sh - name: Unmanaged dependency check - uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.28.1 + uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.54.1 with: # java-bigquery does not produce a BOM. Fortunately the root pom.xml # defines google-cloud-bigquery in dependencyManagement section. So diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 64f559885..084b07b25 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -52,7 +52,7 @@ test) RETURN_CODE=$? ;; lint) - mvn com.coveo:fmt-maven-plugin:check -B -ntp + mvn com.spotify.fmt:fmt-maven-plugin:check -B -ntp RETURN_CODE=$? ;; javadoc) @@ -75,11 +75,6 @@ graalvm) mvn -B ${INTEGRATION_TEST_ARGS} -ntp -Pnative test RETURN_CODE=$? ;; -graalvm17) - # Run Unit and Integration Tests with Native Image - mvn -B ${INTEGRATION_TEST_ARGS} -ntp -Pnative test - RETURN_CODE=$? - ;; samples) SAMPLES_DIR=samples # only run ITs in snapshot/ on presubmit PRs. run ITs in all 3 samples/ subdirectories otherwise. diff --git a/.kokoro/continuous/graalvm-native.cfg b/.kokoro/continuous/graalvm-native-a.cfg similarity index 95% rename from .kokoro/continuous/graalvm-native.cfg rename to .kokoro/continuous/graalvm-native-a.cfg index 853a0d20b..0d98de509 100644 --- a/.kokoro/continuous/graalvm-native.cfg +++ b/.kokoro/continuous/graalvm-native-a.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.28.1" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.54.1" } env_vars: { @@ -34,5 +34,5 @@ env_vars: { env_vars: { key: "ENABLE_FLAKYBOT" - value: "true" + value: "false" } \ No newline at end of file diff --git a/.kokoro/continuous/graalvm-native-17.cfg b/.kokoro/continuous/graalvm-native-b.cfg similarity index 92% rename from .kokoro/continuous/graalvm-native-17.cfg rename to .kokoro/continuous/graalvm-native-b.cfg index fba53ee54..c270bff71 100644 --- a/.kokoro/continuous/graalvm-native-17.cfg +++ b/.kokoro/continuous/graalvm-native-b.cfg @@ -3,12 +3,12 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.28.1" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.54.1" } env_vars: { key: "JOB_TYPE" - value: "graalvm17" + value: "graalvm" } # TODO: remove this after we've migrated all tests and scripts @@ -34,5 +34,5 @@ env_vars: { env_vars: { key: "ENABLE_FLAKYBOT" - value: "true" + value: "false" } \ No newline at end of file diff --git a/.kokoro/presubmit/graalvm-native.cfg b/.kokoro/continuous/graalvm-native-c.cfg similarity index 90% rename from .kokoro/presubmit/graalvm-native.cfg rename to .kokoro/continuous/graalvm-native-c.cfg index 94e00cbaa..720f8bcfa 100644 --- a/.kokoro/presubmit/graalvm-native.cfg +++ b/.kokoro/continuous/graalvm-native-c.cfg @@ -3,7 +3,7 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.28.1" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.54.1" } env_vars: { @@ -31,3 +31,8 @@ env_vars: { key: "SECRET_MANAGER_KEYS" value: "java-it-service-account" } + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "false" +} \ No newline at end of file diff --git a/.kokoro/nightly/graalvm-native-17.cfg b/.kokoro/nightly/graalvm-native-17.cfg deleted file mode 100644 index 15ed86292..000000000 --- a/.kokoro/nightly/graalvm-native-17.cfg +++ /dev/null @@ -1,38 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/graalvm17" -} - -env_vars: { - key: "JOB_TYPE" - value: "nightly-graalvm17" -} - -# TODO: remove this after we've migrated all tests and scripts -env_vars: { - key: "GCLOUD_PROJECT" - value: "gcloud-devel" -} - -env_vars: { - key: "GOOGLE_CLOUD_PROJECT" - value: "gcloud-devel" -} - -env_vars: { - key: "ENABLE_FLAKYBOT" - value: "true" -} - -env_vars: { - key: "GOOGLE_APPLICATION_CREDENTIALS" - value: "secret_manager/java-it-service-account" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "java-it-service-account" -} \ No newline at end of file diff --git a/.kokoro/nightly/graalvm-native.cfg b/.kokoro/nightly/graalvm-native.cfg deleted file mode 100644 index 57c1e3511..000000000 --- a/.kokoro/nightly/graalvm-native.cfg +++ /dev/null @@ -1,38 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/graalvm" -} - -env_vars: { - key: "JOB_TYPE" - value: "nightly-graalvm" -} - -# TODO: remove this after we've migrated all tests and scripts -env_vars: { - key: "GCLOUD_PROJECT" - value: "gcloud-devel" -} - -env_vars: { - key: "GOOGLE_CLOUD_PROJECT" - value: "gcloud-devel" -} - -env_vars: { - key: "ENABLE_FLAKYBOT" - value: "true" -} - -env_vars: { - key: "GOOGLE_APPLICATION_CREDENTIALS" - value: "secret_manager/java-it-service-account" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "java-it-service-account" -} diff --git a/.kokoro/presubmit/graalvm-native-17.cfg b/.kokoro/presubmit/graalvm-native-a.cfg similarity index 87% rename from .kokoro/presubmit/graalvm-native-17.cfg rename to .kokoro/presubmit/graalvm-native-a.cfg index 227409d0c..0d98de509 100644 --- a/.kokoro/presubmit/graalvm-native-17.cfg +++ b/.kokoro/presubmit/graalvm-native-a.cfg @@ -3,12 +3,12 @@ # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.28.1"" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.54.1" } env_vars: { key: "JOB_TYPE" - value: "graalvm17" + value: "graalvm" } # TODO: remove this after we've migrated all tests and scripts @@ -30,4 +30,9 @@ env_vars: { env_vars: { key: "SECRET_MANAGER_KEYS" value: "java-it-service-account" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "false" } \ No newline at end of file diff --git a/.kokoro/presubmit/graalvm-native-b.cfg b/.kokoro/presubmit/graalvm-native-b.cfg new file mode 100644 index 000000000..c270bff71 --- /dev/null +++ b/.kokoro/presubmit/graalvm-native-b.cfg @@ -0,0 +1,38 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.54.1" +} + +env_vars: { + key: "JOB_TYPE" + value: "graalvm" +} + +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "false" +} \ No newline at end of file diff --git a/.kokoro/presubmit/graalvm-native-c.cfg b/.kokoro/presubmit/graalvm-native-c.cfg new file mode 100644 index 000000000..720f8bcfa --- /dev/null +++ b/.kokoro/presubmit/graalvm-native-c.cfg @@ -0,0 +1,38 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.54.1" +} + +env_vars: { + key: "JOB_TYPE" + value: "graalvm" +} + +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "false" +} \ No newline at end of file diff --git a/.repo-metadata.json b/.repo-metadata.json index d795a9ea6..278b4ea8e 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -13,5 +13,6 @@ "codeowner_team": "@googleapis/api-bigquery", "api_id": "bigquery.googleapis.com", "library_type": "GAPIC_MANUAL", - "requires_billing": true + "requires_billing": true, + "recommended_package": "com.google.cloud.bigquery" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 547ec4ce2..59350c03b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,684 @@ # Changelog +## [2.56.0](https://github.com/googleapis/java-bigquery/compare/v2.55.3...v2.56.0) (2025-11-15) + + +### Features + +* New queryWithTimeout method for customer-side wait ([#3995](https://github.com/googleapis/java-bigquery/issues/3995)) ([9c0df54](https://github.com/googleapis/java-bigquery/commit/9c0df5422c05696f7ce4bedf914a58306150dc21)) + + +### Dependencies + +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20251012-2.0.0 ([#3923](https://github.com/googleapis/java-bigquery/issues/3923)) ([1d8977d](https://github.com/googleapis/java-bigquery/commit/1d8977df3b1451378e5471cce9fd8b067f80fc9a)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.54.1 ([#3994](https://github.com/googleapis/java-bigquery/issues/3994)) ([4e09f6b](https://github.com/googleapis/java-bigquery/commit/4e09f6bc7a25904ad8f61141a0837535d39dbb4e)) + +## [2.55.3](https://github.com/googleapis/java-bigquery/compare/v2.55.2...v2.55.3) (2025-10-21) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.53.0 ([#3980](https://github.com/googleapis/java-bigquery/issues/3980)) ([a961247](https://github.com/googleapis/java-bigquery/commit/a961247e9546a9fce8da1609afd18975142c2379)) + +## [2.55.2](https://github.com/googleapis/java-bigquery/compare/v2.55.1...v2.55.2) (2025-10-08) + + +### Dependencies + +* Fix update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.17.2 ([b25095d](https://github.com/googleapis/java-bigquery/commit/b25095d23279dab178975c33f4de84612612e175)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.52.3 ([#3971](https://github.com/googleapis/java-bigquery/issues/3971)) ([f8cf508](https://github.com/googleapis/java-bigquery/commit/f8cf50833772412c4f15922bffcdf5100792948d)) + +## [2.55.1](https://github.com/googleapis/java-bigquery/compare/v2.55.0...v2.55.1) (2025-09-26) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.52.2 ([#3964](https://github.com/googleapis/java-bigquery/issues/3964)) ([6775fce](https://github.com/googleapis/java-bigquery/commit/6775fce537df9c5f4d0b1488ce28591f6aed195f)) + +## [2.55.0](https://github.com/googleapis/java-bigquery/compare/v2.54.2...v2.55.0) (2025-09-12) + + +### Features + +* **bigquery:** Add custom ExceptionHandler to BigQueryOptions ([#3937](https://github.com/googleapis/java-bigquery/issues/3937)) ([de0914d](https://github.com/googleapis/java-bigquery/commit/de0914ddbccf988294d50faf56a515e58ab3505d)) + + +### Dependencies + +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.17.0 ([#3954](https://github.com/googleapis/java-bigquery/issues/3954)) ([e73deed](https://github.com/googleapis/java-bigquery/commit/e73deed9c68a45023d02b40144c304329d6b5829)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.52.1 ([#3952](https://github.com/googleapis/java-bigquery/issues/3952)) ([79b7557](https://github.com/googleapis/java-bigquery/commit/79b7557501d318fd92b90a681036fe6a1aa1bac4)) + +## [2.54.2](https://github.com/googleapis/java-bigquery/compare/v2.54.1...v2.54.2) (2025-08-26) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.52.0 ([#3939](https://github.com/googleapis/java-bigquery/issues/3939)) ([794bf83](https://github.com/googleapis/java-bigquery/commit/794bf83e84efc0712638bebde5158777b9c89397)) + +## [2.54.1](https://github.com/googleapis/java-bigquery/compare/v2.54.0...v2.54.1) (2025-08-13) + + +### Bug Fixes + +* Adapt graalvm config to arrow update ([#3928](https://github.com/googleapis/java-bigquery/issues/3928)) ([ecfabc4](https://github.com/googleapis/java-bigquery/commit/ecfabc4b70922d0e697699ec5508a7328cadacf8)) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.51.0 ([#3924](https://github.com/googleapis/java-bigquery/issues/3924)) ([cb66be5](https://github.com/googleapis/java-bigquery/commit/cb66be596d1bfd0a5aed75f5a0e36d80269c7f6a)) + +## [2.54.0](https://github.com/googleapis/java-bigquery/compare/v2.53.0...v2.54.0) (2025-07-31) + + +### Features + +* **bigquery:** Add OpenTelemetry Samples ([#3899](https://github.com/googleapis/java-bigquery/issues/3899)) ([e3d9ed9](https://github.com/googleapis/java-bigquery/commit/e3d9ed92ca5d9b58b5747960d74f895ed8733ebf)) +* **bigquery:** Add otel metrics to request headers ([#3900](https://github.com/googleapis/java-bigquery/issues/3900)) ([4071e4c](https://github.com/googleapis/java-bigquery/commit/4071e4cb2547b236183fd4fbb92c73f074cf2fa0)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.70.0 ([#3890](https://github.com/googleapis/java-bigquery/issues/3890)) ([84207e2](https://github.com/googleapis/java-bigquery/commit/84207e297eec75bcb4f1cc1b64423d7c2ddd6c30)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250706-2.0.0 ([#3910](https://github.com/googleapis/java-bigquery/issues/3910)) ([ae5c971](https://github.com/googleapis/java-bigquery/commit/ae5c97146c7076e90c000fd98b797ec8e08a9cd8)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.50.2 ([#3901](https://github.com/googleapis/java-bigquery/issues/3901)) ([8205623](https://github.com/googleapis/java-bigquery/commit/82056237f194a6c99ec4fb3a4315023efdedff1b)) +* Update dependency io.opentelemetry:opentelemetry-api to v1.52.0 ([#3902](https://github.com/googleapis/java-bigquery/issues/3902)) ([772407b](https://github.com/googleapis/java-bigquery/commit/772407b12f4da005f79eafc944d4c53f0eec5c27)) +* Update dependency io.opentelemetry:opentelemetry-bom to v1.52.0 ([#3903](https://github.com/googleapis/java-bigquery/issues/3903)) ([509a6fc](https://github.com/googleapis/java-bigquery/commit/509a6fc0bb7e7a101bf0d4334a3ff1adde2cab09)) +* Update dependency io.opentelemetry:opentelemetry-context to v1.52.0 ([#3904](https://github.com/googleapis/java-bigquery/issues/3904)) ([96c1bae](https://github.com/googleapis/java-bigquery/commit/96c1bae0fcdfdfc2dbb25dcae5007c5d02111a8c)) +* Update dependency io.opentelemetry:opentelemetry-exporter-logging to v1.52.0 ([#3905](https://github.com/googleapis/java-bigquery/issues/3905)) ([28ee4c9](https://github.com/googleapis/java-bigquery/commit/28ee4c941b99b1fe3803aefbe7a8ae57100d76cb)) + +## [2.53.0](https://github.com/googleapis/java-bigquery/compare/v2.52.0...v2.53.0) (2025-07-14) + + +### Features + +* **bigquery:** Add OpenTelemetry support to BQ rpcs ([#3860](https://github.com/googleapis/java-bigquery/issues/3860)) ([e2d23c1](https://github.com/googleapis/java-bigquery/commit/e2d23c1b15f2c48a4113f82b920f5c29c4b5dfea)) +* **bigquery:** Add support for custom timezones and timestamps ([#3859](https://github.com/googleapis/java-bigquery/issues/3859)) ([e5467c9](https://github.com/googleapis/java-bigquery/commit/e5467c917c63ac066edcbcd902cc2093a39971a3)) +* Next release from main branch is 2.53.0 ([#3879](https://github.com/googleapis/java-bigquery/issues/3879)) ([c47a062](https://github.com/googleapis/java-bigquery/commit/c47a062136fea4de91190cafb1f11bac6abbbe3a)) + + +### Bug Fixes + +* Load jobs preserve ascii control characters configuration ([#3876](https://github.com/googleapis/java-bigquery/issues/3876)) ([5cfdf85](https://github.com/googleapis/java-bigquery/commit/5cfdf855fa0cf206660fd89743cbaabf3afa75a3)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.69.0 ([#3870](https://github.com/googleapis/java-bigquery/issues/3870)) ([a7f1007](https://github.com/googleapis/java-bigquery/commit/a7f1007b5242da2c0adebbb309a908d7d4db5974)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250615-2.0.0 ([#3872](https://github.com/googleapis/java-bigquery/issues/3872)) ([f081589](https://github.com/googleapis/java-bigquery/commit/f08158955b7fec3c2ced6332b6e4d76cc13f2e90)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.50.1 ([#3878](https://github.com/googleapis/java-bigquery/issues/3878)) ([0e971b8](https://github.com/googleapis/java-bigquery/commit/0e971b8ace013caa31b8a02a21038e94bebae2a5)) + + +### Documentation + +* Update maven format command ([#3877](https://github.com/googleapis/java-bigquery/issues/3877)) ([d2918da](https://github.com/googleapis/java-bigquery/commit/d2918da844cd20ca1602c6fcf9fa1df685f261fc)) + +## [2.52.0](https://github.com/googleapis/java-bigquery/compare/v2.51.0...v2.52.0) (2025-06-25) + + +### Features + +* **bigquery:** Integrate Otel in client lib ([#3747](https://github.com/googleapis/java-bigquery/issues/3747)) ([6e3e07a](https://github.com/googleapis/java-bigquery/commit/6e3e07a22b8397e1e9d5b567589e44abc55961f2)) +* **bigquery:** Integrate Otel into retries, jobs, and more ([#3842](https://github.com/googleapis/java-bigquery/issues/3842)) ([4b28c47](https://github.com/googleapis/java-bigquery/commit/4b28c479c1bc22326c8d2501354fb86ec2ce1744)) + + +### Bug Fixes + +* **bigquery:** Add MY_VIEW_DATASET_NAME_TEST_ to resource clean up sample ([#3838](https://github.com/googleapis/java-bigquery/issues/3838)) ([b1962a7](https://github.com/googleapis/java-bigquery/commit/b1962a7f0084ee4c3e248266b50406cf575cd657)) + + +### Dependencies + +* Remove version declaration of open-telemetry-bom ([#3855](https://github.com/googleapis/java-bigquery/issues/3855)) ([6f9f77d](https://github.com/googleapis/java-bigquery/commit/6f9f77d47596b00b7317c8a0d4a10c3d849ad57b)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.66.0 ([#3835](https://github.com/googleapis/java-bigquery/issues/3835)) ([69be5e7](https://github.com/googleapis/java-bigquery/commit/69be5e7345fb8ca69d633d9dc99cf6c15fa5227b)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.68.0 ([#3858](https://github.com/googleapis/java-bigquery/issues/3858)) ([d4ca353](https://github.com/googleapis/java-bigquery/commit/d4ca3535f54f3282aec133337103bbfa2c9a3653)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.49.2 ([#3853](https://github.com/googleapis/java-bigquery/issues/3853)) ([cf864df](https://github.com/googleapis/java-bigquery/commit/cf864df739bbb820e99999b7c1592a3635fea4ec)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.50.0 ([#3861](https://github.com/googleapis/java-bigquery/issues/3861)) ([eb26dee](https://github.com/googleapis/java-bigquery/commit/eb26deee37119389aee3962eea5ad67d63f26c70)) +* Update dependency io.opentelemetry:opentelemetry-bom to v1.51.0 ([#3840](https://github.com/googleapis/java-bigquery/issues/3840)) ([51321c2](https://github.com/googleapis/java-bigquery/commit/51321c22778fd41134cc0cdfc70bdc47f05883f1)) +* Update ossf/scorecard-action action to v2.4.2 ([#3810](https://github.com/googleapis/java-bigquery/issues/3810)) ([414f61d](https://github.com/googleapis/java-bigquery/commit/414f61d7efcfa568c1446bd41945d7a8e2450649)) + +## [2.51.0](https://github.com/googleapis/java-bigquery/compare/v2.50.1...v2.51.0) (2025-06-06) + + +### Features + +* **bigquery:** Job creation mode GA ([#3804](https://github.com/googleapis/java-bigquery/issues/3804)) ([a21cde8](https://github.com/googleapis/java-bigquery/commit/a21cde8994e93337326cc4a2deb4bafd1596b77f)) +* **bigquery:** Support Fine Grained ACLs for Datasets ([#3803](https://github.com/googleapis/java-bigquery/issues/3803)) ([bebf1c6](https://github.com/googleapis/java-bigquery/commit/bebf1c610e6d050c49fc05f30d3fa0247b7dfdcb)) + + +### Dependencies + +* Rollback netty.version to v4.1.119.Final ([#3827](https://github.com/googleapis/java-bigquery/issues/3827)) ([94c71a0](https://github.com/googleapis/java-bigquery/commit/94c71a090eab745c81dd9530bcdd3c8c1e734788)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.65.0 ([#3787](https://github.com/googleapis/java-bigquery/issues/3787)) ([0574ecc](https://github.com/googleapis/java-bigquery/commit/0574eccec2975738804be7d0ccb4c973459c82c9)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250511-2.0.0 ([#3794](https://github.com/googleapis/java-bigquery/issues/3794)) ([d3bf724](https://github.com/googleapis/java-bigquery/commit/d3bf724feef91469b44e1e5068738604d2b3cead)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.49.0 ([#3811](https://github.com/googleapis/java-bigquery/issues/3811)) ([2c5ede4](https://github.com/googleapis/java-bigquery/commit/2c5ede4b115cf7cdd078d54d29ce93636c1cedf5)) + +## [2.50.1](https://github.com/googleapis/java-bigquery/compare/v2.50.0...v2.50.1) (2025-05-16) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.48.0 ([#3790](https://github.com/googleapis/java-bigquery/issues/3790)) ([206f06d](https://github.com/googleapis/java-bigquery/commit/206f06de115ead53b26f09a5f4781efd279b5a73)) +* Update netty.version to v4.2.1.final ([#3780](https://github.com/googleapis/java-bigquery/issues/3780)) ([6dcd858](https://github.com/googleapis/java-bigquery/commit/6dcd858eca788a8cb571368e12b4925993e380c4)) + + +### Documentation + +* **bigquery:** Update TableResult.getTotalRows() docstring ([#3785](https://github.com/googleapis/java-bigquery/issues/3785)) ([6483588](https://github.com/googleapis/java-bigquery/commit/6483588a3c5785b95ea841f21aa38f50ecf4226d)) + +## [2.50.0](https://github.com/googleapis/java-bigquery/compare/v2.49.2...v2.50.0) (2025-05-06) + + +### Features + +* Add WRITE_TRUNCATE_DATA as an enum value for write disposition ([#3752](https://github.com/googleapis/java-bigquery/issues/3752)) ([acea61c](https://github.com/googleapis/java-bigquery/commit/acea61c20b69b44c8612ca22745458ad04bc6be4)) +* **bigquery:** Add support for reservation field in jobs. ([#3768](https://github.com/googleapis/java-bigquery/issues/3768)) ([3e97f7c](https://github.com/googleapis/java-bigquery/commit/3e97f7c0c4676fcdda0862929a69bbabc69926f2)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.63.0 ([#3770](https://github.com/googleapis/java-bigquery/issues/3770)) ([934389e](https://github.com/googleapis/java-bigquery/commit/934389eb114d8fbb10c9c125d21ec26d503dca65)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250404-2.0.0 ([#3754](https://github.com/googleapis/java-bigquery/issues/3754)) ([1381c8f](https://github.com/googleapis/java-bigquery/commit/1381c8fe6c2552eec4519304c71697302733d6c7)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250427-2.0.0 ([#3773](https://github.com/googleapis/java-bigquery/issues/3773)) ([c0795fe](https://github.com/googleapis/java-bigquery/commit/c0795fe948e0ca231dbe8fc47c470603cb48ecc8)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.46.3 ([#3772](https://github.com/googleapis/java-bigquery/issues/3772)) ([ab166b6](https://github.com/googleapis/java-bigquery/commit/ab166b6c33c574b4494368709db0443e055b4863)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.47.0 ([#3779](https://github.com/googleapis/java-bigquery/issues/3779)) ([b27434b](https://github.com/googleapis/java-bigquery/commit/b27434b8a75e74184458e920142f5575fed9ba52)) + +## [2.49.2](https://github.com/googleapis/java-bigquery/compare/v2.49.1...v2.49.2) (2025-04-26) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.46.2 ([#3756](https://github.com/googleapis/java-bigquery/issues/3756)) ([907e39f](https://github.com/googleapis/java-bigquery/commit/907e39fd467f972863deeb86356fc3bfb989a76d)) + +## [2.49.1](https://github.com/googleapis/java-bigquery/compare/v2.49.0...v2.49.1) (2025-04-24) + + +### Bug Fixes + +* Add labels to converter for listTables method ([#3735](https://github.com/googleapis/java-bigquery/issues/3735)) ([#3736](https://github.com/googleapis/java-bigquery/issues/3736)) ([8634822](https://github.com/googleapis/java-bigquery/commit/8634822e1836c5ccc0f8d0263ac57ac561578360)) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.46.0 ([#3753](https://github.com/googleapis/java-bigquery/issues/3753)) ([a335927](https://github.com/googleapis/java-bigquery/commit/a335927e16d0907d62e584f08fa8393daae40354)) +* Update netty.version to v4.2.0.final ([#3745](https://github.com/googleapis/java-bigquery/issues/3745)) ([bb811c0](https://github.com/googleapis/java-bigquery/commit/bb811c068b3efabf04fbe67dbb2979d562c604d9)) + +## [2.49.0](https://github.com/googleapis/java-bigquery/compare/v2.48.1...v2.49.0) (2025-03-20) + + +### Features + +* **bigquery:** Implement getArray in BigQueryResultImpl ([#3693](https://github.com/googleapis/java-bigquery/issues/3693)) ([e2a3f2c](https://github.com/googleapis/java-bigquery/commit/e2a3f2c1a1406bf7bc9a035dce3acfde78f0eaa4)) +* Next release from main branch is 2.49.0 ([#3706](https://github.com/googleapis/java-bigquery/issues/3706)) ([b46a6cc](https://github.com/googleapis/java-bigquery/commit/b46a6ccc959f8defb145279ea18ff2e4f1bac58f)) + + +### Bug Fixes + +* Retry ExceptionHandler not retrying on IOException ([#3668](https://github.com/googleapis/java-bigquery/issues/3668)) ([83245b9](https://github.com/googleapis/java-bigquery/commit/83245b961950ca9a993694082e533834ee364417)) + + +### Dependencies + +* Exclude io.netty:netty-common from org.apache.arrow:arrow-memor… ([#3715](https://github.com/googleapis/java-bigquery/issues/3715)) ([11b5809](https://github.com/googleapis/java-bigquery/commit/11b580949b910b38732c1c8d64704c54c260214e)) +* Update actions/upload-artifact action to v4.6.2 ([#3724](https://github.com/googleapis/java-bigquery/issues/3724)) ([426a59b](https://github.com/googleapis/java-bigquery/commit/426a59b9b999e836804f84c5cbe11d497128f0a8)) +* Update actions/upload-artifact action to v4.6.2 ([#3724](https://github.com/googleapis/java-bigquery/issues/3724)) ([483f930](https://github.com/googleapis/java-bigquery/commit/483f9305023988b3884329733d0e5fbcb6599eb1)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.61.0 ([#3703](https://github.com/googleapis/java-bigquery/issues/3703)) ([53b07b0](https://github.com/googleapis/java-bigquery/commit/53b07b0e77f6ef57c8518df2b106edace679f79a)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.62.0 ([#3726](https://github.com/googleapis/java-bigquery/issues/3726)) ([38e004b](https://github.com/googleapis/java-bigquery/commit/38e004b58134caf4f7b0d96257456930beb0e599)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250302-2.0.0 ([#3720](https://github.com/googleapis/java-bigquery/issues/3720)) ([c0b3902](https://github.com/googleapis/java-bigquery/commit/c0b39029302c51e65ea31495d837598eefbe94e8)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250313-2.0.0 ([#3723](https://github.com/googleapis/java-bigquery/issues/3723)) ([b8875a8](https://github.com/googleapis/java-bigquery/commit/b8875a895d6d5e267086e24f97d0ed5fec36b9fe)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.65.0 ([#3704](https://github.com/googleapis/java-bigquery/issues/3704)) ([53b68b1](https://github.com/googleapis/java-bigquery/commit/53b68b13a505aa5d38e56032eaeb8c95bf3e9078)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.66.0 ([#3727](https://github.com/googleapis/java-bigquery/issues/3727)) ([7339f94](https://github.com/googleapis/java-bigquery/commit/7339f94cfa53d1c988f8ef051ddd5a2d7668d430)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.45.1 ([#3714](https://github.com/googleapis/java-bigquery/issues/3714)) ([e4512aa](https://github.com/googleapis/java-bigquery/commit/e4512aa5966e7b935fa55a062d940d9db0c834b3)) +* Update dependency com.google.oauth-client:google-oauth-client-java6 to v1.39.0 ([#3710](https://github.com/googleapis/java-bigquery/issues/3710)) ([c0c6352](https://github.com/googleapis/java-bigquery/commit/c0c6352b8d02145fe9513e3e23d316e045360d2d)) +* Update dependency com.google.oauth-client:google-oauth-client-jetty to v1.39.0 ([#3711](https://github.com/googleapis/java-bigquery/issues/3711)) ([43b86e9](https://github.com/googleapis/java-bigquery/commit/43b86e91a664dd9d3edaea7b31b46ac635fb22b0)) +* Update dependency node to v22 ([#3713](https://github.com/googleapis/java-bigquery/issues/3713)) ([251def5](https://github.com/googleapis/java-bigquery/commit/251def5659d2648dff0833ba967a65435e11b643)) +* Update netty.version to v4.1.119.final ([#3717](https://github.com/googleapis/java-bigquery/issues/3717)) ([08a290a](https://github.com/googleapis/java-bigquery/commit/08a290adcfa7551ee27a58da0eaf5ac00a759b90)) + + +### Documentation + +* Update error handling comment to be more precise in samples ([#3712](https://github.com/googleapis/java-bigquery/issues/3712)) ([9eb555f](https://github.com/googleapis/java-bigquery/commit/9eb555ff61bef42a3bdfe197da8423b7bf14f493)) + +## [2.48.1](https://github.com/googleapis/java-bigquery/compare/v2.48.0...v2.48.1) (2025-02-26) + + +### Dependencies + +* Update actions/upload-artifact action to v4.6.1 ([#3691](https://github.com/googleapis/java-bigquery/issues/3691)) ([9c0edea](https://github.com/googleapis/java-bigquery/commit/9c0edea7c00b3ffbe6b6a404e4161f768acb34f2)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.60.0 ([#3680](https://github.com/googleapis/java-bigquery/issues/3680)) ([6d9a40d](https://github.com/googleapis/java-bigquery/commit/6d9a40d55a6bbcbff7df39723d33f0af2b24f66e)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250216-2.0.0 ([#3688](https://github.com/googleapis/java-bigquery/issues/3688)) ([e3beb6f](https://github.com/googleapis/java-bigquery/commit/e3beb6ffe433db8ad4087d0f27a8f0d23e7c9322)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.64.0 ([#3681](https://github.com/googleapis/java-bigquery/issues/3681)) ([9e4e261](https://github.com/googleapis/java-bigquery/commit/9e4e26116226d17cc42ae030eed284bd6674b74b)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.44.0 ([#3694](https://github.com/googleapis/java-bigquery/issues/3694)) ([f69fbd3](https://github.com/googleapis/java-bigquery/commit/f69fbd371f18da6ddc43d4f32f532e684026fe16)) +* Update dependency com.google.oauth-client:google-oauth-client-java6 to v1.38.0 ([#3685](https://github.com/googleapis/java-bigquery/issues/3685)) ([53bd7af](https://github.com/googleapis/java-bigquery/commit/53bd7af47783674a3accbadb1172edbcf628ab2b)) +* Update dependency com.google.oauth-client:google-oauth-client-jetty to v1.38.0 ([#3686](https://github.com/googleapis/java-bigquery/issues/3686)) ([d71b2a3](https://github.com/googleapis/java-bigquery/commit/d71b2a34a728fb6ee1c88cdc895b87959e230b7a)) +* Update ossf/scorecard-action action to v2.4.1 ([#3690](https://github.com/googleapis/java-bigquery/issues/3690)) ([cdb61fe](https://github.com/googleapis/java-bigquery/commit/cdb61febcb1a64f6ddd3c0e3c29fa7995f1d3fa5)) + +## [2.48.0](https://github.com/googleapis/java-bigquery/compare/v2.47.0...v2.48.0) (2025-02-13) + + +### Features + +* Implement wasNull for BigQueryResultSet ([#3650](https://github.com/googleapis/java-bigquery/issues/3650)) ([c7ef94b](https://github.com/googleapis/java-bigquery/commit/c7ef94be115cd572df589385f9be801033d72d6d)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.59.0 ([#3660](https://github.com/googleapis/java-bigquery/issues/3660)) ([3a6228b](https://github.com/googleapis/java-bigquery/commit/3a6228b4adc638759d3b2725c612e97e1a3b9cec)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250128-2.0.0 ([#3667](https://github.com/googleapis/java-bigquery/issues/3667)) ([0b92af6](https://github.com/googleapis/java-bigquery/commit/0b92af6eba4a633bb514089c24b7dd19cf286789)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.63.0 ([#3661](https://github.com/googleapis/java-bigquery/issues/3661)) ([9bc8c01](https://github.com/googleapis/java-bigquery/commit/9bc8c0115dc16fb950567cd85cc7dfaa9df50d7d)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.43.0 ([#3669](https://github.com/googleapis/java-bigquery/issues/3669)) ([4d9e0ff](https://github.com/googleapis/java-bigquery/commit/4d9e0ff30269127f47484910e71fa7a21a735492)) + + +### Documentation + +* Update CONTRIBUTING.md for users without branch permissions ([#3670](https://github.com/googleapis/java-bigquery/issues/3670)) ([009b9a2](https://github.com/googleapis/java-bigquery/commit/009b9a2b3940ab66220e68ddd565710b8552cc45)) + +## [2.47.0](https://github.com/googleapis/java-bigquery/compare/v2.46.0...v2.47.0) (2025-01-29) + + +### Features + +* **bigquery:** Support resource tags for datasets in java client ([#3647](https://github.com/googleapis/java-bigquery/issues/3647)) ([01e0b74](https://github.com/googleapis/java-bigquery/commit/01e0b742b9ffeafaa89b080a39d8a66c12c1fd3b)) + + +### Bug Fixes + +* **bigquery:** Remove ReadAPI bypass in executeSelect() ([#3624](https://github.com/googleapis/java-bigquery/issues/3624)) ([fadd992](https://github.com/googleapis/java-bigquery/commit/fadd992a63fd1bc87c99cc689ed103f05de49a99)) +* Close bq read client ([#3644](https://github.com/googleapis/java-bigquery/issues/3644)) ([8833c97](https://github.com/googleapis/java-bigquery/commit/8833c97d73e3ba8e6a2061bbc55a6254b9e6668e)) + + +### Dependencies + +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250112-2.0.0 ([#3651](https://github.com/googleapis/java-bigquery/issues/3651)) ([fd06100](https://github.com/googleapis/java-bigquery/commit/fd06100c4c18b0416d384ec1f6bdfc796b70ad9f)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.42.0 ([#3653](https://github.com/googleapis/java-bigquery/issues/3653)) ([1a14342](https://github.com/googleapis/java-bigquery/commit/1a143428c7f584db3dd6e827c2ee8fe980afe18c)) +* Update github/codeql-action action to v2.28.1 ([#3637](https://github.com/googleapis/java-bigquery/issues/3637)) ([858e517](https://github.com/googleapis/java-bigquery/commit/858e51792d98276f10fd780ef6edd0bb4a1b4f54)) + +## [2.46.0](https://github.com/googleapis/java-bigquery/compare/v2.45.0...v2.46.0) (2025-01-11) + + +### Features + +* **bigquery:** Support IAM conditions in datasets in Java client. ([#3602](https://github.com/googleapis/java-bigquery/issues/3602)) ([6696a9c](https://github.com/googleapis/java-bigquery/commit/6696a9c7d42970e3c24bda4da713a855dbe40ce5)) + + +### Bug Fixes + +* NPE when reading BigQueryResultSet from empty tables ([#3627](https://github.com/googleapis/java-bigquery/issues/3627)) ([9a0b05a](https://github.com/googleapis/java-bigquery/commit/9a0b05a3b57797b7cdd8ca9739699fc018dbd868)) +* **test:** Force usage of ReadAPI ([#3625](https://github.com/googleapis/java-bigquery/issues/3625)) ([5ca7d4a](https://github.com/googleapis/java-bigquery/commit/5ca7d4acbbc40d6ef337732464b3bbd130c86430)) + + +### Dependencies + +* Update actions/upload-artifact action to v4.5.0 ([#3620](https://github.com/googleapis/java-bigquery/issues/3620)) ([cc25099](https://github.com/googleapis/java-bigquery/commit/cc25099f81cbf94e9e2ee9db03a7d9ecd913c176)) +* Update actions/upload-artifact action to v4.6.0 ([#3633](https://github.com/googleapis/java-bigquery/issues/3633)) ([ca20aa4](https://github.com/googleapis/java-bigquery/commit/ca20aa47ea7826594975ab6aeb8498e2377f8553)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.57.0 ([#3617](https://github.com/googleapis/java-bigquery/issues/3617)) ([51370a9](https://github.com/googleapis/java-bigquery/commit/51370a92e7ab29dfce91199666f23576d2d1b64a)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.58.0 ([#3631](https://github.com/googleapis/java-bigquery/issues/3631)) ([b0ea0d5](https://github.com/googleapis/java-bigquery/commit/b0ea0d5bc4ac730b0e2eaf47e8a7441dc113686b)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20241222-2.0.0 ([#3623](https://github.com/googleapis/java-bigquery/issues/3623)) ([4061922](https://github.com/googleapis/java-bigquery/commit/4061922e46135d673bfa48c00bbf284efa46e065)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.61.0 ([#3618](https://github.com/googleapis/java-bigquery/issues/3618)) ([6cba626](https://github.com/googleapis/java-bigquery/commit/6cba626ff14cebbc04fa4f6058b273de0c5dd96e)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.62.0 ([#3632](https://github.com/googleapis/java-bigquery/issues/3632)) ([e9ff265](https://github.com/googleapis/java-bigquery/commit/e9ff265041f6771a71c8c378ed3ff5fdec6e837b)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.41.1 ([#3628](https://github.com/googleapis/java-bigquery/issues/3628)) ([442d217](https://github.com/googleapis/java-bigquery/commit/442d217606b7d93d26887344a7a4a01303b18b8c)) +* Update dependency com.google.oauth-client:google-oauth-client-java6 to v1.37.0 ([#3614](https://github.com/googleapis/java-bigquery/issues/3614)) ([f5faa69](https://github.com/googleapis/java-bigquery/commit/f5faa69bc5b6fdae137724df5693f8aecf27d609)) +* Update dependency com.google.oauth-client:google-oauth-client-jetty to v1.37.0 ([#3615](https://github.com/googleapis/java-bigquery/issues/3615)) ([a6c7944](https://github.com/googleapis/java-bigquery/commit/a6c79443a5e675a01ecb91e362e261a6f6ecc055)) +* Update github/codeql-action action to v2.27.9 ([#3608](https://github.com/googleapis/java-bigquery/issues/3608)) ([567ce01](https://github.com/googleapis/java-bigquery/commit/567ce01ed77d44760ddcd872a0d61abdd6a09832)) +* Update github/codeql-action action to v2.28.0 ([#3621](https://github.com/googleapis/java-bigquery/issues/3621)) ([e0e09ec](https://github.com/googleapis/java-bigquery/commit/e0e09ec4954f5b5e2f094e4c67600f38353f453c)) + +## [2.45.0](https://github.com/googleapis/java-bigquery/compare/v2.44.0...v2.45.0) (2024-12-13) + + +### Features + +* Enable Lossless Timestamps in BQ java client lib ([#3589](https://github.com/googleapis/java-bigquery/issues/3589)) ([c0b874a](https://github.com/googleapis/java-bigquery/commit/c0b874aa0150e63908450b13d019864b8cbfbfe3)) +* Introduce `java.time` methods and variables ([#3586](https://github.com/googleapis/java-bigquery/issues/3586)) ([31fb15f](https://github.com/googleapis/java-bigquery/commit/31fb15fb963c18e4c29391e9fe56dfde31577511)) + + +### Bug Fixes + +* **test:** Update schema for broken ConnImplBenchmark test ([#3574](https://github.com/googleapis/java-bigquery/issues/3574)) ([8cf4387](https://github.com/googleapis/java-bigquery/commit/8cf4387fae22c81d40635b470b216fa4c126d681)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.56.0 ([#3582](https://github.com/googleapis/java-bigquery/issues/3582)) ([616ee2a](https://github.com/googleapis/java-bigquery/commit/616ee2aa8ccf3d2975274b256252f2f249775960)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20241111-2.0.0 ([#3591](https://github.com/googleapis/java-bigquery/issues/3591)) ([3eef3a9](https://github.com/googleapis/java-bigquery/commit/3eef3a9959bcfdb76c26fdf9069d9acf89f93a7a)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20241115-2.0.0 ([#3601](https://github.com/googleapis/java-bigquery/issues/3601)) ([41f9adb](https://github.com/googleapis/java-bigquery/commit/41f9adbe4235329fa2bbfd0930f4113e63f72e05)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.60.0 ([#3583](https://github.com/googleapis/java-bigquery/issues/3583)) ([34dd8bc](https://github.com/googleapis/java-bigquery/commit/34dd8bc22c8188f2b61dc9939b24a8d820548e2b)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.41.0 ([#3607](https://github.com/googleapis/java-bigquery/issues/3607)) ([11499d1](https://github.com/googleapis/java-bigquery/commit/11499d16727934fd3dfa5c18226e6f20471a11ac)) +* Update github/codeql-action action to v2.27.5 ([#3588](https://github.com/googleapis/java-bigquery/issues/3588)) ([3f94075](https://github.com/googleapis/java-bigquery/commit/3f9407570fea5317aaf212b058ca1da05985eda9)) +* Update github/codeql-action action to v2.27.6 ([#3597](https://github.com/googleapis/java-bigquery/issues/3597)) ([bc1f3b9](https://github.com/googleapis/java-bigquery/commit/bc1f3b97a0c8ccc6e93a07b2f0ebcf8e05da9b48)) +* Update github/codeql-action action to v2.27.7 ([#3603](https://github.com/googleapis/java-bigquery/issues/3603)) ([528426b](https://github.com/googleapis/java-bigquery/commit/528426bf9b7801b1b9b45758b560f14a4c9bbc57)) + + +### Documentation + +* **bigquery:** Add javadoc description of timestamp() parameter. ([#3604](https://github.com/googleapis/java-bigquery/issues/3604)) ([6ee0c10](https://github.com/googleapis/java-bigquery/commit/6ee0c103771ef678f66cc7a584bdce27e21f29c4)) + +## [2.44.0](https://github.com/googleapis/java-bigquery/compare/v2.43.3...v2.44.0) (2024-11-17) + + +### Features + +* Enable maxTimeTravelHours in BigQuery java client library ([#3555](https://github.com/googleapis/java-bigquery/issues/3555)) ([bd24fd8](https://github.com/googleapis/java-bigquery/commit/bd24fd8c550bfbd1207b194ed5c863a4a9924d48)) + + +### Bug Fixes + +* Update experimental methods documentation to [@internalapi](https://github.com/internalapi) ([#3552](https://github.com/googleapis/java-bigquery/issues/3552)) ([20826f1](https://github.com/googleapis/java-bigquery/commit/20826f1b08a3cc5bdcce5637b7ea21d467b2bce2)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.55.0 ([#3559](https://github.com/googleapis/java-bigquery/issues/3559)) ([950ad0c](https://github.com/googleapis/java-bigquery/commit/950ad0cce6370e332a568d3b2e9ef3911503d206)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20241027-2.0.0 ([#3568](https://github.com/googleapis/java-bigquery/issues/3568)) ([b5ccfcc](https://github.com/googleapis/java-bigquery/commit/b5ccfccb552e731ccb09be923715849a4282d44d)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.59.0 ([#3561](https://github.com/googleapis/java-bigquery/issues/3561)) ([1bd24a1](https://github.com/googleapis/java-bigquery/commit/1bd24a1ad28d168587b7cba95ec348cb1308a803)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.40.0 ([#3576](https://github.com/googleapis/java-bigquery/issues/3576)) ([d5fa951](https://github.com/googleapis/java-bigquery/commit/d5fa951b8255ec1bcbcdf9bb3c29f247e38a0c7e)) +* Update github/codeql-action action to v2.27.1 ([#3567](https://github.com/googleapis/java-bigquery/issues/3567)) ([e154ee3](https://github.com/googleapis/java-bigquery/commit/e154ee300485dc9d900343a8b5ceb7f6633bc3ff)) +* Update github/codeql-action action to v2.27.3 ([#3569](https://github.com/googleapis/java-bigquery/issues/3569)) ([3707a40](https://github.com/googleapis/java-bigquery/commit/3707a402039365c49e1976a388593f621231dc02)) +* Update github/codeql-action action to v2.27.4 ([#3572](https://github.com/googleapis/java-bigquery/issues/3572)) ([2c7b4f7](https://github.com/googleapis/java-bigquery/commit/2c7b4f750f4c8bf03c0ba74402d745341382a209)) + + +### Documentation + +* Fix BigQuery documentation formating ([#3565](https://github.com/googleapis/java-bigquery/issues/3565)) ([552f491](https://github.com/googleapis/java-bigquery/commit/552f49132af370f66aa1ccdde86e6280f638da22)) + +## [2.43.3](https://github.com/googleapis/java-bigquery/compare/v2.43.2...v2.43.3) (2024-10-29) + + +### Dependencies + +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.10.2 ([19fc184](https://github.com/googleapis/java-bigquery/commit/19fc1843f7db8ab6fb361bf7f8119014033bc1c6)) + +## [2.43.2](https://github.com/googleapis/java-bigquery/compare/v2.43.1...v2.43.2) (2024-10-27) + + +### Dependencies + +* Update actions/checkout action to v4.2.2 ([#3541](https://github.com/googleapis/java-bigquery/issues/3541)) ([c36c123](https://github.com/googleapis/java-bigquery/commit/c36c123f5cd298b1481c9073ac9f5e634b0e1e68)) +* Update actions/upload-artifact action to v4.4.2 ([#3524](https://github.com/googleapis/java-bigquery/issues/3524)) ([776a554](https://github.com/googleapis/java-bigquery/commit/776a5541cc94e8ffb1f5e5c6969ae06585571b45)) +* Update actions/upload-artifact action to v4.4.3 ([#3530](https://github.com/googleapis/java-bigquery/issues/3530)) ([2f87fd9](https://github.com/googleapis/java-bigquery/commit/2f87fd9d777175cb5a8e5b0dc55f07546351e504)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.54.0 ([#3532](https://github.com/googleapis/java-bigquery/issues/3532)) ([25be311](https://github.com/googleapis/java-bigquery/commit/25be311c1477db0993a5825a2b839a295170790f)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20241013-2.0.0 ([#3544](https://github.com/googleapis/java-bigquery/issues/3544)) ([0c42092](https://github.com/googleapis/java-bigquery/commit/0c42092e34912d21a4d13f041577056faadf914a)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.10.0 ([0bd3c86](https://github.com/googleapis/java-bigquery/commit/0bd3c862636271c5a851fcd229b4cf6878a8c5d4)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.10.1 ([c03a63a](https://github.com/googleapis/java-bigquery/commit/c03a63a0da4f4915e9761dc1ca7429c46748688c)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.58.0 ([#3533](https://github.com/googleapis/java-bigquery/issues/3533)) ([cad2643](https://github.com/googleapis/java-bigquery/commit/cad26430f21a37eec2b87ea417f0cf67dcf9c97a)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.38.0 ([#3542](https://github.com/googleapis/java-bigquery/issues/3542)) ([16448ee](https://github.com/googleapis/java-bigquery/commit/16448eec7c7f00a113c923a0fcde463c8ac91f9b)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.39.0 ([#3548](https://github.com/googleapis/java-bigquery/issues/3548)) ([616b2f6](https://github.com/googleapis/java-bigquery/commit/616b2f611f313994bf0ec2889daea3b569a84baf)) +* Update github/codeql-action action to v2.26.13 ([#3536](https://github.com/googleapis/java-bigquery/issues/3536)) ([844744f](https://github.com/googleapis/java-bigquery/commit/844744f3dea804a31abc806592f557a26cffbab4)) +* Update github/codeql-action action to v2.27.0 ([#3540](https://github.com/googleapis/java-bigquery/issues/3540)) ([1616a0f](https://github.com/googleapis/java-bigquery/commit/1616a0f6057916e21f3b4a6d418d1431d8d1fa16)) + + +### Documentation + +* Reformat javadoc ([#3545](https://github.com/googleapis/java-bigquery/issues/3545)) ([4763f73](https://github.com/googleapis/java-bigquery/commit/4763f73ad854ca4bfdddbbdc0bb43fe639238665)) +* Update SimpleApp to explicitly set project id ([#3534](https://github.com/googleapis/java-bigquery/issues/3534)) ([903a0f7](https://github.com/googleapis/java-bigquery/commit/903a0f7db0926f3d166eebada1710413056fb4a2)) + +## [2.43.1](https://github.com/googleapis/java-bigquery/compare/v2.43.0...v2.43.1) (2024-10-09) + + +### Dependencies + +* Update actions/checkout action to v4.2.1 ([#3520](https://github.com/googleapis/java-bigquery/issues/3520)) ([ad8175a](https://github.com/googleapis/java-bigquery/commit/ad8175af06d5308a9366f8109055d61c115a4852)) +* Update actions/upload-artifact action to v4.4.1 ([#3521](https://github.com/googleapis/java-bigquery/issues/3521)) ([dc21975](https://github.com/googleapis/java-bigquery/commit/dc21975cc6f3597d8f789f12a58feaa5b9b94da0)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240919-2.0.0 ([#3514](https://github.com/googleapis/java-bigquery/issues/3514)) ([9fe3829](https://github.com/googleapis/java-bigquery/commit/9fe382927ff4718252e22ac20c4e012f490e6b0e)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.37.0 ([bf4d37a](https://github.com/googleapis/java-bigquery/commit/bf4d37a15f13ada3cf0045b2d45355193d2c2f34)) +* Update github/codeql-action action to v2.26.11 ([#3517](https://github.com/googleapis/java-bigquery/issues/3517)) ([ac736bb](https://github.com/googleapis/java-bigquery/commit/ac736bb50bf4b2e629dcbfe7de90b846e07038e4)) +* Update github/codeql-action action to v2.26.12 ([#3522](https://github.com/googleapis/java-bigquery/issues/3522)) ([fdf8dc4](https://github.com/googleapis/java-bigquery/commit/fdf8dc4b7cb4e26939da10002e47c810d71bad6c)) + +## [2.43.0](https://github.com/googleapis/java-bigquery/compare/v2.42.4...v2.43.0) (2024-10-01) + + +### Features + +* Add max staleness to ExternalTableDefinition ([#3499](https://github.com/googleapis/java-bigquery/issues/3499)) ([f1ebd5b](https://github.com/googleapis/java-bigquery/commit/f1ebd5be5877a68f76efafc30e3b5b0763f343c5)) + +## [2.42.4](https://github.com/googleapis/java-bigquery/compare/v2.42.3...v2.42.4) (2024-09-30) + + +### Dependencies + +* Update actions/checkout action to v4.2.0 ([#3495](https://github.com/googleapis/java-bigquery/issues/3495)) ([b57fefb](https://github.com/googleapis/java-bigquery/commit/b57fefbdfee7b8dacdb12502d1df72af21323b51)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.51.0 ([#3480](https://github.com/googleapis/java-bigquery/issues/3480)) ([986b036](https://github.com/googleapis/java-bigquery/commit/986b036a022c8f68db59dd9d5944f3b724777533)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.53.0 ([#3504](https://github.com/googleapis/java-bigquery/issues/3504)) ([57ce901](https://github.com/googleapis/java-bigquery/commit/57ce9018448ebf4f09d3ecf9760054ebd117bc36)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240905-2.0.0 ([#3483](https://github.com/googleapis/java-bigquery/issues/3483)) ([a6508a2](https://github.com/googleapis/java-bigquery/commit/a6508a29f81b6729e41e827096e90f1d1bf07f4d)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.55.0 ([#3481](https://github.com/googleapis/java-bigquery/issues/3481)) ([8908cfd](https://github.com/googleapis/java-bigquery/commit/8908cfd82332d09997a5538113fbe8e382f52c4a)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.57.0 ([#3505](https://github.com/googleapis/java-bigquery/issues/3505)) ([6e78f56](https://github.com/googleapis/java-bigquery/commit/6e78f56d17bb0d30b361220c86b1c66f21e9bd48)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.36.0 ([#3490](https://github.com/googleapis/java-bigquery/issues/3490)) ([a72c582](https://github.com/googleapis/java-bigquery/commit/a72c5825c93f359d295fb78e0e541752f535876b)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.36.1 ([#3496](https://github.com/googleapis/java-bigquery/issues/3496)) ([8f2e5c5](https://github.com/googleapis/java-bigquery/commit/8f2e5c542760ecd7c217c36c80cb3b5aebee6a73)) +* Update dependency ubuntu to v24 ([#3498](https://github.com/googleapis/java-bigquery/issues/3498)) ([4f87ade](https://github.com/googleapis/java-bigquery/commit/4f87adec6c010b572675f98b651f88d14323e2e2)) +* Update github/codeql-action action to v2.26.10 ([#3506](https://github.com/googleapis/java-bigquery/issues/3506)) ([ca71294](https://github.com/googleapis/java-bigquery/commit/ca712948b1adfb26bb1f9ef2250be10fe45d3424)) +* Update github/codeql-action action to v2.26.7 ([#3482](https://github.com/googleapis/java-bigquery/issues/3482)) ([e2c94b6](https://github.com/googleapis/java-bigquery/commit/e2c94b601781ebe236c25cd3f40059e7543ba387)) +* Update github/codeql-action action to v2.26.8 ([#3488](https://github.com/googleapis/java-bigquery/issues/3488)) ([a6d75de](https://github.com/googleapis/java-bigquery/commit/a6d75de60b822dcc5433afab55b5d392e6a6caf5)) +* Update github/codeql-action action to v2.26.9 ([#3494](https://github.com/googleapis/java-bigquery/issues/3494)) ([8154043](https://github.com/googleapis/java-bigquery/commit/815404319a43a8a14d1d8aaa8ab22dd924b48175)) + +## [2.42.3](https://github.com/googleapis/java-bigquery/compare/v2.42.2...v2.42.3) (2024-09-12) + + +### Dependencies + +* Update actions/upload-artifact action to v4.4.0 ([#3467](https://github.com/googleapis/java-bigquery/issues/3467)) ([08b28c5](https://github.com/googleapis/java-bigquery/commit/08b28c510a2280119a03da3caa385ec31e0c944c)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.35.0 ([#3472](https://github.com/googleapis/java-bigquery/issues/3472)) ([fa9ac5d](https://github.com/googleapis/java-bigquery/commit/fa9ac5d73ec4f21ab7d12949e413b4ee9d11aa6d)) + +## [2.42.2](https://github.com/googleapis/java-bigquery/compare/v2.42.1...v2.42.2) (2024-08-29) + + +### Bug Fixes + +* ExecuteSelect now use provided credentials instead of GOOGLE_APP… ([#3465](https://github.com/googleapis/java-bigquery/issues/3465)) ([cd82235](https://github.com/googleapis/java-bigquery/commit/cd82235475310cacf1f607a412418be97c83559f)) + + +### Dependencies + +* Update actions/upload-artifact action to v4.3.5 ([#3456](https://github.com/googleapis/java-bigquery/issues/3456)) ([f00977c](https://github.com/googleapis/java-bigquery/commit/f00977ccf60227bf1415795da5b6e0a208f21b2c)) +* Update actions/upload-artifact action to v4.3.5 ([#3462](https://github.com/googleapis/java-bigquery/issues/3462)) ([e1c6e92](https://github.com/googleapis/java-bigquery/commit/e1c6e92813c739fcd861e0622413b74c638cb547)) +* Update actions/upload-artifact action to v4.3.6 ([#3463](https://github.com/googleapis/java-bigquery/issues/3463)) ([ba91227](https://github.com/googleapis/java-bigquery/commit/ba91227b972acb1d0796d5a9470ba790dfb8d5b0)) +* Update github/codeql-action action to v2.26.6 ([#3464](https://github.com/googleapis/java-bigquery/issues/3464)) ([2aeb44d](https://github.com/googleapis/java-bigquery/commit/2aeb44d8b2ff5fa264cb14a8fc31e9494d77cb6b)) + +## [2.42.1](https://github.com/googleapis/java-bigquery/compare/v2.42.0...v2.42.1) (2024-08-27) + + +### Bug Fixes + +* NPE for executeSelect nonFast path with empty result ([#3445](https://github.com/googleapis/java-bigquery/issues/3445)) ([d0d758a](https://github.com/googleapis/java-bigquery/commit/d0d758a6e5e90502491eefa64e3a7409bdcea6a9)) + + +### Dependencies + +* Update actions/upload-artifact action to v4.3.5 ([#3420](https://github.com/googleapis/java-bigquery/issues/3420)) ([d5ec87d](https://github.com/googleapis/java-bigquery/commit/d5ec87d16f64c231c8bfd87635952cb1a04f5e25)) +* Update actions/upload-artifact action to v4.3.5 ([#3422](https://github.com/googleapis/java-bigquery/issues/3422)) ([c7d07b3](https://github.com/googleapis/java-bigquery/commit/c7d07b3f1d6fa2c2259fa7315b284bcaf48ee5f2)) +* Update actions/upload-artifact action to v4.3.5 ([#3424](https://github.com/googleapis/java-bigquery/issues/3424)) ([a9d6869](https://github.com/googleapis/java-bigquery/commit/a9d6869251fa3df80d639c6998b62992468d6625)) +* Update actions/upload-artifact action to v4.3.5 ([#3427](https://github.com/googleapis/java-bigquery/issues/3427)) ([022eb57](https://github.com/googleapis/java-bigquery/commit/022eb578ae0b6f02e943662c8d4e453590f7c209)) +* Update actions/upload-artifact action to v4.3.5 ([#3430](https://github.com/googleapis/java-bigquery/issues/3430)) ([c7aacba](https://github.com/googleapis/java-bigquery/commit/c7aacbaeddc4809e283c6dfcdedd9610eac7730f)) +* Update actions/upload-artifact action to v4.3.5 ([#3432](https://github.com/googleapis/java-bigquery/issues/3432)) ([b7e8244](https://github.com/googleapis/java-bigquery/commit/b7e8244cffdef926465e2d2700766b98ad687247)) +* Update actions/upload-artifact action to v4.3.5 ([#3436](https://github.com/googleapis/java-bigquery/issues/3436)) ([ccefd6e](https://github.com/googleapis/java-bigquery/commit/ccefd6e755042b1e4c2aaec10228abb05779ed87)) +* Update actions/upload-artifact action to v4.3.5 ([#3440](https://github.com/googleapis/java-bigquery/issues/3440)) ([916fe9a](https://github.com/googleapis/java-bigquery/commit/916fe9ad67e5162a9f24852a96e40a2051ebffbd)) +* Update actions/upload-artifact action to v4.3.5 ([#3443](https://github.com/googleapis/java-bigquery/issues/3443)) ([187f099](https://github.com/googleapis/java-bigquery/commit/187f099edbf785e3ef50ae28fce6ae194d44dfb3)) +* Update actions/upload-artifact action to v4.3.5 ([#3444](https://github.com/googleapis/java-bigquery/issues/3444)) ([04aea5e](https://github.com/googleapis/java-bigquery/commit/04aea5e1d0eeab02f8ea92ff3467c64507dc05c9)) +* Update actions/upload-artifact action to v4.3.5 ([#3449](https://github.com/googleapis/java-bigquery/issues/3449)) ([c6e93cd](https://github.com/googleapis/java-bigquery/commit/c6e93cd1996f2feca3c79bf5ec4a079bd821c0f6)) +* Update actions/upload-artifact action to v4.3.5 ([#3455](https://github.com/googleapis/java-bigquery/issues/3455)) ([fbfc106](https://github.com/googleapis/java-bigquery/commit/fbfc1064688ba594a0d232c413e6f8b54558590f)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.49.0 ([#3417](https://github.com/googleapis/java-bigquery/issues/3417)) ([66336a8](https://github.com/googleapis/java-bigquery/commit/66336a8989681a7c5c3d901c11c7fc6cef0b9fef)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.50.0 ([#3448](https://github.com/googleapis/java-bigquery/issues/3448)) ([2c12839](https://github.com/googleapis/java-bigquery/commit/2c128398b04c28ccd0844d028e2f8c467f8723f0)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240714-2.0.0 ([#3412](https://github.com/googleapis/java-bigquery/issues/3412)) ([8a48fd1](https://github.com/googleapis/java-bigquery/commit/8a48fd1eb6762e42bbdc49d1aa4ebab36c3e8e26)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240727-2.0.0 ([#3421](https://github.com/googleapis/java-bigquery/issues/3421)) ([91d780b](https://github.com/googleapis/java-bigquery/commit/91d780b0db2b9b05923b60621cf80251293be184)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240727-2.0.0 ([#3423](https://github.com/googleapis/java-bigquery/issues/3423)) ([16f350c](https://github.com/googleapis/java-bigquery/commit/16f350c28ec60dc4011b77cbda6416c9de45d431)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240727-2.0.0 ([#3428](https://github.com/googleapis/java-bigquery/issues/3428)) ([9ae6eca](https://github.com/googleapis/java-bigquery/commit/9ae6ecac3337eb19bced14b9fcd7ce74580d7326)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240803-2.0.0 ([#3435](https://github.com/googleapis/java-bigquery/issues/3435)) ([b4e20db](https://github.com/googleapis/java-bigquery/commit/b4e20db60b30dac9039407d724b8f7c816301e5c)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240815-2.0.0 ([#3454](https://github.com/googleapis/java-bigquery/issues/3454)) ([8796aee](https://github.com/googleapis/java-bigquery/commit/8796aee5f669414169dc8baf88f9121697f4cd04)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.9.0 ([c4afbef](https://github.com/googleapis/java-bigquery/commit/c4afbef9d4df03c798241d56d8988adb5724d008)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.53.0 ([#3418](https://github.com/googleapis/java-bigquery/issues/3418)) ([6cff7f0](https://github.com/googleapis/java-bigquery/commit/6cff7f0c2241223c529321e2b613f15c84ecbdcc)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.54.0 ([#3450](https://github.com/googleapis/java-bigquery/issues/3450)) ([cc9da95](https://github.com/googleapis/java-bigquery/commit/cc9da9576fa276afe069caff075c50e41e412ce1)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.34.0 ([#3433](https://github.com/googleapis/java-bigquery/issues/3433)) ([801f441](https://github.com/googleapis/java-bigquery/commit/801f44172f7be43e0649a116fb0bb556507fc572)) +* Update github/codeql-action action to v2.26.2 ([#3426](https://github.com/googleapis/java-bigquery/issues/3426)) ([0a6574f](https://github.com/googleapis/java-bigquery/commit/0a6574fa11aa83b5c899f1dcd3b1132aa4f46ebd)) +* Update github/codeql-action action to v2.26.3 ([#3438](https://github.com/googleapis/java-bigquery/issues/3438)) ([390e182](https://github.com/googleapis/java-bigquery/commit/390e1824bffef17e85d0ec142b4fcca6dff80a9c)) +* Update github/codeql-action action to v2.26.5 ([#3446](https://github.com/googleapis/java-bigquery/issues/3446)) ([58aacc5](https://github.com/googleapis/java-bigquery/commit/58aacc5a92e18b790a03c0b9b4a75062928768c2)) + + +### Documentation + +* Update iam policy sample user to be consistent with other languages ([#3429](https://github.com/googleapis/java-bigquery/issues/3429)) ([2fc15b3](https://github.com/googleapis/java-bigquery/commit/2fc15b3e9f89289f0a047bb0a6ae7fb5bb71d253)) + +## [2.42.0](https://github.com/googleapis/java-bigquery/compare/v2.41.0...v2.42.0) (2024-07-28) + + +### Features + +* Add ability to specify RetryOptions and BigQueryRetryConfig when create job and waitFor ([#3398](https://github.com/googleapis/java-bigquery/issues/3398)) ([1f91ae7](https://github.com/googleapis/java-bigquery/commit/1f91ae7fa2100a05f969a7429cb619a2b8b42dee)) +* Add additional parameters to CsvOptions and ParquetOptions ([#3370](https://github.com/googleapis/java-bigquery/issues/3370)) ([34f16fb](https://github.com/googleapis/java-bigquery/commit/34f16fbaad236f5a6db26d693efde2025913d540)) +* Add remaining Statement Types ([#3381](https://github.com/googleapis/java-bigquery/issues/3381)) ([5f39b19](https://github.com/googleapis/java-bigquery/commit/5f39b19e8839f06d956addb8d95cf05e4b60a3f1)) + + +### Bug Fixes + +* Null field mode inconsistency ([#2863](https://github.com/googleapis/java-bigquery/issues/2863)) ([b9e96e3](https://github.com/googleapis/java-bigquery/commit/b9e96e3aa738a1813ad452cf6141f792f437e8de)) + + +### Dependencies + +* Update actions/upload-artifact action to v4.3.4 ([#3382](https://github.com/googleapis/java-bigquery/issues/3382)) ([efa1aef](https://github.com/googleapis/java-bigquery/commit/efa1aef0a579baa379adbfbd2ee12f4ee5f3d987)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.48.0 ([#3374](https://github.com/googleapis/java-bigquery/issues/3374)) ([45b7f20](https://github.com/googleapis/java-bigquery/commit/45b7f20e1b324d9b77183c0f8bb5ae14724d6aef)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240616-2.0.0 ([#3368](https://github.com/googleapis/java-bigquery/issues/3368)) ([ceb270c](https://github.com/googleapis/java-bigquery/commit/ceb270c5cc2af4d69948ac89af1d72990fe1a7ee)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240623-2.0.0 ([#3384](https://github.com/googleapis/java-bigquery/issues/3384)) ([e1de34f](https://github.com/googleapis/java-bigquery/commit/e1de34f0c4c67d75bcf15f35fe86c411b61d04ac)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240629-2.0.0 ([#3392](https://github.com/googleapis/java-bigquery/issues/3392)) ([352562d](https://github.com/googleapis/java-bigquery/commit/352562da445e35a8207bcf77442130867f32e52d)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.52.0 ([#3375](https://github.com/googleapis/java-bigquery/issues/3375)) ([2115c04](https://github.com/googleapis/java-bigquery/commit/2115c0448b242ddd887f2bac3d68c45847273c3d)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.33.0 ([#3405](https://github.com/googleapis/java-bigquery/issues/3405)) ([a4a9999](https://github.com/googleapis/java-bigquery/commit/a4a9999def9805b8fecbc1820cc9f6f6c1997991)) +* Update dependency org.junit.vintage:junit-vintage-engine to v5.10.3 ([#3371](https://github.com/googleapis/java-bigquery/issues/3371)) ([2e804c5](https://github.com/googleapis/java-bigquery/commit/2e804c56eeef5009cc46c7544fe9b04bfdd65d7a)) +* Update github/codeql-action action to v2.25.11 ([#3376](https://github.com/googleapis/java-bigquery/issues/3376)) ([f1e0014](https://github.com/googleapis/java-bigquery/commit/f1e0014dca5ca04522796b44ff313696d2b41176)) +* Update github/codeql-action action to v2.25.12 ([#3387](https://github.com/googleapis/java-bigquery/issues/3387)) ([af60b30](https://github.com/googleapis/java-bigquery/commit/af60b30cd774992c5d82063106471926dc6aaa6e)) +* Update github/codeql-action action to v2.25.13 ([#3395](https://github.com/googleapis/java-bigquery/issues/3395)) ([95c8d6f](https://github.com/googleapis/java-bigquery/commit/95c8d6f65c5c5355fc52a0a2b54002d8f9cdb1ef)) +* Update github/codeql-action action to v2.25.15 ([#3402](https://github.com/googleapis/java-bigquery/issues/3402)) ([a61ce7d](https://github.com/googleapis/java-bigquery/commit/a61ce7d710e2e8b000ee25ec9d295abbc2b63dd1)) +* Update ossf/scorecard-action action to v2.4.0 ([#3408](https://github.com/googleapis/java-bigquery/issues/3408)) ([66777a2](https://github.com/googleapis/java-bigquery/commit/66777a2c3c7b0462330bd1c820e2f04ad4727465)) + + +### Documentation + +* Add short mode query sample ([#3397](https://github.com/googleapis/java-bigquery/issues/3397)) ([6dca6ff](https://github.com/googleapis/java-bigquery/commit/6dca6fffe96937db87713e45f0501d64fd5b544f)) +* Add simple query connection read api sample ([#3394](https://github.com/googleapis/java-bigquery/issues/3394)) ([d407baa](https://github.com/googleapis/java-bigquery/commit/d407baa3e95ad894d4028aa46def7ca8efe930c3)) + +## [2.41.0](https://github.com/googleapis/java-bigquery/compare/v2.40.3...v2.41.0) (2024-06-25) + + +### Features + +* Add columnNameCharacterMap to LoadJobConfiguration ([#3356](https://github.com/googleapis/java-bigquery/issues/3356)) ([2f3cbe3](https://github.com/googleapis/java-bigquery/commit/2f3cbe39619bcc93cb7d504417accd84b418dd41)) +* Add MetadataCacheMode to ExternalTableDefinition ([#3351](https://github.com/googleapis/java-bigquery/issues/3351)) ([2814dc4](https://github.com/googleapis/java-bigquery/commit/2814dc49dfdd5671257b6a9933a5dd381d889dd1)) + + +### Bug Fixes + +* Add clustering value to ListTables result ([#3359](https://github.com/googleapis/java-bigquery/issues/3359)) ([5d52bc9](https://github.com/googleapis/java-bigquery/commit/5d52bc9f4ef93f84200335685901c6ac0256b769)) + + +### Dependencies + +* Update actions/checkout action to v4.1.7 ([#3349](https://github.com/googleapis/java-bigquery/issues/3349)) ([0857234](https://github.com/googleapis/java-bigquery/commit/085723491e4aca58d670c313bc18b0c044cfdca8)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240602-2.0.0 ([#3273](https://github.com/googleapis/java-bigquery/issues/3273)) ([7b7e52b](https://github.com/googleapis/java-bigquery/commit/7b7e52b339f57af752c573a222df68196f1808f5)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.32.0 ([#3360](https://github.com/googleapis/java-bigquery/issues/3360)) ([4420996](https://github.com/googleapis/java-bigquery/commit/4420996e89fef49270771bb4f01ffa4e871e7885)) +* Update github/codeql-action action to v2.25.10 ([#3348](https://github.com/googleapis/java-bigquery/issues/3348)) ([8b6feff](https://github.com/googleapis/java-bigquery/commit/8b6feffa0e8add73a7587ce1762989713c2af38b)) + +## [2.40.3](https://github.com/googleapis/java-bigquery/compare/v2.40.2...v2.40.3) (2024-06-12) + + +### Dependencies + +* Update actions/checkout action to v4.1.6 ([#3309](https://github.com/googleapis/java-bigquery/issues/3309)) ([c7d6362](https://github.com/googleapis/java-bigquery/commit/c7d6362d47cb985abf3c08f5c4e89f651480c4c8)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.46.0 ([#3328](https://github.com/googleapis/java-bigquery/issues/3328)) ([a6661ad](https://github.com/googleapis/java-bigquery/commit/a6661ade5e297102ff54d314fa55caac9201ac67)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.47.0 ([#3342](https://github.com/googleapis/java-bigquery/issues/3342)) ([79e34c2](https://github.com/googleapis/java-bigquery/commit/79e34c256ddf99a43d546788535a9e8fa0e97e6d)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.50.0 ([#3330](https://github.com/googleapis/java-bigquery/issues/3330)) ([cabb0ab](https://github.com/googleapis/java-bigquery/commit/cabb0ab1bc09ba10c43a2cf109f1390268441693)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.51.0 ([#3343](https://github.com/googleapis/java-bigquery/issues/3343)) ([e3b934f](https://github.com/googleapis/java-bigquery/commit/e3b934fa133679a2d61baeea6f4de15eed287f7f)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.31.0 ([#3335](https://github.com/googleapis/java-bigquery/issues/3335)) ([0623455](https://github.com/googleapis/java-bigquery/commit/062345501c392c2a186c3cd82dee8d20ceda2a0a)) +* Update dependency com.google.oauth-client:google-oauth-client-java6 to v1.36.0 ([#3305](https://github.com/googleapis/java-bigquery/issues/3305)) ([d05e554](https://github.com/googleapis/java-bigquery/commit/d05e5547e97f52ccfdcec1d6fe167e6587dd00c6)) +* Update dependency com.google.oauth-client:google-oauth-client-jetty to v1.36.0 ([#3306](https://github.com/googleapis/java-bigquery/issues/3306)) ([0eeed66](https://github.com/googleapis/java-bigquery/commit/0eeed668b5f88f9c59ef6c1b309e7a81f5c1f0e9)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.10.2 ([#3311](https://github.com/googleapis/java-bigquery/issues/3311)) ([3912a92](https://github.com/googleapis/java-bigquery/commit/3912a9232788e09c10fc4e91ef6d65514fc106e4)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.10.2 ([#3312](https://github.com/googleapis/java-bigquery/issues/3312)) ([9737a5d](https://github.com/googleapis/java-bigquery/commit/9737a5d63d545ed197879bbd9dbfd3f1dbc15d93)) +* Update github/codeql-action action to v2.25.6 ([#3307](https://github.com/googleapis/java-bigquery/issues/3307)) ([8999d33](https://github.com/googleapis/java-bigquery/commit/8999d337b92d7030825c5a36686ddd082cadc816)) +* Update github/codeql-action action to v2.25.7 ([#3334](https://github.com/googleapis/java-bigquery/issues/3334)) ([768342d](https://github.com/googleapis/java-bigquery/commit/768342da168921251c34163b51ffc3cddfefc0ce)) +* Update github/codeql-action action to v2.25.8 ([#3338](https://github.com/googleapis/java-bigquery/issues/3338)) ([8673fe5](https://github.com/googleapis/java-bigquery/commit/8673fe55e6d33e50c32a520a848cddc25eb6088e)) + +## [2.40.2](https://github.com/googleapis/java-bigquery/compare/v2.40.1...v2.40.2) (2024-05-26) + + +### Bug Fixes + +* Fixing NPE bug by adding to if clause ([#3290](https://github.com/googleapis/java-bigquery/issues/3290)) ([127cff9](https://github.com/googleapis/java-bigquery/commit/127cff9f964c5d2d912d26276474822fd137a64b)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.45.0 ([#3295](https://github.com/googleapis/java-bigquery/issues/3295)) ([c659523](https://github.com/googleapis/java-bigquery/commit/c659523a7ca25bc12282f0e28fff18ec9221f48e)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.49.0 ([#3296](https://github.com/googleapis/java-bigquery/issues/3296)) ([7d148d5](https://github.com/googleapis/java-bigquery/commit/7d148d5bb1d6e1e6b0a421749fcbb73a6fbe61e0)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.30.1 ([#3310](https://github.com/googleapis/java-bigquery/issues/3310)) ([641f1a8](https://github.com/googleapis/java-bigquery/commit/641f1a8325f0f43aeffd135654480a721f26e4e7)) +* Update github/codeql-action action to v2.25.4 ([#3291](https://github.com/googleapis/java-bigquery/issues/3291)) ([13bb5aa](https://github.com/googleapis/java-bigquery/commit/13bb5aaa6e4bac7144a369c9fbb5ae8922eb36ee)) +* Update ossf/scorecard-action action to v2.3.3 ([#3304](https://github.com/googleapis/java-bigquery/issues/3304)) ([d096082](https://github.com/googleapis/java-bigquery/commit/d09608211aed5dc49e2b5e51affe7942403ed267)) + +## [2.40.1](https://github.com/googleapis/java-bigquery/compare/v2.40.0...v2.40.1) (2024-05-06) + + +### Dependencies + +* Update actions/checkout action ([#3286](https://github.com/googleapis/java-bigquery/issues/3286)) ([4d8f3fb](https://github.com/googleapis/java-bigquery/commit/4d8f3fb7fd3d8f6e9484c809d6690f8078ef7a30)) + +## [2.40.0](https://github.com/googleapis/java-bigquery/compare/v2.39.1...v2.40.0) (2024-05-06) + + +### Features + +* Add getStringOrDefault method to FieldValue ([#3255](https://github.com/googleapis/java-bigquery/issues/3255)) ([8bac33a](https://github.com/googleapis/java-bigquery/commit/8bac33a32e0239ffa03715ad0c6440527cb2e01e)) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.30.0 ([#3279](https://github.com/googleapis/java-bigquery/issues/3279)) ([67f2ea4](https://github.com/googleapis/java-bigquery/commit/67f2ea47f78240b6def27241e21fd298a75920b2)) + +## [2.39.1](https://github.com/googleapis/java-bigquery/compare/v2.39.0...v2.39.1) (2024-04-29) + + +### Bug Fixes + +* @Nullable annotations on builder methods ([#3222](https://github.com/googleapis/java-bigquery/issues/3222)) ([0c5eed1](https://github.com/googleapis/java-bigquery/commit/0c5eed1a18409f120a1243bd5da1db2aa4f9c206)) + + +### Dependencies + +* Update actions/checkout action ([#3267](https://github.com/googleapis/java-bigquery/issues/3267)) ([c297ed2](https://github.com/googleapis/java-bigquery/commit/c297ed2c77e36257451b5c12e4988f3293cdbb88)) +* Update actions/upload-artifact action to v4.3.3 ([#3258](https://github.com/googleapis/java-bigquery/issues/3258)) ([5215235](https://github.com/googleapis/java-bigquery/commit/52152350a2a6218b51ebf3d7dd6beb2699064a3c)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.44.0 ([#3270](https://github.com/googleapis/java-bigquery/issues/3270)) ([ee09ab6](https://github.com/googleapis/java-bigquery/commit/ee09ab68ea2be824aaf4e3d08b67e3bfbab2977f)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.5.0 ([e7c6201](https://github.com/googleapis/java-bigquery/commit/e7c620119321b673c19b99adb79247cd3c52cd67)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.48.0 ([#3271](https://github.com/googleapis/java-bigquery/issues/3271)) ([3b6e0d5](https://github.com/googleapis/java-bigquery/commit/3b6e0d5e3d26b8e2de412aa926a638d72562d4a0)) +* Update github/codeql-action action to v2.25.2 ([#3260](https://github.com/googleapis/java-bigquery/issues/3260)) ([3302dc4](https://github.com/googleapis/java-bigquery/commit/3302dc46e3e2c6a173798ef7f1642d3d4cb20332)) +* Update github/codeql-action action to v2.25.3 ([#3268](https://github.com/googleapis/java-bigquery/issues/3268)) ([1cf2377](https://github.com/googleapis/java-bigquery/commit/1cf237702e16952029741c306aa57cb3558a663f)) + +## [2.39.0](https://github.com/googleapis/java-bigquery/compare/v2.38.2...v2.39.0) (2024-04-22) + + +### Features + +* Add ExportDataStats to QueryStatistics ([#3244](https://github.com/googleapis/java-bigquery/issues/3244)) ([e91be80](https://github.com/googleapis/java-bigquery/commit/e91be80ebdd39c2448914ff9aa1742f3079d0bb8)) +* Add new fields to copy job statistics ([#3205](https://github.com/googleapis/java-bigquery/issues/3205)) ([64bdda8](https://github.com/googleapis/java-bigquery/commit/64bdda84fe06726042a41f2a89ac5c067f9bc949)) +* Add Range object to allow reading range value ([#3236](https://github.com/googleapis/java-bigquery/issues/3236)) ([2c3399d](https://github.com/googleapis/java-bigquery/commit/2c3399dd10fecc01237158a3cdeee966b38746f2)) +* Add support for inserting Range values ([#3246](https://github.com/googleapis/java-bigquery/issues/3246)) ([ff1ebc6](https://github.com/googleapis/java-bigquery/commit/ff1ebc66e458519deca37275ba91650133188683)) +* Add support for ObjectMetadata ([#3217](https://github.com/googleapis/java-bigquery/issues/3217)) ([975df05](https://github.com/googleapis/java-bigquery/commit/975df05b95b714c5574155d5e09860885c4b58f2)) +* Add totalSlotMs to JobStatistics ([#3250](https://github.com/googleapis/java-bigquery/issues/3250)) ([75ea095](https://github.com/googleapis/java-bigquery/commit/75ea095b0a194d6be4951795bc3a616ace389ff2)) + + +### Bug Fixes + +* Fix BigQuery#listDatasets to include dataset location in the response ([#3238](https://github.com/googleapis/java-bigquery/issues/3238)) ([c50c17b](https://github.com/googleapis/java-bigquery/commit/c50c17bc4eedd0c34f440b697a8b26a5354c9c4f)) +* Remove @InternalApi from TableResult ([#3257](https://github.com/googleapis/java-bigquery/issues/3257)) ([19d92a1](https://github.com/googleapis/java-bigquery/commit/19d92a144cd4d86fee6dd420e574c3a1a928642c)) + + +### Dependencies + +* Update actions/checkout action ([#3256](https://github.com/googleapis/java-bigquery/issues/3256)) ([6df3a32](https://github.com/googleapis/java-bigquery/commit/6df3a325b7f71ed1eb2054dd0c3a27cfd6cda2f2)) +* Update actions/upload-artifact action to v4.3.2 ([#3248](https://github.com/googleapis/java-bigquery/issues/3248)) ([066b51f](https://github.com/googleapis/java-bigquery/commit/066b51fb088fc67c83a45a219897752876889136)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.40.0 ([#3210](https://github.com/googleapis/java-bigquery/issues/3210)) ([bf7e97e](https://github.com/googleapis/java-bigquery/commit/bf7e97e1c936a419a34529a316c4f538872dd20b)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.41.0 ([#3219](https://github.com/googleapis/java-bigquery/issues/3219)) ([9d71b8b](https://github.com/googleapis/java-bigquery/commit/9d71b8b9a9231ea5d7cfa93c7bcbb533d6a3a900)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.43.0 ([#3225](https://github.com/googleapis/java-bigquery/issues/3225)) ([a897306](https://github.com/googleapis/java-bigquery/commit/a8973067348fa09acd91c5b01f048c43fac93894)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240323-2.0.0 ([#3239](https://github.com/googleapis/java-bigquery/issues/3239)) ([2c0f48f](https://github.com/googleapis/java-bigquery/commit/2c0f48f86d3c4d5a1a682775c494a9122373858d)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.44.0 ([#3211](https://github.com/googleapis/java-bigquery/issues/3211)) ([6993b51](https://github.com/googleapis/java-bigquery/commit/6993b51f8722466b846a7dd3912acbd81e04126c)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.45.0 ([#3220](https://github.com/googleapis/java-bigquery/issues/3220)) ([21ae09c](https://github.com/googleapis/java-bigquery/commit/21ae09ce2c63f790ca77cc5c4c0df16dcb123b59)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.47.0 ([#3226](https://github.com/googleapis/java-bigquery/issues/3226)) ([d45d168](https://github.com/googleapis/java-bigquery/commit/d45d168bf53a8648e2254c8c4305a5d9a390276d)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.28.1 ([#3207](https://github.com/googleapis/java-bigquery/issues/3207)) ([6204331](https://github.com/googleapis/java-bigquery/commit/6204331953b3922f5ecb1ac0c1868cb6579dd73b)) +* Update dependency org.threeten:threeten-extra to v1.8.0 ([#3242](https://github.com/googleapis/java-bigquery/issues/3242)) ([66d5efd](https://github.com/googleapis/java-bigquery/commit/66d5efded17c42514f98f4af2bc6ba826999a62a)) +* Update github/codeql-action action to v2.24.9 ([#3204](https://github.com/googleapis/java-bigquery/issues/3204)) ([7a24d3e](https://github.com/googleapis/java-bigquery/commit/7a24d3e29f32db58475c1e02ab1c13ee8941c27d)) +* Update github/codeql-action action to v2.25.1 ([#3229](https://github.com/googleapis/java-bigquery/issues/3229)) ([aeedf29](https://github.com/googleapis/java-bigquery/commit/aeedf2960700f1742e38469fd26ea70000967cfa)) + ## [2.38.2](https://github.com/googleapis/java-bigquery/compare/v2.38.1...v2.38.2) (2024-03-21) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b65dd279c..5456fad05 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,9 +18,13 @@ again. ## Code reviews All submissions, including submissions by project members, require review. We -use GitHub pull requests for this purpose. Consult -[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more -information on using pull requests. +use GitHub pull requests for this purpose. Consult the +[GitHub Help: about pull requests](https://help.github.com/articles/about-pull-requests/) +article for more information on using pull requests. If you do not have +permission to create a branch, then fork the repository and submit a pull +request from the forked repository. Consult +[Github Help: about forks](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo#about-forks) +article for more information. ## Community Guidelines @@ -84,7 +88,7 @@ Code in this repo is formatted with [google-java-format](https://github.com/google/google-java-format). To run formatting on your project, you can run: ``` -mvn com.coveo:fmt-maven-plugin:format +mvn com.spotify.fmt:fmt-maven-plugin:format ``` [1]: https://cloud.google.com/docs/authentication/getting-started#creating_a_service_account diff --git a/README.md b/README.md index cc0001a9c..d8f6c46d8 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ See https://github.com/GoogleCloudPlatform/cloud-opensource-java/wiki/The-Google com.google.cloud libraries-bom - 26.20.0 + 26.62.0 pom import @@ -45,7 +45,7 @@ If you are using Maven without the BOM, add this to your dependencies: com.google.cloud google-cloud-bigquery - 2.38.2 + 2.42.2 ``` @@ -53,20 +53,20 @@ If you are using Maven without the BOM, add this to your dependencies: If you are using Gradle 5.x or later, add this to your dependencies: ```Groovy -implementation platform('com.google.cloud:libraries-bom:26.37.0') +implementation platform('com.google.cloud:libraries-bom:26.45.0') implementation 'com.google.cloud:google-cloud-bigquery' ``` If you are using Gradle without BOM, add this to your dependencies: ```Groovy -implementation 'com.google.cloud:google-cloud-bigquery:2.38.2' +implementation 'com.google.cloud:google-cloud-bigquery:2.42.2' ``` If you are using SBT, add this to your dependencies: ```Scala -libraryDependencies += "com.google.cloud" % "google-cloud-bigquery" % "2.38.2" +libraryDependencies += "com.google.cloud" % "google-cloud-bigquery" % "2.42.2" ``` @@ -113,7 +113,6 @@ Samples are in the [`samples/`](https://github.com/googleapis/java-bigquery/tree | Sample | Source Code | Try it | | --------------------------- | --------------------------------- | ------ | -| Native Image Bigquery Sample | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/native-image-sample/src/main/java/com/example/bigquery/NativeImageBigquerySample.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/native-image-sample/src/main/java/com/example/bigquery/NativeImageBigquerySample.java) | | Add Column Load Append | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java) | | Add Empty Column | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/AddEmptyColumn.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AddEmptyColumn.java) | | Auth Drive Scope | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/AuthDriveScope.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthDriveScope.java) | @@ -215,6 +214,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/java-bigquery/tree | Query Pagination | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryPagination.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryPagination.java) | | Query Partitioned Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryPartitionedTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryPartitionedTable.java) | | Query Script | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryScript.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryScript.java) | +| Query Short Mode | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryShortMode.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryShortMode.java) | | Query Total Rows | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryTotalRows.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryTotalRows.java) | | Query With Array Of Structs Named Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayOfStructsNamedParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayOfStructsNamedParameters.java) | | Query With Array Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayParameters.java) | @@ -234,6 +234,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/java-bigquery/tree | Set User Agent | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/SetUserAgent.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SetUserAgent.java) | | Simple App | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java) | | Simple Query | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/SimpleQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SimpleQuery.java) | +| Simple Query Connection Read Api | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/SimpleQueryConnectionReadApi.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SimpleQueryConnectionReadApi.java) | | Table Exists | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/TableExists.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/TableExists.java) | | Table Insert Rows | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java) | | Table Insert Rows Without Row Ids | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/TableInsertRowsWithoutRowIds.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/TableInsertRowsWithoutRowIds.java) | @@ -351,7 +352,7 @@ Java is a registered trademark of Oracle and/or its affiliates. [kokoro-badge-link-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java11.html [stability-image]: https://img.shields.io/badge/stability-stable-green [maven-version-image]: https://img.shields.io/maven-central/v/com.google.cloud/google-cloud-bigquery.svg -[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-bigquery/2.38.2 +[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-bigquery/2.42.2 [authentication]: https://github.com/googleapis/google-cloud-java#authentication [auth-scopes]: https://developers.google.com/identity/protocols/oauth2/scopes [predefined-iam-roles]: https://cloud.google.com/iam/docs/understanding-roles#predefined_roles diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 91018403e..18ee2019a 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -6,7 +6,7 @@ google-cloud-bigquery-parent com.google.cloud - 2.38.4-mi + 2.56.0-mi @@ -37,12 +37,21 @@ org.apache.maven.plugins maven-compiler-plugin - 3.13.0 + 3.14.0 + + + + org.openjdk.jmh + jmh-generator-annprocess + ${jmh.version} + + + org.apache.maven.plugins maven-shade-plugin - 3.5.2 + 3.6.0 package @@ -75,7 +84,7 @@ org.apache.maven.plugins maven-deploy-plugin - 3.1.1 + 3.1.4 true @@ -83,7 +92,7 @@ org.sonatype.plugins nexus-staging-maven-plugin - 1.6.13 + 1.7.0 true diff --git a/benchmark/src/main/java/com.google.cloud.bigquery/ConnImplBenchmark.java b/benchmark/src/main/java/com.google.cloud.bigquery/ConnImplBenchmark.java index f202df666..eb239463f 100644 --- a/benchmark/src/main/java/com.google.cloud.bigquery/ConnImplBenchmark.java +++ b/benchmark/src/main/java/com.google.cloud.bigquery/ConnImplBenchmark.java @@ -20,6 +20,7 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.util.concurrent.TimeUnit; +import java.util.function.Function; import java.util.logging.Level; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; @@ -48,31 +49,24 @@ public class ConnImplBenchmark { public int rowLimit; private ConnectionSettings connectionSettingsReadAPIEnabled, connectionSettingsReadAPIDisabled; - private long numBuffRows = 100000L; - private final String DATASET = "new_york_taxi_trips"; private final String QUERY = "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 LIMIT %s"; - public static final long NUM_PAGE_ROW_CNT_RATIO = - 10; // ratio of [records in the current page :: total rows] to be met to use read API - public static final long NUM_MIN_RESULT_SIZE = - 200000; // min number of records to use to ReadAPI with @Setup public void setUp() throws IOException { java.util.logging.Logger.getGlobal().setLevel(Level.ALL); - connectionSettingsReadAPIEnabled = - ConnectionSettings.newBuilder() - .setUseReadAPI(true) // enable read api + connectionSettingsReadAPIEnabled = ConnectionSettings.newBuilder() + .setUseReadAPI(true) + .setMaxResults(500L) + .setJobTimeoutMs(Long.MAX_VALUE) .build(); - connectionSettingsReadAPIDisabled = - ConnectionSettings.newBuilder() - .setUseReadAPI(false) // disable read api + connectionSettingsReadAPIDisabled = ConnectionSettings.newBuilder() + .setUseReadAPI(false) .build(); } @Benchmark - // uses bigquery.query public void iterateRecordsWithBigQuery_Query(Blackhole blackhole) throws InterruptedException { String selectQuery = String.format(QUERY, rowLimit); BigQuery bigQuery = BigQueryOptions.getDefaultInstance().getService(); @@ -81,81 +75,35 @@ public void iterateRecordsWithBigQuery_Query(Blackhole blackhole) throws Interru TableResult result = bigQuery.query(config); long hash = 0L; int cnt = 0; - System.out.print("\n Running"); - // iterate al the records and compute the hash + long lastTime = System.currentTimeMillis(); + System.out.println("\n Running"); for (FieldValueList row : result.iterateAll()) { - hash += - row.get("vendor_id").getStringValue() == null - ? 0 - : row.get("vendor_id").getStringValue().hashCode(); - hash += - row.get("pickup_datetime").getStringValue() == null - ? 0 - : row.get("pickup_datetime").getStringValue().hashCode(); - hash += - row.get("dropoff_datetime").getStringValue() == null - ? 0 - : row.get("dropoff_datetime").getStringValue().hashCode(); - hash += - row.get("passenger_count").getValue() == null - ? 0 - : row.get("passenger_count").getLongValue(); - hash += - row.get("trip_distance").getValue() == null - ? 0 - : row.get("trip_distance").getDoubleValue(); - hash += - row.get("pickup_longitude").getValue() == null - ? 0 - : row.get("pickup_longitude").getDoubleValue(); - hash += - row.get("pickup_latitude").getValue() == null - ? 0 - : row.get("pickup_latitude").getDoubleValue(); - hash += - row.get("rate_code").getStringValue() == null - ? 0 - : row.get("rate_code").getStringValue().hashCode(); - hash += - row.get("store_and_fwd_flag").getStringValue() == null - ? 0 - : row.get("store_and_fwd_flag").getStringValue().hashCode(); - hash += - row.get("payment_type").getStringValue() == null - ? 0 - : row.get("payment_type").getStringValue().hashCode(); - hash += - row.get("pickup_location_id").getStringValue() == null - ? 0 - : row.get("pickup_location_id").getStringValue().hashCode(); - hash += - row.get("dropoff_location_id").getStringValue() == null - ? 0 - : row.get("dropoff_location_id").getStringValue().hashCode(); - hash += - row.get("dropoff_longitude").getValue() == null - ? 0 - : row.get("dropoff_longitude").getDoubleValue(); - hash += - row.get("dropoff_latitude").getValue() == null - ? 0 - : row.get("dropoff_latitude").getDoubleValue(); - hash += - row.get("fare_amount").getValue() == null ? 0 : row.get("fare_amount").getDoubleValue(); - hash += row.get("extra").getValue() == null ? 0 : row.get("extra").getDoubleValue(); - hash += row.get("mta_tax").getValue() == null ? 0 : row.get("mta_tax").getDoubleValue(); - hash += row.get("tip_amount").getValue() == null ? 0 : row.get("tip_amount").getDoubleValue(); - hash += - row.get("tolls_amount").getValue() == null ? 0 : row.get("tolls_amount").getDoubleValue(); - hash += - row.get("imp_surcharge").getValue() == null - ? 0 - : row.get("imp_surcharge").getDoubleValue(); - hash += - row.get("total_amount").getValue() == null ? 0 : row.get("total_amount").getDoubleValue(); + hash += computeHash(row.get("vendor_id"), FieldValue::getStringValue); + hash += computeHash(row.get("pickup_datetime"), FieldValue::getStringValue); + hash += computeHash(row.get("dropoff_datetime"), FieldValue::getStringValue); + hash += computeHash(row.get("passenger_count"), FieldValue::getLongValue); + hash += computeHash(row.get("trip_distance"), FieldValue::getDoubleValue); + hash += computeHash(row.get("rate_code"), FieldValue::getStringValue); + hash += computeHash(row.get("store_and_fwd_flag"), FieldValue::getStringValue); + hash += computeHash(row.get("payment_type"), FieldValue::getStringValue); + hash += computeHash(row.get("fare_amount"), FieldValue::getDoubleValue); + hash += computeHash(row.get("extra"), FieldValue::getDoubleValue); + hash += computeHash(row.get("mta_tax"), FieldValue::getDoubleValue); + hash += computeHash(row.get("tip_amount"), FieldValue::getDoubleValue); + hash += computeHash(row.get("tolls_amount"), FieldValue::getDoubleValue); + hash += computeHash(row.get("imp_surcharge"), FieldValue::getDoubleValue); + hash += computeHash(row.get("airport_fee"), FieldValue::getDoubleValue); + hash += computeHash(row.get("total_amount"), FieldValue::getDoubleValue); + hash += computeHash(row.get("pickup_location_id"), FieldValue::getStringValue); + hash += computeHash(row.get("dropoff_location_id"), FieldValue::getStringValue); + hash += computeHash(row.get("data_file_year"), FieldValue::getLongValue); + hash += computeHash(row.get("data_file_month"), FieldValue::getLongValue); - if (++cnt % 100000 == 0) { // just to indicate the progress while long running benchmarks - System.out.print("."); + if (++cnt % 100_000 == 0) { + long now = System.currentTimeMillis(); + long duration = now - lastTime; + System.out.println("ROW " + cnt + " Time: " + duration + " ms"); + lastTime = now; } } System.out.println(cnt + " records processed using bigquery.query"); @@ -202,54 +150,67 @@ public void iterateRecordsWithoutUsingReadAPI(Blackhole blackhole) blackhole.consume(hash); } - // Hashes all the 20 columns of all the rows private long getResultHash(BigQueryResult bigQueryResultSet) throws SQLException { ResultSet rs = bigQueryResultSet.getResultSet(); long hash = 0L; int cnt = 0; - System.out.print("\n Running"); + long lastTime = System.currentTimeMillis(); + System.out.println("\n Running"); while (rs.next()) { - hash += rs.getString("vendor_id") == null ? 0 : rs.getString("vendor_id").hashCode(); - hash += - rs.getString("pickup_datetime") == null ? 0 : rs.getString("pickup_datetime").hashCode(); - hash += - rs.getString("dropoff_datetime") == null - ? 0 - : rs.getString("dropoff_datetime").hashCode(); - hash += rs.getLong("passenger_count"); - hash += rs.getDouble("trip_distance"); - hash += rs.getDouble("pickup_longitude"); - hash += rs.getDouble("pickup_latitude"); - hash += rs.getString("rate_code") == null ? 0 : rs.getString("rate_code").hashCode(); - hash += - rs.getString("store_and_fwd_flag") == null - ? 0 - : rs.getString("store_and_fwd_flag").hashCode(); - hash += rs.getDouble("dropoff_longitude"); - hash += rs.getDouble("dropoff_latitude"); - hash += rs.getString("payment_type") == null ? 0 : rs.getString("payment_type").hashCode(); - hash += rs.getDouble("fare_amount"); - hash += rs.getDouble("extra"); - hash += rs.getDouble("mta_tax"); - hash += rs.getDouble("tip_amount"); - hash += rs.getDouble("tolls_amount"); - hash += rs.getDouble("imp_surcharge"); - hash += rs.getDouble("total_amount"); - hash += - rs.getString("pickup_location_id") == null - ? 0 - : rs.getString("pickup_location_id").hashCode(); - hash += - rs.getString("dropoff_location_id") == null - ? 0 - : rs.getString("dropoff_location_id").hashCode(); - if (++cnt % 100000 == 0) { // just to indicate the progress while long running benchmarks - System.out.print("."); + hash += computeHash(rs, "vendor_id", ResultSet::getString); + hash += computeHash(rs, "pickup_datetime", ResultSet::getLong); + hash += computeHash(rs, "dropoff_datetime", ResultSet::getLong); + hash += computeHash(rs, "passenger_count", ResultSet::getLong); + hash += computeHash(rs, "trip_distance", ResultSet::getDouble); + hash += computeHash(rs, "rate_code", ResultSet::getString); + hash += computeHash(rs, "store_and_fwd_flag", ResultSet::getString); + hash += computeHash(rs, "payment_type", ResultSet::getString); + hash += computeHash(rs, "fare_amount", ResultSet::getDouble); + hash += computeHash(rs, "extra", ResultSet::getDouble); + hash += computeHash(rs, "mta_tax", ResultSet::getDouble); + hash += computeHash(rs, "tip_amount", ResultSet::getDouble); + hash += computeHash(rs, "tolls_amount", ResultSet::getDouble); + hash += computeHash(rs, "imp_surcharge", ResultSet::getDouble); + hash += computeHash(rs, "airport_fee", ResultSet::getDouble); + hash += computeHash(rs, "total_amount", ResultSet::getDouble); + hash += computeHash(rs, "pickup_location_id", ResultSet::getString); + hash += computeHash(rs, "dropoff_location_id", ResultSet::getString); + hash += computeHash(rs, "data_file_year", ResultSet::getLong); + hash += computeHash(rs, "data_file_month", ResultSet::getLong); + + if (++cnt % 100_000 == 0) { + long now = System.currentTimeMillis(); + long duration = now - lastTime; + System.out.println("ROW " + cnt + " Time: " + duration + " ms"); + lastTime = now; } } return hash; } + private long computeHash( + ResultSet rs, String columnName, SQLFunction extractor) { + try { + T value = extractor.apply(rs, columnName); + return (value == null) ? 0 : value.hashCode(); + } catch (SQLException e) { + return 0; + } + } + + @FunctionalInterface + private interface SQLFunction { + R apply(T t, String columnName) throws SQLException; + } + + private long computeHash(FieldValue fieldValue, Function extractor) { + if (fieldValue == null || fieldValue.isNull()) { + return 0; + } + T value = extractor.apply(fieldValue); + return (value == null) ? 0 : value.hashCode(); + } + public static void main(String[] args) throws Exception { Options opt = new OptionsBuilder().include(ConnImplBenchmark.class.getSimpleName()).build(); new Runner(opt).run(); diff --git a/codecov.yaml b/codecov.yaml deleted file mode 100644 index c00182958..000000000 --- a/codecov.yaml +++ /dev/null @@ -1,7 +0,0 @@ ---- -codecov: - ci: - - source.cloud.google.com -coverage: - round: down - range: "50...100" \ No newline at end of file diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml new file mode 100644 index 000000000..e69de29bb diff --git a/google-cloud-bigquery/clirr-ignored-differences.xml b/google-cloud-bigquery/clirr-ignored-differences.xml index abf827b48..cef0f30f7 100644 --- a/google-cloud-bigquery/clirr-ignored-differences.xml +++ b/google-cloud-bigquery/clirr-ignored-differences.xml @@ -3,15 +3,80 @@ - 3005 - com/google/cloud/bigquery/TableResult* - TableResult is an internal API and it should be fine to update + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *TimeZone(*) - 7002 - com/google/cloud/bigquery/TableResult* - *TableResult(*) - TableResult is an internal API and it should be fine to update + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *DateFormat(*) + + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *DatetimeFormat(*) + + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *TimeFormat(*) + + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *TimestampFormat(*) + + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *SourceColumnMatch(*) + + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *NullMarkers(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *TimeZone(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *DateFormat(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *DatetimeFormat(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *TimeFormat(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *TimestampFormat(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *SourceColumnMatch(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *NullMarkers(*) + + + 7004 + com/google/cloud/bigquery/BigQueryRetryHelper + java.lang.Object runWithRetries(java.util.concurrent.Callable, com.google.api.gax.retrying.RetrySettings, com.google.api.gax.retrying.ResultRetryAlgorithm, com.google.api.core.ApiClock, com.google.cloud.bigquery.BigQueryRetryConfig) + A Tracer object is needed to use Otel and runWithRetries is only called in a few files, so it should be fine to update the signature 7004 @@ -26,17 +91,11 @@ getQueryResultsWithRowLimit is just used by ConnectionImpl at the moment so it should be fine to update the signature instead of writing an overloaded method - 7004 - com/google/cloud/bigquery/TableResult* - *TableResult(*) - It should be fine to update TableResult constructors since it is used to return results to the user and users should not directly construct TableResult objects - - - 7005 - com/google/cloud/bigquery/TableResult* - *TableResult(*) - *TableResult(*) - It should be fine to update TableResult constructors since it is used to return results to the user and users should not directly construct TableResult objects + 7006 + com/google/cloud/bigquery/BigQueryOptions* + *getBigQueryRpcV2(*) + com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc + getBigQueryRpcV2 is protected and is only used within the BigQuery package 7013 @@ -59,14 +118,9 @@ *ResourceTags(*) - 7013 - com/google/cloud/bigquery/TableResult* - *getPageNoSchema(*) - - - 7013 - com/google/cloud/bigquery/TableResult* - *toBuilder(*) + 7012 + com/google/cloud/bigquery/BigQuery + java.lang.Object queryWithTimeout(com.google.cloud.bigquery.QueryJobConfiguration, com.google.cloud.bigquery.JobId, java.lang.Long, com.google.cloud.bigquery.BigQuery$JobOption[]) 7012 @@ -164,7 +218,13 @@ *BigLakeConfiguration(*) - 8001 - com/google/cloud/bigquery/EmptyTableResult* + 7013 + com/google/cloud/bigquery/DatasetInfo* + *setMaxTimeTravelHours(*) + + + 7013 + com/google/cloud/bigquery/DatasetInfo* + *setResourceTags(*) \ No newline at end of file diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 1b660687f..1f1f186a0 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 2.38.4-mi + 2.56.0-mi jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 2.38.4-mi + 2.56.0-mi google-cloud-bigquery @@ -85,10 +85,6 @@ com.google.api gax - - org.threeten - threetenbp - com.google.code.gson gson @@ -120,7 +116,6 @@ org.apache.arrow arrow-memory-netty - runtime @@ -129,7 +124,23 @@ error_prone_annotations + + + io.opentelemetry + opentelemetry-api + + + io.opentelemetry + opentelemetry-context + + + + com.google.api + gax + testlib + test + com.google.cloud google-cloud-datacatalog @@ -175,6 +186,23 @@ proto-google-cloud-datacatalog-v1 test + + + + io.opentelemetry + opentelemetry-sdk + test + + + io.opentelemetry + opentelemetry-sdk-common + test + + + io.opentelemetry + opentelemetry-sdk-trace + test + @@ -214,7 +242,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.5.0 + 3.6.1 add-source @@ -234,6 +262,28 @@ + + arrow-config + + [9,) + + + + + org.apache.maven.plugins + maven-compiler-plugin + + UTF-8 + true + + -J--add-opens=java.base/java.nio=org.apache.arrow.memory.core,ALL-UNNAMED + -J--add-opens=java.base/java.nio=java-base,ALL-UNNAMED + + + + + + java17 diff --git a/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java b/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java index caec16676..9d05f6b2b 100644 --- a/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java +++ b/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java @@ -23,10 +23,10 @@ import com.google.cloud.bigquery.QueryJobConfiguration; import com.google.cloud.bigquery.TableResult; import java.io.FileInputStream; +import java.time.Clock; +import java.time.Duration; +import java.time.Instant; import java.util.List; -import org.threeten.bp.Clock; -import org.threeten.bp.Duration; -import org.threeten.bp.Instant; public class Benchmark { diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java index e4107cdfd..d52124092 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java @@ -21,6 +21,7 @@ import com.google.api.core.ApiFunction; import com.google.api.services.bigquery.model.Dataset.Access; import com.google.api.services.bigquery.model.DatasetAccessEntry; +import com.google.api.services.bigquery.model.Expr; import com.google.cloud.StringEnumType; import com.google.cloud.StringEnumValue; import java.io.Serializable; @@ -41,6 +42,7 @@ public final class Acl implements Serializable { private final Entity entity; private final Role role; + private final Expr condition; /** * Dataset roles supported by BigQuery. @@ -90,7 +92,9 @@ public static Role valueOf(String constant) { return type.valueOf(constant); } - /** @return Return the known values for Role. */ + /** + * @return Return the known values for Role. + */ public static Role[] values() { return type.values(); } @@ -182,7 +186,9 @@ public DatasetAclEntity(DatasetId id, List targetTypes) { this.targetTypes = targetTypes; } - /** @return Returns DatasetAclEntity's identity. */ + /** + * @return Returns DatasetAclEntity's identity. + */ public DatasetId getId() { return id; } @@ -238,7 +244,9 @@ public Domain(String domain) { this.domain = domain; } - /** @return Returns the domain name. */ + /** + * @return Returns the domain name. + */ public String getDomain() { return domain; } @@ -346,22 +354,30 @@ Access toPb() { } } - /** @return Returns a Group entity representing all project's owners. */ + /** + * @return Returns a Group entity representing all project's owners. + */ public static Group ofProjectOwners() { return new Group(PROJECT_OWNERS); } - /** @return Returns a Group entity representing all project's readers. */ + /** + * @return Returns a Group entity representing all project's readers. + */ public static Group ofProjectReaders() { return new Group(PROJECT_READERS); } - /** @return Returns a Group entity representing all project's writers. */ + /** + * @return Returns a Group entity representing all project's writers. + */ public static Group ofProjectWriters() { return new Group(PROJECT_WRITERS); } - /** @return Returns a Group entity representing all BigQuery authenticated users. */ + /** + * @return Returns a Group entity representing all BigQuery authenticated users. + */ public static Group ofAllAuthenticatedUsers() { return new Group(ALL_AUTHENTICATED_USERS); } @@ -383,7 +399,9 @@ public User(String email) { this.email = email; } - /** @return Returns user's email. */ + /** + * @return Returns user's email. + */ public String getEmail() { return email; } @@ -435,7 +453,9 @@ public View(TableId id) { this.id = id; } - /** @return Returns table's identity. */ + /** + * @return Returns table's identity. + */ public TableId getId() { return id; } @@ -487,7 +507,9 @@ public Routine(RoutineId id) { this.id = id; } - /** @return Returns routine's identity. */ + /** + * @return Returns routine's identity. + */ public RoutineId getId() { return id; } @@ -535,7 +557,9 @@ public IamMember(String iamMember) { this.iamMember = iamMember; } - /** @return Returns iamMember. */ + /** + * @return Returns iamMember. + */ public String getIamMember() { return iamMember; } @@ -568,21 +592,173 @@ Access toPb() { } } + /** Expr represents the conditional information related to dataset access policies. */ + public static final class Expr implements Serializable { + // Textual representation of an expression in Common Expression Language syntax. + private final String expression; + + /** + * Optional. Title for the expression, i.e. a short string describing its purpose. This can be + * used e.g. in UIs which allow to enter the expression. + */ + private final String title; + + /** + * Optional. Description of the expression. This is a longer text which describes the + * expression, e.g. when hovered over it in a UI. + */ + private final String description; + + /** + * Optional. String indicating the location of the expression for error reporting, e.g. a file + * name and a position in the file. + */ + private final String location; + + private static final long serialVersionUID = 7358264726377291156L; + + static final class Builder { + private String expression; + private String title; + private String description; + private String location; + + Builder() {} + + Builder(Expr expr) { + this.expression = expr.expression; + this.title = expr.title; + this.description = expr.description; + this.location = expr.location; + } + + Builder(com.google.api.services.bigquery.model.Expr bqExpr) { + this.expression = bqExpr.getExpression(); + if (bqExpr.getTitle() != null) { + this.title = bqExpr.getTitle(); + } + if (bqExpr.getDescription() != null) { + this.description = bqExpr.getDescription(); + } + if (bqExpr.getLocation() != null) { + this.location = bqExpr.getLocation(); + } + } + + public Builder setExpression(String expression) { + this.expression = expression; + return this; + } + + public Builder setTitle(String title) { + this.title = title; + return this; + } + + public Builder setDescription(String description) { + this.description = description; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Expr build() { + return new Expr(this); + } + } + + public Expr(Builder builder) { + this.expression = builder.expression; + this.title = builder.title; + this.description = builder.description; + this.location = builder.location; + } + + public Expr(String expression, String title, String description, String location) { + this.expression = expression; + this.title = title; + this.description = description; + this.location = location; + } + + com.google.api.services.bigquery.model.Expr toPb() { + com.google.api.services.bigquery.model.Expr bqExpr = + new com.google.api.services.bigquery.model.Expr(); + bqExpr.setExpression(this.expression); + bqExpr.setTitle(this.title); + bqExpr.setDescription(this.description); + bqExpr.setLocation(this.location); + return bqExpr; + } + + static Expr fromPb(com.google.api.services.bigquery.model.Expr bqExpr) { + return new Builder(bqExpr).build(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public int hashCode() { + return Objects.hash(expression, title, description, location); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + final Expr other = (Expr) obj; + return Objects.equals(this.expression, other.expression) + && Objects.equals(this.title, other.title) + && Objects.equals(this.description, other.description) + && Objects.equals(this.location, other.location); + } + + @Override + public String toString() { + return toPb().toString(); + } + } + private Acl(Entity entity, Role role) { + this(entity, role, null); + } + + private Acl(Entity entity, Role role, Expr condition) { this.entity = checkNotNull(entity); this.role = role; + this.condition = condition; } - /** @return Returns the entity for this ACL. */ + /** + * @return Returns the entity for this ACL. + */ public Entity getEntity() { return entity; } - /** @return Returns the role specified by this ACL. */ + /** + * @return Returns the role specified by this ACL. + */ public Role getRole() { return role; } + /** + * @return Returns the condition specified by this ACL. + */ + public Expr getCondition() { + return condition; + } + /** * @return Returns an Acl object. * @param entity the entity for this ACL object @@ -592,6 +768,10 @@ public static Acl of(Entity entity, Role role) { return new Acl(entity, role); } + public static Acl of(Entity entity, Role role, Expr condition) { + return new Acl(entity, role, condition); + } + /** * @param datasetAclEntity * @return Returns an Acl object for a datasetAclEntity. @@ -618,7 +798,7 @@ public static Acl of(Routine routine) { @Override public int hashCode() { - return Objects.hash(entity, role); + return Objects.hash(entity, role, condition); } @Override @@ -635,7 +815,9 @@ public boolean equals(Object obj) { return false; } final Acl other = (Acl) obj; - return Objects.equals(this.entity, other.entity) && Objects.equals(this.role, other.role); + return Objects.equals(this.entity, other.entity) + && Objects.equals(this.role, other.role) + && Objects.equals(this.condition, other.condition); } Access toPb() { @@ -643,11 +825,16 @@ Access toPb() { if (role != null) { accessPb.setRole(role.name()); } + if (condition != null) { + accessPb.setCondition(condition.toPb()); + } return accessPb; } static Acl fromPb(Access access) { return Acl.of( - Entity.fromPb(access), access.getRole() != null ? Role.valueOf(access.getRole()) : null); + Entity.fromPb(access), + access.getRole() != null ? Role.valueOf(access.getRole()) : null, + access.getCondition() != null ? Expr.fromPb(access.getCondition()) : null); } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java index cdfac5468..6bfe5964a 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java @@ -77,6 +77,42 @@ public String getSelector() { } } + enum DatasetView { + DATASET_VIEW_UNSPECIFIED("DATASET_VIEW_UNSPECIFIED"), + FULL("FULL"), + METADATA("METADATA"), + ACL("ACL"); + + private final String view; + + DatasetView(String view) { + this.view = view; + } + + @Override + public String toString() { + return view; + } + } + + enum DatasetUpdateMode { + UPDATE_MODE_UNSPECIFIED("UPDATE_MODE_UNSPECIFIED"), + UPDATE_FULL("UPDATE_FULL"), + UPDATE_METADATA("UPDATE_METADATA"), + UPDATE_ACL("UPDATE_ACL"); + + private final String updateMode; + + DatasetUpdateMode(String updateMode) { + this.updateMode = updateMode; + } + + @Override + public String toString() { + return updateMode; + } + } + /** * Fields of a BigQuery Table resource. * @@ -273,8 +309,9 @@ private DatasetListOption(BigQueryRpc.Option option, Object value) { } /** - * Returns an option to specify a label filter. See - * https://cloud.google.com/bigquery/docs/adding-using-labels#filtering_datasets_using_labels + * Returns an option to specify a label filter. @see Filtering + * using labels * * @param labelFilter In the form "labels.key:value" */ @@ -309,7 +346,7 @@ private DatasetOption(BigQueryRpc.Option option, Object value) { /** * Returns an option to specify the dataset's fields to be returned by the RPC call. If this - * option is not provided all dataset's fields are returned. { code DatasetOption.fields} can be + * option is not provided all dataset's fields are returned. {@code DatasetOption.fields} can be * used to specify only the fields of interest. {@link Dataset#getDatasetId()} is always * returned, even if not specified. */ @@ -317,6 +354,40 @@ public static DatasetOption fields(DatasetField... fields) { return new DatasetOption( BigQueryRpc.Option.FIELDS, Helper.selector(DatasetField.REQUIRED_FIELDS, fields)); } + + /** + * Returns an option to specify the dataset's access policy version for conditional access. If + * this option is not provided the field remains unset and conditional access cannot be used. + * Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests + * for conditional access policy binding in datasets must specify version 3. Datasets with no + * conditional role bindings in access policy may specify any valid value or leave the field + * unset. This field will be mapped to IAM Policy version and will be + * used to fetch the policy from IAM. If unset or if 0 or 1 the value is used for a dataset with + * conditional bindings, access entry with condition will have role string appended by + * 'withcond' string followed by a hash value. Please refer to Troubleshooting + * withcond for more details. + */ + public static DatasetOption accessPolicyVersion(Integer accessPolicyVersion) { + return new DatasetOption(BigQueryRpc.Option.ACCESS_POLICY_VERSION, accessPolicyVersion); + } + + /** + * Returns an option to specify the view that determines which dataset information is returned. + * By default, metadata and ACL information are returned. + */ + public static DatasetOption datasetView(DatasetView datasetView) { + return new DatasetOption(BigQueryRpc.Option.DATASET_VIEW, datasetView); + } + + /** + * Returns an option to specify the fields of dataset that update/patch operation is targeting. + * By default, both metadata and ACL fields are updated. + */ + public static DatasetOption updateMode(DatasetUpdateMode updateMode) { + return new DatasetOption(BigQueryRpc.Option.DATASET_UPDATE_MODE, updateMode); + } } /** Class for specifying dataset delete options. */ @@ -607,7 +678,7 @@ public static JobListOption fields(JobField... fields) { /** Class for specifying table get and create options. */ class JobOption extends Option { - private static final long serialVersionUID = -3111736712316353665L; + private static final long serialVersionUID = -3111736712316353664L; private JobOption(BigQueryRpc.Option option, Object value) { super(option, value); @@ -624,6 +695,16 @@ public static JobOption fields(JobField... fields) { return new JobOption( BigQueryRpc.Option.FIELDS, Helper.selector(JobField.REQUIRED_FIELDS, fields)); } + + /** Returns an option to specify the job's BigQuery retry configuration. */ + public static JobOption bigQueryRetryConfig(BigQueryRetryConfig bigQueryRetryConfig) { + return new JobOption(BigQueryRpc.Option.BIGQUERY_RETRY_CONFIG, bigQueryRetryConfig); + } + + /** Returns an option to specify the job's retry options. */ + public static JobOption retryOptions(RetryOption... options) { + return new JobOption(BigQueryRpc.Option.RETRY_OPTIONS, options); + } } /** Class for specifying query results options. */ @@ -735,20 +816,17 @@ public int hashCode() { * *

Example of creating a dataset. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   Dataset dataset = null;
-   *   DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build();
-   *   try {
-   *     // the dataset was created
-   *     dataset = bigquery.create(datasetInfo);
-   *   } catch (BigQueryException e) {
-   *     // the dataset was not created
-   *   }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * Dataset dataset = null;
+   * DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build();
+   * try {
+   *   // the dataset was created
+   *   dataset = bigquery.create(datasetInfo);
+   * } catch (BigQueryException e) {
+   *   // the dataset was not created
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -759,22 +837,19 @@ public int hashCode() { * *

Example of creating a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   String fieldName = "string_field";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   // Table field definition
-   *   Field field = Field.of(fieldName, LegacySQLTypeName.STRING);
-   *   // Table schema definition
-   *   Schema schema = Schema.of(field);
-   *   TableDefinition tableDefinition = StandardTableDefinition.of(schema);
-   *   TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build();
-   *   Table table = bigquery.create(tableInfo);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * String fieldName = "string_field";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * // Table field definition
+   * Field field = Field.of(fieldName, LegacySQLTypeName.STRING);
+   * // Table schema definition
+   * Schema schema = Schema.of(field);
+   * TableDefinition tableDefinition = StandardTableDefinition.of(schema);
+   * TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build();
+   * Table table = bigquery.create(tableInfo);
+   * }
* * @throws BigQueryException upon failure */ @@ -792,46 +867,41 @@ public int hashCode() { * *

Example of loading a newline-delimited-json file with textual fields from GCS to a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   // Table field definition
-   *   Field[] fields = new Field[] { Field.of("name", LegacySQLTypeName.STRING),
-   *       Field.of("post_abbr", LegacySQLTypeName.STRING) };
-   *   // Table schema definition
-   *   Schema schema = Schema.of(fields);
-   *   LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri)
-   *       .setFormatOptions(FormatOptions.json()).setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
-   *       .setSchema(schema).build();
-   *   // Load the table
-   *   Job loadJob = bigquery.create(JobInfo.of(configuration));
-   *   loadJob = loadJob.waitFor();
-   *   // Check the table
-   *   System.out.println("State: " + loadJob.getStatus().getState());
-   *   return ((StandardTableDefinition) bigquery.getTable(tableId).getDefinition()).getNumRows();
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * // Table field definition
+   * Field[] fields = new Field[] { Field.of("name", LegacySQLTypeName.STRING),
+   * Field.of("post_abbr", LegacySQLTypeName.STRING) };
+   * // Table schema definition
+   * Schema schema = Schema.of(fields);
+   * LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri)
+   *     .setFormatOptions(FormatOptions.json())
+   *     .setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
+   *     .setSchema(schema).build();
+   * // Load the table
+   * Job loadJob = bigquery.create(JobInfo.of(configuration));
+   * loadJob = loadJob.waitFor();
+   * // Check the table
+   * System.out.println("State: " + loadJob.getStatus().getState());
+   * return ((StandardTableDefinition) bigquery.getTable(tableId).getDefinition()).getNumRows();
+   * }
* *

Example of creating a query job. * - *

-   * {
-   *   @code
-   *   String query = "SELECT field FROM my_dataset_name.my_table_name";
-   *   Job job = null;
-   *   JobConfiguration jobConfiguration = QueryJobConfiguration.of(query);
-   *   JobInfo jobInfo = JobInfo.of(jobConfiguration);
-   *   try {
-   *     job = bigquery.create(jobInfo);
-   *   } catch (BigQueryException e) {
-   *     // the job was not created
-   *   }
+   * 
{@code
+   * String query = "SELECT field FROM my_dataset_name.my_table_name";
+   * Job job = null;
+   * JobConfiguration jobConfiguration = QueryJobConfiguration.of(query);
+   * JobInfo jobInfo = JobInfo.of(jobConfiguration);
+   * try {
+   *   job = bigquery.create(jobInfo);
+   * } catch (BigQueryException e) {
+   *   // the job was not created
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -844,18 +914,15 @@ public int hashCode() { * *

Example of creating a query connection. * - *

-   * {
-   *   @code
-   *       ConnectionSettings connectionSettings =
-   *         ConnectionSettings.newBuilder()
-   *             .setRequestTimeout(10L)
-   *             .setMaxResults(100L)
-   *             .setUseQueryCache(true)
-   *             .build();
-   *       Connection connection = bigquery.createConnection(connectionSettings);
-   * }
-   * 
+ *
{@code
+   * ConnectionSettings connectionSettings =
+   *     ConnectionSettings.newBuilder()
+   *         .setRequestTimeout(10L)
+   *         .setMaxResults(100L)
+   *         .setUseQueryCache(true)
+   *         .build();
+   * Connection connection = bigquery.createConnection(connectionSettings);
+   * }
* * @throws BigQueryException upon failure * @param connectionSettings @@ -872,12 +939,9 @@ public int hashCode() { * *

Example of creating a query connection. * - *

-   * {
-   *   @code
-   *       Connection connection = bigquery.createConnection();
-   * }
-   * 
+ *
{@code
+   * Connection connection = bigquery.createConnection();
+   * }
* * @throws BigQueryException upon failure */ @@ -889,13 +953,10 @@ public int hashCode() { * *

Example of getting a dataset. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset";
-   *   Dataset dataset = bigquery.getDataset(datasetName);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset";
+   * Dataset dataset = bigquery.getDataset(datasetName);
+   * }
* * @throws BigQueryException upon failure */ @@ -906,15 +967,12 @@ public int hashCode() { * *

Example of getting a dataset. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
-   *   Dataset dataset = bigquery.getDataset(datasetId);
-   * }
-   * 
+ *
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
+   * Dataset dataset = bigquery.getDataset(datasetId);
+   * }
* * @throws BigQueryException upon failure */ @@ -923,21 +981,17 @@ public int hashCode() { /** * Lists the project's datasets. This method returns partial information on each dataset: ({@link * Dataset#getDatasetId()}, {@link Dataset#getFriendlyName()} and {@link - * Dataset#getGeneratedId()}). To get complete information use either {@link #getDataset(String, - * DatasetOption...)} or {@link #getDataset(DatasetId, DatasetOption...)}. + * Dataset#getGeneratedId()}). To get complete information use {@link #getDataset}. * *

Example of listing datasets, specifying the page size. * - *

-   * {
-   *   @code
-   *   // List datasets in the default project
-   *   Page<Dataset> datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
-   *   for (Dataset dataset : datasets.iterateAll()) {
-   *     // do something with the dataset
-   *   }
+   * 
{@code
+   * // List datasets in the default project
+   * Page datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
+   * for (Dataset dataset : datasets.iterateAll()) {
+   *   // do something with the dataset
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -961,22 +1015,18 @@ public int hashCode() { /** * Lists the datasets in the provided project. This method returns partial information on each * dataset: ({@link Dataset#getDatasetId()}, {@link Dataset#getFriendlyName()} and {@link - * Dataset#getGeneratedId()}). To get complete information use either {@link #getDataset(String, - * DatasetOption...)} or {@link #getDataset(DatasetId, DatasetOption...)}. + * Dataset#getGeneratedId()}). To get complete information use either {@link #getDataset}. * *

Example of listing datasets in a project, specifying the page size. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   // List datasets in a specified project
-   *   Page<{@link Dataset}> datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
-   *   for (Dataset dataset : datasets.iterateAll()) {
-   *     // do something with the dataset
-   *   }
+   * 
{@code
+   * String projectId = "my_project_id";
+   * // List datasets in a specified project
+   * Page datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
+   * for (Dataset dataset : datasets.iterateAll()) {
+   *   // do something with the dataset
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -987,18 +1037,15 @@ public int hashCode() { * *

Example of deleting a dataset from its id, even if non-empty. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
-   *   if (deleted) {
-   *     // the dataset was deleted
-   *   } else {
-   *     // the dataset was not found
-   *   }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
+   * if (deleted) {
+   *   // the dataset was deleted
+   * } else {
+   *   // the dataset was not found
    * }
-   * 
+ * }
* * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -1010,20 +1057,17 @@ public int hashCode() { * *

Example of deleting a dataset, even if non-empty. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
-   *   boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
-   *   if (deleted) {
-   *     // the dataset was deleted
-   *   } else {
-   *     // the dataset was not found
-   *   }
+   * 
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
+   * boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
+   * if (deleted) {
+   *   // the dataset was deleted
+   * } else {
+   *   // the dataset was not found
    * }
-   * 
+ * }
* * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -1047,21 +1091,18 @@ public int hashCode() { * *

Example of deleting a table. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   TableId tableId = TableId.of(projectId, datasetName, tableName);
-   *   boolean deleted = bigquery.delete(tableId);
-   *   if (deleted) {
-   *     // the table was deleted
-   *   } else {
-   *     // the table was not found
-   *   }
+   * 
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * TableId tableId = TableId.of(projectId, datasetName, tableName);
+   * boolean deleted = bigquery.delete(tableId);
+   * if (deleted) {
+   *   // the table was deleted
+   * } else {
+   *   // the table was not found
    * }
-   * 
+ * }
* * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -1073,21 +1114,18 @@ public int hashCode() { * *

Example of deleting a model. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_model_name";
-   *   ModelId modelId = ModelId.of(projectId, datasetName, modelName);
-   *   boolean deleted = bigquery.delete(modelId);
-   *   if (deleted) {
-   *     // the model was deleted
-   *   } else {
-   *     // the model was not found
-   *   }
+   * 
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_model_name";
+   * ModelId modelId = ModelId.of(projectId, datasetName, modelName);
+   * boolean deleted = bigquery.delete(modelId);
+   * if (deleted) {
+   *   // the model was deleted
+   * } else {
+   *   // the model was not found
    * }
-   * 
+ * }
* * @return {@code true} if model was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -1132,19 +1170,15 @@ public int hashCode() { * * - *
-   * {
-   *   @code
-   *   // String datasetName = "my_dataset_name";
-   *   // String tableName = "my_table_name";
-   *   // String newDescription = "new_description";
-   *
-   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
-   *   TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build();
-   *   Table afterTable = bigquery.update(tableInfo);
+   * 
{@code
+   * // String datasetName = "my_dataset_name";
+   * // String tableName = "my_table_name";
+   * // String newDescription = "new_description";
    *
-   * }
-   * 
+ * Table beforeTable = bigquery.getTable(datasetName, tableName); + * TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build(); + * Table afterTable = bigquery.update(tableInfo); + * }
* * * @@ -1157,33 +1191,27 @@ public int hashCode() { * *

Example of updating a table by changing its description. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   String newDescription = "new_description";
-   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
-   *   TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build();
-   *   Table afterTable = bigquery.update(tableInfo);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * String newDescription = "new_description";
+   * Table beforeTable = bigquery.getTable(datasetName, tableName);
+   * TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build();
+   * Table afterTable = bigquery.update(tableInfo);
+   * }
* *

Example of updating a table by changing its expiration. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
-   *
-   *   // Set table to expire 5 days from now.
-   *   long expirationMillis = DateTime.now().plusDays(5).getMillis();
-   *   TableInfo tableInfo = beforeTable.toBuilder().setExpirationTime(expirationMillis).build();
-   *   Table afterTable = bigquery.update(tableInfo);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * Table beforeTable = bigquery.getTable(datasetName, tableName);
+   *
+   * // Set table to expire 5 days from now.
+   * long expirationMillis = DateTime.now().plusDays(5).getMillis();
+   * TableInfo tableInfo = beforeTable.toBuilder().setExpirationTime(expirationMillis).build();
+   * Table afterTable = bigquery.update(tableInfo);
+   * }
* * @throws BigQueryException upon failure */ @@ -1194,33 +1222,27 @@ public int hashCode() { * *

Example of updating a model by changing its description. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String modelName = "my_model_name";
-   *   String newDescription = "new_description";
-   *   Model beforeModel = bigquery.getModel(datasetName, modelName);
-   *   ModelInfo modelInfo = beforeModel.toBuilder().setDescription(newDescription).build();
-   *   Model afterModel = bigquery.update(modelInfo);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String modelName = "my_model_name";
+   * String newDescription = "new_description";
+   * Model beforeModel = bigquery.getModel(datasetName, modelName);
+   * ModelInfo modelInfo = beforeModel.toBuilder().setDescription(newDescription).build();
+   * Model afterModel = bigquery.update(modelInfo);
+   * }
* *

Example of updating a model by changing its expiration. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String modelName = "my_model_name";
-   *   Model beforeModel = bigquery.getModel(datasetName, modelName);
-   *
-   *   // Set model to expire 5 days from now.
-   *   long expirationMillis = DateTime.now().plusDays(5).getMillis();
-   *   ModelInfo modelInfo = beforeModel.toBuilder().setExpirationTime(expirationMillis).build();
-   *   Model afterModel = bigquery.update(modelInfo);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String modelName = "my_model_name";
+   * Model beforeModel = bigquery.getModel(datasetName, modelName);
+   *
+   * // Set model to expire 5 days from now.
+   * long expirationMillis = DateTime.now().plusDays(5).getMillis();
+   * ModelInfo modelInfo = beforeModel.toBuilder().setExpirationTime(expirationMillis).build();
+   * Model afterModel = bigquery.update(modelInfo);
+   * }
* * @throws BigQueryException upon failure */ @@ -1238,14 +1260,11 @@ public int hashCode() { * *

Example of getting a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   Table table = bigquery.getTable(datasetName, tableName);
-   * }
-   * 
+ *
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * Table table = bigquery.getTable(datasetName, tableName);
+   * }
* * @throws BigQueryException upon failure */ @@ -1256,16 +1275,13 @@ public int hashCode() { * *

Example of getting a table. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   TableId tableId = TableId.of(projectId, datasetName, tableName);
-   *   Table table = bigquery.getTable(tableId);
-   * }
-   * 
+ *
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * TableId tableId = TableId.of(projectId, datasetName, tableName);
+   * Table table = bigquery.getTable(tableId);
+   * }
* * @throws BigQueryException upon failure */ @@ -1283,16 +1299,13 @@ public int hashCode() { * *

Example of getting a model. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   String modelName = "my_model_name";
-   *   ModelId modelId = ModelId.of(projectId, datasetName, tableName);
-   *   Model model = bigquery.getModel(modelId);
-   * }
-   * 
+ *
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * String modelName = "my_model_name";
+   * ModelId modelId = ModelId.of(projectId, datasetName, tableName);
+   * Model model = bigquery.getModel(modelId);
+   * }
* * @throws BigQueryException upon failure */ @@ -1322,20 +1335,17 @@ public int hashCode() { * Lists the tables in the dataset. This method returns partial information on each table: ({@link * Table#getTableId()}, {@link Table#getFriendlyName()}, {@link Table#getGeneratedId()} and type, * which is part of {@link Table#getDefinition()}). To get complete information use either {@link - * #getTable(TableId, TableOption...)} or {@link #getTable(String, String, TableOption...)}. + * #getTable}. * *

Example of listing the tables in a dataset, specifying the page size. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   Page<Table> tables = bigquery.listTables(datasetName, TableListOption.pageSize(100));
-   *   for (Table table : tables.iterateAll()) {
-   *     // do something with the table
-   *   }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * Page tables = bigquery.listTables(datasetName, TableListOption.pageSize(100));
+   * for (Table table : tables.iterateAll()) {
+   *   // do something with the table
    * }
-   * 
+   * }
    *
    * @throws BigQueryException upon failure
    */
@@ -1345,22 +1355,19 @@ public int hashCode() {
    * Lists the tables in the dataset. This method returns partial information on each table: ({@link
    * Table#getTableId()}, {@link Table#getFriendlyName()}, {@link Table#getGeneratedId()} and type,
    * which is part of {@link Table#getDefinition()}). To get complete information use either {@link
-   * #getTable(TableId, TableOption...)} or {@link #getTable(String, String, TableOption...)}.
+   * #getTable}.
    *
    * 

Example of listing the tables in a dataset. * - *

-   * {
-   *   @code
-   *   String projectId = "my_project_id";
-   *   String datasetName = "my_dataset_name";
-   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
-   *   Page<Table> tables = bigquery.listTables(datasetId, TableListOption.pageSize(100));
-   *   for (Table table : tables.iterateAll()) {
-   *     // do something with the table
-   *   }
+   * 
{@code
+   * String projectId = "my_project_id";
+   * String datasetName = "my_dataset_name";
+   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
+   * Page
tables = bigquery.listTables(datasetId, TableListOption.pageSize(100)); + * for (Table table : tables.iterateAll()) { + * // do something with the table * } - * + * } * * @throws BigQueryException upon failure */ @@ -1383,33 +1390,30 @@ public int hashCode() { * *

Example of inserting rows into a table without running a load job. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   // Values of the row to insert
-   *   Map<String, Object> rowContent = new HashMap<>();
-   *   rowContent.put("booleanField", true);
-   *   // Bytes are passed in base64
-   *   rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64
-   *   // Records are passed as a map
-   *   Map<String, Object> recordsContent = new HashMap<>();
-   *   recordsContent.put("stringField", "Hello, World!");
-   *   rowContent.put("recordField", recordsContent);
-   *   InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow("rowId", rowContent)
-   *       // More rows can be added in the same RPC by invoking .addRow() on the
-   *       // builder
-   *       .build());
-   *   if (response.hasErrors()) {
-   *     // If any of the insertions failed, this lets you inspect the errors
-   *     for (Entry<Long, List<BigQueryError>> entry : response.getInsertErrors().entrySet()) {
-   *       // inspect row error
-   *     }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * // Values of the row to insert
+   * Map rowContent = new HashMap<>();
+   * rowContent.put("booleanField", true);
+   * // Bytes are passed in base64
+   * rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64
+   * // Records are passed as a map
+   * Map recordsContent = new HashMap<>();
+   * recordsContent.put("stringField", "Hello, World!");
+   * rowContent.put("recordField", recordsContent);
+   * InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow("rowId", rowContent)
+   *     // More rows can be added in the same RPC by invoking .addRow() on the
+   *     // builder
+   *     .build());
+   * if (response.hasErrors()) {
+   *   // If any of the insertions failed, this lets you inspect the errors
+   *   for (Entry> entry : response.getInsertErrors().entrySet()) {
+   *     // inspect row error
    *   }
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1420,20 +1424,17 @@ public int hashCode() { * *

Example of listing table rows, specifying the page size. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   // This example reads the result 100 rows per RPC call. If there's no need
-   *   // to limit the number,
-   *   // simply omit the option.
-   *   TableResult tableData = bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
-   *   for (FieldValueList row : tableData.iterateAll()) {
-   *     // do something with the row
-   *   }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * // This example reads the result 100 rows per RPC call. If there's no need
+   * // to limit the number,
+   * // simply omit the option.
+   * TableResult tableData = bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
+   * for (FieldValueList row : tableData.iterateAll()) {
+   *   // do something with the row
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1444,21 +1445,18 @@ public int hashCode() { * *

Example of listing table rows, specifying the page size. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   TableId tableIdObject = TableId.of(datasetName, tableName);
-   *   // This example reads the result 100 rows per RPC call. If there's no need
-   *   // to limit the number,
-   *   // simply omit the option.
-   *   TableResult tableData = bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
-   *   for (FieldValueList row : tableData.iterateAll()) {
-   *     // do something with the row
-   *   }
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * TableId tableIdObject = TableId.of(datasetName, tableName);
+   * // This example reads the result 100 rows per RPC call. If there's no need
+   * // to limit the number,
+   * // simply omit the option.
+   * TableResult tableData = bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
+   * for (FieldValueList row : tableData.iterateAll()) {
+   *   // do something with the row
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1492,18 +1490,15 @@ TableResult listTableData( * *

Example of listing table rows with schema. * - *

-   * {
-   *   @code
-   *   Schema schema = Schema.of(Field.of("word", LegacySQLTypeName.STRING),
-   *       Field.of("word_count", LegacySQLTypeName.STRING), Field.of("corpus", LegacySQLTypeName.STRING),
-   *       Field.of("corpus_date", LegacySQLTypeName.STRING));
-   *   TableResult tableData = bigquery.listTableData(TableId.of("bigquery-public-data", "samples", "shakespeare"),
-   *       schema);
-   *   FieldValueList row = tableData.getValues().iterator().next();
-   *   System.out.println(row.get("word").getStringValue());
-   * }
-   * 
+ *
{@code
+   * Schema schema = Schema.of(Field.of("word", LegacySQLTypeName.STRING),
+   *     Field.of("word_count", LegacySQLTypeName.STRING), Field.of("corpus", LegacySQLTypeName.STRING),
+   *     Field.of("corpus_date", LegacySQLTypeName.STRING));
+   * TableResult tableData = bigquery.listTableData(TableId.of("bigquery-public-data", "samples", "shakespeare"),
+   *     schema);
+   * FieldValueList row = tableData.getValues().iterator().next();
+   * System.out.println(row.get("word").getStringValue());
+   * }
* * @throws BigQueryException upon failure */ @@ -1515,16 +1510,13 @@ TableResult listTableData( * *

Example of getting a job. * - *

-   * {
-   *   @code
-   *   String jobName = "my_job_name";
-   *   Job job = bigquery.getJob(jobName);
-   *   if (job == null) {
-   *     // job was not found
-   *   }
+   * 
{@code
+   * String jobName = "my_job_name";
+   * Job job = bigquery.getJob(jobName);
+   * if (job == null) {
+   *   // job was not found
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1536,17 +1528,14 @@ TableResult listTableData( * *

Example of getting a job. * - *

-   * {
-   *   @code
-   *   String jobName = "my_job_name";
-   *   JobId jobIdObject = JobId.of(jobName);
-   *   Job job = bigquery.getJob(jobIdObject);
-   *   if (job == null) {
-   *     // job was not found
-   *   }
+   * 
{@code
+   * String jobName = "my_job_name";
+   * JobId jobIdObject = JobId.of(jobName);
+   * Job job = bigquery.getJob(jobIdObject);
+   * if (job == null) {
+   *   // job was not found
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1557,15 +1546,12 @@ TableResult listTableData( * *

Example of listing jobs, specifying the page size. * - *

-   * {
-   *   @code
-   *   Page<Job> jobs = bigquery.listJobs(JobListOption.pageSize(100));
-   *   for (Job job : jobs.iterateAll()) {
-   *     // do something with the job
-   *   }
+   * 
{@code
+   * Page jobs = bigquery.listJobs(JobListOption.pageSize(100));
+   * for (Job job : jobs.iterateAll()) {
+   *   // do something with the job
    * }
-   * 
+ * }
* * @throws BigQueryException upon failure */ @@ -1573,25 +1559,21 @@ TableResult listTableData( /** * Sends a job cancel request. This call will return immediately. The job status can then be - * checked using either {@link #getJob(JobId, JobOption...)} or {@link #getJob(String, - * JobOption...)}). + * checked by using {@link #getJob}. * *

If the location of the job is not "US" or "EU", {@link #cancel(JobId)} must be used instead. * *

Example of cancelling a job. * - *

-   * {
-   *   @code
-   *   String jobName = "my_job_name";
-   *   boolean success = bigquery.cancel(jobName);
-   *   if (success) {
-   *     // job was cancelled
-   *   } else {
-   *     // job was not found
-   *   }
+   * 
{@code
+   * String jobName = "my_job_name";
+   * boolean success = bigquery.cancel(jobName);
+   * if (success) {
+   *   // job was cancelled
+   * } else {
+   *   // job was not found
    * }
-   * 
+ * }
* * @return {@code true} if cancel was requested successfully, {@code false} if the job was not * found @@ -1601,27 +1583,23 @@ TableResult listTableData( /** * Sends a job cancel request. This call will return immediately. The job status can then be - * checked using either {@link #getJob(JobId, JobOption...)} or {@link #getJob(String, - * JobOption...)}). + * checked using {@link #getJob}. * *

If the location of the job is not "US" or "EU", the {@code jobId} must specify the job * location. * *

Example of cancelling a job. * - *

-   * {
-   *   @code
-   *   String jobName = "my_job_name";
-   *   JobId jobId = JobId.of(jobName);
-   *   boolean success = bigquery.cancel(jobId);
-   *   if (success) {
-   *     // job was cancelled
-   *   } else {
-   *     // job was not found
-   *   }
+   * 
{@code
+   * String jobName = "my_job_name";
+   * JobId jobId = JobId.of(jobName);
+   * boolean success = bigquery.cancel(jobId);
+   * if (success) {
+   *   // job was cancelled
+   * } else {
+   *   // job was not found
    * }
-   * 
+ * }
* * @return {@code true} if cancel was requested successfully, {@code false} if the job was not * found @@ -1640,22 +1618,19 @@ TableResult listTableData( * *

Example of running a query. * - *

-   * {
-   *   @code
-   *   // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
-   *   String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
-   *   QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
-   *
-   *   // Print the results.
-   *   for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
-   *     for (FieldValue val : row) {
-   *       System.out.printf("%s,", val.toString());
-   *     }
-   *     System.out.printf("\n");
+   * 
{@code
+   * // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
+   * String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
+   * QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
+   *
+   * // Print the results.
+   * for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
+   *   for (FieldValue val : row) {
+   *     System.out.printf("%s,", val.toString());
    *   }
+   *   System.out.printf("\n");
    * }
-   * 
+ * }
* * This method supports query-related preview features via environmental variables (enabled by * setting the {@code QUERY_PREVIEW_ENABLED} environment variable to "TRUE"). Specifically, this @@ -1681,7 +1656,7 @@ TableResult query(QueryJobConfiguration configuration, JobOption... options) *

If the location of the job is not "US" or "EU", the {@code jobId} must specify the job * location. * - *

This method cannot be used in conjuction with {@link QueryJobConfiguration#dryRun()} + *

This method cannot be used in conjunction with {@link QueryJobConfiguration#dryRun()} * queries. Since dry-run queries are not actually executed, there's no way to retrieve results. * *

See {@link #query(QueryJobConfiguration, JobOption...)} for examples on populating a {@link @@ -1695,6 +1670,28 @@ TableResult query(QueryJobConfiguration configuration, JobOption... options) TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... options) throws InterruptedException, JobException; + /** + * Starts the query associated with the request, using the given JobId. It returns either + * TableResult for quick queries or Job object for long-running queries. + * + *

If the location of the job is not "US" or "EU", the {@code jobId} must specify the job + * location. + * + *

This method cannot be used in conjunction with {@link QueryJobConfiguration#dryRun()} + * queries. Since dry-run queries are not actually executed, there's no way to retrieve results. + * + *

See {@link #query(QueryJobConfiguration, JobOption...)} for examples on populating a {@link + * QueryJobConfiguration}. + * + * @throws BigQueryException upon failure + * @throws InterruptedException if the current thread gets interrupted while waiting for the query + * to complete + * @throws JobException if the job completes unsuccessfully + */ + Object queryWithTimeout( + QueryJobConfiguration configuration, JobId jobId, Long timeoutMs, JobOption... options) + throws InterruptedException, JobException; + /** * Returns results of the query associated with the provided job. * @@ -1710,56 +1707,50 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... * *

Example of creating a channel with which to write to a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   String csvData = "StringValue1\nStringValue2\n";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
-   *       .setFormatOptions(FormatOptions.csv()).build();
-   *   TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
-   *   // Write data to writer
-   *   try {
-   *     writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
-   *   } finally {
-   *     writer.close();
-   *   }
-   *   // Get load job
-   *   Job job = writer.getJob();
-   *   job = job.waitFor();
-   *   LoadStatistics stats = job.getStatistics();
-   *   return stats.getOutputRows();
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * String csvData = "StringValue1\nStringValue2\n";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
+   *     .setFormatOptions(FormatOptions.csv()).build();
+   * TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
+   * // Write data to writer
+   * try {
+   *   writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
+   * } finally {
+   *   writer.close();
    * }
-   * 
+ * // Get load job + * Job job = writer.getJob(); + * job = job.waitFor(); + * LoadStatistics stats = job.getStatistics(); + * return stats.getOutputRows(); + * }
* *

Example of writing a local file to a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
-   *   String location = "us";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
-   *       .setFormatOptions(FormatOptions.csv()).build();
-   *   // The location must be specified; other fields can be auto-detected.
-   *   JobId jobId = JobId.newBuilder().setLocation(location).build();
-   *   TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
-   *   // Write data to writer
-   *   try (OutputStream stream = Channels.newOutputStream(writer)) {
-   *     Files.copy(csvPath, stream);
-   *   }
-   *   // Get load job
-   *   Job job = writer.getJob();
-   *   job = job.waitFor();
-   *   LoadStatistics stats = job.getStatistics();
-   *   return stats.getOutputRows();
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
+   * String location = "us";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
+   *     .setFormatOptions(FormatOptions.csv()).build();
+   * // The location must be specified; other fields can be auto-detected.
+   * JobId jobId = JobId.newBuilder().setLocation(location).build();
+   * TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
+   * // Write data to writer
+   * try (OutputStream stream = Channels.newOutputStream(writer)) {
+   *   Files.copy(csvPath, stream);
    * }
-   * 
+ * // Get load job + * Job job = writer.getJob(); + * job = job.waitFor(); + * LoadStatistics stats = job.getStatistics(); + * return stats.getOutputRows(); + * }
* * @throws BigQueryException upon failure */ @@ -1772,32 +1763,29 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... * *

Example of creating a channel with which to write to a table. * - *

-   * {
-   *   @code
-   *   String datasetName = "my_dataset_name";
-   *   String tableName = "my_table_name";
-   *   String csvData = "StringValue1\nStringValue2\n";
-   *   String location = "us";
-   *   TableId tableId = TableId.of(datasetName, tableName);
-   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
-   *       .setFormatOptions(FormatOptions.csv()).build();
-   *   // The location must be specified; other fields can be auto-detected.
-   *   JobId jobId = JobId.newBuilder().setLocation(location).build();
-   *   TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
-   *   // Write data to writer
-   *   try {
-   *     writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
-   *   } finally {
-   *     writer.close();
-   *   }
-   *   // Get load job
-   *   Job job = writer.getJob();
-   *   job = job.waitFor();
-   *   LoadStatistics stats = job.getStatistics();
-   *   return stats.getOutputRows();
+   * 
{@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * String csvData = "StringValue1\nStringValue2\n";
+   * String location = "us";
+   * TableId tableId = TableId.of(datasetName, tableName);
+   * WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
+   *     .setFormatOptions(FormatOptions.csv()).build();
+   * // The location must be specified; other fields can be auto-detected.
+   * JobId jobId = JobId.newBuilder().setLocation(location).build();
+   * TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
+   * // Write data to writer
+   * try {
+   *   writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
+   * } finally {
+   *   writer.close();
    * }
-   * 
+ * // Get load job + * Job job = writer.getJob(); + * job = job.waitFor(); + * LoadStatistics stats = job.getStatistics(); + * return stats.getOutputRows(); + * }
*/ TableDataWriteChannel writer(JobId jobId, WriteChannelConfiguration writeChannelConfiguration); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryBaseService.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryBaseService.java index aefb4329b..bdcefb3d9 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryBaseService.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryBaseService.java @@ -26,11 +26,12 @@ protected BigQueryBaseService(ServiceOptions options) { super(options); } - public static final ExceptionHandler BIGQUERY_EXCEPTION_HANDLER = + public static final ExceptionHandler DEFAULT_BIGQUERY_EXCEPTION_HANDLER = ExceptionHandler.newBuilder() .abortOn(RuntimeException.class) .retryOn(java.net.ConnectException.class) // retry on Connection Exception .retryOn(java.net.UnknownHostException.class) // retry on UnknownHostException + .retryOn(java.net.SocketException.class) // retry on SocketException .addInterceptors(EXCEPTION_HANDLER_INTERCEPTOR) .build(); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java index 62e6a9008..42f8900ad 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java @@ -15,7 +15,6 @@ */ package com.google.cloud.bigquery; -import static com.google.cloud.RetryHelper.runWithRetries; import static com.google.cloud.bigquery.PolicyHelper.convertFromApiPolicy; import static com.google.cloud.bigquery.PolicyHelper.convertToApiPolicy; import static com.google.common.base.Preconditions.checkArgument; @@ -37,12 +36,12 @@ import com.google.cloud.PageImpl; import com.google.cloud.PageImpl.NextPageFetcher; import com.google.cloud.Policy; -import com.google.cloud.RetryHelper; -import com.google.cloud.RetryHelper.RetryHelperException; +import com.google.cloud.RetryOption; import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQueryRetryHelper.BigQueryRetryHelperException; import com.google.cloud.bigquery.InsertAllRequest.RowToInsert; -import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; import com.google.cloud.bigquery.spi.v2.BigQueryRpc; +import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Strings; @@ -52,6 +51,10 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; +import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -59,8 +62,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.checkerframework.checker.nullness.qual.NonNull; -import org.threeten.bp.Instant; -import org.threeten.bp.temporal.ChronoUnit; final class BigQueryImpl extends BaseService implements BigQuery { @@ -263,7 +264,11 @@ public Page getNextPage() { } } - private final BigQueryRpc bigQueryRpc; + private final HttpBigQueryRpc bigQueryRpc; + + private static final BigQueryRetryConfig EMPTY_RETRY_CONFIG = + BigQueryRetryConfig.newBuilder().build(); + private static final BigQueryRetryConfig DEFAULT_RETRY_CONFIG = BigQueryRetryConfig.newBuilder() .retryOnMessage(BigQueryErrorMessages.RATE_LIMIT_EXCEEDED_MSG) @@ -286,21 +291,39 @@ public Dataset create(DatasetInfo datasetInfo, DatasetOption... options) { : datasetInfo.getDatasetId().getProject()) .toPb(); final Map optionsMap = optionMap(options); - try { + Span datasetCreate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + datasetCreate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.createDataset") + .setAllAttributes(datasetInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope datasetCreateScope = datasetCreate != null ? datasetCreate.makeCurrent() : null) { return Dataset.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Dataset call() { - return bigQueryRpc.create(datasetPb, optionsMap); + public com.google.api.services.bigquery.model.Dataset call() throws IOException { + return bigQueryRpc.createSkipExceptionTranslation(datasetPb, optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (datasetCreate != null) { + datasetCreate.end(); + } } } @@ -315,21 +338,39 @@ public Table create(TableInfo tableInfo, TableOption... options) { .toPb(); handleExternalTableSchema(tablePb); final Map optionsMap = optionMap(options); - try { + Span tableCreate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tableCreate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.createTable") + .setAllAttributes(tableInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tableCreateScope = tableCreate != null ? tableCreate.makeCurrent() : null) { return Table.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Table call() { - return bigQueryRpc.create(tablePb, optionsMap); + public com.google.api.services.bigquery.model.Table call() throws IOException { + return bigQueryRpc.createSkipExceptionTranslation(tablePb, optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (tableCreate != null) { + tableCreate.end(); + } } } @@ -353,21 +394,39 @@ public Routine create(RoutineInfo routineInfo, RoutineOption... options) { : routineInfo.getRoutineId().getProject()) .toPb(); final Map optionsMap = optionMap(options); - try { + Span routineCreate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routineCreate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.createRoutine") + .setAllAttributes(routineInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope createRoutineScope = routineCreate != null ? routineCreate.makeCurrent() : null) { return Routine.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Routine call() { - return bigQueryRpc.create(routinePb, optionsMap); + public com.google.api.services.bigquery.model.Routine call() throws IOException { + return bigQueryRpc.createSkipExceptionTranslation(routinePb, optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (routineCreate != null) { + routineCreate.end(); + } } } @@ -403,6 +462,17 @@ Job create(JobInfo jobInfo, Supplier idProvider, JobOption... options) { final boolean idRandom = (jobInfo.getJobId() == null); final Map optionsMap = optionMap(options); + Span jobCreate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + jobCreate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.createJob") + .setAllAttributes(jobInfo.getJobId().getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } BigQueryException createException; // NOTE(pongad): This double-try structure is admittedly odd. // translateAndThrow itself throws, and pretends to return an exception only @@ -411,84 +481,98 @@ Job create(JobInfo jobInfo, Supplier idProvider, JobOption... options) { // Fixing this entails some work on BaseServiceException.translate. // Since that affects a bunch of APIs, we should fix this as a separate change. final JobId[] finalJobId = new JobId[1]; - try { + try (Scope jobCreateScope = jobCreate != null ? jobCreate.makeCurrent() : null) { try { - return Job.fromPb( - this, - BigQueryRetryHelper.runWithRetries( - new Callable() { - @Override - public com.google.api.services.bigquery.model.Job call() { - if (idRandom) { - // re-generate a new random job with the same jobInfo when jobId is not - // provided by the user - JobInfo recreatedJobInfo = - jobInfo.toBuilder().setJobId(idProvider.get()).build(); - com.google.api.services.bigquery.model.Job newJobPb = - recreatedJobInfo.setProjectId(getOptions().getProjectId()).toPb(); - finalJobId[0] = recreatedJobInfo.getJobId(); - return bigQueryRpc.create(newJobPb, optionsMap); - } else { - com.google.api.services.bigquery.model.Job jobPb = - jobInfo.setProjectId(getOptions().getProjectId()).toPb(); - return bigQueryRpc.create(jobPb, optionsMap); + try { + return Job.fromPb( + this, + BigQueryRetryHelper.runWithRetries( + new Callable() { + @Override + public com.google.api.services.bigquery.model.Job call() throws IOException { + if (idRandom) { + // re-generate a new random job with the same jobInfo when jobId is not + // provided by the user + JobInfo recreatedJobInfo = + jobInfo.toBuilder().setJobId(idProvider.get()).build(); + com.google.api.services.bigquery.model.Job newJobPb = + recreatedJobInfo.setProjectId(getOptions().getProjectId()).toPb(); + finalJobId[0] = recreatedJobInfo.getJobId(); + return bigQueryRpc.createSkipExceptionTranslation(newJobPb, optionsMap); + } else { + com.google.api.services.bigquery.model.Job jobPb = + jobInfo.setProjectId(getOptions().getProjectId()).toPb(); + return bigQueryRpc.createSkipExceptionTranslation(jobPb, optionsMap); + } } - } - }, - getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock(), - DEFAULT_RETRY_CONFIG)); - } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { - throw BigQueryException.translateAndThrow(e); + }, + getRetryOptions(optionsMap) != null + ? RetryOption.mergeToSettings( + getOptions().getRetrySettings(), getRetryOptions(optionsMap)) + : getOptions().getRetrySettings(), + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + getBigQueryRetryConfig(optionsMap) != null + ? getBigQueryRetryConfig(optionsMap) + : DEFAULT_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } catch (BigQueryException e) { + createException = e; } - } catch (BigQueryException e) { - createException = e; - } - - if (!idRandom) { - if (createException instanceof BigQueryException && createException.getCause() != null) { - /*GoogleJsonResponseException createExceptionCause = - (GoogleJsonResponseException) createException.getCause();*/ - - Pattern pattern = Pattern.compile(".*Already.*Exists:.*Job.*", Pattern.CASE_INSENSITIVE); - Matcher matcher = pattern.matcher(createException.getCause().getMessage()); - - if (matcher.find()) { - // If the Job ALREADY EXISTS, retrieve it. - Job job = this.getJob(jobInfo.getJobId(), JobOption.fields(JobField.STATISTICS)); - - long jobCreationTime = job.getStatistics().getCreationTime(); - long jobMinStaleTime = System.currentTimeMillis(); - long jobMaxStaleTime = - Instant.ofEpochMilli(jobMinStaleTime).minus(1, ChronoUnit.DAYS).toEpochMilli(); - - // Only return the job if it has been created in the past 24 hours. - // This is assuming any job older than 24 hours is a valid duplicate JobID - // and not a false positive like b/290419183 - if (jobCreationTime >= jobMaxStaleTime && jobCreationTime <= jobMinStaleTime) { - return job; + if (!idRandom) { + if (createException instanceof BigQueryException + && createException.getCause() != null + && createException.getCause().getMessage() != null) { + + Pattern pattern = Pattern.compile(".*Already.*Exists:.*Job.*", Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(createException.getCause().getMessage()); + + if (matcher.find()) { + // If the Job ALREADY EXISTS, retrieve it. + Job job = this.getJob(jobInfo.getJobId(), JobOption.fields(JobField.STATISTICS)); + + long jobCreationTime = job.getStatistics().getCreationTime(); + long jobMinStaleTime = System.currentTimeMillis(); + long jobMaxStaleTime = + java.time.Instant.ofEpochMilli(jobMinStaleTime) + .minus(1, java.time.temporal.ChronoUnit.DAYS) + .toEpochMilli(); + + // Only return the job if it has been created in the past 24 hours. + // This is assuming any job older than 24 hours is a valid duplicate JobID + // and not a false positive like b/290419183 + if (jobCreationTime >= jobMaxStaleTime && jobCreationTime <= jobMinStaleTime) { + return job; + } } } + throw createException; } - throw createException; - } - // If create RPC fails, it's still possible that the job has been successfully - // created, and get might work. - // We can only do this if we randomly generated the ID. Otherwise we might - // mistakenly fetch a job created by someone else. - Job job; - try { - job = getJob(finalJobId[0]); - } catch (BigQueryException e) { - throw createException; - } - if (job == null) { - throw createException; + // If create RPC fails, it's still possible that the job has been successfully + // created, and get might work. + // We can only do this if we randomly generated the ID. Otherwise we might + // mistakenly fetch a job created by someone else. + Job job; + try { + job = getJob(finalJobId[0]); + } catch (BigQueryException e) { + throw createException; + } + if (job == null) { + throw createException; + } + return job; + } finally { + if (jobCreate != null) { + jobCreate.end(); + } } - return job; } @Override @@ -500,25 +584,46 @@ public Dataset getDataset(String datasetId, DatasetOption... options) { public Dataset getDataset(final DatasetId datasetId, DatasetOption... options) { final DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId()); final Map optionsMap = optionMap(options); - try { + Span datasetGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + datasetGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getDataset") + .setAllAttributes(completeDatasetId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope datasetGetScope = datasetGet != null ? datasetGet.makeCurrent() : null) { com.google.api.services.bigquery.model.Dataset answer = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Dataset call() { - return bigQueryRpc.getDataset( + public com.google.api.services.bigquery.model.Dataset call() throws IOException { + return bigQueryRpc.getDatasetSkipExceptionTranslation( completeDatasetId.getProject(), completeDatasetId.getDataset(), optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - if (getOptions().getThrowNotFound() && answer == null) { - throw new BigQueryException(HTTP_NOT_FOUND, "Dataset not found"); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + return Dataset.fromPb(this, answer); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + if (getOptions().getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Dataset not found"); + } + return null; } - return answer == null ? null : Dataset.fromPb(this, answer); - } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (datasetGet != null) { + datasetGet.end(); + } } } @@ -529,7 +634,24 @@ public Page listDatasets(DatasetListOption... options) { @Override public Page listDatasets(String projectId, DatasetListOption... options) { - return listDatasets(projectId, getOptions(), optionMap(options)); + Span datasetsList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + datasetsList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listDatasets") + .setAttribute("bq.dataset.project_id", projectId) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope datasetsListScope = datasetsList != null ? datasetsList.makeCurrent() : null) { + return listDatasets(projectId, getOptions(), optionMap(options)); + } finally { + if (datasetsList != null) { + datasetsList.end(); + } + } } private static Page listDatasets( @@ -538,18 +660,23 @@ private static Page listDatasets( final Map optionsMap) { try { Tuple> result = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable< Tuple>>() { @Override public Tuple> - call() { - return serviceOptions.getBigQueryRpcV2().listDatasets(projectId, optionsMap); + call() throws IOException { + return serviceOptions + .getBigQueryRpcV2() + .listDatasetsSkipExceptionTranslation(projectId, optionsMap); } }, serviceOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - serviceOptions.getClock()); + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); String cursor = result.x(); return new PageImpl<>( new DatasetPageFetcher(projectId, serviceOptions, cursor, optionsMap), @@ -562,7 +689,7 @@ public Dataset apply(com.google.api.services.bigquery.model.Dataset dataset) { return Dataset.fromPb(serviceOptions.getService(), dataset); } })); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -577,7 +704,7 @@ private static Page listProjects( final Map optionsMap) { try { Tuple> result = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable< Tuple>>() { @Override @@ -601,7 +728,7 @@ public Project apply(com.google.api.services.bigquery.model.ProjectList.Projects return new Project(projects.getId(), projects.getFriendlyName()); } })); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -615,20 +742,41 @@ public boolean delete(String datasetId, DatasetDeleteOption... options) { public boolean delete(DatasetId datasetId, DatasetDeleteOption... options) { final DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId()); final Map optionsMap = optionMap(options); - try { - return runWithRetries( + Span datasetDelete = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + datasetDelete = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.deleteDataset") + .setAllAttributes(datasetId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope datasetDeleteScope = datasetDelete != null ? datasetDelete.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public Boolean call() { - return bigQueryRpc.deleteDataset( + public Boolean call() throws IOException { + return bigQueryRpc.deleteDatasetSkipExceptionTranslation( completeDatasetId.getProject(), completeDatasetId.getDataset(), optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + return false; + } throw BigQueryException.translateAndThrow(e); + } finally { + if (datasetDelete != null) { + datasetDelete.end(); + } } } @@ -644,22 +792,42 @@ public boolean delete(TableId tableId) { Strings.isNullOrEmpty(tableId.getProject()) ? getOptions().getProjectId() : tableId.getProject()); - try { - return runWithRetries( + Span tableDelete = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tableDelete = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.deleteTable") + .setAllAttributes(tableId.getOtelAttributes()) + .startSpan(); + } + try (Scope tableDeleteScope = tableDelete != null ? tableDelete.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public Boolean call() { - return bigQueryRpc.deleteTable( + public Boolean call() throws IOException { + return bigQueryRpc.deleteTableSkipExceptionTranslation( completeTableId.getProject(), completeTableId.getDataset(), completeTableId.getTable()); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + return false; + } throw BigQueryException.translateAndThrow(e); + } finally { + if (tableDelete != null) { + tableDelete.end(); + } } } @@ -670,22 +838,42 @@ public boolean delete(ModelId modelId) { Strings.isNullOrEmpty(modelId.getProject()) ? getOptions().getProjectId() : modelId.getProject()); - try { - return runWithRetries( + Span modelDelete = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + modelDelete = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.deleteModel") + .setAllAttributes(modelId.getOtelAttributes()) + .startSpan(); + } + try (Scope modelDeleteScope = modelDelete != null ? modelDelete.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public Boolean call() { - return bigQueryRpc.deleteModel( + public Boolean call() throws IOException { + return bigQueryRpc.deleteModelSkipExceptionTranslation( completeModelId.getProject(), completeModelId.getDataset(), completeModelId.getModel()); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + return false; + } throw BigQueryException.translateAndThrow(e); + } finally { + if (modelDelete != null) { + modelDelete.end(); + } } } @@ -696,22 +884,42 @@ public boolean delete(RoutineId routineId) { Strings.isNullOrEmpty(routineId.getProject()) ? getOptions().getProjectId() : routineId.getProject()); - try { - return runWithRetries( + Span routineDelete = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routineDelete = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.deleteRoutine") + .setAllAttributes(routineId.getOtelAttributes()) + .startSpan(); + } + try (Scope routineDeleteScope = routineDelete != null ? routineDelete.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public Boolean call() { - return bigQueryRpc.deleteRoutine( + public Boolean call() throws IOException { + return bigQueryRpc.deleteRoutineSkipExceptionTranslation( completeRoutineId.getProject(), completeRoutineId.getDataset(), completeRoutineId.getRoutine()); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + return false; + } throw BigQueryException.translateAndThrow(e); + } finally { + if (routineDelete != null) { + routineDelete.end(); + } } } @@ -722,20 +930,37 @@ public boolean delete(JobId jobId) { Strings.isNullOrEmpty(jobId.getProject()) ? getOptions().getProjectId() : jobId.getProject()); + Span jobDelete = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + jobDelete = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.deleteJob") + .setAllAttributes(completeJobId.getOtelAttributes()) + .startSpan(); + } try { - return runWithRetries( + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public Boolean call() { - return bigQueryRpc.deleteJob( + public Boolean call() throws IOException { + return bigQueryRpc.deleteJobSkipExceptionTranslation( completeJobId.getProject(), completeJobId.getJob(), completeJobId.getLocation()); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (jobDelete != null) { + jobDelete.end(); + } } } @@ -744,21 +969,39 @@ public Dataset update(DatasetInfo datasetInfo, DatasetOption... options) { final com.google.api.services.bigquery.model.Dataset datasetPb = datasetInfo.setProjectId(getOptions().getProjectId()).toPb(); final Map optionsMap = optionMap(options); - try { + Span datasetUpdate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + datasetUpdate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.updateDataset") + .setAllAttributes(datasetInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope datasetUpdateScope = datasetUpdate != null ? datasetUpdate.makeCurrent() : null) { return Dataset.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Dataset call() { - return bigQueryRpc.patch(datasetPb, optionsMap); + public com.google.api.services.bigquery.model.Dataset call() throws IOException { + return bigQueryRpc.patchSkipExceptionTranslation(datasetPb, optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (datasetUpdate != null) { + datasetUpdate.end(); + } } } @@ -773,21 +1016,39 @@ public Table update(TableInfo tableInfo, TableOption... options) { .toPb(); handleExternalTableSchema(tablePb); final Map optionsMap = optionMap(options); - try { + Span tableUpdate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tableUpdate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.updateTable") + .setAllAttributes(tableInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tableUpdateScope = tableUpdate != null ? tableUpdate.makeCurrent() : null) { return Table.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Table call() { - return bigQueryRpc.patch(tablePb, optionsMap); + public com.google.api.services.bigquery.model.Table call() throws IOException { + return bigQueryRpc.patchSkipExceptionTranslation(tablePb, optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (tableUpdate != null) { + tableUpdate.end(); + } } } @@ -801,21 +1062,39 @@ public Model update(ModelInfo modelInfo, ModelOption... options) { : modelInfo.getModelId().getProject()) .toPb(); final Map optionsMap = optionMap(options); - try { + Span modelUpdate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + modelUpdate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.updateModel") + .setAllAttributes(modelInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope modelUpdateScope = modelUpdate != null ? modelUpdate.makeCurrent() : null) { return Model.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Model call() { - return bigQueryRpc.patch(modelPb, optionsMap); + public com.google.api.services.bigquery.model.Model call() throws IOException { + return bigQueryRpc.patchSkipExceptionTranslation(modelPb, optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (modelUpdate != null) { + modelUpdate.end(); + } } } @@ -829,21 +1108,39 @@ public Routine update(RoutineInfo routineInfo, RoutineOption... options) { : routineInfo.getRoutineId().getProject()) .toPb(); final Map optionsMap = optionMap(options); - try { + Span routineUpdate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routineUpdate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.updateRoutine") + .setAllAttributes(routineInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope routineUpdateScope = routineUpdate != null ? routineUpdate.makeCurrent() : null) { return Routine.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Routine call() { - return bigQueryRpc.update(routinePb, optionsMap); + public com.google.api.services.bigquery.model.Routine call() throws IOException { + return bigQueryRpc.updateSkipExceptionTranslation(routinePb, optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (routineUpdate != null) { + routineUpdate.end(); + } } } @@ -862,13 +1159,24 @@ public Table getTable(TableId tableId, TableOption... options) { ? getOptions().getProjectId() : tableId.getProject()); final Map optionsMap = optionMap(options); - try { + Span tableGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tableGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getTable") + .setAllAttributes(tableId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tableGetScope = tableGet != null ? tableGet.makeCurrent() : null) { com.google.api.services.bigquery.model.Table answer = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Table call() { - return bigQueryRpc.getTable( + public com.google.api.services.bigquery.model.Table call() throws IOException { + return bigQueryRpc.getTableSkipExceptionTranslation( completeTableId.getProject(), completeTableId.getDataset(), completeTableId.getTable(), @@ -876,14 +1184,24 @@ public com.google.api.services.bigquery.model.Table call() { } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - if (getOptions().getThrowNotFound() && answer == null) { - throw new BigQueryException(HTTP_NOT_FOUND, "Table not found"); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + return Table.fromPb(this, answer); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + if (getOptions().getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Table not found"); + } + return null; } - return answer == null ? null : Table.fromPb(this, answer); - } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (tableGet != null) { + tableGet.end(); + } } } @@ -900,13 +1218,24 @@ public Model getModel(ModelId modelId, ModelOption... options) { ? getOptions().getProjectId() : modelId.getProject()); final Map optionsMap = optionMap(options); - try { + Span modelGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + modelGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getModel") + .setAllAttributes(modelId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope modelGetScope = modelGet != null ? modelGet.makeCurrent() : null) { com.google.api.services.bigquery.model.Model answer = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Model call() { - return bigQueryRpc.getModel( + public com.google.api.services.bigquery.model.Model call() throws IOException { + return bigQueryRpc.getModelSkipExceptionTranslation( completeModelId.getProject(), completeModelId.getDataset(), completeModelId.getModel(), @@ -914,14 +1243,24 @@ public com.google.api.services.bigquery.model.Model call() { } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - if (getOptions().getThrowNotFound() && answer == null) { - throw new BigQueryException(HTTP_NOT_FOUND, "Model not found"); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + return Model.fromPb(this, answer); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + if (getOptions().getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Model not found"); + } + return null; } - return answer == null ? null : Model.fromPb(this, answer); - } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (modelGet != null) { + modelGet.end(); + } } } @@ -938,13 +1277,24 @@ public Routine getRoutine(RoutineId routineId, RoutineOption... options) { ? getOptions().getProjectId() : routineId.getProject()); final Map optionsMap = optionMap(options); - try { + Span routineGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routineGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getRoutine") + .setAllAttributes(routineId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope routineGetScope = routineGet != null ? routineGet.makeCurrent() : null) { com.google.api.services.bigquery.model.Routine answer = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Routine call() { - return bigQueryRpc.getRoutine( + public com.google.api.services.bigquery.model.Routine call() throws IOException { + return bigQueryRpc.getRoutineSkipExceptionTranslation( completeRoutineId.getProject(), completeRoutineId.getDataset(), completeRoutineId.getRoutine(), @@ -952,75 +1302,203 @@ public com.google.api.services.bigquery.model.Routine call() { } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - if (getOptions().getThrowNotFound() && answer == null) { - throw new BigQueryException(HTTP_NOT_FOUND, "Routine not found"); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + return Routine.fromPb(this, answer); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + if (getOptions().getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Routine not found"); + } + return null; } - return answer == null ? null : Routine.fromPb(this, answer); - } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (routineGet != null) { + routineGet.end(); + } } } @Override public Page
listTables(String datasetId, TableListOption... options) { - return listTables( - DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + Span tablesList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tablesList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listTables") + .setAllAttributes(DatasetId.of(datasetId).getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tablesListScope = tablesList != null ? tablesList.makeCurrent() : null) { + return listTables( + DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + } finally { + if (tablesList != null) { + tablesList.end(); + } + } } @Override public Page
listTables(DatasetId datasetId, TableListOption... options) { DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId()); - return listTables(completeDatasetId, getOptions(), optionMap(options)); + Span tablesList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tablesList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listTables") + .setAllAttributes(completeDatasetId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tablesListScope = tablesList != null ? tablesList.makeCurrent() : null) { + return listTables(completeDatasetId, getOptions(), optionMap(options)); + } finally { + if (tablesList != null) { + tablesList.end(); + } + } } @Override public Page listModels(String datasetId, ModelListOption... options) { - return listModels( - DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + Span modelsList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + modelsList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listModels") + .setAllAttributes(DatasetId.of(datasetId).getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope modelsListScope = modelsList != null ? modelsList.makeCurrent() : null) { + return listModels( + DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + } finally { + if (modelsList != null) { + modelsList.end(); + } + } } @Override public Page listModels(DatasetId datasetId, ModelListOption... options) { DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId()); - return listModels(completeDatasetId, getOptions(), optionMap(options)); + Span modelsList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + modelsList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listModels") + .setAllAttributes(datasetId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope modelsListScope = modelsList != null ? modelsList.makeCurrent() : null) { + return listModels(completeDatasetId, getOptions(), optionMap(options)); + } finally { + if (modelsList != null) { + modelsList.end(); + } + } } @Override public Page listRoutines(String datasetId, RoutineListOption... options) { - return listRoutines( - DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + Span routinesList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routinesList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listRoutines") + .setAllAttributes(DatasetId.of(datasetId).getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope routinesListScope = routinesList != null ? routinesList.makeCurrent() : null) { + return listRoutines( + DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + } finally { + if (routinesList != null) { + routinesList.end(); + } + } } @Override public Page listRoutines(DatasetId datasetId, RoutineListOption... options) { DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId()); - return listRoutines(completeDatasetId, getOptions(), optionMap(options)); + Span routinesList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routinesList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listRoutines") + .setAllAttributes(datasetId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope routinesListScope = routinesList != null ? routinesList.makeCurrent() : null) { + return listRoutines(completeDatasetId, getOptions(), optionMap(options)); + } finally { + if (routinesList != null) { + routinesList.end(); + } + } } @Override public List listPartitions(TableId tableId) { - List partitions = new ArrayList(); - String partitionsTable = tableId.getTable() + "$__PARTITIONS_SUMMARY__"; - TableId metaTableId = - tableId.getProject() == null - ? TableId.of(tableId.getDataset(), partitionsTable) - : TableId.of(tableId.getProject(), tableId.getDataset(), partitionsTable); - Table metaTable = getTable(metaTableId); - Schema metaSchema = metaTable.getDefinition().getSchema(); - String partition_id = null; - for (Field field : metaSchema.getFields()) { - if (field.getName().equals("partition_id")) { - partition_id = field.getName(); - break; - } + Span listPartitions = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + listPartitions = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listPartitions") + .setAllAttributes(tableId.getOtelAttributes()) + .startSpan(); } - TableResult result = metaTable.list(metaSchema); - for (FieldValueList list : result.iterateAll()) { - partitions.add(list.get(partition_id).getStringValue()); + try (Scope listPartitionsScope = listPartitions != null ? listPartitions.makeCurrent() : null) { + List partitions = new ArrayList(); + String partitionsTable = tableId.getTable() + "$__PARTITIONS_SUMMARY__"; + TableId metaTableId = + tableId.getProject() == null + ? TableId.of(tableId.getDataset(), partitionsTable) + : TableId.of(tableId.getProject(), tableId.getDataset(), partitionsTable); + Table metaTable = getTable(metaTableId); + Schema metaSchema = metaTable.getDefinition().getSchema(); + String partition_id = null; + for (Field field : metaSchema.getFields()) { + if (field.getName().equals("partition_id")) { + partition_id = field.getName(); + break; + } + } + TableResult result = metaTable.list(metaSchema); + for (FieldValueList list : result.iterateAll()) { + partitions.add(list.get(partition_id).getStringValue()); + } + return partitions; + } finally { + if (listPartitions != null) { + listPartitions.end(); + } } - return partitions; } private static Page
listTables( @@ -1029,20 +1507,24 @@ private static Page
listTables( final Map optionsMap) { try { Tuple> result = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable< Tuple>>() { @Override - public Tuple> - call() { + public Tuple> call() + throws IOException { return serviceOptions .getBigQueryRpcV2() - .listTables(datasetId.getProject(), datasetId.getDataset(), optionsMap); + .listTablesSkipExceptionTranslation( + datasetId.getProject(), datasetId.getDataset(), optionsMap); } }, serviceOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - serviceOptions.getClock()); + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); String cursor = result.x(); Iterable
tables = Iterables.transform( @@ -1055,7 +1537,7 @@ public Table apply(com.google.api.services.bigquery.model.Table table) { }); return new PageImpl<>( new TablePageFetcher(datasetId, serviceOptions, cursor, optionsMap), cursor, tables); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -1066,20 +1548,24 @@ private static Page listModels( final Map optionsMap) { try { Tuple> result = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable< Tuple>>() { @Override - public Tuple> - call() { + public Tuple> call() + throws IOException { return serviceOptions .getBigQueryRpcV2() - .listModels(datasetId.getProject(), datasetId.getDataset(), optionsMap); + .listModelsSkipExceptionTranslation( + datasetId.getProject(), datasetId.getDataset(), optionsMap); } }, serviceOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - serviceOptions.getClock()); + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); String cursor = result.x(); Iterable models = Iterables.transform( @@ -1092,7 +1578,7 @@ public Model apply(com.google.api.services.bigquery.model.Model model) { }); return new PageImpl<>( new ModelPageFetcher(datasetId, serviceOptions, cursor, optionsMap), cursor, models); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -1103,20 +1589,24 @@ private static Page listRoutines( final Map optionsMap) { try { Tuple> result = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable< Tuple>>() { @Override public Tuple> - call() { + call() throws IOException { return serviceOptions .getBigQueryRpcV2() - .listRoutines(datasetId.getProject(), datasetId.getDataset(), optionsMap); + .listRoutinesSkipExceptionTranslation( + datasetId.getProject(), datasetId.getDataset(), optionsMap); } }, serviceOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - serviceOptions.getClock()); + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); String cursor = result.x(); Iterable routines = Iterables.transform( @@ -1129,7 +1619,7 @@ public Routine apply(com.google.api.services.bigquery.model.Routine routinePb) { }); return new PageImpl<>( new RoutinePageFetcher(datasetId, serviceOptions, cursor, optionsMap), cursor, routines); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -1167,31 +1657,54 @@ public Rows apply(RowToInsert rowToInsert) { requestPb.setRows(rowsPb); TableDataInsertAllResponse responsePb; - if (allInsertIdsSet[0]) { - // allowing retries only if all row insertIds are set (used for deduplication) - try { + Span insertAll = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + insertAll = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.insertAll") + .setAllAttributes(request.getOtelAttributes()) + .startSpan(); + } + try (Scope insertAllScope = insertAll != null ? insertAll.makeCurrent() : null) { + if (allInsertIdsSet[0]) { + // allowing retries only if all row insertIds are set (used for deduplication) + try { + responsePb = + BigQueryRetryHelper.runWithRetries( + new Callable() { + @Override + public TableDataInsertAllResponse call() throws Exception { + return bigQueryRpc.insertAllSkipExceptionTranslation( + tableId.getProject(), + tableId.getDataset(), + tableId.getTable(), + requestPb); + } + }, + getOptions().getRetrySettings(), + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } else { + // Use insertAll that translate the exception as we are not retrying. responsePb = - runWithRetries( - new Callable() { - @Override - public TableDataInsertAllResponse call() throws Exception { - return bigQueryRpc.insertAll( - tableId.getProject(), tableId.getDataset(), tableId.getTable(), requestPb); - } - }, - getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelperException e) { - throw BigQueryException.translateAndThrow(e); + bigQueryRpc.insertAll( + tableId.getProject(), tableId.getDataset(), tableId.getTable(), requestPb); } - } else { - responsePb = - bigQueryRpc.insertAll( - tableId.getProject(), tableId.getDataset(), tableId.getTable(), requestPb); - } - return InsertAllResponse.fromPb(responsePb); + return InsertAllResponse.fromPb(responsePb); + } finally { + if (insertAll != null) { + insertAll.end(); + } + } } @Override @@ -1215,11 +1728,28 @@ public TableResult listTableData( public TableResult listTableData(TableId tableId, Schema schema, TableDataListOption... options) { Tuple, Long> data = listTableData(tableId, schema, getOptions(), optionMap(options)); - return TableResult.newBuilder() - .setSchema(schema) - .setTotalRows(data.y()) - .setPageNoSchema(data.x()) - .build(); + Span tableDataList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tableDataList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listTableData") + .setAllAttributes(tableId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tableDataListScope = tableDataList != null ? tableDataList.makeCurrent() : null) { + return TableResult.newBuilder() + .setSchema(schema) + .setTotalRows(data.y()) + .setPageNoSchema(data.x()) + .build(); + } finally { + if (tableDataList != null) { + tableDataList.end(); + } + } } private static Tuple, Long> listTableData( @@ -1234,13 +1764,13 @@ private static Tuple, Long> listTableData( ? serviceOptions.getProjectId() : tableId.getProject()); TableDataList result = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public TableDataList call() { + public TableDataList call() throws IOException { return serviceOptions .getBigQueryRpcV2() - .listTableData( + .listTableDataSkipExceptionTranslation( completeTableId.getProject(), completeTableId.getDataset(), completeTableId.getTable(), @@ -1248,8 +1778,11 @@ public TableDataList call() { } }, serviceOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - serviceOptions.getClock()); + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); String cursor = result.getPageToken(); Map pageOptionMap = Strings.isNullOrEmpty(cursor) ? optionsMap : optionMap(TableDataListOption.startIndex(0)); @@ -1257,15 +1790,15 @@ public TableDataList call() { new PageImpl<>( new TableDataPageFetcher(tableId, schema, serviceOptions, cursor, pageOptionMap), cursor, - transformTableData(result.getRows(), schema)), + transformTableData(result.getRows(), schema, serviceOptions.getUseInt64Timestamps())), result.getTotalRows()); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } private static Iterable transformTableData( - Iterable tableDataPb, final Schema schema) { + Iterable tableDataPb, final Schema schema, boolean useInt64Timestamps) { return ImmutableList.copyOf( Iterables.transform( tableDataPb != null ? tableDataPb : ImmutableList.of(), @@ -1274,7 +1807,7 @@ private static Iterable transformTableData( @Override public FieldValueList apply(TableRow rowPb) { - return FieldValueList.fromPb(rowPb.getF(), fields); + return FieldValueList.fromPb(rowPb.getF(), fields, useInt64Timestamps); } })); } @@ -1294,13 +1827,24 @@ public Job getJob(JobId jobId, JobOption... options) { jobId.getLocation() == null && getOptions().getLocation() != null ? getOptions().getLocation() : jobId.getLocation()); - try { + Span jobGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + jobGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getJob") + .setAllAttributes(completeJobId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope jobGetScope = jobGet != null ? jobGet.makeCurrent() : null) { com.google.api.services.bigquery.model.Job answer = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Job call() { - return bigQueryRpc.getJob( + public com.google.api.services.bigquery.model.Job call() throws IOException { + return bigQueryRpc.getJobSkipExceptionTranslation( completeJobId.getProject(), completeJobId.getJob(), completeJobId.getLocation(), @@ -1308,48 +1852,82 @@ public com.google.api.services.bigquery.model.Job call() { } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - if (getOptions().getThrowNotFound() && answer == null) { - throw new BigQueryException(HTTP_NOT_FOUND, "Job not found"); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + return Job.fromPb(this, answer); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + if (getOptions().getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Job not found"); + } + return null; } - return answer == null ? null : Job.fromPb(this, answer); - } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (jobGet != null) { + jobGet.end(); + } } } @Override public Page listJobs(JobListOption... options) { - return listJobs(getOptions(), optionMap(options)); + Span jobsList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + jobsList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listJobs") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope jobsListScope = jobsList != null ? jobsList.makeCurrent() : null) { + return listJobs(getOptions(), optionMap(options)); + } finally { + if (jobsList != null) { + jobsList.end(); + } + } } private static Page listJobs( final BigQueryOptions serviceOptions, final Map optionsMap) { - Tuple> result = - runWithRetries( - new Callable>>() { - @Override - public Tuple> call() { - return serviceOptions - .getBigQueryRpcV2() - .listJobs(serviceOptions.getProjectId(), optionsMap); - } - }, - serviceOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - serviceOptions.getClock()); - String cursor = result.x(); - Iterable jobs = - Iterables.transform( - result.y(), - new Function() { - @Override - public Job apply(com.google.api.services.bigquery.model.Job job) { - return Job.fromPb(serviceOptions.getService(), job); - } - }); - return new PageImpl<>(new JobPageFetcher(serviceOptions, cursor, optionsMap), cursor, jobs); + try { + Tuple> result = + BigQueryRetryHelper.runWithRetries( + new Callable>>() { + @Override + public Tuple> call() + throws IOException { + return serviceOptions + .getBigQueryRpcV2() + .listJobsSkipExceptionTranslation(serviceOptions.getProjectId(), optionsMap); + } + }, + serviceOptions.getRetrySettings(), + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); + String cursor = result.x(); + Iterable jobs = + Iterables.transform( + result.y(), + new Function() { + @Override + public Job apply(com.google.api.services.bigquery.model.Job job) { + return Job.fromPb(serviceOptions.getService(), job); + } + }); + return new PageImpl<>(new JobPageFetcher(serviceOptions, cursor, optionsMap), cursor, jobs); + } catch (BigQueryRetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } } @Override @@ -1366,70 +1944,86 @@ public boolean cancel(JobId jobId) { jobId.getLocation() == null && getOptions().getLocation() != null ? getOptions().getLocation() : jobId.getLocation()); - try { - return runWithRetries( + Span jobCancel = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + jobCancel = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.cancelJob") + .setAllAttributes(completeJobId.getOtelAttributes()) + .startSpan(); + } + try (Scope jobCancelScope = jobCancel != null ? jobCancel.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public Boolean call() { - return bigQueryRpc.cancel( + public Boolean call() throws IOException { + return bigQueryRpc.cancelSkipExceptionTranslation( completeJobId.getProject(), completeJobId.getJob(), completeJobId.getLocation()); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + return false; + } throw BigQueryException.translateAndThrow(e); + } finally { + if (jobCancel != null) { + jobCancel.end(); + } } } @Override public TableResult query(QueryJobConfiguration configuration, JobOption... options) throws InterruptedException, JobException { - Job.checkNotDryRun(configuration, "query"); - - if (getOptions().isQueryPreviewEnabled()) { - configuration = - configuration - .toBuilder() - .setJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL) - .build(); - } - - // If all parameters passed in configuration are supported by the query() method on the backend, - // put on fast path - QueryRequestInfo requestInfo = new QueryRequestInfo(configuration); - if (requestInfo.isFastQuerySupported(null)) { - String projectId = getOptions().getProjectId(); - QueryRequest content = requestInfo.toPb(); - if (getOptions().getLocation() != null) { - content.setLocation(getOptions().getLocation()); - } - return queryRpc(projectId, content, options); - } - // Otherwise, fall back to the existing create query job logic - return create(JobInfo.of(configuration), options).getQueryResults(); + return query(configuration, null, options); } - private TableResult queryRpc( - final String projectId, final QueryRequest content, JobOption... options) + private Object queryRpc(final String projectId, final QueryRequest content, JobOption... options) throws InterruptedException { com.google.api.services.bigquery.model.QueryResponse results; - try { + Span queryRpc = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + queryRpc = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.queryRpc") + .setAttribute("bq.query.project_id", projectId) + .setAllAttributes(otelAttributesFromQueryRequest(content)) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope queryRpcScope = queryRpc != null ? queryRpc.makeCurrent() : null) { results = BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.QueryResponse call() { - return bigQueryRpc.queryRpc(projectId, content); + public com.google.api.services.bigquery.model.QueryResponse call() + throws IOException { + return bigQueryRpc.queryRpcSkipExceptionTranslation(projectId, content); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, + getOptions().getResultRetryAlgorithm(), getOptions().getClock(), - DEFAULT_RETRY_CONFIG); + DEFAULT_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (queryRpc != null) { + queryRpc.end(); + } } if (results.getErrors() != null) { @@ -1457,7 +2051,7 @@ public com.google.api.services.bigquery.model.QueryResponse call() { // here, but this is left as future work. JobId jobId = JobId.fromPb(results.getJobReference()); Job job = getJob(jobId, options); - return job.getQueryResults(); + return job; } if (results.getPageToken() != null) { @@ -1471,7 +2065,8 @@ public com.google.api.services.bigquery.model.QueryResponse call() { // fetch next pages of results new QueryPageFetcher(jobId, schema, getOptions(), cursor, optionMap(options)), cursor, - transformTableData(results.getRows(), schema))) + transformTableData( + results.getRows(), schema, getOptions().getUseInt64Timestamps()))) .setJobId(jobId) .setQueryId(results.getQueryId()) .build(); @@ -1484,7 +2079,8 @@ public com.google.api.services.bigquery.model.QueryResponse call() { new PageImpl<>( new TableDataPageFetcher(null, schema, getOptions(), null, optionMap(options)), null, - transformTableData(results.getRows(), schema))) + transformTableData( + results.getRows(), schema, getOptions().getUseInt64Timestamps()))) // Return the JobID of the successful job .setJobId( results.getJobReference() != null ? JobId.fromPb(results.getJobReference()) : null) @@ -1495,38 +2091,100 @@ public com.google.api.services.bigquery.model.QueryResponse call() { @Override public TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... options) throws InterruptedException, JobException { + Object result = queryWithTimeout(configuration, jobId, null, options); + if (result instanceof Job) { + return ((Job) result).getQueryResults(); + } + return (TableResult) result; + } + + @Override + public Object queryWithTimeout( + QueryJobConfiguration configuration, JobId jobId, Long timeoutMs, JobOption... options) + throws InterruptedException, JobException { Job.checkNotDryRun(configuration, "query"); - // If all parameters passed in configuration are supported by the query() method on the backend, - // put on fast path - QueryRequestInfo requestInfo = new QueryRequestInfo(configuration); - if (requestInfo.isFastQuerySupported(jobId)) { - // Be careful when setting the projectID in JobId, if a projectID is specified in the JobId, - // the job created by the query method will use that project. This may cause the query to - // fail with "Access denied" if the project do not have enough permissions to run the job. - - String projectId = - jobId.getProject() != null ? jobId.getProject() : getOptions().getProjectId(); - QueryRequest content = requestInfo.toPb(); - // Be careful when setting the location, if a location is specified in the BigQueryOption or - // JobId the job created by the query method will be in that location, even if the table to be - // queried is in a different location. This may cause the query to fail with - // "BigQueryException: Not found" - if (jobId.getLocation() != null) { - content.setLocation(jobId.getLocation()); - } else if (getOptions().getLocation() != null) { - content.setLocation(getOptions().getLocation()); - } + // If JobCreationMode is not explicitly set, update it with default value; + if (configuration.getJobCreationMode() == null) { + configuration = + configuration.toBuilder() + .setJobCreationMode(getOptions().getDefaultJobCreationMode()) + .build(); + } - return queryRpc(projectId, content, options); + Span querySpan = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + querySpan = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.queryWithTimeout") + .setAllAttributes(jobId != null ? jobId.getOtelAttributes() : null) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope queryScope = querySpan != null ? querySpan.makeCurrent() : null) { + // If all parameters passed in configuration are supported by the query() method on the + // backend, + // put on fast path + QueryRequestInfo requestInfo = + new QueryRequestInfo(configuration, getOptions().getUseInt64Timestamps()); + if (requestInfo.isFastQuerySupported(jobId)) { + // Be careful when setting the projectID in JobId, if a projectID is specified in the JobId, + // the job created by the query method will use that project. This may cause the query to + // fail with "Access denied" if the project do not have enough permissions to run the job. + + String projectId = + jobId != null && jobId.getProject() != null + ? jobId.getProject() + : getOptions().getProjectId(); + QueryRequest content = requestInfo.toPb(); + // Be careful when setting the location, if a location is specified in the BigQueryOption or + // JobId the job created by the query method will be in that location, even if the table to + // be + // queried is in a different location. This may cause the query to fail with + // "BigQueryException: Not found" + if (jobId != null && jobId.getLocation() != null) { + content.setLocation(jobId.getLocation()); + } else if (getOptions().getLocation() != null) { + content.setLocation(getOptions().getLocation()); + } + if (timeoutMs != null) { + content.setTimeoutMs(timeoutMs); + } + + return queryRpc(projectId, content, options); + } + return create(JobInfo.of(jobId, configuration), options).getQueryResults(); + } finally { + if (querySpan != null) { + querySpan.end(); + } } - return create(JobInfo.of(jobId, configuration), options).getQueryResults(); } @Override public QueryResponse getQueryResults(JobId jobId, QueryResultsOption... options) { Map optionsMap = optionMap(options); - return getQueryResults(jobId, getOptions(), optionsMap); + Span getQueryResults = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + getQueryResults = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getQueryResults") + .setAllAttributes(jobId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope getQueryResultsScope = + getQueryResults != null ? getQueryResults.makeCurrent() : null) { + return getQueryResults(jobId, getOptions(), optionsMap); + } finally { + if (getQueryResults != null) { + getQueryResults.end(); + } + } } private static QueryResponse getQueryResults( @@ -1545,10 +2203,10 @@ private static QueryResponse getQueryResults( BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public GetQueryResultsResponse call() { + public GetQueryResultsResponse call() throws IOException { return serviceOptions .getBigQueryRpcV2() - .getQueryResults( + .getQueryResultsSkipExceptionTranslation( completeJobId.getProject(), completeJobId.getJob(), completeJobId.getLocation(), @@ -1556,9 +2214,11 @@ public GetQueryResultsResponse call() { } }, serviceOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, + serviceOptions.getResultRetryAlgorithm(), serviceOptions.getClock(), - DEFAULT_RETRY_CONFIG); + DEFAULT_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); TableSchema schemaPb = results.getSchema(); @@ -1575,7 +2235,7 @@ public GetQueryResultsResponse call() { .setTotalRows(results.getTotalRows() == null ? 0 : results.getTotalRows().longValue()) .setErrors(errors.build()) .build(); - } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -1602,21 +2262,40 @@ public Policy getIamPolicy(TableId tableId, IAMOption... options) { ? getOptions().getProjectId() : tableId.getProject()); - try { - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(options); + Span iamPolicyGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + iamPolicyGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getIamPolicy") + .setAllAttributes(tableId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope iamPolicyGetScope = iamPolicyGet != null ? iamPolicyGet.makeCurrent() : null) { return convertFromApiPolicy( - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Policy call() { - return bigQueryRpc.getIamPolicy(completeTableId.getIAMResourceName(), optionsMap); + public com.google.api.services.bigquery.model.Policy call() throws IOException { + return bigQueryRpc.getIamPolicySkipExceptionTranslation( + completeTableId.getIAMResourceName(), optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (iamPolicyGet != null) { + iamPolicyGet.end(); + } } } @@ -1627,22 +2306,42 @@ public Policy setIamPolicy(TableId tableId, final Policy policy, IAMOption... op Strings.isNullOrEmpty(tableId.getProject()) ? getOptions().getProjectId() : tableId.getProject()); - try { - final Map optionsMap = optionMap(options); + + final Map optionsMap = optionMap(options); + Span iamPolicySet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + iamPolicySet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.setIamPolicy") + .setAllAttributes(tableId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromPolicy(policy)) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope iamPolicySetScope = iamPolicySet != null ? iamPolicySet.makeCurrent() : null) { return convertFromApiPolicy( - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Policy call() { - return bigQueryRpc.setIamPolicy( + public com.google.api.services.bigquery.model.Policy call() throws IOException { + return bigQueryRpc.setIamPolicySkipExceptionTranslation( completeTableId.getIAMResourceName(), convertToApiPolicy(policy), optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (iamPolicySet != null) { + iamPolicySet.end(); + } } } @@ -1654,25 +2353,46 @@ public List testIamPermissions( Strings.isNullOrEmpty(tableId.getProject()) ? getOptions().getProjectId() : tableId.getProject()); - try { - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(options); + Span testIamPermissions = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + testIamPermissions = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.testIamPermissions") + .setAllAttributes(tableId.getOtelAttributes()) + .setAttribute("bq.iam.permissions", permissions.toString()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope testIamPermissionsScope = + testIamPermissions != null ? testIamPermissions.makeCurrent() : null) { com.google.api.services.bigquery.model.TestIamPermissionsResponse response = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.TestIamPermissionsResponse call() { - return bigQueryRpc.testIamPermissions( + public com.google.api.services.bigquery.model.TestIamPermissionsResponse call() + throws IOException { + return bigQueryRpc.testIamPermissionsSkipExceptionTranslation( completeTableId.getIAMResourceName(), permissions, optionsMap); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); return response.getPermissions() == null ? ImmutableList.of() : ImmutableList.copyOf(response.getPermissions()); - } catch (RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (testIamPermissions != null) { + testIamPermissions.end(); + } } } @@ -1685,4 +2405,55 @@ public com.google.api.services.bigquery.model.TestIamPermissionsResponse call() } return optionMap; } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + private static Attributes otelAttributesFromOptions(Option... options) { + Attributes attributes = Attributes.builder().build(); + for (Option option : options) { + attributes = + attributes.toBuilder() + .put("bq.option." + option.getRpcOption().toString(), option.getValue().toString()) + .build(); + } + return attributes; + } + + private static Attributes otelAttributesFromQueryRequest(QueryRequest request) { + return Attributes.builder() + .put("bq.query.dry_run", getFieldAsString(request.getDryRun())) + .put("bq.query.job_creation_mode", getFieldAsString(request.getJobCreationMode())) + .put("bq.query.kind", getFieldAsString(request.getKind())) + .put("bq.query.location", getFieldAsString(request.getLocation())) + .put("bq.query.request_id", getFieldAsString(request.getRequestId())) + .put("bq.query.use_query_cache", getFieldAsString(request.getUseQueryCache())) + .build(); + } + + private static Attributes otelAttributesFromPolicy(Policy policy) { + return Attributes.builder() + .put("bq.policy.version", getFieldAsString(policy.getVersion())) + .put("bq.policy.bindings", getFieldAsString(policy.getBindings())) + .build(); + } + + static BigQueryRetryConfig getBigQueryRetryConfig(Map options) { + return (BigQueryRetryConfig) + options.getOrDefault(BigQueryRpc.Option.BIGQUERY_RETRY_CONFIG, null); + } + + static RetryOption[] getRetryOptions(Map options) { + return (RetryOption[]) options.getOrDefault(BigQueryRpc.Option.RETRY_OPTIONS, null); + } + + private static boolean isRetryErrorCodeHttpNotFound(BigQueryRetryHelperException e) { + if (e.getCause() instanceof BigQueryException) { + if (((BigQueryException) e.getCause()).getCode() == HTTP_NOT_FOUND) { + return true; + } + } + return false; + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java index e53439f02..7adb58d3a 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java @@ -16,16 +16,18 @@ package com.google.cloud.bigquery; +import com.google.api.core.BetaApi; +import com.google.api.gax.retrying.ResultRetryAlgorithm; import com.google.cloud.ServiceDefaults; import com.google.cloud.ServiceOptions; import com.google.cloud.ServiceRpc; import com.google.cloud.TransportOptions; +import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; import com.google.cloud.bigquery.spi.BigQueryRpcFactory; -import com.google.cloud.bigquery.spi.v2.BigQueryRpc; import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import com.google.cloud.http.HttpTransportOptions; -import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableSet; +import io.opentelemetry.api.trace.Tracer; import java.util.Set; public class BigQueryOptions extends ServiceOptions { @@ -34,11 +36,15 @@ public class BigQueryOptions extends ServiceOptions { private static final int DEFAULT_READ_API_TIME_OUT = 60000; private static final String BIGQUERY_SCOPE = "https://www.googleapis.com/auth/bigquery"; private static final Set SCOPES = ImmutableSet.of(BIGQUERY_SCOPE); - private static final long serialVersionUID = -2437598817433266049L; + private static final long serialVersionUID = -2437598817433266048L; private final String location; // set the option ThrowNotFound when you want to throw the exception when the value not found private boolean setThrowNotFound; - private String queryPreviewEnabled = System.getenv("QUERY_PREVIEW_ENABLED"); + private boolean useInt64Timestamps; + private JobCreationMode defaultJobCreationMode = JobCreationMode.JOB_CREATION_MODE_UNSPECIFIED; + private boolean enableOpenTelemetryTracing; + private Tracer openTelemetryTracer; + private ResultRetryAlgorithm resultRetryAlgorithm; public static class DefaultBigQueryFactory implements BigQueryFactory { @@ -63,6 +69,10 @@ public ServiceRpc create(BigQueryOptions options) { public static class Builder extends ServiceOptions.Builder { private String location; + private boolean useInt64Timestamps; + private boolean enableOpenTelemetryTracing; + private Tracer openTelemetryTracer; + private ResultRetryAlgorithm resultRetryAlgorithm; private Builder() {} @@ -84,6 +94,38 @@ public Builder setLocation(String location) { return this; } + public Builder setUseInt64Timestamps(boolean useInt64Timestamps) { + this.useInt64Timestamps = useInt64Timestamps; + return this; + } + + /** + * Enables OpenTelemetry tracing functionality for this BigQuery instance + * + * @param enableOpenTelemetryTracing enables OpenTelemetry tracing if true + */ + @BetaApi + public Builder setEnableOpenTelemetryTracing(boolean enableOpenTelemetryTracing) { + this.enableOpenTelemetryTracing = enableOpenTelemetryTracing; + return this; + } + + /** + * Sets the OpenTelemetry tracer for this BigQuery instance to be tracer. + * + * @param tracer OpenTelemetry tracer to be used + */ + @BetaApi + public Builder setOpenTelemetryTracer(Tracer tracer) { + this.openTelemetryTracer = tracer; + return this; + } + + public Builder setResultRetryAlgorithm(ResultRetryAlgorithm resultRetryAlgorithm) { + this.resultRetryAlgorithm = resultRetryAlgorithm; + return this; + } + @Override public BigQueryOptions build() { return new BigQueryOptions(this); @@ -93,6 +135,14 @@ public BigQueryOptions build() { private BigQueryOptions(Builder builder) { super(BigQueryFactory.class, BigQueryRpcFactory.class, builder, new BigQueryDefaults()); this.location = builder.location; + this.useInt64Timestamps = builder.useInt64Timestamps; + this.enableOpenTelemetryTracing = builder.enableOpenTelemetryTracing; + this.openTelemetryTracer = builder.openTelemetryTracer; + if (builder.resultRetryAlgorithm != null) { + this.resultRetryAlgorithm = builder.resultRetryAlgorithm; + } else { + this.resultRetryAlgorithm = BigQueryBaseService.DEFAULT_BIGQUERY_EXCEPTION_HANDLER; + } } private static class BigQueryDefaults implements ServiceDefaults { @@ -124,31 +174,70 @@ protected Set getScopes() { return SCOPES; } - protected BigQueryRpc getBigQueryRpcV2() { - return (BigQueryRpc) getRpc(); + protected HttpBigQueryRpc getBigQueryRpcV2() { + return (HttpBigQueryRpc) getRpc(); } public String getLocation() { return location; } + @Deprecated public boolean isQueryPreviewEnabled() { - return queryPreviewEnabled != null && queryPreviewEnabled.equalsIgnoreCase("TRUE"); + return false; } public void setThrowNotFound(boolean setThrowNotFound) { this.setThrowNotFound = setThrowNotFound; } - @VisibleForTesting - public void setQueryPreviewEnabled(String queryPreviewEnabled) { - this.queryPreviewEnabled = queryPreviewEnabled; + public void setUseInt64Timestamps(boolean useInt64Timestamps) { + this.useInt64Timestamps = useInt64Timestamps; + } + + @Deprecated + public void setQueryPreviewEnabled(String queryPreviewEnabled) {} + + public void setDefaultJobCreationMode(JobCreationMode jobCreationMode) { + this.defaultJobCreationMode = jobCreationMode; } public boolean getThrowNotFound() { return setThrowNotFound; } + public boolean getUseInt64Timestamps() { + return useInt64Timestamps; + } + + public JobCreationMode getDefaultJobCreationMode() { + return defaultJobCreationMode; + } + + /** + * Returns whether this BigQuery instance has OpenTelemetry tracing enabled + * + * @return true if tracing is enabled, false if not + */ + @BetaApi("Span names and attributes are subject to change without notice") + public boolean isOpenTelemetryTracingEnabled() { + return enableOpenTelemetryTracing; + } + + /** + * Returns the OpenTelemetry tracer used by this BigQuery instance + * + * @return OpenTelemetry tracer object or {@code null} if not set + */ + @BetaApi("Span names and attributes are subject to change without notice") + public Tracer getOpenTelemetryTracer() { + return openTelemetryTracer; + } + + public ResultRetryAlgorithm getResultRetryAlgorithm() { + return resultRetryAlgorithm; + } + @SuppressWarnings("unchecked") @Override public Builder toBuilder() { diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultImpl.java index e944efceb..b705e77c1 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultImpl.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultImpl.java @@ -16,22 +16,30 @@ package com.google.cloud.bigquery; +import com.google.cloud.bigquery.FieldValue.Attribute; import java.math.BigDecimal; import java.sql.Date; import java.sql.ResultSet; import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; import java.sql.Time; import java.sql.Timestamp; import java.time.LocalDateTime; import java.time.LocalTime; -import java.time.ZoneId; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import java.util.Map; -import java.util.TimeZone; import java.util.concurrent.BlockingQueue; import java.util.concurrent.TimeUnit; import org.apache.arrow.vector.util.JsonStringArrayList; import org.apache.arrow.vector.util.Text; +/** + * An implementation of BigQueryResult. + * + *

This class and the ResultSet it returns is not thread-safe. + */ public class BigQueryResultImpl implements BigQueryResult { private static final String NULL_CURSOR_MSG = @@ -109,10 +117,85 @@ private class BigQueryResultSet extends AbstractJdbcResultSet { private boolean hasReachedEnd = false; // flag which will be set to true when we have encountered a EndOfStream or when // curTup.isLast(). Ref: https://github.com/googleapis/java-bigquery/issues/2033 + private boolean wasNull = false; + + private class BigQueryArrayResult implements java.sql.Array { + List array; + + public BigQueryArrayResult(Object array) { + if (array instanceof Object[]) { + this.array = new ArrayList<>(Arrays.asList((Object[]) array)); + } else if (array instanceof FieldValueList) { + this.array = new ArrayList<>((FieldValueList) array); + } else { + this.array = (List) array; + } + } + + @Override + public String getBaseTypeName() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public int getBaseType() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Object getArray() throws SQLException { + return array; + } + + @Override + public Object getArray(java.util.Map> map) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Object getArray(long index, int count) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Object getArray(long index, int count, java.util.Map> map) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public ResultSet getResultSet() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public ResultSet getResultSet(java.util.Map> map) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public ResultSet getResultSet(long index, int count) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public ResultSet getResultSet(long index, int count, java.util.Map> map) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void free() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + } @Override /*Advances the result set to the next row, returning false if no such row exists. Potentially blocking operation*/ public boolean next() throws SQLException { + if (buffer == null) { + return false; + } if (hasReachedEnd) { // if end of stream is reached then we can simply return false return false; } @@ -145,6 +228,62 @@ private boolean isEndOfStream(T cursor) { return cursor instanceof ConnectionImpl.EndOfFieldValueList; } + private Object getCurrentValueForReadApiData(String fieldName) throws SQLException { + Row curRow = (Row) cursor; + if (!curRow.hasField(fieldName)) { + throw new SQLException(String.format("Field %s not found", fieldName)); + } + return curRow.get(fieldName); + } + + @Override + public java.sql.Array getArray(String fieldName) throws SQLException { + if (fieldName == null) { + throw new SQLException("fieldName can't be null"); + } + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + if (fieldValue.getAttribute().equals(Attribute.REPEATED)) { + return new BigQueryArrayResult(fieldValue.getValue()); + } else { + wasNull = true; + return null; + } + } else { // Data received from Read API (Arrow) + Object currentVal = getCurrentValueForReadApiData(fieldName); + if (currentVal == null) { + wasNull = true; + return null; + } + wasNull = false; + return new BigQueryArrayResult(currentVal); + } + } + + @Override + public java.sql.Array getArray(int columnIndex) throws SQLException { + if (cursor == null) { + return null; + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if (fieldValue == null || fieldValue.getValue() == null) { + wasNull = true; + return null; + } + wasNull = false; + return new BigQueryArrayResult(fieldValue.getValue()); + } else { + return getArray(schemaFieldList.get(columnIndex).getName()); + } + } + @Override public Object getObject(String fieldName) throws SQLException { if (fieldName == null) { @@ -154,13 +293,20 @@ public Object getObject(String fieldName) throws SQLException { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); - return (fieldValue == null || fieldValue.getValue() == null) ? null : fieldValue.getValue(); + if (fieldValue == null || fieldValue.getValue() == null) { + wasNull = true; + return null; + } + wasNull = false; + return fieldValue.getValue(); } else { // Data received from Read API (Arrow) - Row curRow = (Row) cursor; - if (!curRow.hasField(fieldName)) { - throw new SQLException(String.format("Field %s not found", fieldName)); + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return null; } - return curRow.get(fieldName); + wasNull = false; + return curVal; } } @@ -170,7 +316,12 @@ public Object getObject(int columnIndex) throws SQLException { return null; } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); - return (fieldValue == null || fieldValue.getValue() == null) ? null : fieldValue.getValue(); + if (fieldValue == null || fieldValue.getValue() == null) { + wasNull = true; + return null; + } + wasNull = false; + return fieldValue.getValue(); } else { // Data received from Read API (Arrow) return getObject(schemaFieldList.get(columnIndex).getName()); } @@ -186,23 +337,23 @@ public String getString(String fieldName) throws SQLException { } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; return null; - } else if (fieldValue - .getAttribute() - .equals(FieldValue.Attribute.REPEATED)) { // Case for Arrays + } + wasNull = false; + if (fieldValue.getAttribute().equals(FieldValue.Attribute.REPEATED)) { // Case for Arrays return fieldValue.getValue().toString(); } else { return fieldValue.getStringValue(); } } else { // Data received from Read API (Arrow) - Row curRow = (Row) cursor; - if (!curRow.hasField(fieldName)) { - throw new SQLException(String.format("Field %s not found", fieldName)); - } - Object currentVal = curRow.get(fieldName); + Object currentVal = getCurrentValueForReadApiData(fieldName); if (currentVal == null) { + wasNull = true; return null; - } else if (currentVal instanceof JsonStringArrayList) { // arrays + } + wasNull = false; + if (currentVal instanceof JsonStringArrayList) { // arrays JsonStringArrayList jsnAry = (JsonStringArrayList) currentVal; return jsnAry.toString(); } else if (currentVal instanceof LocalDateTime) { @@ -221,9 +372,12 @@ public String getString(int columnIndex) throws SQLException { return null; } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); - return (fieldValue == null || fieldValue.getValue() == null) - ? null - : fieldValue.getStringValue(); + if (fieldValue == null || fieldValue.getValue() == null) { + wasNull = true; + return null; + } + wasNull = false; + return fieldValue.getStringValue(); } else { // Data received from Read API (Arrow) return getString(schemaFieldList.get(columnIndex).getName()); } @@ -239,27 +393,27 @@ public int getInt(String fieldName) throws SQLException { // java.sql.ResultSet definition } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); - return (fieldValue == null || fieldValue.getValue() == null) - ? 0 - : fieldValue.getNumericValue().intValue(); - } else { // Data received from Read API (Arrow) - - Row curRow = (Row) cursor; - if (!curRow.hasField(fieldName)) { - throw new SQLException(String.format("Field %s not found", fieldName)); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return 0; } - Object curVal = curRow.get(fieldName); - if (curVal == null) { + wasNull = false; + return fieldValue.getNumericValue().intValue(); + } else { // Data received from Read API (Arrow) + Object currentVal = getCurrentValueForReadApiData(fieldName); + if (currentVal == null) { + wasNull = true; return 0; } - if (curVal instanceof Text) { // parse from text to int - return Integer.parseInt(((Text) curVal).toString()); - } else if (curVal + wasNull = false; + if (currentVal instanceof Text) { // parse from text to int + return Integer.parseInt((currentVal).toString()); + } else if (currentVal instanceof Long) { // incase getInt is called for a Long value. Loss of precision might occur - return ((Long) curVal).intValue(); + return ((Long) currentVal).intValue(); } - return ((BigDecimal) curVal).intValue(); + return ((BigDecimal) currentVal).intValue(); } } @@ -270,9 +424,11 @@ public int getInt(int columnIndex) throws SQLException { // java.sql.ResultSet definition } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); - return (fieldValue == null || fieldValue.getValue() == null) - ? 0 - : fieldValue.getNumericValue().intValue(); + if (fieldValue == null || fieldValue.getValue() == null) { + wasNull = true; + return 0; + } + return fieldValue.getNumericValue().intValue(); } else { // Data received from Read API (Arrow) return getInt(schemaFieldList.get(columnIndex).getName()); } @@ -287,20 +443,21 @@ public long getLong(String fieldName) throws SQLException { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); - return (fieldValue == null || fieldValue.getValue() == null) - ? 0L - : fieldValue.getNumericValue().longValue(); - } else { // Data received from Read API (Arrow) - Row curRow = (Row) cursor; - if (!curRow.hasField(fieldName)) { - throw new SQLException(String.format("Field %s not found", fieldName)); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return 0L; } - Object curVal = curRow.get(fieldName); + wasNull = false; + return fieldValue.getNumericValue().longValue(); + } else { // Data received from Read API (Arrow) + Object curVal = getCurrentValueForReadApiData(fieldName); if (curVal == null) { + wasNull = true; return 0L; - } else { // value will be Long or BigDecimal, but are Number - return ((Number) curVal).longValue(); } + wasNull = false; + // value will be Long or BigDecimal, but are Number + return ((Number) curVal).longValue(); } } @@ -311,9 +468,12 @@ public long getLong(int columnIndex) throws SQLException { // java.sql.ResultSet definition } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); - return (fieldValue == null || fieldValue.getValue() == null) - ? 0L - : fieldValue.getNumericValue().longValue(); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return 0L; + } + wasNull = false; + return fieldValue.getNumericValue().longValue(); } else { // Data received from Read API (Arrow) return getInt(schemaFieldList.get(columnIndex).getName()); } @@ -328,16 +488,20 @@ public double getDouble(String fieldName) throws SQLException { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); - return (fieldValue == null || fieldValue.getValue() == null) - ? 0d - : fieldValue.getNumericValue().doubleValue(); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return 0.0d; + } + wasNull = false; + return fieldValue.getNumericValue().doubleValue(); } else { // Data received from Read API (Arrow) - Row curRow = (Row) cursor; - if (!curRow.hasField(fieldName)) { - throw new SQLException(String.format("Field %s not found", fieldName)); + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return 0.0d; } - Object curVal = curRow.get(fieldName); - return curVal == null ? 0.0d : new BigDecimal(curVal.toString()).doubleValue(); + wasNull = false; + return new BigDecimal(curVal.toString()).doubleValue(); } } @@ -348,9 +512,12 @@ public double getDouble(int columnIndex) throws SQLException { // java.sql.ResultSet definition } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); - return (fieldValue == null || fieldValue.getValue() == null) - ? 0d - : fieldValue.getNumericValue().doubleValue(); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return 0.0d; + } + wasNull = false; + return fieldValue.getNumericValue().doubleValue(); } else { // Data received from Read API (Arrow) return getDouble(schemaFieldList.get(columnIndex).getName()); } @@ -365,10 +532,19 @@ public BigDecimal getBigDecimal(String fieldName) throws SQLException { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); - return (fieldValue == null || fieldValue.getValue() == null) - ? null - : BigDecimal.valueOf(fieldValue.getNumericValue().doubleValue()); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return BigDecimal.valueOf(fieldValue.getNumericValue().doubleValue()); } else { // Data received from Read API (Arrow) + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return null; + } + wasNull = false; return BigDecimal.valueOf(getDouble(fieldName)); } } @@ -379,9 +555,12 @@ public BigDecimal getBigDecimal(int columnIndex) throws SQLException { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); - return (fieldValue == null || fieldValue.getValue() == null) - ? null - : BigDecimal.valueOf(fieldValue.getNumericValue().doubleValue()); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return BigDecimal.valueOf(fieldValue.getNumericValue().doubleValue()); } else { // Data received from Read API (Arrow) return getBigDecimal(schemaFieldList.get(columnIndex).getName()); } @@ -396,14 +575,20 @@ public boolean getBoolean(String fieldName) throws SQLException { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); - return fieldValue.getValue() != null && fieldValue.getBooleanValue(); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return false; + } + wasNull = false; + return fieldValue.getBooleanValue(); } else { // Data received from Read API (Arrow) - Row curRow = (Row) cursor; - if (!curRow.hasField(fieldName)) { - throw new SQLException(String.format("Field %s not found", fieldName)); + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return false; } - Object curVal = curRow.get(fieldName); - return curVal != null && (Boolean) curVal; + wasNull = false; + return (Boolean) curVal; } } @@ -413,7 +598,12 @@ public boolean getBoolean(int columnIndex) throws SQLException { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); - return fieldValue.getValue() != null && fieldValue.getBooleanValue(); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return false; + } + wasNull = false; + return fieldValue.getBooleanValue(); } else { // Data received from Read API (Arrow) return getBoolean(schemaFieldList.get(columnIndex).getName()); } @@ -428,16 +618,20 @@ public byte[] getBytes(String fieldName) throws SQLException { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); - return (fieldValue == null || fieldValue.getValue() == null) - ? null - : fieldValue.getBytesValue(); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return fieldValue.getBytesValue(); } else { // Data received from Read API (Arrow) - Row curRow = (Row) cursor; - if (!curRow.hasField(fieldName)) { - throw new SQLException(String.format("Field %s not found", fieldName)); + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return null; } - Object curVal = curRow.get(fieldName); - return curVal == null ? null : (byte[]) curVal; + wasNull = false; + return (byte[]) curVal; } } @@ -447,9 +641,12 @@ public byte[] getBytes(int columnIndex) throws SQLException { return null; // if the value is SQL NULL, the value returned is null } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); - return (fieldValue == null || fieldValue.getValue() == null) - ? null - : fieldValue.getBytesValue(); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return fieldValue.getBytesValue(); } else { // Data received from Read API (Arrow) return getBytes(schemaFieldList.get(columnIndex).getName()); } @@ -464,21 +661,23 @@ public Timestamp getTimestamp(String fieldName) throws SQLException { return null; // if the value is SQL NULL, the value returned is null } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); - return (fieldValue == null || fieldValue.getValue() == null) - ? null - : new Timestamp( - fieldValue.getTimestampValue() - / 1000); // getTimestampValue returns time in microseconds, and TimeStamp - // expects it in millis + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return new Timestamp( + fieldValue.getTimestampValue() + / 1000); // getTimestampValue returns time in microseconds, and TimeStamp expects it + // in millis } else { - Row curRow = (Row) cursor; - if (!curRow.hasField(fieldName)) { - throw new SQLException(String.format("Field %s not found", fieldName)); + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return null; } - Object timeStampVal = curRow.get(fieldName); - return timeStampVal == null - ? null - : new Timestamp((Long) timeStampVal / 1000); // Timestamp is represented as a Long + wasNull = false; + return new Timestamp((Long) curVal / 1000); // Timestamp is represented as a Long } } @@ -488,12 +687,15 @@ public Timestamp getTimestamp(int columnIndex) throws SQLException { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); - return (fieldValue == null || fieldValue.getValue() == null) - ? null - : new Timestamp( - fieldValue.getTimestampValue() - / 1000); // getTimestampValue returns time in microseconds, and TimeStamp - // expects it in millis + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return new Timestamp( + fieldValue.getTimestampValue() + / 1000); // getTimestampValue returns time in microseconds, and TimeStamp expects it + // in millis } else { // Data received from Read API (Arrow) return getTimestamp(schemaFieldList.get(columnIndex).getName()); } @@ -508,61 +710,62 @@ public Time getTime(String fieldName) throws SQLException { return null; // if the value is SQL NULL, the value returned is null } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; return getTimeFromFieldVal(fieldValue); } else { // Data received from Read API (Arrow) - Row curRow = (Row) cursor; - if (!curRow.hasField(fieldName)) { - throw new SQLException(String.format("Field %s not found", fieldName)); + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return null; } - Object timeStampObj = curRow.get(fieldName); - return timeStampObj == null - ? null - : new Time( - ((Long) timeStampObj) - / 1000); // Time.toString() will return 12:11:35 in GMT as 17:41:35 in - // (GMT+5:30). This can be offset using getTimeZoneOffset + wasNull = false; + return new Time( + ((Long) curVal) + / 1000); // Time.toString() will return 12:11:35 in GMT as 17:41:35 in (GMT+5:30). + // This can be offset using getTimeZoneOffset } } - private int getTimeZoneOffset() { - TimeZone timeZone = TimeZone.getTimeZone(ZoneId.systemDefault()); - return timeZone.getOffset(new java.util.Date().getTime()); // offset in seconds - } - @Override public Time getTime(int columnIndex) throws SQLException { if (cursor == null) { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; return getTimeFromFieldVal(fieldValue); } else { // Data received from Read API (Arrow) return getTime(schemaFieldList.get(columnIndex).getName()); } } + // Expects fieldValue.getValue() != null. private Time getTimeFromFieldVal(FieldValue fieldValue) throws SQLException { - if (fieldValue.getValue() != null) { - // Time ranges from 00:00:00 to 23:59:59.99999. in BigQuery. Parsing it to java.sql.Time - String strTime = fieldValue.getStringValue(); - String[] timeSplt = strTime.split(":"); - if (timeSplt.length != 3) { - throw new SQLException("Can not parse the value " + strTime + " to java.sql.Time"); - } - int hr = Integer.parseInt(timeSplt[0]); - int min = Integer.parseInt(timeSplt[1]); - int sec = 0, nanoSec = 0; - if (timeSplt[2].contains(".")) { - String[] secSplt = timeSplt[2].split("\\."); - sec = Integer.parseInt(secSplt[0]); - nanoSec = Integer.parseInt(secSplt[1]); - } else { - sec = Integer.parseInt(timeSplt[2]); - } - return Time.valueOf(LocalTime.of(hr, min, sec, nanoSec)); + // Time ranges from 00:00:00 to 23:59:59.99999. in BigQuery. Parsing it to java.sql.Time + String strTime = fieldValue.getStringValue(); + String[] timeSplt = strTime.split(":"); + if (timeSplt.length != 3) { + throw new SQLException("Can not parse the value " + strTime + " to java.sql.Time"); + } + int hr = Integer.parseInt(timeSplt[0]); + int min = Integer.parseInt(timeSplt[1]); + int sec, nanoSec = 0; + if (timeSplt[2].contains(".")) { + String[] secSplt = timeSplt[2].split("\\."); + sec = Integer.parseInt(secSplt[0]); + nanoSec = Integer.parseInt(secSplt[1]); } else { - return null; + sec = Integer.parseInt(timeSplt[2]); } + return Time.valueOf(LocalTime.of(hr, min, sec, nanoSec)); } @Override @@ -574,26 +777,26 @@ public Date getDate(String fieldName) throws SQLException { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); - return (fieldValue == null || fieldValue.getValue() == null) - ? null - : Date.valueOf(fieldValue.getStringValue()); - } else { // Data received from Read API (Arrow) - Row curRow = (Row) cursor; - if (!curRow.hasField(fieldName)) { - throw new SQLException(String.format("Field %s not found", fieldName)); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; } - Object dateObj = curRow.get(fieldName); - if (dateObj == null) { + wasNull = false; + return Date.valueOf(fieldValue.getStringValue()); + } else { // Data received from Read API (Arrow) + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; return null; - } else { - Integer dateInt = (Integer) dateObj; - long dateInMillis = - TimeUnit.DAYS.toMillis( - Long.valueOf( - dateInt)); // For example int 18993 represents 2022-01-01, converting time to - // milli seconds - return new Date(dateInMillis); } + wasNull = false; + Integer dateInt = (Integer) curVal; + long dateInMillis = + TimeUnit.DAYS.toMillis( + Long.valueOf( + dateInt)); // For example int 18993 represents 2022-01-01, converting time to + // milli seconds + return new Date(dateInMillis); } } @@ -603,13 +806,26 @@ public Date getDate(int columnIndex) throws SQLException { throw new BigQuerySQLException(NULL_CURSOR_MSG); } else if (cursor instanceof FieldValueList) { FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); - return (fieldValue == null || fieldValue.getValue() == null) - ? null - : Date.valueOf(fieldValue.getStringValue()); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return Date.valueOf(fieldValue.getStringValue()); } else { // Data received from Read API (Arrow) return getDate(schemaFieldList.get(columnIndex).getName()); } } + + /** + * Returns whether the last column read had a value of SQL NULL. Note that you must first call + * one of the getter methods on a column to try to read its value and then call the method + * wasNull to see if the value read was SQL NULL. * + */ + @Override + public boolean wasNull() { + return wasNull; + } } @Override diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryAlgorithm.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryAlgorithm.java index 0429b7f00..7b168c12d 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryAlgorithm.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryAlgorithm.java @@ -27,13 +27,13 @@ import com.google.api.gax.retrying.TimedRetryAlgorithmWithContext; import com.google.gson.JsonObject; import com.google.gson.JsonParser; +import java.time.Duration; import java.util.Iterator; import java.util.UUID; import java.util.concurrent.CancellationException; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; -import org.threeten.bp.Duration; public class BigQueryRetryAlgorithm extends RetryAlgorithm { private final BigQueryRetryConfig bigQueryRetryConfig; @@ -67,7 +67,7 @@ public boolean shouldRetry( // Log retry info int attemptCount = nextAttemptSettings == null ? 0 : nextAttemptSettings.getAttemptCount(); Duration retryDelay = - nextAttemptSettings == null ? Duration.ZERO : nextAttemptSettings.getRetryDelay(); + nextAttemptSettings == null ? Duration.ZERO : nextAttemptSettings.getRetryDelayDuration(); String errorMessage = previousThrowable != null ? previousThrowable.getMessage() : ""; // Implementing shouldRetryBasedOnBigQueryRetryConfig so that we can retry exceptions based on diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryHelper.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryHelper.java index 405a2371e..9c7083046 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryHelper.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryHelper.java @@ -25,6 +25,10 @@ import com.google.api.gax.retrying.RetryingFuture; import com.google.api.gax.retrying.TimedRetryAlgorithm; import com.google.cloud.RetryHelper; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import java.io.IOException; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.logging.Level; @@ -39,9 +43,18 @@ public static V runWithRetries( RetrySettings retrySettings, ResultRetryAlgorithm resultRetryAlgorithm, ApiClock clock, - BigQueryRetryConfig bigQueryRetryConfig) + BigQueryRetryConfig bigQueryRetryConfig, + boolean isOpenTelemetryEnabled, + Tracer openTelemetryTracer) throws RetryHelperException { - try { + Span runWithRetries = null; + if (isOpenTelemetryEnabled && openTelemetryTracer != null) { + runWithRetries = + openTelemetryTracer + .spanBuilder("com.google.cloud.bigquery.BigQueryRetryHelper.runWithRetries") + .startSpan(); + } + try (Scope runWithRetriesScope = runWithRetries != null ? runWithRetries.makeCurrent() : null) { // Suppressing should be ok as a workaraund. Current and only ResultRetryAlgorithm // implementation does not use response at all, so ignoring its type is ok. @SuppressWarnings("unchecked") @@ -52,7 +65,16 @@ public static V runWithRetries( algorithm, bigQueryRetryConfig); } catch (Exception e) { + // Checks for IOException and translate it into BigQueryException. The BigQueryException + // constructor parses the IOException and translate it into internal code. + if (e.getCause() instanceof IOException) { + throw new BigQueryRetryHelperException(new BigQueryException((IOException) e.getCause())); + } throw new BigQueryRetryHelperException(e.getCause()); + } finally { + if (runWithRetries != null) { + runWithRetries.end(); + } } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Connection.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Connection.java index afc8eb848..83ea0fc0d 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Connection.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Connection.java @@ -140,7 +140,7 @@ ListenableFuture executeSelectAsync(String sql) * @code * ConnectionSettings connectionSettings = * ConnectionSettings.newBuilder() - * ..setUseReadAPI(true) + * .setUseReadAPI(true) * .build(); * Connection connection = bigquery.createConnection(connectionSettings); * String selectQuery = diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java index 2d0367790..c3465c33a 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java @@ -16,25 +16,26 @@ package com.google.cloud.bigquery; -import static com.google.cloud.RetryHelper.runWithRetries; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import com.google.api.core.BetaApi; import com.google.api.core.InternalApi; +import com.google.api.gax.core.FixedCredentialsProvider; import com.google.api.services.bigquery.model.GetQueryResultsResponse; import com.google.api.services.bigquery.model.JobConfigurationQuery; import com.google.api.services.bigquery.model.QueryParameter; import com.google.api.services.bigquery.model.QueryRequest; import com.google.api.services.bigquery.model.TableDataList; import com.google.api.services.bigquery.model.TableRow; -import com.google.cloud.RetryHelper; import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQueryRetryHelper.BigQueryRetryHelperException; import com.google.cloud.bigquery.JobStatistics.QueryStatistics; import com.google.cloud.bigquery.JobStatistics.SessionInfo; import com.google.cloud.bigquery.spi.v2.BigQueryRpc; import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; import com.google.cloud.bigquery.storage.v1.ArrowSchema; import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.BigQueryReadSettings; import com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest; import com.google.cloud.bigquery.storage.v1.DataFormat; import com.google.cloud.bigquery.storage.v1.ReadRowsRequest; @@ -53,6 +54,7 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; import java.io.IOException; +import java.math.BigInteger; import java.util.AbstractList; import java.util.ArrayList; import java.util.Collections; @@ -94,10 +96,13 @@ class ConnectionImpl implements Connection { private final Logger logger = Logger.getLogger(this.getClass().getName()); private BigQueryReadClient bqReadClient; private static final long EXECUTOR_TIMEOUT_SEC = 10; + private static final long BIGQUERY_TIMEOUT_SEC = 10; private BlockingQueue> bufferFvl; // initialized lazily iff we end up using the tabledata.list end point private BlockingQueue bufferRow; // initialized lazily iff we end up using Read API + private static final BigQueryRetryConfig EMPTY_RETRY_CONFIG = + BigQueryRetryConfig.newBuilder().build(); ConnectionImpl( ConnectionSettings connectionSettings, @@ -130,6 +135,7 @@ private int getBufferSize() { ? 20000 : Math.min(connectionSettings.getNumBufferedRows() * 2, 100000)); } + /** * Cancel method shutdowns the pageFetcher and producerWorker threads gracefully using interrupt. * The pageFetcher threat will not request for any subsequent threads after interrupting and @@ -145,8 +151,15 @@ public synchronized boolean close() throws BigQuerySQLException { flagEndOfStream(); // an End of Stream flag in the buffer so that the `ResultSet.next()` stops // advancing the cursor queryTaskExecutor.shutdownNow(); + boolean isBqReadClientTerminated = true; try { - if (queryTaskExecutor.awaitTermination(EXECUTOR_TIMEOUT_SEC, TimeUnit.SECONDS)) { + if (bqReadClient != null) { + bqReadClient.shutdownNow(); + isBqReadClientTerminated = + bqReadClient.awaitTermination(BIGQUERY_TIMEOUT_SEC, TimeUnit.SECONDS); + } + if (queryTaskExecutor.awaitTermination(EXECUTOR_TIMEOUT_SEC, TimeUnit.SECONDS) + && isBqReadClientTerminated) { return true; } // else queryTaskExecutor.isShutdown() will be returned outside this try block } catch (InterruptedException e) { @@ -156,7 +169,9 @@ public synchronized boolean close() throws BigQuerySQLException { e); // Logging InterruptedException instead of throwing the exception back, close method // will return queryTaskExecutor.isShutdown() } - return queryTaskExecutor.isShutdown(); // check if the executor has been shutdown + + return queryTaskExecutor.isShutdown() + && isBqReadClientTerminated; // check if the executor has been shutdown } /** @@ -249,6 +264,7 @@ private BigQueryResult getExecuteSelectResponse( throw new BigQuerySQLException(e.getMessage(), e, e.getErrors()); } } + /** * Execute a SQL statement that returns a single ResultSet and returns a ListenableFuture to * process the response asynchronously. @@ -418,12 +434,15 @@ public ListenableFuture executeSelectAsync( @VisibleForTesting BigQueryResult getResultSet( GetQueryResultsResponse firstPage, JobId jobId, String sql, Boolean hasQueryParameters) { - return getSubsequentQueryResultsWithJob( - firstPage.getTotalRows().longValue(), - (long) firstPage.getRows().size(), - jobId, - firstPage, - hasQueryParameters); + if (firstPage.getTotalRows().compareTo(BigInteger.ZERO) > 0) { + return getSubsequentQueryResultsWithJob( + firstPage.getTotalRows().longValue(), + (long) firstPage.getRows().size(), + jobId, + firstPage, + hasQueryParameters); + } + return new BigQueryResultImpl(Schema.fromPb(firstPage.getSchema()), 0, null, null); } static class EndOfFieldValueList @@ -450,12 +469,17 @@ private BigQueryResult queryRpc( try { results = BigQueryRetryHelper.runWithRetries( - () -> bigQueryRpc.queryRpc(projectId, queryRequest), + () -> + bigQueryOptions + .getBigQueryRpcV2() + .queryRpcSkipExceptionTranslation(projectId, queryRequest), bigQueryOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, + bigQueryOptions.getResultRetryAlgorithm(), bigQueryOptions.getClock(), - retryConfig); - } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { + retryConfig, + false, + null); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } @@ -470,22 +494,29 @@ private BigQueryResult queryRpc( } // Query finished running and we can paginate all the results - if (results.getJobComplete() && results.getSchema() != null) { + // Results should be read using the high throughput read API if sufficiently large. + boolean resultsLargeEnoughForReadApi = + connectionSettings.getUseReadAPI() + && results.getTotalRows() != null + && results.getTotalRows().longValue() > connectionSettings.getMinResultSize(); + if (results.getJobComplete() && results.getSchema() != null && !resultsLargeEnoughForReadApi) { return processQueryResponseResults(results); } else { - // Query is long-running (> 10s) and hasn't completed yet, or query completed but didn't - // return the schema, fallback to jobs.insert path. Some operations don't return the schema - // and can be optimized here, but this is left as future work. - Long totalRows = results.getTotalRows() == null ? null : results.getTotalRows().longValue(); - Long pageRows = results.getRows() == null ? null : (long) (results.getRows().size()); + // Query is long-running (> 10s) and hasn't completed yet, query completed but didn't + // return the schema, or results are sufficiently large to use the high throughput read API, + // fallback to jobs.insert path. Some operations don't return the schema and can be optimized + // here, but this is left as future work. + JobId jobId = JobId.fromPb(results.getJobReference()); + GetQueryResultsResponse firstPage = getQueryResultsFirstPage(jobId); + Long totalRows = + firstPage.getTotalRows() == null ? null : firstPage.getTotalRows().longValue(); + Long pageRows = firstPage.getRows() == null ? null : (long) (firstPage.getRows().size()); logger.log( Level.WARNING, "\n" + String.format( "results.getJobComplete(): %s, isSchemaNull: %s , totalRows: %s, pageRows: %s", results.getJobComplete(), results.getSchema() == null, totalRows, pageRows)); - JobId jobId = JobId.fromPb(results.getJobReference()); - GetQueryResultsResponse firstPage = getQueryResultsFirstPage(jobId); return getSubsequentQueryResultsWithJob( totalRows, pageRows, jobId, firstPage, hasQueryParameters); } @@ -500,6 +531,7 @@ BigQueryResultStats getBigQueryResultSetStats(JobId jobId) { queryStatistics.getSessionInfo() == null ? null : queryStatistics.getSessionInfo(); return new BigQueryResultStatsImpl(queryStatistics, sessionInfo); } + /* This method processed the first page of GetQueryResultsResponse and then it uses tabledata.list */ @VisibleForTesting BigQueryResult tableDataList(GetQueryResultsResponse firstPage, JobId jobId) { @@ -891,21 +923,32 @@ private Job getQueryJobRpc(JobId jobId) { com.google.api.services.bigquery.model.Job jobPb; try { jobPb = - runWithRetries( + BigQueryRetryHelper.runWithRetries( () -> - bigQueryRpc.getQueryJob( - completeJobId.getProject(), - completeJobId.getJob(), - completeJobId.getLocation()), + bigQueryOptions + .getBigQueryRpcV2() + .getQueryJobSkipExceptionTranslation( + completeJobId.getProject(), + completeJobId.getJob(), + completeJobId.getLocation()), bigQueryOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - bigQueryOptions.getClock()); - if (bigQueryOptions.getThrowNotFound() && jobPb == null) { - throw new BigQueryException(HTTP_NOT_FOUND, "Query job not found"); + bigQueryOptions.getResultRetryAlgorithm(), + bigQueryOptions.getClock(), + EMPTY_RETRY_CONFIG, + false, + null); + } catch (BigQueryRetryHelperException e) { + if (e.getCause() instanceof BigQueryException) { + if (((BigQueryException) e.getCause()).getCode() == HTTP_NOT_FOUND) { + if (bigQueryOptions.getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Query job not found"); + } + return null; + } } - } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); } + // getQueryJobSkipExceptionTranslation will never return null so this is safe. return Job.fromPb(bigQueryOptions.getService(), jobPb); } @@ -925,22 +968,25 @@ TableDataList tableDataListRpc(TableId destinationTable, String pageToken) { ? bigQueryOptions.getProjectId() : destinationTable.getProject()); TableDataList results = - runWithRetries( + BigQueryRetryHelper.runWithRetries( () -> bigQueryOptions .getBigQueryRpcV2() - .listTableDataWithRowLimit( + .listTableDataWithRowLimitSkipExceptionTranslation( completeTableId.getProject(), completeTableId.getDataset(), completeTableId.getTable(), connectionSettings.getMaxResultPerPage(), pageToken), bigQueryOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - bigQueryOptions.getClock()); + bigQueryOptions.getResultRetryAlgorithm(), + bigQueryOptions.getClock(), + EMPTY_RETRY_CONFIG, + false, + null); return results; - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -951,7 +997,12 @@ BigQueryResult highThroughPutRead( try { if (bqReadClient == null) { // if the read client isn't already initialized. Not thread safe. - bqReadClient = BigQueryReadClient.create(); + BigQueryReadSettings settings = + BigQueryReadSettings.newBuilder() + .setCredentialsProvider( + FixedCredentialsProvider.create(bigQueryOptions.getCredentials())) + .build(); + bqReadClient = BigQueryReadClient.create(settings); } String parent = String.format("projects/%s", destinationTable.getProject()); String srcTable = @@ -974,7 +1025,6 @@ BigQueryResult highThroughPutRead( // DO a regex check using order by and use multiple streams ; ReadSession readSession = bqReadClient.createReadSession(builder.build()); - bufferRow = new LinkedBlockingDeque<>(getBufferSize()); Map arrowNameToIndex = new HashMap<>(); // deserialize and populate the buffer async, so that the client isn't blocked @@ -985,6 +1035,7 @@ BigQueryResult highThroughPutRead( schema); logger.log(Level.INFO, "\n Using BigQuery Read API"); + stats.getQueryStatistics().setUseReadApi(true); return new BigQueryResultImpl(schema, totalRows, bufferRow, stats); } catch (IOException e) { @@ -1031,6 +1082,7 @@ private void processArrowStreamAsync( "\n" + Thread.currentThread().getName() + " Interrupted @ markLast", e); } + bqReadClient.shutdownNow(); // Shutdown the read client queryTaskExecutor.shutdownNow(); // Shutdown the thread pool } }; @@ -1066,7 +1118,9 @@ private ArrowRowReader(ArrowSchema arrowSchema, Map arrowNameTo loader = new VectorLoader(root); } - /** @param batch object returned from the ReadRowsResponse. */ + /** + * @param batch object returned from the ReadRowsResponse. + */ private void processRows( ArrowRecordBatch batch, BlockingQueue buffer, Schema schema) throws IOException { // deserialize the values and consume the hash of the values @@ -1123,6 +1177,7 @@ public void close() { allocator.close(); } } + /*Returns just the first page of GetQueryResultsResponse using the jobId*/ @VisibleForTesting GetQueryResultsResponse getQueryResultsFirstPage(JobId jobId) { @@ -1148,16 +1203,20 @@ GetQueryResultsResponse getQueryResultsFirstPage(JobId jobId) { results = BigQueryRetryHelper.runWithRetries( () -> - bigQueryRpc.getQueryResultsWithRowLimit( - completeJobId.getProject(), - completeJobId.getJob(), - completeJobId.getLocation(), - connectionSettings.getMaxResultPerPage(), - timeoutMs), + bigQueryOptions + .getBigQueryRpcV2() + .getQueryResultsWithRowLimitSkipExceptionTranslation( + completeJobId.getProject(), + completeJobId.getJob(), + completeJobId.getLocation(), + connectionSettings.getMaxResultPerPage(), + timeoutMs), bigQueryOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, + bigQueryOptions.getResultRetryAlgorithm(), bigQueryOptions.getClock(), - retryConfig); + retryConfig, + false, + null); if (results.getErrors() != null) { List bigQueryErrors = @@ -1168,7 +1227,7 @@ GetQueryResultsResponse getQueryResultsFirstPage(JobId jobId) { // with the case where there is a HTTP error throw new BigQueryException(bigQueryErrors); } - } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { + } catch (BigQueryRetryHelperException e) { logger.log(Level.WARNING, "\n Error occurred while calling getQueryResultsWithRowLimit", e); throw BigQueryException.translateAndThrow(e); } @@ -1413,11 +1472,16 @@ com.google.api.services.bigquery.model.Job createQueryJob( try { queryJob = BigQueryRetryHelper.runWithRetries( - () -> bigQueryRpc.createJobForQuery(jobPb), + () -> + bigQueryOptions + .getBigQueryRpcV2() + .createJobForQuerySkipExceptionTranslation(jobPb), bigQueryOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, + bigQueryOptions.getResultRetryAlgorithm(), bigQueryOptions.getClock(), - retryConfig); + retryConfig, + false, + null); } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { logger.log(Level.WARNING, "\n Error occurred while calling createJobForQuery", e); throw BigQueryException.translateAndThrow(e); @@ -1453,11 +1517,16 @@ com.google.api.services.bigquery.model.Job createDryRunJob(String sql) { try { dryRunJob = BigQueryRetryHelper.runWithRetries( - () -> bigQueryRpc.createJobForQuery(jobPb), + () -> + bigQueryOptions + .getBigQueryRpcV2() + .createJobForQuerySkipExceptionTranslation(jobPb), bigQueryOptions.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, + bigQueryOptions.getResultRetryAlgorithm(), bigQueryOptions.getClock(), - retryConfig); + retryConfig, + false, + null); } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionProperty.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionProperty.java index a6206712e..7abd36b41 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionProperty.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionProperty.java @@ -55,7 +55,8 @@ public static final class Builder { private String key; private String value; - private Builder() {}; + private Builder() {} + ; private Builder(ConnectionProperty properties) { this.key = properties.key; diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionSettings.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionSettings.java index a9aabe038..79bc3aac9 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionSettings.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionSettings.java @@ -221,7 +221,6 @@ Builder withDefaultValues() { * * @param useReadAPI or {@code true} for none */ - @Nullable public abstract Builder setUseReadAPI(Boolean useReadAPI); /** diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java index 37955fec0..54e612271 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java @@ -45,6 +45,7 @@ public final class CopyJobConfiguration extends JobConfiguration { private final EncryptionConfiguration destinationEncryptionConfiguration; private final Map labels; private final Long jobTimeoutMs; + private final String reservation; public static final class Builder extends JobConfiguration.Builder { @@ -58,6 +59,7 @@ public static final class Builder private EncryptionConfiguration destinationEncryptionConfiguration; private Map labels; private Long jobTimeoutMs; + private String reservation; private Builder() { super(Type.COPY); @@ -74,6 +76,7 @@ private Builder(CopyJobConfiguration jobConfiguration) { this.destinationEncryptionConfiguration = jobConfiguration.destinationEncryptionConfiguration; this.labels = jobConfiguration.labels; this.jobTimeoutMs = jobConfiguration.jobTimeoutMs; + this.reservation = jobConfiguration.reservation; } private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { @@ -113,6 +116,9 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur if (configurationPb.getJobTimeoutMs() != null) { this.jobTimeoutMs = configurationPb.getJobTimeoutMs(); } + if (configurationPb.getReservation() != null) { + this.reservation = configurationPb.getReservation(); + } } /** Sets the source tables to copy. */ @@ -201,6 +207,19 @@ public Builder setJobTimeoutMs(Long jobTimeoutMs) { return this; } + /** + * [Optional] The reservation that job would use. User can specify a reservation to execute the + * job. If reservation is not set, reservation is determined based on the rules defined by the + * reservation assignments. The expected format is + * `projects/{project}/locations/{location}/reservations/{reservation}`. + * + * @param reservation reservation or {@code null} for none + */ + public Builder setReservation(String reservation) { + this.reservation = reservation; + return this; + } + public CopyJobConfiguration build() { return new CopyJobConfiguration(this); } @@ -217,6 +236,7 @@ private CopyJobConfiguration(Builder builder) { this.destinationEncryptionConfiguration = builder.destinationEncryptionConfiguration; this.labels = builder.labels; this.jobTimeoutMs = builder.jobTimeoutMs; + this.reservation = builder.reservation; } /** Returns the source tables to copy. */ @@ -275,6 +295,11 @@ public Long getJobTimeoutMs() { return jobTimeoutMs; } + /** Returns the reservation associated with this job */ + public String getReservation() { + return reservation; + } + @Override public Builder toBuilder() { return new Builder(this); @@ -291,7 +316,8 @@ ToStringHelper toStringHelper() { .add("createDisposition", createDisposition) .add("writeDisposition", writeDisposition) .add("labels", labels) - .add("jobTimeoutMs", jobTimeoutMs); + .add("jobTimeoutMs", jobTimeoutMs) + .add("reservation", reservation); } @Override @@ -311,7 +337,8 @@ public int hashCode() { createDisposition, writeDisposition, labels, - jobTimeoutMs); + jobTimeoutMs, + reservation); } @Override @@ -366,6 +393,9 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { if (jobTimeoutMs != null) { jobConfiguration.setJobTimeoutMs(jobTimeoutMs); } + if (reservation != null) { + jobConfiguration.setReservation(reservation); + } jobConfiguration.setCopy(configurationPb); return jobConfiguration; } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java index cbcce2173..b39c82a7e 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java @@ -26,12 +26,13 @@ */ public final class CsvOptions extends FormatOptions { - private static final long serialVersionUID = 2193570529308612708L; + private static final long serialVersionUID = 2193570529308612709L; private final Boolean allowJaggedRows; private final Boolean allowQuotedNewLines; private final String encoding; private final String fieldDelimiter; + private final String nullMarker; private final String quote; private final Long skipLeadingRows; private final Boolean preserveAsciiControlCharacters; @@ -42,6 +43,7 @@ public static final class Builder { private Boolean allowQuotedNewLines; private String encoding; private String fieldDelimiter; + private String nullMarker; private String quote; private Long skipLeadingRows; private Boolean preserveAsciiControlCharacters; @@ -53,6 +55,7 @@ private Builder(CsvOptions csvOptions) { this.allowQuotedNewLines = csvOptions.allowQuotedNewLines; this.encoding = csvOptions.encoding; this.fieldDelimiter = csvOptions.fieldDelimiter; + this.nullMarker = csvOptions.nullMarker; this.quote = csvOptions.quote; this.skipLeadingRows = csvOptions.skipLeadingRows; this.preserveAsciiControlCharacters = csvOptions.preserveAsciiControlCharacters; @@ -110,6 +113,18 @@ public Builder setFieldDelimiter(String fieldDelimiter) { return this; } + /** + * [Optional] Specifies a string that represents a null value in a CSV file. For example, if you + * specify \"\\N\", BigQuery interprets \"\\N\" as a null value when querying a CSV file. The + * default value is the empty string. If you set this property to a custom value, BigQuery + * throws an error if an empty string is present for all data types except for STRING and BYTE. + * For STRING and BYTE columns, BigQuery interprets the empty string as an empty value. + */ + public Builder setNullMarker(String nullMarker) { + this.nullMarker = nullMarker; + return this; + } + /** * Sets the value that is used to quote data sections in a CSV file. BigQuery converts the * string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split @@ -154,6 +169,7 @@ private CsvOptions(Builder builder) { this.allowQuotedNewLines = builder.allowQuotedNewLines; this.encoding = builder.encoding; this.fieldDelimiter = builder.fieldDelimiter; + this.nullMarker = builder.nullMarker; this.quote = builder.quote; this.skipLeadingRows = builder.skipLeadingRows; this.preserveAsciiControlCharacters = builder.preserveAsciiControlCharacters; @@ -192,6 +208,11 @@ public String getFieldDelimiter() { return fieldDelimiter; } + /** Returns the string that represents a null value in a CSV file. */ + public String getNullMarker() { + return nullMarker; + } + /** Returns the value that is used to quote data sections in a CSV file. */ public String getQuote() { return quote; @@ -226,6 +247,7 @@ public String toString() { .add("allowQuotedNewLines", allowQuotedNewLines) .add("encoding", encoding) .add("fieldDelimiter", fieldDelimiter) + .add("nullMarker", nullMarker) .add("quote", quote) .add("skipLeadingRows", skipLeadingRows) .add("preserveAsciiControlCharacters", preserveAsciiControlCharacters) @@ -240,6 +262,7 @@ public int hashCode() { allowQuotedNewLines, encoding, fieldDelimiter, + nullMarker, quote, skipLeadingRows, preserveAsciiControlCharacters); @@ -258,6 +281,7 @@ com.google.api.services.bigquery.model.CsvOptions toPb() { csvOptions.setAllowQuotedNewlines(allowQuotedNewLines); csvOptions.setEncoding(encoding); csvOptions.setFieldDelimiter(fieldDelimiter); + csvOptions.setNullMarker(nullMarker); csvOptions.setQuote(quote); csvOptions.setSkipLeadingRows(skipLeadingRows); csvOptions.setPreserveAsciiControlCharacters(preserveAsciiControlCharacters); @@ -283,6 +307,9 @@ static CsvOptions fromPb(com.google.api.services.bigquery.model.CsvOptions csvOp if (csvOptions.getFieldDelimiter() != null) { builder.setFieldDelimiter(csvOptions.getFieldDelimiter()); } + if (csvOptions.getNullMarker() != null) { + builder.setNullMarker(csvOptions.getNullMarker()); + } if (csvOptions.getQuote() != null) { builder.setQuote(csvOptions.getQuote()); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java index daadb9a85..18606e701 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java @@ -164,6 +164,18 @@ public Builder setStorageBillingModel(String storageBillingModel) { return this; } + @Override + public Builder setMaxTimeTravelHours(Long maxTimeTravelHours) { + infoBuilder.setMaxTimeTravelHours(maxTimeTravelHours); + return this; + } + + @Override + public Builder setResourceTags(Map resourceTags) { + infoBuilder.setResourceTags(resourceTags); + return this; + } + @Override public Dataset build() { return new Dataset(bigquery, infoBuilder); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java index 0e2ad02b2..c06d257d7 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java @@ -20,6 +20,7 @@ import static com.google.common.base.Strings.isNullOrEmpty; import com.google.api.services.bigquery.model.DatasetReference; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Objects; @@ -84,4 +85,11 @@ DatasetReference toPb() { static DatasetId fromPb(DatasetReference datasetRef) { return new DatasetId(datasetRef.getProjectId(), datasetRef.getDatasetId()); } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.dataset.project", this.getProject()) + .put("bq.dataset.id", this.getDataset()) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java index b5fabe97d..918761995 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java @@ -27,6 +27,7 @@ import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.List; import java.util.Map; @@ -75,6 +76,8 @@ public Dataset apply(DatasetInfo datasetInfo) { private final String defaultCollation; private final ExternalDatasetReference externalDatasetReference; private final String storageBillingModel; + private final Long maxTimeTravelHours; + private final Annotations resourceTags; /** A builder for {@code DatasetInfo} objects. */ public abstract static class Builder { @@ -142,6 +145,12 @@ public abstract Builder setExternalDatasetReference( */ public abstract Builder setStorageBillingModel(String storageBillingModel); + /** + * Optional. Defines the time travel window in hours. The value can be from 48 to 168 hours (2 + * to 7 days). The default value is 168 hours if this is not set. The value may be {@code null}. + */ + public abstract Builder setMaxTimeTravelHours(Long maxTimeTravelHours); + /** * The default encryption key for all tables in the dataset. Once this property is set, all * newly-created partitioned tables in the dataset will have encryption key set to this value, @@ -177,6 +186,19 @@ public abstract Builder setDefaultEncryptionConfiguration( */ public abstract Builder setDefaultCollation(String defaultCollation); + /** + * Optional. The tags attached to this + * dataset. Tag keys are globally unique. Tag key is expected to be in the namespaced format, + * for example "123456789012/environment" where 123456789012 is the ID of the parent + * organization or project resource for this tag key. Tag value is expected to be the short + * name, for example "Production". + * + * @see Tag + * definitions for more details. + * @param resourceTags resourceTags or {@code null} for none + */ + public abstract Builder setResourceTags(Map resourceTags); + /** Creates a {@code DatasetInfo} object. */ public abstract DatasetInfo build(); } @@ -200,6 +222,8 @@ static final class BuilderImpl extends Builder { private String defaultCollation; private ExternalDatasetReference externalDatasetReference; private String storageBillingModel; + private Long maxTimeTravelHours; + private Annotations resourceTags = Annotations.ZERO; BuilderImpl() {} @@ -221,6 +245,8 @@ static final class BuilderImpl extends Builder { this.defaultCollation = datasetInfo.defaultCollation; this.externalDatasetReference = datasetInfo.externalDatasetReference; this.storageBillingModel = datasetInfo.storageBillingModel; + this.maxTimeTravelHours = datasetInfo.maxTimeTravelHours; + this.resourceTags = datasetInfo.resourceTags; } BuilderImpl(com.google.api.services.bigquery.model.Dataset datasetPb) { @@ -260,6 +286,8 @@ public Acl apply(Dataset.Access accessPb) { ExternalDatasetReference.fromPb(datasetPb.getExternalDatasetReference()); } this.storageBillingModel = datasetPb.getStorageBillingModel(); + this.maxTimeTravelHours = datasetPb.getMaxTimeTravelHours(); + this.resourceTags = Annotations.fromPb(datasetPb.getResourceTags()); } @Override @@ -372,6 +400,18 @@ public Builder setStorageBillingModel(String storageBillingModel) { return this; } + @Override + public Builder setMaxTimeTravelHours(Long maxTimeTravelHours) { + this.maxTimeTravelHours = maxTimeTravelHours; + return this; + } + + @Override + public Builder setResourceTags(Map resourceTags) { + this.resourceTags = Annotations.fromUser(resourceTags); + return this; + } + @Override public DatasetInfo build() { return new DatasetInfo(this); @@ -396,6 +436,8 @@ public DatasetInfo build() { defaultCollation = builder.defaultCollation; externalDatasetReference = builder.externalDatasetReference; storageBillingModel = builder.storageBillingModel; + maxTimeTravelHours = builder.maxTimeTravelHours; + resourceTags = builder.resourceTags; } /** Returns the dataset identity. */ @@ -529,6 +571,29 @@ public String getStorageBillingModel() { return storageBillingModel; } + /** + * Returns the number of hours that deleted or updated data will be available to be queried for + * all tables in the dataset. + */ + public Long getMaxTimeTravelHours() { + return maxTimeTravelHours; + } + + /** + * Optional. The tags attached to this + * dataset. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for + * example "123456789012/environment" where 123456789012 is the ID of the parent organization or + * project resource for this tag key. Tag value is expected to be the short name, for example + * "Production". + * + * @see Tag + * definitions for more details. + * @return value or {@code null} for none + */ + public Map getResourceTags() { + return resourceTags.userMap(); + } + /** * Returns information about the external metadata storage where the dataset is defined. Filled * out when the dataset type is EXTERNAL. @@ -562,6 +627,8 @@ public String toString() { .add("defaultCollation", defaultCollation) .add("externalDatasetReference", externalDatasetReference) .add("storageBillingModel", storageBillingModel) + .add("maxTimeTravelHours", maxTimeTravelHours) + .add("resourceTags", resourceTags) .toString(); } @@ -646,6 +713,10 @@ public Dataset.Access apply(Acl acl) { if (storageBillingModel != null) { datasetPb.setStorageBillingModel(storageBillingModel); } + if (maxTimeTravelHours != null) { + datasetPb.setMaxTimeTravelHours(maxTimeTravelHours); + } + datasetPb.setResourceTags(resourceTags.toPb()); return datasetPb; } @@ -676,6 +747,18 @@ public static DatasetInfo of(String datasetId) { return newBuilder(datasetId).build(); } + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .putAll(this.getDatasetId().getOtelAttributes()) + .put("bq.dataset.last_modified", getFieldAsString(this.getLastModified())) + .put("bq.dataset.location", getFieldAsString(this.getLocation())) + .build(); + } + static DatasetInfo fromPb(Dataset datasetPb) { return new BuilderImpl(datasetPb).build(); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java index d307b8232..c1859c3f9 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java @@ -19,9 +19,12 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Strings.isNullOrEmpty; +import com.google.api.core.ApiFunction; import com.google.api.services.bigquery.model.ExternalDataConfiguration; import com.google.api.services.bigquery.model.Table; import com.google.auto.value.AutoValue; +import com.google.cloud.StringEnumType; +import com.google.cloud.StringEnumValue; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import java.util.List; @@ -57,6 +60,46 @@ public ExternalDataConfiguration apply(ExternalTableDefinition tableInfo) { private static final long serialVersionUID = -5951580238459622025L; + public static final class SourceColumnMatch extends StringEnumValue { + private static final long serialVersionUID = 818920627219751207L; + private static final ApiFunction CONSTRUCTOR = + new ApiFunction() { + @Override + public SourceColumnMatch apply(String constant) { + return new SourceColumnMatch(constant); + } + }; + + private static final StringEnumType type = + new StringEnumType(SourceColumnMatch.class, CONSTRUCTOR); + + public static final SourceColumnMatch POSITION = type.createAndRegister("POSITION"); + + public static final SourceColumnMatch NAME = type.createAndRegister("NAME"); + + private SourceColumnMatch(String constant) { + super(constant); + } + + /** + * Get the SourceColumnMatch for the given String constant, and throw an exception if the + * constant is not recognized. + */ + public static SourceColumnMatch valueOfStrict(String constant) { + return type.valueOfStrict(constant); + } + + /** Get the SourceColumnMatch for the given String constant, and allow unrecognized values. */ + public static SourceColumnMatch valueOf(String constant) { + return type.valueOf(constant); + } + + /** Return the known values for SourceColumnMatch. */ + public static SourceColumnMatch[] values() { + return type.values(); + } + } + @AutoValue.Builder public abstract static class Builder extends TableDefinition.Builder { @@ -167,7 +210,8 @@ public Builder setFormatOptions(FormatOptions formatOptions) { /** Sets the table Hive partitioning options. */ public Builder setHivePartitioningOptions(HivePartitioningOptions hivePartitioningOptions) { return setHivePartitioningOptionsInner(hivePartitioningOptions); - }; + } + ; /** * When creating an external table, the user can provide a reference file with the table schema. @@ -180,6 +224,79 @@ public Builder setHivePartitioningOptions(HivePartitioningOptions hivePartitioni abstract Builder setHivePartitioningOptionsInner( HivePartitioningOptions hivePartitioningOptions); + public Builder setObjectMetadata(String objectMetadata) { + return setObjectMetadataInner(objectMetadata); + } + + abstract Builder setObjectMetadataInner(String objectMetadata); + + /** + * [Optional] Metadata Cache Mode for the table. Set this to enable caching of metadata from + * external data source. + * + * @see + * MetadataCacheMode + */ + public Builder setMetadataCacheMode(String metadataCacheMode) { + return setMetadataCacheModeInner(metadataCacheMode); + } + + abstract Builder setMetadataCacheModeInner(String metadataCacheMode); + + /** + * [Optional] Metadata Cache Mode for the table. Set this to enable caching of metadata from + * external data source. + * + * @see + * MaxStaleness + */ + public Builder setMaxStaleness(String maxStaleness) { + return setMaxStalenessInner(maxStaleness); + } + + abstract Builder setMaxStalenessInner(String maxStaleness); + + /** + * Time zone used when parsing timestamp values that do not have specific time zone information + * (e.g. 2024-04-20 12:34:56). The expected format is a IANA timezone string (e.g. + * America/Los_Angeles). + */ + public abstract Builder setTimeZone(String timeZone); + + /** Format used to parse DATE values. Supports C-style and SQL-style values. */ + public abstract Builder setDateFormat(String dateFormat); + + /** Format used to parse DATETIME values. Supports C-style and SQL-style values. */ + public abstract Builder setDatetimeFormat(String datetimeFormat); + + /** Format used to parse TIME values. Supports C-style and SQL-style values. */ + public abstract Builder setTimeFormat(String timeFormat); + + /** Format used to parse TIMESTAMP values. Supports C-style and SQL-style values. */ + public abstract Builder setTimestampFormat(String timestampFormat); + + /** + * Controls the strategy used to match loaded columns to the schema. If not set, a sensible + * default is chosen based on how the schema is provided. If autodetect is used, then columns + * are matched by name. Otherwise, columns are matched by position. This is done to keep the + * behavior backward-compatible. Acceptable values are: POSITION - matches by position. This + * assumes that the columns are ordered the same way as the schema. NAME - matches by name. This + * reads the header row as column names and reorders columns to match the field names in the + * schema. + */ + public abstract Builder setSourceColumnMatch(SourceColumnMatch sourceColumnMatch); + + /** + * A list of strings represented as SQL NULL value in a CSV file. null_marker and null_markers + * can't be set at the same time. If null_marker is set, null_markers has to be not set. If + * null_markers is set, null_marker has to be not set. If both null_marker and null_markers are + * set at the same time, a user error would be thrown. Any strings listed in null_markers, + * including empty string would be interpreted as SQL NULL. This applies to all column types. + */ + public abstract Builder setNullMarkers(List nullMarkers); + /** Creates an {@code ExternalTableDefinition} object. */ @Override public abstract ExternalTableDefinition build(); @@ -219,7 +336,8 @@ abstract Builder setHivePartitioningOptionsInner( @Nullable public Boolean ignoreUnknownValues() { return getIgnoreUnknownValues(); - }; + } + ; @Nullable public abstract Boolean getIgnoreUnknownValues(); @@ -255,6 +373,52 @@ public String getFileSetSpecType() { @Nullable public abstract ImmutableList getSourceUrisImmut(); + /** + * Returns the object metadata. + * + * @see + * ObjectMetadata + */ + @Nullable + public String getObjectMetadata() { + return getObjectMetadataInner(); + } + + @Nullable + abstract String getObjectMetadataInner(); + + /** + * Returns the metadata cache mode. + * + * @see + * MetadataCacheMode + */ + @Nullable + public String getMetadataCacheMode() { + return getMetadataCacheModeInner(); + } + + @Nullable + abstract String getMetadataCacheModeInner(); + + /** + * Returns the maximum staleness of data that could be returned when the table is queried. + * Staleness encoded as a string encoding of sql IntervalValue type. + * + * @see + * MaxStaleness + */ + @Nullable + public String getMaxStaleness() { + return getMaxStalenessInner(); + } + + @Nullable + abstract String getMaxStalenessInner(); + /** * Returns the source format, and possibly some parsing options, of the external data. Supported * formats are {@code CSV} and {@code NEWLINE_DELIMITED_JSON}. @@ -291,6 +455,37 @@ public HivePartitioningOptions getHivePartitioningOptions() { return getHivePartitioningOptionsInner(); } + /** + * Returns the time zone used when parsing timestamp values that don't have specific time zone + * information. + */ + @Nullable + public abstract String getTimeZone(); + + /** Returns the format used to parse DATE values. */ + @Nullable + public abstract String getDateFormat(); + + /** Returns the format used to parse DATETIME values. */ + @Nullable + public abstract String getDatetimeFormat(); + + /** Returns the format used to parse TIME values. */ + @Nullable + public abstract String getTimeFormat(); + + /** Returns the format used to parse TIMESTAMP values. */ + @Nullable + public abstract String getTimestampFormat(); + + /** Returns the strategy used to match loaded columns to the schema, either POSITION or NAME. */ + @Nullable + public abstract SourceColumnMatch getSourceColumnMatch(); + + /** Returns a list of strings represented as SQL NULL value in a CSV file. */ + @Nullable + public abstract List getNullMarkers(); + @Nullable abstract HivePartitioningOptions getHivePartitioningOptionsInner(); @@ -301,6 +496,9 @@ public HivePartitioningOptions getHivePartitioningOptions() { com.google.api.services.bigquery.model.Table toPb() { Table tablePb = super.toPb(); tablePb.setExternalDataConfiguration(toExternalDataConfigurationPb()); + if (getMaxStaleness() != null) { + tablePb.setMaxStaleness(getMaxStaleness()); + } return tablePb; } @@ -362,6 +560,37 @@ com.google.api.services.bigquery.model.ExternalDataConfiguration toExternalDataC externalConfigurationPb.setFileSetSpecType(getFileSetSpecType()); } + if (getObjectMetadata() != null) { + externalConfigurationPb.setObjectMetadata(getObjectMetadata()); + } + + if (getMetadataCacheMode() != null) { + externalConfigurationPb.setMetadataCacheMode(getMetadataCacheMode()); + } + if (getTimeZone() != null) { + externalConfigurationPb.setTimeZone(getTimeZone()); + } + if (getDateFormat() != null) { + externalConfigurationPb.setDateFormat(getDateFormat()); + } + if (getDatetimeFormat() != null) { + externalConfigurationPb.setDatetimeFormat(getDatetimeFormat()); + } + if (getTimeFormat() != null) { + externalConfigurationPb.setTimeFormat(getTimeFormat()); + } + if (getTimestampFormat() != null) { + externalConfigurationPb.setTimestampFormat(getTimestampFormat()); + } + if (getSourceColumnMatch() != null) { + externalConfigurationPb + .getCsvOptions() + .setSourceColumnMatch(getSourceColumnMatch().toString()); + } + if (getNullMarkers() != null) { + externalConfigurationPb.getCsvOptions().setNullMarkers(getNullMarkers()); + } + return externalConfigurationPb; } @@ -426,6 +655,24 @@ public static Builder newBuilder(String sourceUri, FormatOptions format) { return newBuilder().setSourceUris(ImmutableList.of(sourceUri)).setFormatOptions(format); } + /** + * Creates a builder for an ExternalTableDefinition object. + * + * @param sourceUri the fully-qualified URIs that point to your data in Google Cloud. For Google + * Cloud Bigtable URIs: Exactly one URI can be specified and it has be a fully specified and + * valid HTTPS URL for a Google Cloud Bigtable table. Size limits related to load jobs apply + * to external data sources, plus an additional limit of 10 GB maximum size across all URIs. + * @return a builder for an ExternalTableDefinition object given source URIs and format + * @see Quota + * @see + * Source Format + */ + public static Builder newBuilder(String sourceUri) { + checkArgument(!isNullOrEmpty(sourceUri), "Provided sourceUri is null or empty"); + return newBuilder().setSourceUris(ImmutableList.of(sourceUri)); + } + /** * Creates an ExternalTableDefinition object. * @@ -534,6 +781,40 @@ static ExternalTableDefinition fromPb(Table tablePb) { if (externalDataConfiguration.getFileSetSpecType() != null) { builder.setFileSetSpecType(externalDataConfiguration.getFileSetSpecType()); } + if (externalDataConfiguration.getObjectMetadata() != null) { + builder.setObjectMetadata(externalDataConfiguration.getObjectMetadata()); + } + if (externalDataConfiguration.getMetadataCacheMode() != null) { + builder.setMetadataCacheMode(externalDataConfiguration.getMetadataCacheMode()); + } + if (tablePb.getMaxStaleness() != null) { + builder.setMaxStaleness(tablePb.getMaxStaleness()); + } + if (externalDataConfiguration.getTimeZone() != null) { + builder.setTimeZone(externalDataConfiguration.getTimeZone()); + } + if (externalDataConfiguration.getDateFormat() != null) { + builder.setDateFormat(externalDataConfiguration.getDateFormat()); + } + if (externalDataConfiguration.getDatetimeFormat() != null) { + builder.setDatetimeFormat(externalDataConfiguration.getDatetimeFormat()); + } + if (externalDataConfiguration.getTimeFormat() != null) { + builder.setTimeFormat(externalDataConfiguration.getTimeFormat()); + } + if (externalDataConfiguration.getTimestampFormat() != null) { + builder.setTimestampFormat(externalDataConfiguration.getTimestampFormat()); + } + if (externalDataConfiguration.getCsvOptions() != null) { + if (externalDataConfiguration.getCsvOptions().getSourceColumnMatch() != null) { + builder.setSourceColumnMatch( + SourceColumnMatch.valueOf( + externalDataConfiguration.getCsvOptions().getSourceColumnMatch())); + } + if (externalDataConfiguration.getCsvOptions().getNullMarkers() != null) { + builder.setNullMarkers(externalDataConfiguration.getCsvOptions().getNullMarkers()); + } + } } return builder.build(); } @@ -597,6 +878,39 @@ static ExternalTableDefinition fromExternalDataConfiguration( builder.setFileSetSpecType(externalDataConfiguration.getFileSetSpecType()); } + if (externalDataConfiguration.getObjectMetadata() != null) { + builder.setObjectMetadata(externalDataConfiguration.getObjectMetadata()); + } + + if (externalDataConfiguration.getMetadataCacheMode() != null) { + builder.setMetadataCacheMode(externalDataConfiguration.getMetadataCacheMode()); + } + if (externalDataConfiguration.getTimeZone() != null) { + builder.setTimeZone(externalDataConfiguration.getTimeZone()); + } + if (externalDataConfiguration.getDateFormat() != null) { + builder.setDateFormat(externalDataConfiguration.getDateFormat()); + } + if (externalDataConfiguration.getDatetimeFormat() != null) { + builder.setDatetimeFormat(externalDataConfiguration.getDatetimeFormat()); + } + if (externalDataConfiguration.getTimeFormat() != null) { + builder.setTimeFormat(externalDataConfiguration.getTimeFormat()); + } + if (externalDataConfiguration.getTimestampFormat() != null) { + builder.setTimestampFormat(externalDataConfiguration.getTimeFormat()); + } + if (externalDataConfiguration.getCsvOptions() != null) { + if (externalDataConfiguration.getCsvOptions().getSourceColumnMatch() != null) { + builder.setSourceColumnMatch( + SourceColumnMatch.valueOf( + externalDataConfiguration.getCsvOptions().getSourceColumnMatch())); + } + if (externalDataConfiguration.getCsvOptions().getNullMarkers() != null) { + builder.setNullMarkers(externalDataConfiguration.getCsvOptions().getNullMarkers()); + } + } + return builder.build(); } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java index 29a256e9e..d79959ee0 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java @@ -47,6 +47,7 @@ public final class ExtractJobConfiguration extends JobConfiguration { private final Boolean useAvroLogicalTypes; private final Map labels; private final Long jobTimeoutMs; + private final String reservation; public static final class Builder extends JobConfiguration.Builder { @@ -61,6 +62,7 @@ public static final class Builder private Boolean useAvroLogicalTypes; private Map labels; private Long jobTimeoutMs; + private String reservation; private Builder() { super(Type.EXTRACT); @@ -78,6 +80,7 @@ private Builder(ExtractJobConfiguration jobInfo) { this.useAvroLogicalTypes = jobInfo.useAvroLogicalTypes; this.labels = jobInfo.labels; this.jobTimeoutMs = jobInfo.jobTimeoutMs; + this.reservation = jobInfo.reservation; } private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { @@ -101,6 +104,9 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur if (configurationPb.getJobTimeoutMs() != null) { this.jobTimeoutMs = configurationPb.getJobTimeoutMs(); } + if (configurationPb.getReservation() != null) { + this.reservation = configurationPb.getReservation(); + } } /** Sets the table to export. */ @@ -198,6 +204,19 @@ public Builder setJobTimeoutMs(Long jobTimeoutMs) { return this; } + /** + * [Optional] The reservation that job would use. User can specify a reservation to execute the + * job. If reservation is not set, reservation is determined based on the rules defined by the + * reservation assignments. The expected format is + * `projects/{project}/locations/{location}/reservations/{reservation}`. + * + * @param reservation reservation or {@code null} for none + */ + public Builder setReservation(String reservation) { + this.reservation = reservation; + return this; + } + public ExtractJobConfiguration build() { return new ExtractJobConfiguration(this); } @@ -215,6 +234,7 @@ private ExtractJobConfiguration(Builder builder) { this.useAvroLogicalTypes = builder.useAvroLogicalTypes; this.labels = builder.labels; this.jobTimeoutMs = builder.jobTimeoutMs; + this.reservation = builder.reservation; } /** Returns the table to export. */ @@ -274,6 +294,11 @@ public Long getJobTimeoutMs() { return jobTimeoutMs; } + /** Returns the reservation associated with this job */ + public String getReservation() { + return reservation; + } + @Override public Builder toBuilder() { return new Builder(this); @@ -291,7 +316,8 @@ ToStringHelper toStringHelper() { .add("compression", compression) .add("useAvroLogicalTypes", useAvroLogicalTypes) .add("labels", labels) - .add("jobTimeoutMs", jobTimeoutMs); + .add("jobTimeoutMs", jobTimeoutMs) + .add("reservation", reservation); } @Override @@ -313,7 +339,8 @@ public int hashCode() { compression, useAvroLogicalTypes, labels, - jobTimeoutMs); + jobTimeoutMs, + reservation); } @Override @@ -350,6 +377,9 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { if (jobTimeoutMs != null) { jobConfiguration.setJobTimeoutMs(jobTimeoutMs); } + if (reservation != null) { + jobConfiguration.setReservation(reservation); + } jobConfiguration.setExtract(extractConfigurationPb); return jobConfiguration; } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java index 9fbc2ab91..3c959a73f 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java @@ -201,7 +201,7 @@ public Builder setType(StandardSQLTypeName type, FieldList subFields) { /** Sets the mode of the field. When not specified {@link Mode#NULLABLE} is used. */ public Builder setMode(Mode mode) { - this.mode = mode != null ? mode.name() : Data.nullOf(String.class); + this.mode = mode != null ? mode.name() : null; return this; } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldElementType.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldElementType.java index 43446e1d0..de601151e 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldElementType.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldElementType.java @@ -15,6 +15,7 @@ */ package com.google.cloud.bigquery; +import com.google.api.services.bigquery.model.QueryParameterType; import com.google.api.services.bigquery.model.TableFieldSchema; import com.google.auto.value.AutoValue; import java.io.Serializable; @@ -60,4 +61,14 @@ static FieldElementType fromPb(TableFieldSchema.RangeElementType rangeElementTyp } return null; } + + /** Creates an instance of FieldElementType from QueryParameterType with RangeElementType. */ + static FieldElementType fromPb(QueryParameterType queryParameterTypePb) { + // Treat a FieldElementType message without a Type subfield as invalid. + if ((queryParameterTypePb.getRangeElementType() != null) + && (queryParameterTypePb.getRangeElementType().getType() != null)) { + return newBuilder().setType(queryParameterTypePb.getRangeElementType().getType()).build(); + } + return null; + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java index ea68075f7..d11df4b95 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java @@ -26,6 +26,7 @@ import com.google.common.io.BaseEncoding; import java.io.Serializable; import java.math.BigDecimal; +import java.math.BigInteger; import java.math.RoundingMode; import java.time.Duration; import java.time.Instant; @@ -46,10 +47,11 @@ public class FieldValue implements Serializable { private static final int MICROSECONDS = 1000000; - private static final long serialVersionUID = 469098630191710061L; + private static final long serialVersionUID = 469098630191710062L; private final Attribute attribute; private final Object value; + private final Boolean useInt64Timestamps; /** The field value's attribute, giving information on the field's content type. */ public enum Attribute { @@ -67,12 +69,20 @@ public enum Attribute { REPEATED, /** A {@code FieldValue} for a field of type {@link LegacySQLTypeName#RECORD}. */ - RECORD + RECORD, + + /** A {@code FieldValue} for a field of type {@link LegacySQLTypeName#RANGE}. */ + RANGE } private FieldValue(Attribute attribute, Object value) { + this(attribute, value, false); + } + + private FieldValue(Attribute attribute, Object value, Boolean useInt64Timestamps) { this.attribute = checkNotNull(attribute); this.value = value; + this.useInt64Timestamps = useInt64Timestamps; } /** @@ -104,6 +114,10 @@ public Object getValue() { return value; } + public Boolean getUseInt64Timestamps() { + return useInt64Timestamps; + } + /** * Returns this field's value as a {@link String}. This method should only be used if the * corresponding field has primitive type ({@link LegacySQLTypeName#BYTES}, {@link @@ -120,6 +134,20 @@ public String getStringValue() { return (String) value; } + /** + * Returns this field's value as a {@link String}, or defaultValue if {@link #isNull()} returns + * {@code true}. See {@link #getStringValue()} for more details. + * + * @throws ClassCastException if the field is not a primitive type + */ + @SuppressWarnings("unchecked") + public String getStringValueOrDefault(String defaultValue) { + if (isNull()) { + return defaultValue; + } + return getStringValue(); + } + /** * Returns this field's value as a byte array. This method should only be used if the * corresponding field has primitive type ({@link LegacySQLTypeName#BYTES}. @@ -190,6 +218,9 @@ public boolean getBooleanValue() { */ @SuppressWarnings("unchecked") public long getTimestampValue() { + if (useInt64Timestamps) { + return new BigInteger(getStringValue()).longValue(); + } // timestamps are encoded in the format 1408452095.22 where the integer part is seconds since // epoch (e.g. 1408452095.22 == 2014-08-19 07:41:35.220 -05:00) BigDecimal secondsWithMicro = new BigDecimal(getStringValue()); @@ -229,6 +260,23 @@ public BigDecimal getNumericValue() { return new BigDecimal(getStringValue()); } + /** + * Returns this field's value as a {@link Range}. This method should only be used * if the + * corresponding field has {@link LegacySQLTypeName#RANGE} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws IllegalArgumentException if the field's value could not be converted to {@link Range} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public Range getRangeValue() { + if (attribute == Attribute.RANGE) { + return (Range) value; + } + // Provide best effort to convert value to Range object. + return Range.of(getStringValue()); + } + /** * Returns this field's value as a list of {@link FieldValue}. This method should only be used if * the corresponding field has {@link Field.Mode#REPEATED} mode (i.e. {@link #getAttribute()} is @@ -283,12 +331,13 @@ public String toString() { return MoreObjects.toStringHelper(this) .add("attribute", attribute) .add("value", value) + .add("useInt64Timestamps", useInt64Timestamps) .toString(); } @Override public final int hashCode() { - return Objects.hash(attribute, value); + return Objects.hash(attribute, value, useInt64Timestamps); } @Override @@ -300,7 +349,9 @@ public final boolean equals(Object obj) { return false; } FieldValue other = (FieldValue) obj; - return attribute == other.attribute && Objects.equals(value, other.value); + return attribute == other.attribute + && Objects.equals(value, other.value) + && Objects.equals(useInt64Timestamps, other.useInt64Timestamps); } /** @@ -319,23 +370,38 @@ public final boolean equals(Object obj) { */ @BetaApi public static FieldValue of(Attribute attribute, Object value) { - return new FieldValue(attribute, value); + return of(attribute, value, false); + } + + @BetaApi + public static FieldValue of(Attribute attribute, Object value, Boolean useInt64Timestamps) { + return new FieldValue(attribute, value, useInt64Timestamps); } static FieldValue fromPb(Object cellPb) { - return fromPb(cellPb, null); + return fromPb(cellPb, null, false); } @SuppressWarnings("unchecked") - static FieldValue fromPb(Object cellPb, Field recordSchema) { + static FieldValue fromPb(Object cellPb, Field recordSchema, Boolean useInt64Timestamps) { if (Data.isNull(cellPb)) { - return FieldValue.of(Attribute.PRIMITIVE, null); + return FieldValue.of(Attribute.PRIMITIVE, null, useInt64Timestamps); } if (cellPb instanceof String) { - return FieldValue.of(Attribute.PRIMITIVE, cellPb); + if ((recordSchema != null) + && (recordSchema.getType() == LegacySQLTypeName.RANGE) + && (recordSchema.getRangeElementType() != null)) { + return FieldValue.of( + Attribute.RANGE, + Range.of((String) cellPb, recordSchema.getRangeElementType()), + useInt64Timestamps); + } + return FieldValue.of(Attribute.PRIMITIVE, cellPb, useInt64Timestamps); } if (cellPb instanceof List) { - return FieldValue.of(Attribute.REPEATED, FieldValueList.fromPb((List) cellPb, null)); + return FieldValue.of( + Attribute.REPEATED, + FieldValueList.fromPb((List) cellPb, null, useInt64Timestamps)); } if (cellPb instanceof Map) { Map cellMapPb = (Map) cellPb; @@ -343,12 +409,13 @@ static FieldValue fromPb(Object cellPb, Field recordSchema) { FieldList subFieldsSchema = recordSchema != null ? recordSchema.getSubFields() : null; return FieldValue.of( Attribute.RECORD, - FieldValueList.fromPb((List) cellMapPb.get("f"), subFieldsSchema)); + FieldValueList.fromPb( + (List) cellMapPb.get("f"), subFieldsSchema, useInt64Timestamps)); } // This should never be the case when we are processing a first level table field (i.e. a // row's field, not a record sub-field) if (cellMapPb.containsKey("v")) { - return FieldValue.fromPb(cellMapPb.get("v"), recordSchema); + return FieldValue.fromPb(cellMapPb.get("v"), recordSchema, useInt64Timestamps); } } throw new IllegalArgumentException("Unexpected table cell format"); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValueList.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValueList.java index 5035bb164..18d2155a5 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValueList.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValueList.java @@ -112,6 +112,10 @@ FieldValueList withSchema(FieldList schema) { } static FieldValueList fromPb(List rowPb, FieldList schema) { + return fromPb(rowPb, schema, false); + } + + static FieldValueList fromPb(List rowPb, FieldList schema, Boolean useInt64Timestamps) { List row = new ArrayList<>(rowPb.size()); if (schema != null) { if (schema.size() != rowPb.size()) { @@ -120,11 +124,11 @@ static FieldValueList fromPb(List rowPb, FieldList schema) { Iterator schemaIter = schema.iterator(); Iterator rowPbIter = rowPb.iterator(); while (rowPbIter.hasNext() && schemaIter.hasNext()) { - row.add(FieldValue.fromPb(rowPbIter.next(), schemaIter.next())); + row.add(FieldValue.fromPb(rowPbIter.next(), schemaIter.next(), useInt64Timestamps)); } } else { for (Object cellPb : rowPb) { - row.add(FieldValue.fromPb(cellPb, null)); + row.add(FieldValue.fromPb(cellPb, null, useInt64Timestamps)); } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/InsertAllRequest.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/InsertAllRequest.java index 53952bc6c..ce9380098 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/InsertAllRequest.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/InsertAllRequest.java @@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Collections; import java.util.HashMap; @@ -479,4 +480,15 @@ public boolean equals(Object obj) { && Objects.equals(skipInvalidRows, other.skipInvalidRows) && Objects.equals(templateSuffix, other.templateSuffix); } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + public Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.insert_all.table", getFieldAsString(this.getTable().getTable())) + .put("bq.insert_all.template_suffix", getFieldAsString(this.getTemplateSuffix())) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java index d23e4ea52..88950b9fb 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java @@ -29,15 +29,18 @@ import com.google.cloud.bigquery.BigQuery.TableDataListOption; import com.google.cloud.bigquery.JobConfiguration.Type; import com.google.common.collect.ImmutableList; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; import java.io.IOException; import java.io.ObjectInputStream; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; -import org.threeten.bp.Duration; /** * A Google BigQuery Job. @@ -52,20 +55,20 @@ public class Job extends JobInfo { private static final RetrySettings DEFAULT_JOB_WAIT_SETTINGS = RetrySettings.newBuilder() - .setTotalTimeout(Duration.ofHours(12L)) - .setInitialRetryDelay(Duration.ofSeconds(1L)) + .setTotalTimeoutDuration(Duration.ofHours(12L)) + .setInitialRetryDelayDuration(Duration.ofSeconds(1L)) .setRetryDelayMultiplier(2.0) .setJittered(true) - .setMaxRetryDelay(Duration.ofMinutes(1L)) + .setMaxRetryDelayDuration(Duration.ofMinutes(1L)) .build(); static final RetrySettings DEFAULT_QUERY_JOB_WAIT_SETTINGS = RetrySettings.newBuilder() - .setTotalTimeout(Duration.ofHours(12L)) - .setInitialRetryDelay(Duration.ofSeconds(3L)) + .setTotalTimeoutDuration(Duration.ofHours(12L)) + .setInitialRetryDelayDuration(Duration.ofSeconds(3L)) .setRetryDelayMultiplier(1.0) .setJittered(true) - .setMaxRetryDelay(Duration.ofSeconds(3L)) + .setMaxRetryDelayDuration(Duration.ofSeconds(3L)) .build(); static final QueryResultsOption[] DEFAULT_QUERY_WAIT_OPTIONS = { @@ -172,7 +175,21 @@ public Job build() { */ public boolean exists() { checkNotDryRun("exists"); - return bigquery.getJob(getJobId(), JobOption.fields()) != null; + Span exists = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + exists = + options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.exists") + .startSpan(); + } + try (Scope existsScope = exists != null ? exists.makeCurrent() : null) { + return bigquery.getJob(getJobId(), JobOption.fields()) != null; + } finally { + if (exists != null) { + exists.end(); + } + } } /** @@ -193,15 +210,38 @@ public boolean exists() { */ public boolean isDone() { checkNotDryRun("isDone"); - Job job = bigquery.getJob(getJobId(), JobOption.fields(BigQuery.JobField.STATUS)); - return job == null || JobStatus.State.DONE.equals(job.getStatus().getState()); + Span isDone = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + isDone = + options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.isDone") + .startSpan(); + } + try (Scope isDoneScope = isDone != null ? isDone.makeCurrent() : null) { + Job job = bigquery.getJob(getJobId(), JobOption.fields(BigQuery.JobField.STATUS)); + return job == null || JobStatus.State.DONE.equals(job.getStatus().getState()); + } finally { + if (isDone != null) { + isDone.end(); + } + } + } + + /** See {@link #waitFor(BigQueryRetryConfig, RetryOption...)} */ + public Job waitFor(RetryOption... waitOptions) throws InterruptedException { + return waitForInternal(DEFAULT_RETRY_CONFIG, waitOptions); } + /** * Blocks until this job completes its execution, either failing or succeeding. This method * returns current job's latest information. If the job no longer exists, this method returns * {@code null}. By default, the job status is checked using jittered exponential backoff with 1 * second as an initial delay, 2.0 as a backoff factor, 1 minute as maximum delay between polls, - * 12 hours as a total timeout and unlimited number of attempts. + * 12 hours as a total timeout and unlimited number of attempts. For query jobs, the job status + * check can be configured to retry on specific BigQuery error messages using {@link + * BigQueryRetryConfig}. This {@link BigQueryRetryConfig} configuration is not available for + * non-query jobs. * *

Example usage of {@code waitFor()}. * @@ -232,25 +272,68 @@ public boolean isDone() { * } * } * + *

Example usage of {@code waitFor()} with BigQuery retry configuration to retry on rate limit + * exceeded error messages for query jobs. + * + *

{@code
+   * Job completedJob =
+   *     job.waitFor(
+   *             BigQueryRetryConfig.newBuilder()
+   *                 .retryOnMessage(BigQueryErrorMessages.RATE_LIMIT_EXCEEDED_MSG)
+   *                 .retryOnMessage(BigQueryErrorMessages.JOB_RATE_LIMIT_EXCEEDED_MSG)
+   *                 .retryOnRegEx(BigQueryErrorMessages.RetryRegExPatterns.RATE_LIMIT_EXCEEDED_REGEX)
+   *                 .build());
+   * if (completedJob == null) {
+   *   // job no longer exists
+   * } else if (completedJob.getStatus().getError() != null) {
+   *   // job failed, handle error
+   * } else {
+   *   // job completed successfully
+   * }
+   * }
+ * + * @param bigQueryRetryConfig configures retries for query jobs for BigQuery failures * @param waitOptions options to configure checking period and timeout * @throws BigQueryException upon failure, check {@link BigQueryException#getCause()} for details * @throws InterruptedException if the current thread gets interrupted while waiting for the job * to complete */ - public Job waitFor(RetryOption... waitOptions) throws InterruptedException { + public Job waitFor(BigQueryRetryConfig bigQueryRetryConfig, RetryOption... waitOptions) + throws InterruptedException { + return waitForInternal(bigQueryRetryConfig, waitOptions); + } + + private Job waitForInternal(BigQueryRetryConfig bigQueryRetryConfig, RetryOption... waitOptions) + throws InterruptedException { checkNotDryRun("waitFor"); - Object completedJobResponse; - if (getConfiguration().getType() == Type.QUERY) { - completedJobResponse = - waitForQueryResults( - RetryOption.mergeToSettings(DEFAULT_JOB_WAIT_SETTINGS, waitOptions), - DEFAULT_QUERY_WAIT_OPTIONS); - } else { - completedJobResponse = - waitForJob(RetryOption.mergeToSettings(DEFAULT_QUERY_JOB_WAIT_SETTINGS, waitOptions)); + Span waitFor = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + waitFor = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.waitFor") + .startSpan(); } + try (Scope waitForScope = waitFor != null ? waitFor.makeCurrent() : null) { + Object completedJobResponse; + if (getConfiguration().getType() == Type.QUERY) { + completedJobResponse = + waitForQueryResults( + RetryOption.mergeToSettings(DEFAULT_JOB_WAIT_SETTINGS, waitOptions), + bigQueryRetryConfig, + DEFAULT_QUERY_WAIT_OPTIONS); + } else { + completedJobResponse = + waitForJob(RetryOption.mergeToSettings(DEFAULT_QUERY_JOB_WAIT_SETTINGS, waitOptions)); + } - return completedJobResponse == null ? null : reload(); + return completedJobResponse == null ? null : reload(); + } finally { + if (waitFor != null) { + waitFor.end(); + } + } } /** @@ -267,81 +350,114 @@ public Job waitFor(RetryOption... waitOptions) throws InterruptedException { public TableResult getQueryResults(QueryResultsOption... options) throws InterruptedException, JobException { checkNotDryRun("getQueryResults"); - if (getConfiguration().getType() != Type.QUERY) { - throw new UnsupportedOperationException( - "Getting query results is supported only for " + Type.QUERY + " jobs"); - } - - List waitOptions = - new ArrayList<>(Arrays.asList(DEFAULT_QUERY_WAIT_OPTIONS)); - List listOptions = new ArrayList<>(); - for (QueryResultsOption option : options) { - switch (option.getRpcOption()) { - case MAX_RESULTS: - listOptions.add(TableDataListOption.pageSize((Long) option.getValue())); - break; - case PAGE_TOKEN: - listOptions.add(TableDataListOption.pageToken((String) option.getValue())); - break; - case START_INDEX: - listOptions.add(TableDataListOption.startIndex((Long) option.getValue())); - break; - case TIMEOUT: - waitOptions.add(QueryResultsOption.maxWaitTime((Long) option.getValue())); - break; - } + + Span getQueryResults = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getQueryResults = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.getQueryResults") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); } + try (Scope getQueryResultsScope = + getQueryResults != null ? getQueryResults.makeCurrent() : null) { - QueryResponse response = - waitForQueryResults( - DEFAULT_JOB_WAIT_SETTINGS, waitOptions.toArray(new QueryResultsOption[0])); - - // Get the job resource to determine if it has errored. - Job job = this; - if (job.getStatus() == null || !JobStatus.State.DONE.equals(job.getStatus().getState())) { - job = reload(); - } - if (job.getStatus() != null && job.getStatus().getError() != null) { - throw new BigQueryException( - job.getStatus().getExecutionErrors() == null - ? ImmutableList.of(job.getStatus().getError()) - : ImmutableList.copyOf(job.getStatus().getExecutionErrors())); - } - - // If there are no rows in the result, this may have been a DDL query. - // Listing table data might fail, such as with CREATE VIEW queries. - // Avoid a tabledata.list API request by returning an empty TableResult. - if (response.getTotalRows() == 0) { - TableResult emptyTableResult = - TableResult.newBuilder() - .setSchema(response.getSchema()) - .setJobId(job.getJobId()) - .setTotalRows(0L) - .setPageNoSchema(new PageImpl(null, "", null)) - .build(); - return emptyTableResult; - } - - TableId table = - ((QueryJobConfiguration) getConfiguration()).getDestinationTable() == null - ? ((QueryJobConfiguration) job.getConfiguration()).getDestinationTable() - : ((QueryJobConfiguration) getConfiguration()).getDestinationTable(); - TableResult tableResult = - bigquery.listTableData( - table, response.getSchema(), listOptions.toArray(new TableDataListOption[0])); - TableResult tableResultWithJobId = tableResult.toBuilder().setJobId(job.getJobId()).build(); - return tableResultWithJobId; + if (getConfiguration().getType() != Type.QUERY) { + throw new UnsupportedOperationException( + "Getting query results is supported only for " + Type.QUERY + " jobs"); + } + + List waitOptions = + new ArrayList<>(Arrays.asList(DEFAULT_QUERY_WAIT_OPTIONS)); + List listOptions = new ArrayList<>(); + for (QueryResultsOption option : options) { + switch (option.getRpcOption()) { + case MAX_RESULTS: + listOptions.add(TableDataListOption.pageSize((Long) option.getValue())); + break; + case PAGE_TOKEN: + listOptions.add(TableDataListOption.pageToken((String) option.getValue())); + break; + case START_INDEX: + listOptions.add(TableDataListOption.startIndex((Long) option.getValue())); + break; + case TIMEOUT: + waitOptions.add(QueryResultsOption.maxWaitTime((Long) option.getValue())); + break; + } + } + + QueryResponse response = + waitForQueryResults( + DEFAULT_JOB_WAIT_SETTINGS, + DEFAULT_RETRY_CONFIG, + waitOptions.toArray(new QueryResultsOption[0])); + + // Get the job resource to determine if it has errored. + Job job = this; + if (job.getStatus() == null || !JobStatus.State.DONE.equals(job.getStatus().getState())) { + job = reload(); + } + if (job.getStatus() != null && job.getStatus().getError() != null) { + throw new BigQueryException( + job.getStatus().getExecutionErrors() == null + ? ImmutableList.of(job.getStatus().getError()) + : ImmutableList.copyOf(job.getStatus().getExecutionErrors())); + } + + // If there are no rows in the result, this may have been a DDL query. + // Listing table data might fail, such as with CREATE VIEW queries. + // Avoid a tabledata.list API request by returning an empty TableResult. + if (response.getTotalRows() == 0) { + TableResult emptyTableResult = + TableResult.newBuilder() + .setSchema(response.getSchema()) + .setJobId(job.getJobId()) + .setTotalRows(0L) + .setPageNoSchema(new PageImpl(null, "", null)) + .build(); + return emptyTableResult; + } + + TableId table = + ((QueryJobConfiguration) getConfiguration()).getDestinationTable() == null + ? ((QueryJobConfiguration) job.getConfiguration()).getDestinationTable() + : ((QueryJobConfiguration) getConfiguration()).getDestinationTable(); + TableResult tableResult = + bigquery.listTableData( + table, response.getSchema(), listOptions.toArray(new TableDataListOption[0])); + TableResult tableResultWithJobId = tableResult.toBuilder().setJobId(job.getJobId()).build(); + return tableResultWithJobId; + } finally { + if (getQueryResults != null) { + getQueryResults.end(); + } + } } private QueryResponse waitForQueryResults( - RetrySettings retrySettings, final QueryResultsOption... resultsOptions) + RetrySettings retrySettings, + BigQueryRetryConfig bigQueryRetryConfig, + final QueryResultsOption... resultsOptions) throws InterruptedException { if (getConfiguration().getType() != Type.QUERY) { throw new UnsupportedOperationException( "Waiting for query results is supported only for " + Type.QUERY + " jobs"); } - try { + Span waitForQueryResults = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + waitForQueryResults = + options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.waitForQueryResults") + .setAllAttributes(otelAttributesFromOptions(resultsOptions)) + .startSpan(); + } + try (Scope waitForQueryResultsScope = + waitForQueryResults != null ? waitForQueryResults.makeCurrent() : null) { return BigQueryRetryHelper.runWithRetries( new Callable() { @Override @@ -360,14 +476,43 @@ public boolean shouldRetry( } }, options.getClock(), - DEFAULT_RETRY_CONFIG); + bigQueryRetryConfig, + options.isOpenTelemetryTracingEnabled(), + options.getOpenTelemetryTracer()); } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (waitForQueryResults != null) { + waitForQueryResults.end(); + } } } private Job waitForJob(RetrySettings waitSettings) throws InterruptedException { - try { + Span waitForJob = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + waitForJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.waitForJob") + .setAttribute( + "bq.job.wait_settings.total_timeout", + getFieldAsString(waitSettings.getTotalTimeoutDuration())) + .setAttribute( + "bq.job.wait_settings.initial_retry_delay", + getFieldAsString(waitSettings.getInitialRetryDelayDuration())) + .setAttribute( + "bq.job.wait_settings.max_retry_delay", + getFieldAsString(waitSettings.getMaxRetryDelayDuration())) + .setAttribute( + "bq.job.wait_settings.initial_rpc_timeout", + getFieldAsString(waitSettings.getInitialRpcTimeoutDuration())) + .setAttribute( + "bq.job.wait_settings.max_rpc_timeout", + getFieldAsString(waitSettings.getMaxRpcTimeoutDuration())) + .startSpan(); + } + try (Scope waitForJobScope = waitForJob != null ? waitForJob.makeCurrent() : null) { return RetryHelper.poll( new Callable() { @Override @@ -392,6 +537,10 @@ public boolean shouldRetry(Throwable prevThrowable, Job prevResponse) { options.getClock()); } catch (ExecutionException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (waitForJob != null) { + waitForJob.end(); + } } } @@ -422,14 +571,31 @@ public boolean shouldRetry(Throwable prevThrowable, Job prevResponse) { */ public Job reload(JobOption... options) { checkNotDryRun("reload"); - Job job = bigquery.getJob(getJobId(), options); - if (job != null && job.getStatus().getError() != null) { - throw new BigQueryException( - job.getStatus().getExecutionErrors() == null - ? ImmutableList.of(job.getStatus().getError()) - : ImmutableList.copyOf(job.getStatus().getExecutionErrors())); - } - return job; + Span reload = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + reload = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.reload") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + try (Scope reloadScope = reload != null ? reload.makeCurrent() : null) { + Job job = bigquery.getJob(getJobId(), options); + if (job != null && job.getStatus().getError() != null) { + throw new BigQueryException( + job.getStatus().getExecutionErrors() == null + ? ImmutableList.of(job.getStatus().getError()) + : ImmutableList.copyOf(job.getStatus().getExecutionErrors())); + } + return job; + } finally { + if (reload != null) { + reload.end(); + } + } } /** @@ -451,7 +617,22 @@ public Job reload(JobOption... options) { */ public boolean cancel() { checkNotDryRun("cancel"); - return bigquery.cancel(getJobId()); + Span cancel = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + cancel = + options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.cancel") + .startSpan(); + } + + try (Scope cancelScope = cancel != null ? cancel.makeCurrent() : null) { + return bigquery.cancel(getJobId()); + } finally { + if (cancel != null) { + cancel.end(); + } + } } private void checkNotDryRun(String op) { @@ -515,4 +696,19 @@ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundE static Job fromPb(BigQuery bigquery, com.google.api.services.bigquery.model.Job jobPb) { return new Job(bigquery, new JobInfo.BuilderImpl(jobPb)); } + + private static Attributes otelAttributesFromOptions(Option... options) { + Attributes attributes = Attributes.builder().build(); + for (Option option : options) { + attributes = + attributes.toBuilder() + .put(option.getRpcOption().toString(), option.getValue().toString()) + .build(); + } + return attributes; + } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobException.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobException.java index 612affd31..0bfa2572a 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobException.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobException.java @@ -34,6 +34,7 @@ public class JobException extends RuntimeException { public JobId getId() { return id; } + /** * The errors reported by the job. * diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobId.java index b966cebe1..4bfc2aa00 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobId.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobId.java @@ -21,6 +21,7 @@ import com.google.api.services.bigquery.model.JobReference; import com.google.auto.value.AutoValue; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.UUID; import javax.annotation.Nullable; @@ -123,4 +124,16 @@ static JobId fromPb(JobReference jobRef) { .setLocation(jobRef.getLocation()) .build(); } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.job.id", getFieldAsString(this.getJob())) + .put("bq.job.location", getFieldAsString(this.getLocation())) + .put("bq.job.project", getFieldAsString(this.getProject())) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobInfo.java index 8f571ff55..fdf1b6e60 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobInfo.java @@ -66,6 +66,12 @@ public enum WriteDisposition { /** Configures the job to overwrite the table data if table already exists. */ WRITE_TRUNCATE, + /** + * Configures the job to retain schema and constraints on an existing table, and truncate and + * replace data. + */ + WRITE_TRUNCATE_DATA, + /** Configures the job to append data to the table if it already exists. */ WRITE_APPEND, diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java index c600f6a19..c78378ab5 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java @@ -17,14 +17,17 @@ package com.google.cloud.bigquery; import com.google.api.core.ApiFunction; +import com.google.api.services.bigquery.model.ExportDataStatistics; import com.google.api.services.bigquery.model.JobConfiguration; import com.google.api.services.bigquery.model.JobStatistics2; import com.google.api.services.bigquery.model.JobStatistics3; import com.google.api.services.bigquery.model.JobStatistics4; import com.google.api.services.bigquery.model.JobStatistics5; import com.google.api.services.bigquery.model.QueryParameter; +import com.google.auto.value.AutoValue; import com.google.cloud.StringEnumType; import com.google.cloud.StringEnumValue; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects.ToStringHelper; @@ -32,12 +35,13 @@ import java.io.Serializable; import java.util.List; import java.util.Objects; +import javax.annotation.Nullable; import org.checkerframework.checker.nullness.compatqual.NullableDecl; /** A Google BigQuery Job statistics. */ public abstract class JobStatistics implements Serializable { - private static final long serialVersionUID = 1433024714741660399L; + private static final long serialVersionUID = 1433024714741660400L; private final Long creationTime; private final Long endTime; @@ -48,6 +52,7 @@ public abstract class JobStatistics implements Serializable { private final List reservationUsage; private final TransactionInfo transactionInfo; private final SessionInfo sessionInfo; + private final Long totalSlotMs; /** A Google BigQuery Copy Job statistics. */ public static class CopyStatistics extends JobStatistics { @@ -387,23 +392,24 @@ static LoadStatistics fromPb(com.google.api.services.bigquery.model.JobStatistic /** A Google BigQuery Query Job statistics. */ public static class QueryStatistics extends JobStatistics { - private static final long serialVersionUID = 7539354109226732353L; + private static final long serialVersionUID = 7539354109226732354L; private final BiEngineStats biEngineStats; private final Integer billingTier; private final Boolean cacheHit; + private Boolean useReadApi; private final String ddlOperationPerformed; private final TableId ddlTargetTable; private final RoutineId ddlTargetRoutine; private final Long estimatedBytesProcessed; private final Long numDmlAffectedRows; private final DmlStats dmlStats; + private final ExportDataStats exportDataStats; private final List referencedTables; private final StatementType statementType; private final Long totalBytesBilled; private final Long totalBytesProcessed; private final Long totalPartitionsProcessed; - private final Long totalSlotMs; private final List queryPlan; private final List timeline; private final Schema schema; @@ -448,6 +454,44 @@ public StatementType apply(String constant) { public static final StatementType DROP_FUNCTION = type.createAndRegister("DROP_FUNCTION"); public static final StatementType DROP_PROCEDURE = type.createAndRegister("DROP_PROCEDURE"); public static final StatementType MERGE = type.createAndRegister("MERGE"); + public static final StatementType CREATE_MATERIALIZED_VIEW = + type.createAndRegister("CREATE_MATERIALIZED_VIEW"); + public static final StatementType CREATE_TABLE_FUNCTION = + type.createAndRegister("CREATE_TABLE_FUNCTION"); + public static final StatementType CREATE_ROW_ACCESS_POLICY = + type.createAndRegister("CREATE_ROW_ACCESS_POLICY"); + public static final StatementType CREATE_SCHEMA = type.createAndRegister("CREATE_SCHEMA"); + public static final StatementType CREATE_SNAPSHOT_TABLE = + type.createAndRegister("CREATE_SNAPSHOT_TABLE"); + public static final StatementType CREATE_SEARCH_INDEX = + type.createAndRegister("CREATE_SEARCH_INDEX"); + public static final StatementType DROP_EXTERNAL_TABLE = + type.createAndRegister("DROP_EXTERNAL_TABLE"); + + public static final StatementType DROP_MODEL = type.createAndRegister("DROP_MODEL"); + public static final StatementType DROP_MATERIALIZED_VIEW = + type.createAndRegister("DROP_MATERIALIZED_VIEW"); + + public static final StatementType DROP_TABLE_FUNCTION = + type.createAndRegister("DROP_TABLE_FUNCTION"); + public static final StatementType DROP_SEARCH_INDEX = + type.createAndRegister("DROP_SEARCH_INDEX"); + public static final StatementType DROP_SCHEMA = type.createAndRegister("DROP_SCHEMA"); + public static final StatementType DROP_SNAPSHOT_TABLE = + type.createAndRegister("DROP_SNAPSHOT_TABLE"); + public static final StatementType DROP_ROW_ACCESS_POLICY = + type.createAndRegister("DROP_ROW_ACCESS_POLICY"); + public static final StatementType ALTER_MATERIALIZED_VIEW = + type.createAndRegister("ALTER_MATERIALIZED_VIEW"); + public static final StatementType ALTER_SCHEMA = type.createAndRegister("ALTER_SCHEMA"); + public static final StatementType SCRIPT = type.createAndRegister("SCRIPT"); + public static final StatementType TRUNCATE_TABLE = type.createAndRegister("TRUNCATE_TABLE"); + public static final StatementType CREATE_EXTERNAL_TABLE = + type.createAndRegister("CREATE_EXTERNAL_TABLE"); + public static final StatementType EXPORT_DATA = type.createAndRegister("EXPORT_DATA"); + public static final StatementType EXPORT_MODEL = type.createAndRegister("EXPORT_MODEL"); + public static final StatementType LOAD_DATA = type.createAndRegister("LOAD_DATA"); + public static final StatementType CALL = type.createAndRegister("CALL"); private StatementType(String constant) { super(constant); @@ -472,6 +516,80 @@ public static StatementType[] values() { } } + /** + * Statistics for the EXPORT DATA statement as part of Query Job. EXTRACT JOB statistics are + * populated in ExtractStatistics. + */ + @AutoValue + public abstract static class ExportDataStats implements Serializable { + private static final long serialVersionUID = 1L; + + /** + * Returns number of destination files generated in case of EXPORT DATA statement only. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract Long getFileCount(); + + /** + * Returns number of destination rows generated in case of EXPORT DATA statement only. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract Long getRowCount(); + + public abstract Builder toBuilder(); + + public static Builder newBuilder() { + return new AutoValue_JobStatistics_QueryStatistics_ExportDataStats.Builder(); + } + + static ExportDataStats fromPb(ExportDataStatistics exportDataStatisticsPb) { + Builder builder = newBuilder(); + if (exportDataStatisticsPb.getFileCount() != null) { + builder.setFileCount(exportDataStatisticsPb.getFileCount()); + } + if (exportDataStatisticsPb.getRowCount() != null) { + builder.setRowCount(exportDataStatisticsPb.getRowCount()); + } + return builder.build(); + } + + ExportDataStatistics toPb() { + ExportDataStatistics exportDataStatisticsPb = new ExportDataStatistics(); + if (getFileCount() != null) { + exportDataStatisticsPb.setFileCount(getFileCount()); + } + if (getRowCount() != null) { + exportDataStatisticsPb.setRowCount(getRowCount()); + } + return exportDataStatisticsPb; + } + + @AutoValue.Builder + public abstract static class Builder { + + /** + * Number of destination files generated in case of EXPORT DATA statement only. + * + * @param fileCount fileCount or {@code null} for none + */ + public abstract Builder setFileCount(Long fileCount); + + /** + * Number of destination rows generated in case of EXPORT DATA statement only. + * + * @param rowCount rowCount or {@code null} for none + */ + public abstract Builder setRowCount(Long rowCount); + + /** Creates a {@code ExportDataStats} object. */ + public abstract ExportDataStats build(); + } + } + static final class Builder extends JobStatistics.Builder { private BiEngineStats biEngineStats; @@ -483,12 +601,12 @@ static final class Builder extends JobStatistics.Builder referencedTables; private StatementType statementType; private Long totalBytesBilled; private Long totalBytesProcessed; private Long totalPartitionsProcessed; - private Long totalSlotMs; private List queryPlan; private List timeline; private Schema schema; @@ -520,7 +638,6 @@ private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsP this.totalBytesBilled = statisticsPb.getQuery().getTotalBytesBilled(); this.totalBytesProcessed = statisticsPb.getQuery().getTotalBytesProcessed(); this.totalPartitionsProcessed = statisticsPb.getQuery().getTotalPartitionsProcessed(); - this.totalSlotMs = statisticsPb.getQuery().getTotalSlotMs(); if (statisticsPb.getQuery().getStatementType() != null) { this.statementType = StatementType.valueOf(statisticsPb.getQuery().getStatementType()); } @@ -553,6 +670,10 @@ private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsP if (statisticsPb.getQuery().getDmlStats() != null) { this.dmlStats = DmlStats.fromPb(statisticsPb.getQuery().getDmlStats()); } + if (statisticsPb.getQuery().getExportDataStatistics() != null) { + this.exportDataStats = + ExportDataStats.fromPb(statisticsPb.getQuery().getExportDataStatistics()); + } } } @@ -601,6 +722,11 @@ Builder setDmlStats(DmlStats dmlStats) { return self(); } + Builder setExportDataStats(ExportDataStats exportDataStats) { + this.exportDataStats = exportDataStats; + return self(); + } + Builder setReferenceTables(List referencedTables) { this.referencedTables = referencedTables; return self(); @@ -631,11 +757,6 @@ Builder setTotalPartitionsProcessed(Long totalPartitionsProcessed) { return self(); } - Builder setTotalSlotMs(Long totalSlotMs) { - this.totalSlotMs = totalSlotMs; - return self(); - } - Builder setQueryPlan(List queryPlan) { this.queryPlan = queryPlan; return self(); @@ -677,18 +798,19 @@ private QueryStatistics(Builder builder) { this.biEngineStats = builder.biEngineStats; this.billingTier = builder.billingTier; this.cacheHit = builder.cacheHit; + this.useReadApi = false; this.ddlOperationPerformed = builder.ddlOperationPerformed; this.ddlTargetTable = builder.ddlTargetTable; this.ddlTargetRoutine = builder.ddlTargetRoutine; this.estimatedBytesProcessed = builder.estimatedBytesProcessed; this.numDmlAffectedRows = builder.numDmlAffectedRows; this.dmlStats = builder.dmlStats; + this.exportDataStats = builder.exportDataStats; this.referencedTables = builder.referencedTables; this.statementType = builder.statementType; this.totalBytesBilled = builder.totalBytesBilled; this.totalBytesProcessed = builder.totalBytesProcessed; this.totalPartitionsProcessed = builder.totalPartitionsProcessed; - this.totalSlotMs = builder.totalSlotMs; this.queryPlan = builder.queryPlan; this.timeline = builder.timeline; this.schema = builder.schema; @@ -716,6 +838,18 @@ public Boolean getCacheHit() { return cacheHit; } + /** Returns whether the query result is read from the high throughput ReadAPI. */ + @VisibleForTesting + public Boolean getUseReadApi() { + return useReadApi; + } + + /** Sets internal state to reflect the use of the high throughput ReadAPI. */ + @VisibleForTesting + public void setUseReadApi(Boolean useReadApi) { + this.useReadApi = useReadApi; + } + /** [BETA] For DDL queries, returns the operation applied to the DDL target table. */ public String getDdlOperationPerformed() { return ddlOperationPerformed; @@ -749,6 +883,11 @@ public DmlStats getDmlStats() { return dmlStats; } + /** Detailed statistics for EXPORT DATA statement. */ + public ExportDataStats getExportDataStats() { + return exportDataStats; + } + /** * Referenced tables for the job. Queries that reference more than 50 tables will not have a * complete list. @@ -780,11 +919,6 @@ public Long getTotalPartitionsProcessed() { return totalPartitionsProcessed; } - /** Returns the slot-milliseconds consumed by the query. */ - public Long getTotalSlotMs() { - return totalSlotMs; - } - /** * Returns the query plan as a list of stages or {@code null} if a query plan is not available. * Each stage involves a number of steps that read from data sources, perform a series of @@ -890,7 +1024,6 @@ com.google.api.services.bigquery.model.JobStatistics toPb() { queryStatisticsPb.setTotalBytesBilled(totalBytesBilled); queryStatisticsPb.setTotalBytesProcessed(totalBytesProcessed); queryStatisticsPb.setTotalPartitionsProcessed(totalPartitionsProcessed); - queryStatisticsPb.setTotalSlotMs(totalSlotMs); if (ddlTargetTable != null) { queryStatisticsPb.setDdlTargetTable(ddlTargetTable.toPb()); } @@ -900,6 +1033,9 @@ com.google.api.services.bigquery.model.JobStatistics toPb() { if (dmlStats != null) { queryStatisticsPb.setDmlStats(dmlStats.toPb()); } + if (exportDataStats != null) { + queryStatisticsPb.setExportDataStatistics(exportDataStats.toPb()); + } if (referencedTables != null) { queryStatisticsPb.setReferencedTables( Lists.transform(referencedTables, TableId.TO_PB_FUNCTION)); @@ -1264,7 +1400,8 @@ public static class Builder { private String name; private Long slotMs; - private Builder() {}; + private Builder() {} + ; Builder setName(String name) { this.name = name; @@ -1350,7 +1487,8 @@ public static class Builder { private String transactionId; - private Builder() {}; + private Builder() {} + ; Builder setTransactionId(String transactionId) { this.transactionId = transactionId; @@ -1421,7 +1559,8 @@ public static class Builder { private String sessionId; - private Builder() {}; + private Builder() {} + ; Builder setSessionId(String sessionId) { this.sessionId = sessionId; @@ -1492,6 +1631,7 @@ abstract static class Builder> private List reservationUsage; private TransactionInfo transactionInfo; private SessionInfo sessionInfo; + private Long totalSlotMs; protected Builder() {} @@ -1501,6 +1641,9 @@ protected Builder(com.google.api.services.bigquery.model.JobStatistics statistic this.startTime = statisticsPb.getStartTime(); this.numChildJobs = statisticsPb.getNumChildJobs(); this.parentJobId = statisticsPb.getParentJobId(); + if (statisticsPb.getTotalSlotMs() != null) { + this.totalSlotMs = statisticsPb.getTotalSlotMs(); + } if (statisticsPb.getScriptStatistics() != null) { this.scriptStatistics = ScriptStatistics.fromPb(statisticsPb.getScriptStatistics()); } @@ -1536,6 +1679,11 @@ B setStartTime(Long startTime) { return self(); } + B setTotalSlotMs(Long totalSlotMs) { + this.totalSlotMs = totalSlotMs; + return self(); + } + abstract T build(); } @@ -1549,6 +1697,7 @@ protected JobStatistics(Builder builder) { this.reservationUsage = builder.reservationUsage; this.transactionInfo = builder.transactionInfo; this.sessionInfo = builder.sessionInfo; + this.totalSlotMs = builder.totalSlotMs; } /** Returns the creation time of the job in milliseconds since epoch. */ @@ -1602,6 +1751,11 @@ public SessionInfo getSessionInfo() { return sessionInfo; } + /** Returns the slot-milliseconds for the job. */ + public Long getTotalSlotMs() { + return totalSlotMs; + } + ToStringHelper toStringHelper() { return MoreObjects.toStringHelper(this) .add("creationTime", creationTime) @@ -1612,7 +1766,8 @@ ToStringHelper toStringHelper() { .add("scriptStatistics", scriptStatistics) .add("reservationUsage", reservationUsage) .add("transactionInfo", transactionInfo) - .add("sessionInfo", sessionInfo); + .add("sessionInfo", sessionInfo) + .add("totalSlotMs", totalSlotMs); } @Override @@ -1630,7 +1785,8 @@ final int baseHashCode() { scriptStatistics, reservationUsage, transactionInfo, - sessionInfo); + sessionInfo, + totalSlotMs); } final boolean baseEquals(JobStatistics jobStatistics) { @@ -1645,6 +1801,7 @@ com.google.api.services.bigquery.model.JobStatistics toPb() { statistics.setStartTime(startTime); statistics.setNumChildJobs(numChildJobs); statistics.setParentJobId(parentJobId); + statistics.setTotalSlotMs(totalSlotMs); if (scriptStatistics != null) { statistics.setScriptStatistics(scriptStatistics.toPb()); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LegacySQLTypeName.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LegacySQLTypeName.java index dec2583e9..334e3290b 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LegacySQLTypeName.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LegacySQLTypeName.java @@ -46,56 +46,70 @@ public LegacySQLTypeName apply(String constant) { /** Variable-length binary data. */ public static final LegacySQLTypeName BYTES = type.createAndRegister("BYTES").setStandardType(StandardSQLTypeName.BYTES); + /** Variable-length character (Unicode) data. */ public static final LegacySQLTypeName STRING = type.createAndRegister("STRING").setStandardType(StandardSQLTypeName.STRING); + /** A 64-bit signed integer value. */ public static final LegacySQLTypeName INTEGER = type.createAndRegister("INTEGER").setStandardType(StandardSQLTypeName.INT64); + /** A 64-bit IEEE binary floating-point value. */ public static final LegacySQLTypeName FLOAT = type.createAndRegister("FLOAT").setStandardType(StandardSQLTypeName.FLOAT64); + /** * A decimal value with 38 digits of precision and 9 digits of scale. Note, support for this type * is limited in legacy SQL. */ public static final LegacySQLTypeName NUMERIC = type.createAndRegister("NUMERIC").setStandardType(StandardSQLTypeName.NUMERIC); + /** * A decimal value with 76+ digits of precision (the 77th digit is partial) and 38 digits of scale */ public static final LegacySQLTypeName BIGNUMERIC = type.createAndRegister("BIGNUMERIC").setStandardType(StandardSQLTypeName.BIGNUMERIC); + /** A Boolean value (true or false). */ public static final LegacySQLTypeName BOOLEAN = type.createAndRegister("BOOLEAN").setStandardType(StandardSQLTypeName.BOOL); + /** Represents an absolute point in time, with microsecond precision. */ public static final LegacySQLTypeName TIMESTAMP = type.createAndRegister("TIMESTAMP").setStandardType(StandardSQLTypeName.TIMESTAMP); + /** Represents a logical calendar date. Note, support for this type is limited in legacy SQL. */ public static final LegacySQLTypeName DATE = type.createAndRegister("DATE").setStandardType(StandardSQLTypeName.DATE); + /** Represents a set of geographic points, represented as a Well Known Text (WKT) string. */ public static final LegacySQLTypeName GEOGRAPHY = type.createAndRegister("GEOGRAPHY").setStandardType(StandardSQLTypeName.GEOGRAPHY); + /** * Represents a time, independent of a specific date, to microsecond precision. Note, support for * this type is limited in legacy SQL. */ public static final LegacySQLTypeName TIME = type.createAndRegister("TIME").setStandardType(StandardSQLTypeName.TIME); + /** * Represents a year, month, day, hour, minute, second, and subsecond (microsecond precision). * Note, support for this type is limited in legacy SQL. */ public static final LegacySQLTypeName DATETIME = type.createAndRegister("DATETIME").setStandardType(StandardSQLTypeName.DATETIME); + /** A record type with a nested schema. */ public static final LegacySQLTypeName RECORD = type.createAndRegister("RECORD").setStandardType(StandardSQLTypeName.STRUCT); + /** Represents JSON data */ public static final LegacySQLTypeName JSON = type.createAndRegister("JSON").setStandardType(StandardSQLTypeName.JSON); + /** Represents duration or amount of time. */ public static final LegacySQLTypeName INTERVAL = type.createAndRegister("INTERVAL").setStandardType(StandardSQLTypeName.INTERVAL); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadConfiguration.java index d4ed81044..58cf98670 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadConfiguration.java @@ -16,6 +16,7 @@ package com.google.cloud.bigquery; +import com.google.api.core.InternalApi; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; import com.google.cloud.bigquery.JobInfo.WriteDisposition; @@ -99,17 +100,17 @@ interface Builder { Builder setIgnoreUnknownValues(Boolean ignoreUnknownValues); /** - * [Experimental] Sets options allowing the schema of the destination table to be updated as a - * side effect of the load job. Schema update options are supported in two cases: when - * writeDisposition is WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination - * table is a partition of a table, specified by partition decorators. For normal tables, - * WRITE_TRUNCATE will always overwrite the schema. + * Sets options allowing the schema of the destination table to be updated as a side effect of + * the load job. Schema update options are supported in two cases: when writeDisposition is + * WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a + * partition of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE + * will always overwrite the schema. */ + @InternalApi Builder setSchemaUpdateOptions(List schemaUpdateOptions); - /** - * [Experimental] Sets automatic inference of the options and schema for CSV and JSON sources. - */ + /** Sets automatic inference of the options and schema for CSV and JSON sources. */ + @InternalApi Builder setAutodetect(Boolean autodetect); /** Sets the time partitioning specification for the destination table. */ @@ -202,18 +203,19 @@ interface Builder { DatastoreBackupOptions getDatastoreBackupOptions(); /** - * [Experimental] Returns options allowing the schema of the destination table to be updated as a - * side effect of the load job. Schema update options are supported in two cases: when - * writeDisposition is WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination - * table is a partition of a table, specified by partition decorators. For normal tables, - * WRITE_TRUNCATE will always overwrite the schema. + * Returns options allowing the schema of the destination table to be updated as a side effect of + * the load job. Schema update options are supported in two cases: when writeDisposition is + * WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a partition + * of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE will always + * overwrite the schema. */ + @InternalApi List getSchemaUpdateOptions(); /** - * [Experimental] Returns whether automatic inference of the options and schema for CSV and JSON - * sources is set. + * Returns whether automatic inference of the options and schema for CSV and JSON sources is set. */ + @InternalApi Boolean getAutodetect(); /** Returns the time partitioning specification defined for the destination table. */ diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadJobConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadJobConfiguration.java index fefff3409..381942cd0 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadJobConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadJobConfiguration.java @@ -18,7 +18,10 @@ import static com.google.common.base.Preconditions.checkNotNull; +import com.google.api.core.ApiFunction; import com.google.api.services.bigquery.model.JobConfigurationLoad; +import com.google.cloud.StringEnumType; +import com.google.cloud.StringEnumValue; import com.google.common.base.MoreObjects.ToStringHelper; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; @@ -39,6 +42,7 @@ public final class LoadJobConfiguration extends JobConfiguration implements Load private final List sourceUris; private final String fileSetSpecType; + private final String columnNameCharacterMap; private final TableId destinationTable; private final List decimalTargetTypes; private final EncryptionConfiguration destinationEncryptionConfiguration; @@ -59,16 +63,66 @@ public final class LoadJobConfiguration extends JobConfiguration implements Load private final RangePartitioning rangePartitioning; private final HivePartitioningOptions hivePartitioningOptions; private final String referenceFileSchemaUri; - private final List connectionProperties; - private final Boolean createSession; + private final String reservation; + private final String timeZone; + private final String dateFormat; + private final String datetimeFormat; + private final String timeFormat; + private final String timestampFormat; + private final SourceColumnMatch sourceColumnMatch; + private final List nullMarkers; + + public static final class SourceColumnMatch extends StringEnumValue { + private static final long serialVersionUID = 818920627219751207L; + private static final ApiFunction CONSTRUCTOR = + new ApiFunction() { + @Override + public SourceColumnMatch apply(String constant) { + return new SourceColumnMatch(constant); + } + }; + + private static final StringEnumType type = + new StringEnumType(SourceColumnMatch.class, CONSTRUCTOR); + + public static final SourceColumnMatch SOURCE_COLUMN_MATCH_UNSPECIFIED = + type.createAndRegister("SOURCE_COLUMN_MATCH_UNSPECIFIED"); + public static final SourceColumnMatch POSITION = type.createAndRegister("POSITION"); + + public static final SourceColumnMatch NAME = type.createAndRegister("NAME"); + + private SourceColumnMatch(String constant) { + super(constant); + } + + /** + * Get the SourceColumnMatch for the given String constant, and throw an exception if the + * constant is not recognized. + */ + public static SourceColumnMatch valueOfStrict(String constant) { + return type.valueOfStrict(constant); + } + + /** Get the SourceColumnMatch for the given String constant, and allow unrecognized values. */ + public static SourceColumnMatch valueOf(String constant) { + return type.valueOf(constant); + } + + /** Return the known values for SourceColumnMatch. */ + public static SourceColumnMatch[] values() { + return type.values(); + } + } public static final class Builder extends JobConfiguration.Builder implements LoadConfiguration.Builder { private List sourceUris; private String fileSetSpecType; + private String columnNameCharacterMap; + private TableId destinationTable; private List decimalTargetTypes; private EncryptionConfiguration destinationEncryptionConfiguration; @@ -92,6 +146,14 @@ public static final class Builder extends JobConfiguration.Builder connectionProperties; private Boolean createSession; + private String reservation; + private String timeZone; + private String dateFormat; + private String datetimeFormat; + private String timeFormat; + private String timestampFormat; + private SourceColumnMatch sourceColumnMatch; + private List nullMarkers; private Builder() { super(Type.LOAD); @@ -110,6 +172,7 @@ private Builder(LoadJobConfiguration loadConfiguration) { this.ignoreUnknownValues = loadConfiguration.ignoreUnknownValues; this.sourceUris = loadConfiguration.sourceUris; this.fileSetSpecType = loadConfiguration.fileSetSpecType; + this.columnNameCharacterMap = loadConfiguration.columnNameCharacterMap; this.schemaUpdateOptions = loadConfiguration.schemaUpdateOptions; this.autodetect = loadConfiguration.autodetect; this.destinationEncryptionConfiguration = @@ -124,6 +187,14 @@ private Builder(LoadJobConfiguration loadConfiguration) { this.referenceFileSchemaUri = loadConfiguration.referenceFileSchemaUri; this.connectionProperties = loadConfiguration.connectionProperties; this.createSession = loadConfiguration.createSession; + this.reservation = loadConfiguration.reservation; + this.timeZone = loadConfiguration.timeZone; + this.dateFormat = loadConfiguration.dateFormat; + this.datetimeFormat = loadConfiguration.datetimeFormat; + this.timeFormat = loadConfiguration.timeFormat; + this.timestampFormat = loadConfiguration.timestampFormat; + this.sourceColumnMatch = loadConfiguration.sourceColumnMatch; + this.nullMarkers = loadConfiguration.nullMarkers; } private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { @@ -148,6 +219,7 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur this.nullMarker = loadConfigurationPb.getNullMarker(); } if (loadConfigurationPb.getAllowJaggedRows() != null + || loadConfigurationPb.getPreserveAsciiControlCharacters() != null || loadConfigurationPb.getAllowQuotedNewlines() != null || loadConfigurationPb.getEncoding() != null || loadConfigurationPb.getFieldDelimiter() != null @@ -158,6 +230,10 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur .setEncoding(loadConfigurationPb.getEncoding()) .setFieldDelimiter(loadConfigurationPb.getFieldDelimiter()) .setQuote(loadConfigurationPb.getQuote()); + if (loadConfigurationPb.getPreserveAsciiControlCharacters() != null) { + builder.setPreserveAsciiControlCharacters( + loadConfigurationPb.getPreserveAsciiControlCharacters()); + } if (loadConfigurationPb.getAllowJaggedRows() != null) { builder.setAllowJaggedRows(loadConfigurationPb.getAllowJaggedRows()); } @@ -181,6 +257,9 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur if (loadConfigurationPb.getFileSetSpecType() != null) { this.fileSetSpecType = loadConfigurationPb.getFileSetSpecType(); } + if (loadConfigurationPb.getColumnNameCharacterMap() != null) { + this.columnNameCharacterMap = loadConfigurationPb.getColumnNameCharacterMap(); + } if (loadConfigurationPb.getSchemaUpdateOptions() != null) { ImmutableList.Builder schemaUpdateOptionsBuilder = new ImmutableList.Builder<>(); @@ -227,6 +306,31 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur loadConfigurationPb.getConnectionProperties(), ConnectionProperty.FROM_PB_FUNCTION); } createSession = loadConfigurationPb.getCreateSession(); + if (configurationPb.getReservation() != null) { + this.reservation = configurationPb.getReservation(); + } + if (loadConfigurationPb.getTimeZone() != null) { + this.timeZone = loadConfigurationPb.getTimeZone(); + } + if (loadConfigurationPb.getDateFormat() != null) { + this.dateFormat = loadConfigurationPb.getDateFormat(); + } + if (loadConfigurationPb.getDatetimeFormat() != null) { + this.datetimeFormat = loadConfigurationPb.getDatetimeFormat(); + } + if (loadConfigurationPb.getTimeFormat() != null) { + this.timeFormat = loadConfigurationPb.getTimeFormat(); + } + if (loadConfigurationPb.getTimestampFormat() != null) { + this.timestampFormat = loadConfigurationPb.getTimestampFormat(); + } + if (loadConfigurationPb.getSourceColumnMatch() != null) { + this.sourceColumnMatch = + SourceColumnMatch.valueOf(loadConfigurationPb.getSourceColumnMatch()); + } + if (loadConfigurationPb.getNullMarkers() != null) { + this.nullMarkers = loadConfigurationPb.getNullMarkers(); + } } @Override @@ -323,6 +427,20 @@ public Builder setFileSetSpecType(String fileSetSpecType) { return this; } + /** + * [Optional] Character map supported for column names in CSV/Parquet loads. Defaults to STRICT + * and can be overridden by Project Config Service. Using this option with unsupporting load + * formats will result in an error. + * + * @see + * ColumnNameCharacterMap + */ + public Builder setColumnNameCharacterMap(String columnNameCharacterMap) { + this.columnNameCharacterMap = columnNameCharacterMap; + return this; + } + /** * Defines the list of possible SQL data types to which the source decimal values are converted. * This list and the precision and the scale parameters of the decimal field determine the @@ -411,6 +529,75 @@ public Builder setCreateSession(Boolean createSession) { return this; } + /** + * [Optional] The reservation that job would use. User can specify a reservation to execute the + * job. If reservation is not set, reservation is determined based on the rules defined by the + * reservation assignments. The expected format is + * `projects/{project}/locations/{location}/reservations/{reservation}`. + * + * @param reservation reservation or {@code null} for none + */ + public Builder setReservation(String reservation) { + this.reservation = reservation; + return this; + } + + /** + * [Experimental] Default time zone that will apply when parsing timestamp values that have no + * specific time zone. + */ + public Builder setTimeZone(String timeZone) { + this.timeZone = timeZone; + return this; + } + + /** Date format used for parsing DATE values. */ + public Builder setDateFormat(String dateFormat) { + this.dateFormat = dateFormat; + return this; + } + + /** Date format used for parsing DATETIME values. */ + public Builder setDatetimeFormat(String datetimeFormat) { + this.datetimeFormat = datetimeFormat; + return this; + } + + /** Date format used for parsing TIME values. */ + public Builder setTimeFormat(String timeFormat) { + this.timeFormat = timeFormat; + return this; + } + + /** Date format used for parsing TIMESTAMP values. */ + public Builder setTimestampFormat(String timestampFormat) { + this.timestampFormat = timestampFormat; + return this; + } + + /** + * Controls the strategy used to match loaded columns to the schema. If not set, a sensible + * default is chosen based on how the schema is provided. If autodetect is used, then columns + * are matched by name. Otherwise, columns are matched by position. This is done to keep the + * behavior backward-compatible. + */ + public Builder setSourceColumnMatch(SourceColumnMatch sourceColumnMatch) { + this.sourceColumnMatch = sourceColumnMatch; + return this; + } + + /** + * A list of strings represented as SQL NULL value in a CSV file. null_marker and null_markers + * can't be set at the same time. If null_marker is set, null_markers has to be not set. If + * null_markers is set, null_marker has to be not set. If both null_marker and null_markers are + * set at the same time, a user error would be thrown. Any strings listed in null_markers, + * including empty string would be interpreted as SQL NULL. This applies to all column types. + */ + public Builder setNullMarkers(List nullMarkers) { + this.nullMarkers = nullMarkers; + return this; + } + @Override public LoadJobConfiguration build() { return new LoadJobConfiguration(this); @@ -421,6 +608,7 @@ private LoadJobConfiguration(Builder builder) { super(builder); this.sourceUris = builder.sourceUris; this.fileSetSpecType = builder.fileSetSpecType; + this.columnNameCharacterMap = builder.columnNameCharacterMap; this.destinationTable = builder.destinationTable; this.decimalTargetTypes = builder.decimalTargetTypes; this.createDisposition = builder.createDisposition; @@ -443,6 +631,14 @@ private LoadJobConfiguration(Builder builder) { this.referenceFileSchemaUri = builder.referenceFileSchemaUri; this.connectionProperties = builder.connectionProperties; this.createSession = builder.createSession; + this.reservation = builder.reservation; + this.timeZone = builder.timeZone; + this.dateFormat = builder.dateFormat; + this.datetimeFormat = builder.datetimeFormat; + this.timeFormat = builder.timeFormat; + this.timestampFormat = builder.timestampFormat; + this.sourceColumnMatch = builder.sourceColumnMatch; + this.nullMarkers = builder.nullMarkers; } @Override @@ -519,6 +715,17 @@ public String getFileSetSpecType() { return fileSetSpecType; } + /** + * Returns the column name character map used in CSV/Parquet loads. + * + * @see + * ColumnNameCharacterMap + */ + public String getColumnNameCharacterMap() { + return columnNameCharacterMap; + } + public List getDecimalTargetTypes() { return decimalTargetTypes; } @@ -578,6 +785,49 @@ public Boolean getCreateSession() { return createSession; } + /** Returns the reservation associated with this job */ + public String getReservation() { + return reservation; + } + + /** + * Returns the time zone used when parsing timestamp values that don't have specific time zone + * information. + */ + public String getTimeZone() { + return timeZone; + } + + /** Returns the format used to parse DATE values. */ + public String getDateFormat() { + return dateFormat; + } + + /** Returns the format used to parse DATETIME values. */ + public String getDatetimeFormat() { + return datetimeFormat; + } + + /** Returns the format used to parse TIME values. */ + public String getTimeFormat() { + return timeFormat; + } + + /** Returns the format used to parse TIMESTAMP values. */ + public String getTimestampFormat() { + return timestampFormat; + } + + /** Returns the strategy used to match loaded columns to the schema, either POSITION or NAME. */ + public SourceColumnMatch getSourceColumnMatch() { + return sourceColumnMatch; + } + + /** Returns a list of strings represented as SQL NULL value in a CSV file. */ + public List getNullMarkers() { + return nullMarkers; + } + @Override public Builder toBuilder() { return new Builder(this); @@ -598,6 +848,7 @@ ToStringHelper toStringHelper() { .add("ignoreUnknownValue", ignoreUnknownValues) .add("sourceUris", sourceUris) .add("fileSetSpecType", fileSetSpecType) + .add("columnNameCharacterMap", columnNameCharacterMap) .add("schemaUpdateOptions", schemaUpdateOptions) .add("autodetect", autodetect) .add("timePartitioning", timePartitioning) @@ -609,7 +860,15 @@ ToStringHelper toStringHelper() { .add("hivePartitioningOptions", hivePartitioningOptions) .add("referenceFileSchemaUri", referenceFileSchemaUri) .add("connectionProperties", connectionProperties) - .add("createSession", createSession); + .add("createSession", createSession) + .add("reservation", reservation) + .add("timeZone", timeZone) + .add("dateFormat", dateFormat) + .add("datetimeFormat", datetimeFormat) + .add("timeFormat", timeFormat) + .add("timestampFormat", timestampFormat) + .add("sourceColumnMatch", sourceColumnMatch) + .add("nullMarkers", nullMarkers); } @Override @@ -653,6 +912,7 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { .setAllowJaggedRows(csvOptions.allowJaggedRows()) .setAllowQuotedNewlines(csvOptions.allowQuotedNewLines()) .setEncoding(csvOptions.getEncoding()) + .setPreserveAsciiControlCharacters(csvOptions.getPreserveAsciiControlCharacters()) .setQuote(csvOptions.getQuote()); if (csvOptions.getSkipLeadingRows() != null) { // todo(mziccard) remove checked cast or comment when #1044 is closed @@ -681,6 +941,9 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { if (fileSetSpecType != null) { loadConfigurationPb.setFileSetSpecType(fileSetSpecType); } + if (columnNameCharacterMap != null) { + loadConfigurationPb.setColumnNameCharacterMap(columnNameCharacterMap); + } if (decimalTargetTypes != null) { loadConfigurationPb.setDecimalTargetTypes(ImmutableList.copyOf(decimalTargetTypes)); } @@ -725,6 +988,30 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { if (createSession != null) { loadConfigurationPb.setCreateSession(createSession); } + if (reservation != null) { + jobConfiguration.setReservation(reservation); + } + if (timeZone != null) { + loadConfigurationPb.setTimeZone(timeZone); + } + if (dateFormat != null) { + loadConfigurationPb.setDateFormat(dateFormat); + } + if (datetimeFormat != null) { + loadConfigurationPb.setDatetimeFormat(datetimeFormat); + } + if (timeFormat != null) { + loadConfigurationPb.setTimeFormat(timeFormat); + } + if (timestampFormat != null) { + loadConfigurationPb.setTimestampFormat(timestampFormat); + } + if (sourceColumnMatch != null) { + loadConfigurationPb.setSourceColumnMatch(sourceColumnMatch.toString()); + } + if (nullMarkers != null) { + loadConfigurationPb.setNullMarkers(nullMarkers); + } jobConfiguration.setLoad(loadConfigurationPb); return jobConfiguration; diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelId.java index 9356c6935..adb497ffa 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelId.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelId.java @@ -21,6 +21,7 @@ import com.google.api.services.bigquery.model.ModelReference; import com.google.common.base.Function; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Objects; @@ -105,4 +106,12 @@ ModelReference toPb() { static ModelId fromPb(ModelReference modelRef) { return new ModelId(modelRef.getProjectId(), modelRef.getDatasetId(), modelRef.getModelId()); } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.model.project", this.getProject()) + .put("bq.model.dataset", this.getDataset()) + .put("bq.model.id", this.getModel()) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelInfo.java index 922ed441c..3039483b5 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelInfo.java @@ -26,6 +26,7 @@ import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Collections; import java.util.List; @@ -453,4 +454,19 @@ Model toPb() { static ModelInfo fromPb(Model modelPb) { return new BuilderImpl(modelPb).build(); } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .putAll(this.getModelId().getOtelAttributes()) + .put("bq.model.type", getFieldAsString(this.getModelType())) + .put("bq.model.creation_time", getFieldAsString(this.getCreationTime())) + .put("bq.model.last_modified_time", getFieldAsString(this.getLastModifiedTime())) + .put("bq.model.expiration_time", getFieldAsString(this.getExpirationTime())) + .put("bq.model.location", getFieldAsString(this.getLocation())) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ParquetOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ParquetOptions.java index 174da41d8..cc85eeda6 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ParquetOptions.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ParquetOptions.java @@ -21,10 +21,11 @@ public class ParquetOptions extends FormatOptions { - private static final long serialVersionUID = 1992L; + private static final long serialVersionUID = 1993L; private final Boolean enableListInference; private final Boolean enumAsString; + private final String mapTargetType; public Boolean getEnableListInference() { return enableListInference; @@ -34,16 +35,23 @@ public Boolean getEnumAsString() { return enumAsString; } + /** Returns how the Parquet map is represented. */ + public String getMapTargetType() { + return mapTargetType; + } + /** A builder for {@code ParquetOptions} objects. */ public static final class Builder { private Boolean enableListInference; private Boolean enumAsString; + private String mapTargetType; private Builder() {} private Builder(ParquetOptions parquetOptions) { this.enableListInference = parquetOptions.enableListInference; this.enumAsString = parquetOptions.enumAsString; + this.mapTargetType = parquetOptions.mapTargetType; } public Builder setEnableListInference(Boolean enableListInference) { @@ -56,10 +64,22 @@ public Builder setEnumAsString(Boolean enumAsString) { return this; } + /** + * [Optional] Indicates how to represent a Parquet map if present. + * + * @see + * MapTargetType + */ + public Builder setMapTargetType(String mapTargetType) { + this.mapTargetType = mapTargetType; + return this; + } + public ParquetOptions build() { return new ParquetOptions(this); } } + /** Returns a builder for the {@link ParquetOptions} object. */ public Builder toBuilder() { return new Builder(this); @@ -69,6 +89,7 @@ public Builder toBuilder() { super(FormatOptions.PARQUET); enableListInference = builder.enableListInference; enumAsString = builder.enumAsString; + mapTargetType = builder.mapTargetType; } @Override @@ -76,12 +97,13 @@ public String toString() { return MoreObjects.toStringHelper(this) .add("enableListInference", enableListInference) .add("enumAsString", enumAsString) + .add("mapTargetType", mapTargetType) .toString(); } @Override public final int hashCode() { - return Objects.hash(enableListInference, enumAsString); + return Objects.hash(enableListInference, enumAsString, mapTargetType); } @Override @@ -93,7 +115,9 @@ public final boolean equals(Object obj) { return false; } ParquetOptions other = (ParquetOptions) obj; - return enableListInference == other.enableListInference && enumAsString == other.enumAsString; + return enableListInference == other.enableListInference + && enumAsString == other.enumAsString + && Objects.equals(mapTargetType, ((ParquetOptions) obj).getMapTargetType()); } /** Returns a builder for a {@link ParquetOptions} object. */ @@ -110,6 +134,9 @@ static ParquetOptions fromPb( if (parquetOptions.getEnumAsString() != null) { builder.setEnumAsString(parquetOptions.getEnumAsString()); } + if (parquetOptions.getMapTargetType() != null) { + builder.setMapTargetType(parquetOptions.getMapTargetType()); + } return builder.build(); } @@ -122,6 +149,9 @@ com.google.api.services.bigquery.model.ParquetOptions toPb() { if (enumAsString != null) { parquetOptions.setEnumAsString(enumAsString); } + if (mapTargetType != null) { + parquetOptions.setMapTargetType(mapTargetType); + } return parquetOptions; } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryJobConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryJobConfiguration.java index 0ad85137b..a62fbb500 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryJobConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryJobConfiguration.java @@ -74,6 +74,7 @@ public final class QueryJobConfiguration extends JobConfiguration { // maxResults is only used for fast query path private final Long maxResults; private final JobCreationMode jobCreationMode; + private final String reservation; /** * Priority levels for a query. If not specified the priority is assumed to be {@link @@ -96,7 +97,7 @@ public enum Priority { } /** Job Creation Mode provides different options on job creation. */ - enum JobCreationMode { + public enum JobCreationMode { /** Unspecified JobCreationMode, defaults to JOB_CREATION_REQUIRED. */ JOB_CREATION_MODE_UNSPECIFIED, /** Default. Job creation is always required. */ @@ -142,6 +143,7 @@ public static final class Builder private List connectionProperties; private Long maxResults; private JobCreationMode jobCreationMode; + private String reservation; private Builder() { super(Type.QUERY); @@ -178,13 +180,14 @@ private Builder(QueryJobConfiguration jobConfiguration) { this.connectionProperties = jobConfiguration.connectionProperties; this.maxResults = jobConfiguration.maxResults; this.jobCreationMode = jobConfiguration.jobCreationMode; + this.reservation = jobConfiguration.reservation; } private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { this(); JobConfigurationQuery queryConfigurationPb = configurationPb.getQuery(); this.query = queryConfigurationPb.getQuery(); - // Allows to get undeclaredqueryparameters in jobstatistics2 + // Allows to get undeclared query parameters in jobstatistics if (queryConfigurationPb.getQueryParameters() == null && queryConfigurationPb.getParameterMode() != null) { parameterMode = queryConfigurationPb.getParameterMode(); @@ -285,6 +288,9 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur queryConfigurationPb.getConnectionProperties(), ConnectionProperty.FROM_PB_FUNCTION); } + if (configurationPb.getReservation() != null) { + this.reservation = configurationPb.getReservation(); + } } /** Sets the BigQuery SQL query to execute. */ @@ -677,11 +683,24 @@ public Builder setMaxResults(Long maxResults) { * Provides different options on job creation. If not specified the job creation mode is assumed * to be {@link JobCreationMode#JOB_CREATION_REQUIRED}. */ - Builder setJobCreationMode(JobCreationMode jobCreationMode) { + public Builder setJobCreationMode(JobCreationMode jobCreationMode) { this.jobCreationMode = jobCreationMode; return this; } + /** + * [Optional] The reservation that job would use. User can specify a reservation to execute the + * job. If reservation is not set, reservation is determined based on the rules defined by the + * reservation assignments. The expected format is + * `projects/{project}/locations/{location}/reservations/{reservation}`. + * + * @param reservation reservation or {@code null} for none + */ + public Builder setReservation(String reservation) { + this.reservation = reservation; + return this; + } + public QueryJobConfiguration build() { return new QueryJobConfiguration(this); } @@ -727,6 +746,7 @@ private QueryJobConfiguration(Builder builder) { this.connectionProperties = builder.connectionProperties; this.maxResults = builder.maxResults; this.jobCreationMode = builder.jobCreationMode; + this.reservation = builder.reservation; } /** @@ -939,10 +959,15 @@ public Long getMaxResults() { } /** Returns the job creation mode. */ - JobCreationMode getJobCreationMode() { + public JobCreationMode getJobCreationMode() { return jobCreationMode; } + /** Returns the reservation associated with this job */ + public String getReservation() { + return reservation; + } + @Override public Builder toBuilder() { return new Builder(this); @@ -978,7 +1003,8 @@ ToStringHelper toStringHelper() { .add("labels", labels) .add("rangePartitioning", rangePartitioning) .add("connectionProperties", connectionProperties) - .add("jobCreationMode", jobCreationMode); + .add("jobCreationMode", jobCreationMode) + .add("reservation", reservation); } @Override @@ -1016,7 +1042,8 @@ public int hashCode() { jobTimeoutMs, labels, rangePartitioning, - connectionProperties); + connectionProperties, + reservation); } @Override @@ -1125,6 +1152,9 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { queryConfigurationPb.setConnectionProperties( Lists.transform(connectionProperties, ConnectionProperty.TO_PB_FUNCTION)); } + if (reservation != null) { + configurationPb.setReservation(reservation); + } configurationPb.setQuery(queryConfigurationPb); return configurationPb; } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java index 85ebd2957..0487c3f7c 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java @@ -16,12 +16,14 @@ package com.google.cloud.bigquery; -import static org.threeten.bp.temporal.ChronoField.HOUR_OF_DAY; -import static org.threeten.bp.temporal.ChronoField.MINUTE_OF_HOUR; -import static org.threeten.bp.temporal.ChronoField.NANO_OF_SECOND; -import static org.threeten.bp.temporal.ChronoField.SECOND_OF_MINUTE; +import static java.time.temporal.ChronoField.HOUR_OF_DAY; +import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; +import static java.time.temporal.ChronoField.NANO_OF_SECOND; +import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; +import com.google.api.core.ObsoleteApi; import com.google.api.services.bigquery.model.QueryParameterType; +import com.google.api.services.bigquery.model.RangeValue; import com.google.auto.value.AutoValue; import com.google.cloud.Timestamp; import com.google.common.base.Function; @@ -32,17 +34,17 @@ import com.google.gson.JsonObject; import java.io.Serializable; import java.math.BigDecimal; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Nullable; -import org.threeten.bp.Instant; -import org.threeten.bp.ZoneOffset; -import org.threeten.bp.format.DateTimeFormatter; -import org.threeten.bp.format.DateTimeFormatterBuilder; -import org.threeten.bp.format.DateTimeParseException; import org.threeten.extra.PeriodDuration; /** @@ -141,6 +143,13 @@ public Builder setStructValues(Map structValues) { abstract Builder setStructValuesInner(Map structValues); + /** Sets range values. The type must set to RANGE. */ + public Builder setRangeValues(Range range) { + return setRangeValuesInner(range); + } + + abstract Builder setRangeValuesInner(Range range); + /** Sets the parameter data type. */ public abstract Builder setType(StandardSQLTypeName type); @@ -184,6 +193,15 @@ public Map getStructValues() { @Nullable abstract Map getStructValuesInner(); + /** Returns the struct values of this parameter. The returned map, if not null, is immutable. */ + @Nullable + public Range getRangeValues() { + return getRangeValuesInner(); + } + + @Nullable + abstract Range getRangeValuesInner(); + /** Returns the data type of this parameter. */ public abstract StandardSQLTypeName getType(); @@ -282,7 +300,12 @@ public static QueryParameterValue bytes(byte[] value) { return of(value, StandardSQLTypeName.BYTES); } - /** Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. */ + /** + * Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. + * + * @param value Microseconds since epoch, e.g. 1733945416000000 corresponds to 2024-12-11 + * 19:30:16.929Z + */ public static QueryParameterValue timestamp(Long value) { return of(value, StandardSQLTypeName.TIMESTAMP); } @@ -328,11 +351,23 @@ public static QueryParameterValue interval(String value) { return of(value, StandardSQLTypeName.INTERVAL); } - /** Creates a {@code QueryParameterValue} object with a type of INTERVAL. */ + /** + * Creates a {@code QueryParameterValue} object with a type of INTERVAL. This method is obsolete. + * Use {@link #interval(String)} instead. + */ + @ObsoleteApi("Use interval(String) instead") public static QueryParameterValue interval(PeriodDuration value) { return of(value, StandardSQLTypeName.INTERVAL); } + /** Creates a {@code QueryParameterValue} object with a type of RANGE. */ + public static QueryParameterValue range(Range value) { + return QueryParameterValue.newBuilder() + .setRangeValues(value) + .setType(StandardSQLTypeName.RANGE) + .build(); + } + /** * Creates a {@code QueryParameterValue} object with a type of ARRAY, and an array element type * based on the given class. @@ -442,6 +477,8 @@ private static String valueToStringOrNull(T value, StandardSQLTypeName type) throw new IllegalArgumentException("Cannot convert STRUCT to String value"); case ARRAY: throw new IllegalArgumentException("Cannot convert ARRAY to String value"); + case RANGE: + throw new IllegalArgumentException("Cannot convert RANGE to String value"); case TIMESTAMP: if (value instanceof Long) { Timestamp timestamp = Timestamp.ofTimeMicroseconds((Long) value); @@ -517,6 +554,22 @@ com.google.api.services.bigquery.model.QueryParameterValue toValuePb() { } valuePb.setStructValues(structValues); } + if (getType() == StandardSQLTypeName.RANGE) { + RangeValue rangeValue = new RangeValue(); + if (!getRangeValues().getStart().isNull()) { + com.google.api.services.bigquery.model.QueryParameterValue startValue = + new com.google.api.services.bigquery.model.QueryParameterValue(); + startValue.setValue(getRangeValues().getStart().getStringValue()); + rangeValue.setStart(startValue); + } + if (!getRangeValues().getEnd().isNull()) { + com.google.api.services.bigquery.model.QueryParameterValue endValue = + new com.google.api.services.bigquery.model.QueryParameterValue(); + endValue.setValue(getRangeValues().getEnd().getStringValue()); + rangeValue.setEnd(endValue); + } + valuePb.setRangeValue(rangeValue); + } return valuePb; } @@ -544,6 +597,13 @@ QueryParameterType toTypePb() { } typePb.setStructTypes(structTypes); } + if (getType() == StandardSQLTypeName.RANGE + && getRangeValues() != null + && getRangeValues().getType() != null) { + QueryParameterType rangeTypePb = new QueryParameterType(); + rangeTypePb.setType(getRangeValues().getType().getType()); + typePb.setRangeElementType(rangeTypePb); + } return typePb; } @@ -592,6 +652,21 @@ static QueryParameterValue fromPb( } valueBuilder.setStructValues(structValues); } + } else if (type == StandardSQLTypeName.RANGE) { + Range.Builder range = Range.newBuilder(); + if (valuePb.getRangeValue() != null) { + com.google.api.services.bigquery.model.RangeValue rangeValuePb = valuePb.getRangeValue(); + if (rangeValuePb.getStart() != null && rangeValuePb.getStart().getValue() != null) { + range.setStart(valuePb.getRangeValue().getStart().getValue()); + } + if (rangeValuePb.getEnd() != null && rangeValuePb.getEnd().getValue() != null) { + range.setEnd(valuePb.getRangeValue().getEnd().getValue()); + } + } + if (typePb.getRangeElementType() != null && typePb.getRangeElementType().getType() != null) { + range.setType(FieldElementType.fromPb(typePb)); + } + valueBuilder.setRangeValues(range.build()); } else { valueBuilder.setValue(valuePb == null ? "" : valuePb.getValue()); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java index 00a11f723..588b7cae8 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java @@ -16,6 +16,7 @@ package com.google.cloud.bigquery; +import com.google.api.services.bigquery.model.DataFormatOptions; import com.google.api.services.bigquery.model.QueryParameter; import com.google.api.services.bigquery.model.QueryRequest; import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; @@ -42,8 +43,10 @@ final class QueryRequestInfo { private final Boolean useQueryCache; private final Boolean useLegacySql; private final JobCreationMode jobCreationMode; + private final DataFormatOptions formatOptions; + private final String reservation; - QueryRequestInfo(QueryJobConfiguration config) { + QueryRequestInfo(QueryJobConfiguration config, Boolean useInt64Timestamps) { this.config = config; this.connectionProperties = config.getConnectionProperties(); this.defaultDataset = config.getDefaultDataset(); @@ -58,6 +61,8 @@ final class QueryRequestInfo { this.useLegacySql = config.useLegacySql(); this.useQueryCache = config.useQueryCache(); this.jobCreationMode = config.getJobCreationMode(); + this.formatOptions = new DataFormatOptions().setUseInt64Timestamp(useInt64Timestamps); + this.reservation = config.getReservation(); } boolean isFastQuerySupported(JobId jobId) { @@ -81,7 +86,8 @@ boolean isFastQuerySupported(JobId jobId) { && config.getTableDefinitions() == null && config.getTimePartitioning() == null && config.getUserDefinedFunctions() == null - && config.getWriteDisposition() == null; + && config.getWriteDisposition() == null + && config.getJobCreationMode() != JobCreationMode.JOB_CREATION_REQUIRED; } QueryRequest toPb() { @@ -122,6 +128,12 @@ QueryRequest toPb() { if (jobCreationMode != null) { request.setJobCreationMode(jobCreationMode.toString()); } + if (formatOptions != null) { + request.setFormatOptions(formatOptions); + } + if (reservation != null) { + request.setReservation(reservation); + } return request; } @@ -141,6 +153,8 @@ public String toString() { .add("useQueryCache", useQueryCache) .add("useLegacySql", useLegacySql) .add("jobCreationMode", jobCreationMode) + .add("formatOptions", formatOptions.getUseInt64Timestamp()) + .add("reservation", reservation) .toString(); } @@ -159,7 +173,9 @@ public int hashCode() { createSession, useQueryCache, useLegacySql, - jobCreationMode); + jobCreationMode, + formatOptions, + reservation); } @Override diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Range.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Range.java new file mode 100644 index 000000000..8d244fbeb --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Range.java @@ -0,0 +1,127 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.auto.value.AutoValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.common.collect.ImmutableMap; +import java.io.Serializable; +import javax.annotation.Nullable; + +@AutoValue +public abstract class Range implements Serializable { + private static final long serialVersionUID = 1L; + + /** Returns the start value of the range. A null value represents an unbounded start. */ + public FieldValue getStart() { + // The supported Range types [DATE, TIME, TIMESTAMP] are all Attribute.PRIMITIVE. + return FieldValue.of(Attribute.PRIMITIVE, getStartInner()); + } + + @Nullable + abstract String getStartInner(); + + /** Returns the end value of the range. A null value represents an unbounded end. */ + public FieldValue getEnd() { + // The supported Range types [DATE, TIME, TIMESTAMP] are all Attribute.PRIMITIVE. + return FieldValue.of(Attribute.PRIMITIVE, getEndInner()); + } + + @Nullable + abstract String getEndInner(); + + /** Returns the start and end values of this range. */ + public ImmutableMap getValues() { + ImmutableMap.Builder result = ImmutableMap.builder(); + if (!getStart().isNull()) { + result.put("start", getStart().getStringValue()); + } + if (!getEnd().isNull()) { + result.put("end", getEnd().getStringValue()); + } + return result.build(); + } + + /** Returns the type of the range. */ + @Nullable + public abstract FieldElementType getType(); + + public abstract Range.Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + + public Range.Builder setStart(String start) { + return setStartInner(start); + } + + abstract Range.Builder setStartInner(String start); + + public Range.Builder setEnd(String end) { + return setEndInner(end); + } + + abstract Range.Builder setEndInner(String end); + + public abstract Range.Builder setType(FieldElementType type); + + public abstract Range build(); + } + + /** Creates a range builder. Supported StandardSQLTypeName are [DATE, DATETIME, TIMESTAMP] */ + public static Builder newBuilder() { + return new AutoValue_Range.Builder(); + } + + public static Range of(String value) throws IllegalArgumentException { + return of(value, null); + } + + /** + * Creates an instance of {@code Range} from a string representation. + * + *

The expected string format is: "[start, end)", where start and end are string format of + * [DATE, TIME, TIMESTAMP]. + */ + public static Range of(String value, FieldElementType type) throws IllegalArgumentException { + checkNotNull(value); + Range.Builder builder = newBuilder(); + if (type != null) { + builder.setType(type); + } + String[] startEnd = value.split(", ", 2); // Expect an extra space after ','. + if (startEnd.length != 2) { + throw new IllegalArgumentException( + String.format("Expected Range value string to be [start, end) and got %s", value)); + } + + String start = startEnd[0].substring(1); // Ignore the [ + String end = startEnd[1].substring(0, startEnd[1].length() - 1); // Ignore the ) + if (start.equalsIgnoreCase("UNBOUNDED") || (start.equalsIgnoreCase("NULL"))) { + builder.setStart(null); + } else { + builder.setStart(start); + } + if (end.equalsIgnoreCase("UNBOUNDED") || (end.equalsIgnoreCase("NULL"))) { + builder.setEnd(null); + } else { + builder.setEnd(end); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ReadClientConnectionConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ReadClientConnectionConfiguration.java index e0805a11e..03cc2140e 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ReadClientConnectionConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ReadClientConnectionConfiguration.java @@ -31,21 +31,18 @@ public abstract static class Builder { * Sets the total row count to page row count ratio used to determine whether to us the * BigQueryStorage Read client to fetch result sets after the first page. */ - @Nullable public abstract Builder setTotalToPageRowCountRatio(Long ratio); /** * Sets the minimum number of table rows in the query results used to determine whether to us * the BigQueryStorage Read client to fetch result sets after the first page. */ - @Nullable public abstract Builder setMinResultSize(Long numRows); /** * Sets the maximum number of table rows allowed in buffer before streaming them to the * BigQueryResult. */ - @Nullable public abstract Builder setBufferSize(Long bufferSize); /** Creates a {@code ReadClientConnectionConfiguration} object. */ @@ -53,12 +50,15 @@ public abstract static class Builder { } /** Returns the totalToPageRowCountRatio in this configuration. */ + @Nullable public abstract Long getTotalToPageRowCountRatio(); /** Returns the minResultSize in this configuration. */ + @Nullable public abstract Long getMinResultSize(); /** Returns the bufferSize in this configuration. */ + @Nullable public abstract Long getBufferSize(); public abstract Builder toBuilder(); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineId.java index c5c3dadb3..a632fc5a1 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineId.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineId.java @@ -21,6 +21,7 @@ import com.google.api.services.bigquery.model.RoutineReference; import com.google.common.base.Function; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Objects; @@ -108,4 +109,12 @@ static RoutineId fromPb(RoutineReference routineRef) { return new RoutineId( routineRef.getProjectId(), routineRef.getDatasetId(), routineRef.getRoutineId()); } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.routine.project", this.getProject()) + .put("bq.routine.dataset", this.getDataset()) + .put("bq.routine.id", this.getRoutine()) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineInfo.java index 7cea99f93..c13b90b41 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineInfo.java @@ -24,6 +24,7 @@ import com.google.common.base.MoreObjects; import com.google.common.base.Strings; import com.google.common.collect.Lists; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Collections; import java.util.List; @@ -429,7 +430,8 @@ public String getBody() { /** Returns the Remote function specific options. */ public RemoteFunctionOptions getRemoteFunctionOptions() { return remoteFunctionOptions; - }; + } + ; /** Returns the data governance type of the routine, e.g. DATA_MASKING. */ public String getDataGovernanceType() { @@ -543,4 +545,17 @@ Routine toPb() { static RoutineInfo fromPb(Routine routinePb) { return new BuilderImpl(routinePb).build(); } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .putAll(this.getRoutineId().getOtelAttributes()) + .put("bq.routine.type", getFieldAsString(this.getRoutineType())) + .put("bq.routine.creation_time", getFieldAsString(this.getCreationTime())) + .put("bq.routine.last_modified_time", getFieldAsString(this.getLastModifiedTime())) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDataWriteChannel.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDataWriteChannel.java index 0f9632aea..dde4c0818 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDataWriteChannel.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDataWriteChannel.java @@ -16,12 +16,13 @@ package com.google.cloud.bigquery; -import static com.google.cloud.RetryHelper.runWithRetries; - import com.google.cloud.BaseWriteChannel; import com.google.cloud.RestorableState; -import com.google.cloud.RetryHelper; import com.google.cloud.WriteChannel; +import com.google.cloud.bigquery.BigQueryRetryHelper.BigQueryRetryHelperException; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; +import java.io.IOException; import java.util.List; import java.util.Objects; import java.util.concurrent.Callable; @@ -34,6 +35,9 @@ public class TableDataWriteChannel extends BaseWriteChannel { + private static final BigQueryRetryConfig EMPTY_RETRY_CONFIG = + BigQueryRetryConfig.newBuilder().build(); + private Job job; TableDataWriteChannel( @@ -48,23 +52,43 @@ public class TableDataWriteChannel @Override protected void flushBuffer(final int length, final boolean last) { - try { + Span flushBuffer = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + flushBuffer = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.TableDataWriteChannel.flushBuffer") + .setAttribute("bq.table_data_write_channel.flush_buffer.length", length) + .setAttribute("bq.table_data_write_channel.flush_buffer.last", last) + .startSpan(); + } + + try (Scope flushBufferScope = flushBuffer != null ? flushBuffer.makeCurrent() : null) { com.google.api.services.bigquery.model.Job jobPb = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Job call() { + public com.google.api.services.bigquery.model.Job call() throws IOException { return getOptions() .getBigQueryRpcV2() - .write(getUploadId(), getBuffer(), 0, getPosition(), length, last); + .writeSkipExceptionTranslation( + getUploadId(), getBuffer(), 0, getPosition(), length, last); } }, getOptions().getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - getOptions().getClock()); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); job = jobPb != null ? Job.fromPb(getOptions().getService(), jobPb) : null; - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (flushBuffer != null) { + flushBuffer.end(); + } } } @@ -77,24 +101,42 @@ private static String open( final BigQueryOptions options, final JobId jobId, final WriteChannelConfiguration writeChannelConfiguration) { - try { - return runWithRetries( + Span open = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + open = + options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.TableDataWriteChannel.open") + .setAllAttributes(jobId.getOtelAttributes()) + .setAllAttributes(writeChannelConfiguration.getDestinationTable().getOtelAttributes()) + .startSpan(); + } + + try (Scope openScope = open != null ? open.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public String call() { + public String call() throws IOException { return options .getBigQueryRpcV2() - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setConfiguration(writeChannelConfiguration.toPb()) .setJobReference(jobId.toPb())); } }, options.getRetrySettings(), - BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER, - options.getClock()); - } catch (RetryHelper.RetryHelperException e) { + options.getResultRetryAlgorithm(), + options.getClock(), + EMPTY_RETRY_CONFIG, + options.isOpenTelemetryTracingEnabled(), + options.getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (open != null) { + open.end(); + } } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java index b74055d4f..fcc208453 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java @@ -22,6 +22,7 @@ import com.google.api.services.bigquery.model.TableReference; import com.google.common.base.Function; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Objects; @@ -114,4 +115,12 @@ TableReference toPb() { static TableId fromPb(TableReference tableRef) { return new TableId(tableRef.getProjectId(), tableRef.getDatasetId(), tableRef.getTableId()); } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.table.project", this.getProject()) + .put("bq.table.dataset", this.getDataset()) + .put("bq.table.id", this.getTable()) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableInfo.java index 6e050d064..b3236f4c3 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableInfo.java @@ -25,6 +25,7 @@ import com.google.api.services.bigquery.model.Table; import com.google.common.base.Function; import com.google.common.base.MoreObjects; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.math.BigInteger; import java.util.Map; @@ -763,4 +764,19 @@ Table toPb() { static TableInfo fromPb(Table tablePb) { return new BuilderImpl(tablePb).build(); } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .putAll(this.getTableId().getOtelAttributes()) + .put("bq.table.creation_time", getFieldAsString(this.getCreationTime())) + .put("bq.table.expiration_time", getFieldAsString(this.getExpirationTime())) + .put("bq.table.last_modified_time", getFieldAsString(this.getLastModifiedTime())) + .put("bq.table.num_bytes", getFieldAsString(this.getNumBytes())) + .put("bq.table.num_rows", getFieldAsString(this.getNumRows())) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableResult.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableResult.java index 203a91fab..42044596b 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableResult.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableResult.java @@ -16,7 +16,6 @@ package com.google.cloud.bigquery; -import com.google.api.core.InternalApi; import com.google.api.gax.paging.Page; import com.google.auto.value.AutoValue; import com.google.common.base.Function; @@ -27,7 +26,6 @@ import java.util.Objects; import javax.annotation.Nullable; -@InternalApi @AutoValue public abstract class TableResult implements Page, Serializable { @@ -63,6 +61,11 @@ public static Builder newBuilder() { @Nullable public abstract Schema getSchema(); + /** + * Returns the total number of rows in the complete result set, which can be more than the number + * of rows in the first page of results. If no rows are returned, this value can still be greater + * than 0 if any rows were affected by the query, such as INSERT, UPDATE, or DELETE queries. + */ public abstract long getTotalRows(); public abstract Page getPageNoSchema(); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java index 3da2e3ec1..e7f57a254 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java @@ -45,7 +45,12 @@ enum Option { STATE_FILTER("stateFilter"), TIMEOUT("timeoutMs"), REQUESTED_POLICY_VERSION("requestedPolicyVersion"), - TABLE_METADATA_VIEW("view"); + TABLE_METADATA_VIEW("view"), + RETRY_OPTIONS("retryOptions"), + BIGQUERY_RETRY_CONFIG("bigQueryRetryConfig"), + ACCESS_POLICY_VERSION("accessPolicyVersion"), + DATASET_VIEW("datasetView"), + DATASET_UPDATE_MODE("datasetUpdateMode"); private final String value; @@ -226,6 +231,7 @@ Tuple> listModels( Tuple> listRoutines( String projectId, String datasetId, Map options); + /** * Deletes the requested routine. * diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java index e2a5749ec..ed5092502 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java @@ -44,6 +44,9 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanKind; import java.io.IOException; import java.math.BigInteger; import java.util.List; @@ -70,6 +73,7 @@ public Dataset apply(DatasetList.Datasets datasetPb) { .setFriendlyName(datasetPb.getFriendlyName()) .setId(datasetPb.getId()) .setKind(datasetPb.getKind()) + .setLocation(datasetPb.getLocation()) .setLabels(datasetPb.getLabels()); } }; @@ -115,13 +119,7 @@ private void validateRPC() throws BigQueryException, IOException { @Override public Dataset getDataset(String projectId, String datasetId, Map options) { try { - validateRPC(); - return bigquery - .datasets() - .get(projectId, datasetId) - .setFields(Option.FIELDS.getString(options)) - .setPrettyPrint(false) - .execute(); + return getDatasetSkipExceptionTranslation(projectId, datasetId, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -131,31 +129,105 @@ public Dataset getDataset(String projectId, String datasetId, Map opt } } + @InternalApi("internal to java-bigquery") + public Dataset getDatasetSkipExceptionTranslation( + String projectId, String datasetId, Map options) throws IOException { + validateRPC(); + Bigquery.Datasets.Get bqGetRequest = + bigquery + .datasets() + .get(projectId, datasetId) + .setFields(Option.FIELDS.getString(options)) + .setPrettyPrint(false); + if (options.containsKey(Option.ACCESS_POLICY_VERSION)) { + bqGetRequest.setAccessPolicyVersion((Integer) options.get(Option.ACCESS_POLICY_VERSION)); + } + if (options.containsKey(Option.DATASET_VIEW)) { + bqGetRequest.setDatasetView(options.get(Option.DATASET_VIEW).toString()); + } + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getDataset = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getDataset = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getDataset") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "DatasetService") + .setAttribute("bq.rpc.method", "GetDataset") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + Dataset dataset = bqGetRequest.execute(); + if (getDataset != null) { + getDataset.setAttribute("bq.rpc.response.dataset.id", dataset.getId()); + getDataset.end(); + } + return dataset; + } + @Override public Tuple> listDatasets(String projectId, Map options) { try { - validateRPC(); - DatasetList datasetsList = - bigquery - .datasets() - .list(projectId) - .setPrettyPrint(false) - .setAll(Option.ALL_DATASETS.getBoolean(options)) - .setFilter(Option.LABEL_FILTER.getString(options)) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .execute(); - Iterable datasets = datasetsList.getDatasets(); - return Tuple.of( - datasetsList.getNextPageToken(), - Iterables.transform( - datasets != null ? datasets : ImmutableList.of(), - LIST_TO_DATASET)); + return listDatasetsSkipExceptionTranslation(projectId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Tuple> listDatasetsSkipExceptionTranslation( + String projectId, Map options) throws IOException { + validateRPC(); + Bigquery.Datasets.List datasetsListRequest = + bigquery + .datasets() + .list(projectId) + .setPrettyPrint(false) + .setAll(Option.ALL_DATASETS.getBoolean(options)) + .setFilter(Option.LABEL_FILTER.getString(options)) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)); + + datasetsListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listDatasets = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listDatasets = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listDatasets") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "DatasetService") + .setAttribute("bq.rpc.method", "ListDatasets") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", datasetsListRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + DatasetList datasetsList = datasetsListRequest.execute(); + Iterable datasets = datasetsList.getDatasets(); + if (listDatasets != null) { + listDatasets.setAttribute("bq.rpc.next_page_token", datasetsList.getNextPageToken()); + listDatasets.end(); + } + return Tuple.of( + datasetsList.getNextPageToken(), + Iterables.transform( + datasets != null ? datasets : ImmutableList.of(), + LIST_TO_DATASET)); + } + @Override public Tuple> listProjects(Map options) { try { @@ -179,96 +251,249 @@ public Tuple> listProjects(Map @Override public Dataset create(Dataset dataset, Map options) { try { - validateRPC(); - return bigquery - .datasets() - .insert(dataset.getDatasetReference().getProjectId(), dataset) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return createSkipExceptionTranslation(dataset, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Dataset createSkipExceptionTranslation(Dataset dataset, Map options) + throws IOException { + validateRPC(); + Bigquery.Datasets.Insert bqCreateRequest = + bigquery + .datasets() + .insert(dataset.getDatasetReference().getProjectId(), dataset) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + if (options.containsKey(Option.ACCESS_POLICY_VERSION)) { + bqCreateRequest.setAccessPolicyVersion((Integer) options.get(Option.ACCESS_POLICY_VERSION)); + } + bqCreateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span createDataset = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + createDataset = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.createDataset") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "DatasetService") + .setAttribute("bq.rpc.method", "InsertDataset") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Dataset datasetResponse = bqCreateRequest.execute(); + if (createDataset != null) { + createDataset.setAttribute("bq.rpc.response.dataset.id", datasetResponse.getId()); + createDataset.end(); + } + return datasetResponse; + } + @Override public Table create(Table table, Map options) { try { - validateRPC(); - // unset the type, as it is output only - table.setType(null); - TableReference reference = table.getTableReference(); - return bigquery - .tables() - .insert(reference.getProjectId(), reference.getDatasetId(), table) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return createSkipExceptionTranslation(table, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Table createSkipExceptionTranslation(Table table, Map options) + throws IOException { + validateRPC(); + // unset the type, as it is output only + table.setType(null); + TableReference reference = table.getTableReference(); + Bigquery.Tables.Insert bqCreateRequest = + bigquery + .tables() + .insert(reference.getProjectId(), reference.getDatasetId(), table) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqCreateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span createTable = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + createTable = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.createTable") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "InsertTable") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Table tableResponse = bqCreateRequest.execute(); + if (createTable != null) { + createTable.setAttribute("bq.rpc.response.table.id", tableResponse.getId()); + createTable.end(); + } + return tableResponse; + } + @Override public Routine create(Routine routine, Map options) { try { - validateRPC(); - RoutineReference reference = routine.getRoutineReference(); - return bigquery - .routines() - .insert(reference.getProjectId(), reference.getDatasetId(), routine) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return createSkipExceptionTranslation(routine, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Routine createSkipExceptionTranslation(Routine routine, Map options) + throws IOException { + validateRPC(); + RoutineReference reference = routine.getRoutineReference(); + Bigquery.Routines.Insert bqCreateRequest = + bigquery + .routines() + .insert(reference.getProjectId(), reference.getDatasetId(), routine) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqCreateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span createRoutine = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + createRoutine = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.createRoutine") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "RoutineService") + .setAttribute("bq.rpc.method", "InsertRoutine") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Routine routineResponse = bqCreateRequest.execute(); + if (createRoutine != null) { + createRoutine.setAttribute( + "bq.rpc.response.routine.id", routineResponse.getRoutineReference().getRoutineId()); + createRoutine.end(); + } + return routineResponse; + } + @Override public Job create(Job job, Map options) { try { - validateRPC(); - String projectId = - job.getJobReference() != null - ? job.getJobReference().getProjectId() - : this.options.getProjectId(); - return bigquery - .jobs() - .insert(projectId, job) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return createSkipExceptionTranslation(job, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Job createSkipExceptionTranslation(Job job, Map options) throws IOException { + validateRPC(); + String projectId = + job.getJobReference() != null + ? job.getJobReference().getProjectId() + : this.options.getProjectId(); + Bigquery.Jobs.Insert bqCreateRequest = + bigquery + .jobs() + .insert(projectId, job) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqCreateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span createJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + createJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.createJob") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "InsertJob") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Job jobResponse = bqCreateRequest.execute(); + if (createJob != null) { + createJob.setAttribute("bq.rpc.response.job.id", jobResponse.getId()); + createJob.setAttribute( + "bq.rpc.response.job.status.state", jobResponse.getStatus().getState()); + createJob.end(); + } + return jobResponse; + } + @Override public Job createJobForQuery(Job job) { try { - validateRPC(); - String projectId = - job.getJobReference() != null - ? job.getJobReference().getProjectId() - : this.options.getProjectId(); - return bigquery.jobs().insert(projectId, job).setPrettyPrint(false).execute(); + return createJobForQuerySkipExceptionTranslation(job); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Job createJobForQuerySkipExceptionTranslation(Job job) throws IOException { + validateRPC(); + String projectId = + job.getJobReference() != null + ? job.getJobReference().getProjectId() + : this.options.getProjectId(); + Bigquery.Jobs.Insert bqCreateRequest = + bigquery.jobs().insert(projectId, job).setPrettyPrint(false); + + bqCreateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span createJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + createJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.createJobForQuery") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "InsertJob") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + Job jobResponse = bqCreateRequest.execute(); + if (createJob != null) { + createJob.setAttribute("bq.rpc.response.job.id", jobResponse.getId()); + createJob.setAttribute( + "bq.rpc.response.job.status.state", jobResponse.getStatus().getState()); + createJob.end(); + } + return jobResponse; + } + @Override public boolean deleteDataset(String projectId, String datasetId, Map options) { try { - validateRPC(); - bigquery - .datasets() - .delete(projectId, datasetId) - .setPrettyPrint(false) - .setDeleteContents(Option.DELETE_CONTENTS.getBoolean(options)) - .execute(); - return true; + return deleteDatasetSkipExceptionTranslation(projectId, datasetId, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -278,53 +503,150 @@ public boolean deleteDataset(String projectId, String datasetId, Map } } + @InternalApi("internal to java-bigquery") + public boolean deleteDatasetSkipExceptionTranslation( + String projectId, String datasetId, Map options) throws IOException { + validateRPC(); + Bigquery.Datasets.Delete bqDeleteRequest = + bigquery + .datasets() + .delete(projectId, datasetId) + .setPrettyPrint(false) + .setDeleteContents(Option.DELETE_CONTENTS.getBoolean(options)); + + bqDeleteRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span deleteDataset = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + deleteDataset = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.deleteDataset") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "DatasetService") + .setAttribute("bq.rpc.method", "DeleteDataset") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + bqDeleteRequest.execute(); + if (deleteDataset != null) { + deleteDataset.end(); + } + return true; + } + @Override public Dataset patch(Dataset dataset, Map options) { try { - validateRPC(); - DatasetReference reference = dataset.getDatasetReference(); - return bigquery - .datasets() - .patch(reference.getProjectId(), reference.getDatasetId(), dataset) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return patchSkipExceptionTranslation(dataset, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Dataset patchSkipExceptionTranslation(Dataset dataset, Map options) + throws IOException { + validateRPC(); + DatasetReference reference = dataset.getDatasetReference(); + Bigquery.Datasets.Patch bqPatchRequest = + bigquery + .datasets() + .patch(reference.getProjectId(), reference.getDatasetId(), dataset) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + if (options.containsKey(Option.ACCESS_POLICY_VERSION)) { + bqPatchRequest.setAccessPolicyVersion((Integer) options.get(Option.ACCESS_POLICY_VERSION)); + } + if (options.containsKey(Option.DATASET_UPDATE_MODE)) { + bqPatchRequest.setUpdateMode(options.get(Option.DATASET_UPDATE_MODE).toString()); + } + bqPatchRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span patchDataset = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + patchDataset = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.patchDataset") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "DatasetService") + .setAttribute("bq.rpc.method", "PatchDataset") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Dataset datasetResponse = bqPatchRequest.execute(); + if (patchDataset != null) { + patchDataset.setAttribute("bq.rpc.response.dataset.id", datasetResponse.getId()); + patchDataset.end(); + } + return datasetResponse; + } + @Override public Table patch(Table table, Map options) { try { - validateRPC(); - // unset the type, as it is output only - table.setType(null); - TableReference reference = table.getTableReference(); - return bigquery - .tables() - .patch(reference.getProjectId(), reference.getDatasetId(), reference.getTableId(), table) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .setAutodetectSchema(BigQueryRpc.Option.AUTODETECT_SCHEMA.getBoolean(options)) - .execute(); + return patchSkipExceptionTranslation(table, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Table patchSkipExceptionTranslation(Table table, Map options) + throws IOException { + validateRPC(); + // unset the type, as it is output only + table.setType(null); + TableReference reference = table.getTableReference(); + Bigquery.Tables.Patch bqPatchRequest = + bigquery + .tables() + .patch( + reference.getProjectId(), reference.getDatasetId(), reference.getTableId(), table) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)) + .setAutodetectSchema(BigQueryRpc.Option.AUTODETECT_SCHEMA.getBoolean(options)); + + bqPatchRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span patchTable = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + patchTable = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.patchTable") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "PatchTable") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Table tableResponse = bqPatchRequest.execute(); + if (patchTable != null) { + patchTable.setAttribute("bq.rpc.response.table.id", tableResponse.getId()); + patchTable.end(); + } + return tableResponse; + } + @Override public Table getTable( String projectId, String datasetId, String tableId, Map options) { try { - validateRPC(); - return bigquery - .tables() - .get(projectId, datasetId, tableId) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .setView(getTableMetadataOption(options)) - .execute(); + return getTableSkipExceptionTranslation(projectId, datasetId, tableId, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -334,6 +656,45 @@ public Table getTable( } } + @InternalApi("internal to java-bigquery") + public Table getTableSkipExceptionTranslation( + String projectId, String datasetId, String tableId, Map options) + throws IOException { + validateRPC(); + Bigquery.Tables.Get bqGetRequest = + bigquery + .tables() + .get(projectId, datasetId, tableId) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)) + .setView(getTableMetadataOption(options)); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getTable = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getTable = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getTable") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "GetTable") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Table tableResponse = bqGetRequest.execute(); + if (getTable != null) { + getTable.setAttribute("bq.rpc.response.table.id", tableResponse.getId()); + getTable.end(); + } + return tableResponse; + } + private String getTableMetadataOption(Map options) { if (options.containsKey(Option.TABLE_METADATA_VIEW)) { return options.get(Option.TABLE_METADATA_VIEW).toString(); @@ -345,45 +706,76 @@ private String getTableMetadataOption(Map options) { public Tuple> listTables( String projectId, String datasetId, Map options) { try { - validateRPC(); - TableList tableList = - bigquery - .tables() - .list(projectId, datasetId) - .setPrettyPrint(false) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .execute(); - Iterable tables = tableList.getTables(); - return Tuple.of( - tableList.getNextPageToken(), - Iterables.transform( - tables != null ? tables : ImmutableList.of(), - new Function() { - @Override - public Table apply(TableList.Tables tablePb) { - return new Table() - .setFriendlyName(tablePb.getFriendlyName()) - .setId(tablePb.getId()) - .setKind(tablePb.getKind()) - .setTableReference(tablePb.getTableReference()) - .setType(tablePb.getType()) - .setCreationTime(tablePb.getCreationTime()) - .setTimePartitioning(tablePb.getTimePartitioning()) - .setRangePartitioning(tablePb.getRangePartitioning()); - } - })); + return listTablesSkipExceptionTranslation(projectId, datasetId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Tuple> listTablesSkipExceptionTranslation( + String projectId, String datasetId, Map options) throws IOException { + validateRPC(); + Bigquery.Tables.List tableListRequest = + bigquery + .tables() + .list(projectId, datasetId) + .setPrettyPrint(false) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)); + + tableListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listTables = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listTables = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listTables") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "ListTables") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", tableListRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + TableList tableResponse = tableListRequest.execute(); + if (listTables != null) { + listTables.setAttribute("bq.rpc.next_page_token", tableResponse.getNextPageToken()); + listTables.end(); + } + + Iterable tables = tableResponse.getTables(); + return Tuple.of( + tableResponse.getNextPageToken(), + Iterables.transform( + tables != null ? tables : ImmutableList.of(), + new Function() { + @Override + public Table apply(TableList.Tables tablePb) { + return new Table() + .setFriendlyName(tablePb.getFriendlyName()) + .setId(tablePb.getId()) + .setKind(tablePb.getKind()) + .setTableReference(tablePb.getTableReference()) + .setType(tablePb.getType()) + .setCreationTime(tablePb.getCreationTime()) + .setTimePartitioning(tablePb.getTimePartitioning()) + .setRangePartitioning(tablePb.getRangePartitioning()) + .setClustering(tablePb.getClustering()) + .setLabels(tablePb.getLabels()); + } + })); + } + @Override public boolean deleteTable(String projectId, String datasetId, String tableId) { try { - validateRPC(); - bigquery.tables().delete(projectId, datasetId, tableId).execute(); - return true; + return deleteTableSkipExceptionTranslation(projectId, datasetId, tableId); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -393,34 +785,92 @@ public boolean deleteTable(String projectId, String datasetId, String tableId) { } } + @InternalApi("internal to java-bigquery") + public boolean deleteTableSkipExceptionTranslation( + String projectId, String datasetId, String tableId) throws IOException { + validateRPC(); + Bigquery.Tables.Delete bqDeleteRequest = + bigquery.tables().delete(projectId, datasetId, tableId); + + bqDeleteRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span deleteTable = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + deleteTable = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.deleteTable") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "DeleteTable") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + bqDeleteRequest.execute(); + if (deleteTable != null) { + deleteTable.end(); + } + return true; + } + @Override public Model patch(Model model, Map options) { try { - validateRPC(); - // unset the type, as it is output only - ModelReference reference = model.getModelReference(); - return bigquery - .models() - .patch(reference.getProjectId(), reference.getDatasetId(), reference.getModelId(), model) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return patchSkipExceptionTranslation(model, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Model patchSkipExceptionTranslation(Model model, Map options) + throws IOException { + validateRPC(); + // unset the type, as it is output only + ModelReference reference = model.getModelReference(); + Bigquery.Models.Patch bqPatchRequest = + bigquery + .models() + .patch( + reference.getProjectId(), reference.getDatasetId(), reference.getModelId(), model) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqPatchRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span patchModel = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + patchModel = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.patchModel") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "ModelService") + .setAttribute("bq.rpc.method", "PatchModel") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Model modelResponse = bqPatchRequest.execute(); + if (patchModel != null) { + patchModel.setAttribute( + "bq.rpc.response.model.id", modelResponse.getModelReference().getModelId()); + patchModel.end(); + } + return modelResponse; + } + @Override public Model getModel( String projectId, String datasetId, String modelId, Map options) { try { - validateRPC(); - return bigquery - .models() - .get(projectId, datasetId, modelId) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return getModelSkipExceptionTranslation(projectId, datasetId, modelId, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -430,33 +880,101 @@ public Model getModel( } } + @InternalApi("internal to java-bigquery") + public Model getModelSkipExceptionTranslation( + String projectId, String datasetId, String modelId, Map options) + throws IOException { + validateRPC(); + Bigquery.Models.Get bqGetRequest = + bigquery + .models() + .get(projectId, datasetId, modelId) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getModel = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getModel = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getModel") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "ModelService") + .setAttribute("bq.rpc.method", "GetModel") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Model modelResponse = bqGetRequest.execute(); + if (getModel != null) { + getModel.setAttribute( + "bq.rpc.response.model.id", modelResponse.getModelReference().getModelId()); + getModel.end(); + } + return modelResponse; + } + @Override public Tuple> listModels( String projectId, String datasetId, Map options) { try { - validateRPC(); - ListModelsResponse modelList = - bigquery - .models() - .list(projectId, datasetId) - .setPrettyPrint(false) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .execute(); - Iterable models = - modelList.getModels() != null ? modelList.getModels() : ImmutableList.of(); - return Tuple.of(modelList.getNextPageToken(), models); + return listModelsSkipExceptionTranslation(projectId, datasetId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Tuple> listModelsSkipExceptionTranslation( + String projectId, String datasetId, Map options) throws IOException { + validateRPC(); + Bigquery.Models.List modelListRequest = + bigquery + .models() + .list(projectId, datasetId) + .setPrettyPrint(false) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)); + + modelListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listModels = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listModels = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listModels") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "ModelService") + .setAttribute("bq.rpc.method", "ListModels") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", modelListRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + ListModelsResponse modelResponse = modelListRequest.execute(); + if (listModels != null) { + listModels.setAttribute("bq.rpc.next_page_token", modelResponse.getNextPageToken()); + listModels.end(); + } + + Iterable models = + modelResponse.getModels() != null ? modelResponse.getModels() : ImmutableList.of(); + return Tuple.of(modelResponse.getNextPageToken(), models); + } + @Override public boolean deleteModel(String projectId, String datasetId, String modelId) { try { - validateRPC(); - bigquery.models().delete(projectId, datasetId, modelId).execute(); - return true; + return deleteModelSkipExceptionTranslation(projectId, datasetId, modelId); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -466,34 +984,94 @@ public boolean deleteModel(String projectId, String datasetId, String modelId) { } } + @InternalApi("internal to java-bigquery") + public boolean deleteModelSkipExceptionTranslation( + String projectId, String datasetId, String modelId) throws IOException { + validateRPC(); + Bigquery.Models.Delete bqDeleteRequest = + bigquery.models().delete(projectId, datasetId, modelId); + + bqDeleteRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span deleteModels = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + deleteModels = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.deleteModel") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "ModelService") + .setAttribute("bq.rpc.method", "DeleteModel") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + bqDeleteRequest.execute(); + if (deleteModels != null) { + deleteModels.end(); + } + return true; + } + @Override public Routine update(Routine routine, Map options) { try { - validateRPC(); - RoutineReference reference = routine.getRoutineReference(); - return bigquery - .routines() - .update( - reference.getProjectId(), reference.getDatasetId(), reference.getRoutineId(), routine) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return updateSkipExceptionTranslation(routine, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Routine updateSkipExceptionTranslation(Routine routine, Map options) + throws IOException { + validateRPC(); + RoutineReference reference = routine.getRoutineReference(); + Bigquery.Routines.Update bqUpdateRequest = + bigquery + .routines() + .update( + reference.getProjectId(), + reference.getDatasetId(), + reference.getRoutineId(), + routine) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqUpdateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span updateRoutine = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + updateRoutine = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.updateRoutine") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "RoutineService") + .setAttribute("bq.rpc.method", "UpdateRoutine") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Routine routineResponse = bqUpdateRequest.execute(); + if (updateRoutine != null) { + updateRoutine.setAttribute( + "bq.rpc.response.routine.id", routineResponse.getRoutineReference().getRoutineId()); + updateRoutine.end(); + } + return routineResponse; + } + @Override public Routine getRoutine( String projectId, String datasetId, String routineId, Map options) { try { - validateRPC(); - return bigquery - .routines() - .get(projectId, datasetId, routineId) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return getRoutineSkipExceptionTranslation(projectId, datasetId, routineId, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -503,35 +1081,102 @@ public Routine getRoutine( } } + @InternalApi("internal to java-bigquery") + public Routine getRoutineSkipExceptionTranslation( + String projectId, String datasetId, String routineId, Map options) + throws IOException { + validateRPC(); + Bigquery.Routines.Get bqGetRequest = + bigquery + .routines() + .get(projectId, datasetId, routineId) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getRoutine = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getRoutine = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getRoutine") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "RoutineService") + .setAttribute("bq.rpc.method", "GetRoutine") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Routine routineResponse = bqGetRequest.execute(); + if (getRoutine != null) { + getRoutine.setAttribute( + "bq.rpc.response.routine.id", routineResponse.getRoutineReference().getRoutineId()); + getRoutine.end(); + } + return routineResponse; + } + @Override public Tuple> listRoutines( String projectId, String datasetId, Map options) { try { - validateRPC(); - ListRoutinesResponse routineList = - bigquery - .routines() - .list(projectId, datasetId) - .setPrettyPrint(false) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .execute(); - Iterable routines = - routineList.getRoutines() != null - ? routineList.getRoutines() - : ImmutableList.of(); - return Tuple.of(routineList.getNextPageToken(), routines); + return listRoutinesSkipExceptionTranslation(projectId, datasetId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Tuple> listRoutinesSkipExceptionTranslation( + String projectId, String datasetId, Map options) throws IOException { + validateRPC(); + Bigquery.Routines.List routineListRequest = + bigquery + .routines() + .list(projectId, datasetId) + .setPrettyPrint(false) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)); + + routineListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listRoutines = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listRoutines = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listRoutines") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "RoutineService") + .setAttribute("bq.rpc.method", "ListRoutines") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", routineListRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + ListRoutinesResponse routineResponse = routineListRequest.execute(); + if (listRoutines != null) { + listRoutines.setAttribute("bq.rpc.next_page_token", routineResponse.getNextPageToken()); + listRoutines.end(); + } + Iterable routines = + routineResponse.getRoutines() != null + ? routineResponse.getRoutines() + : ImmutableList.of(); + return Tuple.of(routineResponse.getNextPageToken(), routines); + } + @Override public boolean deleteRoutine(String projectId, String datasetId, String routineId) { try { - validateRPC(); - bigquery.routines().delete(projectId, datasetId, routineId).execute(); - return true; + return deleteRoutineSkipExceptionTranslation(projectId, datasetId, routineId); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -541,42 +1186,135 @@ public boolean deleteRoutine(String projectId, String datasetId, String routineI } } + @InternalApi("internal to java-bigquery") + public boolean deleteRoutineSkipExceptionTranslation( + String projectId, String datasetId, String routineId) throws IOException { + validateRPC(); + Bigquery.Routines.Delete bqDeleteRequest = + bigquery.routines().delete(projectId, datasetId, routineId); + + bqDeleteRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span deleteRoutine = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + deleteRoutine = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listRoutines") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "RoutineService") + .setAttribute("bq.rpc.method", "ListRoutines") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + bqDeleteRequest.execute(); + if (deleteRoutine != null) { + deleteRoutine.end(); + } + return true; + } + @Override public TableDataInsertAllResponse insertAll( String projectId, String datasetId, String tableId, TableDataInsertAllRequest request) { try { - validateRPC(); - return bigquery - .tabledata() - .insertAll(projectId, datasetId, tableId, request) - .setPrettyPrint(false) - .execute(); + return insertAllSkipExceptionTranslation(projectId, datasetId, tableId, request); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public TableDataInsertAllResponse insertAllSkipExceptionTranslation( + String projectId, String datasetId, String tableId, TableDataInsertAllRequest request) + throws IOException { + validateRPC(); + Bigquery.Tabledata.InsertAll insertAllRequest = + bigquery + .tabledata() + .insertAll(projectId, datasetId, tableId, request) + .setPrettyPrint(false); + + insertAllRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span insertAll = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + insertAll = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.insertAll") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableDataService") + .setAttribute("bq.rpc.method", "InsertAll") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + TableDataInsertAllResponse insertAllResponse = insertAllRequest.execute(); + if (insertAll != null) { + insertAll.end(); + } + return insertAllResponse; + } + @Override public TableDataList listTableData( String projectId, String datasetId, String tableId, Map options) { try { - validateRPC(); - return bigquery - .tabledata() - .list(projectId, datasetId, tableId) - .setPrettyPrint(false) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .setStartIndex( - Option.START_INDEX.getLong(options) != null - ? BigInteger.valueOf(Option.START_INDEX.getLong(options)) - : null) - .execute(); + return listTableDataSkipExceptionTranslation(projectId, datasetId, tableId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public TableDataList listTableDataSkipExceptionTranslation( + String projectId, String datasetId, String tableId, Map options) + throws IOException { + validateRPC(); + Bigquery.Tabledata.List bqListRequest = + bigquery + .tabledata() + .list(projectId, datasetId, tableId) + .setPrettyPrint(false) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)) + .setStartIndex( + Option.START_INDEX.getLong(options) != null + ? BigInteger.valueOf(Option.START_INDEX.getLong(options)) + : null); + + bqListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listTableData = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listTableData = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listTableData") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableDataService") + .setAttribute("bq.rpc.method", "List") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", bqListRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + TableDataList bqListResponse = bqListRequest.execute(); + if (listTableData != null) { + listTableData.end(); + } + return bqListResponse; + } + @Override public TableDataList listTableDataWithRowLimit( String projectId, @@ -585,30 +1323,59 @@ public TableDataList listTableDataWithRowLimit( Integer maxResultPerPage, String pageToken) { try { - validateRPC(); - return bigquery - .tabledata() - .list(projectId, datasetId, tableId) - .setPrettyPrint(false) - .setMaxResults(Long.valueOf(maxResultPerPage)) - .setPageToken(pageToken) - .execute(); + return listTableDataWithRowLimitSkipExceptionTranslation( + projectId, datasetId, tableId, maxResultPerPage, pageToken); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public TableDataList listTableDataWithRowLimitSkipExceptionTranslation( + String projectId, + String datasetId, + String tableId, + Integer maxResultPerPage, + String pageToken) + throws IOException { + validateRPC(); + Bigquery.Tabledata.List bqListRequest = + bigquery + .tabledata() + .list(projectId, datasetId, tableId) + .setPrettyPrint(false) + .setMaxResults(Long.valueOf(maxResultPerPage)) + .setPageToken(pageToken); + + bqListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listTableData = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listTableData = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listTableDataWithRowLimit") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableDataService") + .setAttribute("bq.rpc.method", "List") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", bqListRequest.getPageToken()) + .startSpan(); + } + TableDataList bqListResponse = bqListRequest.execute(); + if (listTableData != null) { + listTableData.end(); + } + return bqListResponse; + } + @Override public Job getJob(String projectId, String jobId, String location, Map options) { try { - validateRPC(); - return bigquery - .jobs() - .get(projectId, jobId) - .setPrettyPrint(false) - .setLocation(location) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return getJobSkipExceptionTranslation(projectId, jobId, location, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -618,16 +1385,49 @@ public Job getJob(String projectId, String jobId, String location, Map options) throws IOException { + validateRPC(); + Bigquery.Jobs.Get bqGetRequest = + bigquery + .jobs() + .get(projectId, jobId) + .setPrettyPrint(false) + .setLocation(location) + .setFields(Option.FIELDS.getString(options)); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getJob") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "GetJob") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Job jobResponse = bqGetRequest.execute(); + if (getJob != null) { + getJob.setAttribute("bq.rpc.response.job.id", jobResponse.getId()); + getJob.setAttribute("bq.rpc.response.job.status.state", jobResponse.getStatus().getState()); + getJob.end(); + } + return jobResponse; + } + @Override public Job getQueryJob(String projectId, String jobId, String location) { try { - validateRPC(); - return bigquery - .jobs() - .get(projectId, jobId) - .setPrettyPrint(false) - .setLocation(location) - .execute(); + return getQueryJobSkipExceptionTranslation(projectId, jobId, location); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -637,72 +1437,129 @@ public Job getQueryJob(String projectId, String jobId, String location) { } } + @InternalApi("internal to java-bigquery") + public Job getQueryJobSkipExceptionTranslation(String projectId, String jobId, String location) + throws IOException { + validateRPC(); + Bigquery.Jobs.Get bqGetRequest = + bigquery.jobs().get(projectId, jobId).setPrettyPrint(false).setLocation(location); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getQueryJob") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "GetJob") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + Job jobResponse = bqGetRequest.execute(); + if (getJob != null) { + getJob.setAttribute("bq.rpc.response.job.id", jobResponse.getId()); + getJob.setAttribute("bq.rpc.response.job.status.state", jobResponse.getStatus().getState()); + getJob.end(); + } + return jobResponse; + } + @Override public Tuple> listJobs(String projectId, Map options) { try { - validateRPC(); - Bigquery.Jobs.List request = - bigquery - .jobs() - .list(projectId) - .setPrettyPrint(false) - .setAllUsers(Option.ALL_USERS.getBoolean(options)) - .setFields(Option.FIELDS.getString(options)) - .setStateFilter(Option.STATE_FILTER.>get(options)) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .setProjection(DEFAULT_PROJECTION) - .setParentJobId(Option.PARENT_JOB_ID.getString(options)); - if (Option.MIN_CREATION_TIME.getLong(options) != null) { - request.setMinCreationTime(BigInteger.valueOf(Option.MIN_CREATION_TIME.getLong(options))); - } - if (Option.MAX_CREATION_TIME.getLong(options) != null) { - request.setMaxCreationTime(BigInteger.valueOf(Option.MAX_CREATION_TIME.getLong(options))); - } - JobList jobsList = request.execute(); - - Iterable jobs = jobsList.getJobs(); - return Tuple.of( - jobsList.getNextPageToken(), - Iterables.transform( - jobs != null ? jobs : ImmutableList.of(), - new Function() { - @Override - public Job apply(JobList.Jobs jobPb) { - JobStatus statusPb = - jobPb.getStatus() != null ? jobPb.getStatus() : new JobStatus(); - if (statusPb.getState() == null) { - statusPb.setState(jobPb.getState()); - } - if (statusPb.getErrorResult() == null) { - statusPb.setErrorResult(jobPb.getErrorResult()); - } - return new Job() - .setConfiguration(jobPb.getConfiguration()) - .setId(jobPb.getId()) - .setJobReference(jobPb.getJobReference()) - .setKind(jobPb.getKind()) - .setStatistics(jobPb.getStatistics()) - .setStatus(statusPb) - .setUserEmail(jobPb.getUserEmail()); - } - })); + return listJobsSkipExceptionTranslation(projectId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Tuple> listJobsSkipExceptionTranslation( + String projectId, Map options) throws IOException { + validateRPC(); + Bigquery.Jobs.List listJobsRequest = + bigquery + .jobs() + .list(projectId) + .setPrettyPrint(false) + .setAllUsers(Option.ALL_USERS.getBoolean(options)) + .setFields(Option.FIELDS.getString(options)) + .setStateFilter(Option.STATE_FILTER.>get(options)) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)) + .setProjection(DEFAULT_PROJECTION) + .setParentJobId(Option.PARENT_JOB_ID.getString(options)); + if (Option.MIN_CREATION_TIME.getLong(options) != null) { + listJobsRequest.setMinCreationTime( + BigInteger.valueOf(Option.MIN_CREATION_TIME.getLong(options))); + } + if (Option.MAX_CREATION_TIME.getLong(options) != null) { + listJobsRequest.setMaxCreationTime( + BigInteger.valueOf(Option.MAX_CREATION_TIME.getLong(options))); + } + listJobsRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listJobs = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listJobs = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listJobs") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "ListJobs") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", listJobsRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + JobList jobsList = listJobsRequest.execute(); + if (listJobs != null) { + listJobs.setAttribute("bq.rpc.next_page_token", jobsList.getNextPageToken()); + listJobs.end(); + } + + Iterable jobs = jobsList.getJobs(); + return Tuple.of( + jobsList.getNextPageToken(), + Iterables.transform( + jobs != null ? jobs : ImmutableList.of(), + new Function() { + @Override + public Job apply(JobList.Jobs jobPb) { + JobStatus statusPb = + jobPb.getStatus() != null ? jobPb.getStatus() : new JobStatus(); + if (statusPb.getState() == null) { + statusPb.setState(jobPb.getState()); + } + if (statusPb.getErrorResult() == null) { + statusPb.setErrorResult(jobPb.getErrorResult()); + } + return new Job() + .setConfiguration(jobPb.getConfiguration()) + .setId(jobPb.getId()) + .setJobReference(jobPb.getJobReference()) + .setKind(jobPb.getKind()) + .setStatistics(jobPb.getStatistics()) + .setStatus(statusPb) + .setUserEmail(jobPb.getUserEmail()); + } + })); + } + @Override public boolean cancel(String projectId, String jobId, String location) { try { - validateRPC(); - bigquery - .jobs() - .cancel(projectId, jobId) - .setLocation(location) - .setPrettyPrint(false) - .execute(); - return true; + return cancelSkipExceptionTranslation(projectId, jobId, location); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -712,95 +1569,249 @@ public boolean cancel(String projectId, String jobId, String location) { } } + @InternalApi("internal to java-bigquery") + public boolean cancelSkipExceptionTranslation(String projectId, String jobId, String location) + throws IOException { + validateRPC(); + Bigquery.Jobs.Cancel bqCancelRequest = + bigquery.jobs().cancel(projectId, jobId).setLocation(location).setPrettyPrint(false); + + bqCancelRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span cancelJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + cancelJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.cancelJob") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "CancelJob") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + bqCancelRequest.execute(); + if (cancelJob != null) { + cancelJob.end(); + } + return true; + } + @Override public boolean deleteJob(String projectId, String jobName, String location) { try { - validateRPC(); - bigquery - .jobs() - .delete(projectId, jobName) - .setLocation(location) - .setPrettyPrint(false) - .execute(); - return true; + return deleteJobSkipExceptionTranslation(projectId, jobName, location); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public boolean deleteJobSkipExceptionTranslation( + String projectId, String jobName, String location) throws IOException { + validateRPC(); + Bigquery.Jobs.Delete bqDeleteRequest = + bigquery.jobs().delete(projectId, jobName).setLocation(location).setPrettyPrint(false); + + bqDeleteRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span deleteJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + deleteJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.deleteJob") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "DeleteJob") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + bqDeleteRequest.execute(); + if (deleteJob != null) { + deleteJob.end(); + } + return true; + } + @Override public GetQueryResultsResponse getQueryResults( String projectId, String jobId, String location, Map options) { try { - validateRPC(); - return bigquery - .jobs() - .getQueryResults(projectId, jobId) - .setPrettyPrint(false) - .setLocation(location) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .setStartIndex( - Option.START_INDEX.getLong(options) != null - ? BigInteger.valueOf(Option.START_INDEX.getLong(options)) - : null) - .setTimeoutMs(Option.TIMEOUT.getLong(options)) - .execute(); + return getQueryResultsSkipExceptionTranslation(projectId, jobId, location, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public GetQueryResultsResponse getQueryResultsSkipExceptionTranslation( + String projectId, String jobId, String location, Map options) throws IOException { + validateRPC(); + Bigquery.Jobs.GetQueryResults queryRequest = + bigquery + .jobs() + .getQueryResults(projectId, jobId) + .setPrettyPrint(false) + .setLocation(location) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)) + .setStartIndex( + Option.START_INDEX.getLong(options) != null + ? BigInteger.valueOf(Option.START_INDEX.getLong(options)) + : null) + .setTimeoutMs(Option.TIMEOUT.getLong(options)); + + queryRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getQueryResults = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getQueryResults = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getQueryResults") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "GetQueryResults") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", queryRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + GetQueryResultsResponse queryResponse = queryRequest.execute(); + if (getQueryResults != null) { + getQueryResults.end(); + } + return queryResponse; + } + @Override public GetQueryResultsResponse getQueryResultsWithRowLimit( String projectId, String jobId, String location, Integer maxResultPerPage, Long timeoutMs) { try { - validateRPC(); - return bigquery - .jobs() - .getQueryResults(projectId, jobId) - .setPrettyPrint(false) - .setLocation(location) - .setMaxResults(Long.valueOf(maxResultPerPage)) - .setTimeoutMs(timeoutMs) - .execute(); + return getQueryResultsWithRowLimitSkipExceptionTranslation( + projectId, jobId, location, maxResultPerPage, timeoutMs); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public GetQueryResultsResponse getQueryResultsWithRowLimitSkipExceptionTranslation( + String projectId, String jobId, String location, Integer maxResultPerPage, Long timeoutMs) + throws IOException { + validateRPC(); + Bigquery.Jobs.GetQueryResults queryRequest = + bigquery + .jobs() + .getQueryResults(projectId, jobId) + .setPrettyPrint(false) + .setLocation(location) + .setMaxResults(Long.valueOf(maxResultPerPage)) + .setTimeoutMs(timeoutMs); + + queryRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getQueryResults = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getQueryResults = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getQueryResultsWithRowLimit") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "GetQueryResults") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", queryRequest.getPageToken()) + .startSpan(); + } + + GetQueryResultsResponse queryResponse = queryRequest.execute(); + if (getQueryResults != null) { + getQueryResults.end(); + } + return queryResponse; + } + @Override public QueryResponse queryRpc(String projectId, QueryRequest content) { try { - validateRPC(); - return bigquery.jobs().query(projectId, content).execute(); + return queryRpcSkipExceptionTranslation(projectId, content); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public QueryResponse queryRpcSkipExceptionTranslation(String projectId, QueryRequest content) + throws IOException { + validateRPC(); + Bigquery.Jobs.Query queryRequest = bigquery.jobs().query(projectId, content); + queryRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getQueryResults = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getQueryResults = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.queryRpc") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "Query") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + + QueryResponse queryResponse = queryRequest.execute(); + if (getQueryResults != null) { + getQueryResults.end(); + } + return queryResponse; + } + @Override public String open(Job loadJob) { try { - String builder = options.getResolvedApiaryHost("bigquery"); - if (!builder.endsWith("/")) { - builder += "/"; - } - builder += BASE_RESUMABLE_URI + options.getProjectId() + "/jobs"; - GenericUrl url = new GenericUrl(builder); - url.set("uploadType", "resumable"); - JsonFactory jsonFactory = bigquery.getJsonFactory(); - HttpRequestFactory requestFactory = bigquery.getRequestFactory(); - HttpRequest httpRequest = - requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, loadJob)); - httpRequest.getHeaders().set("X-Upload-Content-Value", "application/octet-stream"); - HttpResponse response = httpRequest.execute(); - return response.getHeaders().getLocation(); + return openSkipExceptionTranslation(loadJob); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public String openSkipExceptionTranslation(Job loadJob) throws IOException { + String builder = options.getResolvedApiaryHost("bigquery"); + if (!builder.endsWith("/")) { + builder += "/"; + } + builder += BASE_RESUMABLE_URI + options.getProjectId() + "/jobs"; + GenericUrl url = new GenericUrl(builder); + url.set("uploadType", "resumable"); + JsonFactory jsonFactory = bigquery.getJsonFactory(); + HttpRequestFactory requestFactory = bigquery.getRequestFactory(); + HttpRequest httpRequest = + requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, loadJob)); + httpRequest.getHeaders().set("X-Upload-Content-Value", "application/octet-stream"); + HttpResponse response = httpRequest.execute(); + return response.getHeaders().getLocation(); + } + @Override public Job write( String uploadId, @@ -810,101 +1821,200 @@ public Job write( int length, boolean last) { try { - if (length == 0) { - return null; - } - GenericUrl url = new GenericUrl(uploadId); - HttpRequest httpRequest = - bigquery - .getRequestFactory() - .buildPutRequest(url, new ByteArrayContent(null, toWrite, toWriteOffset, length)); - httpRequest.setParser(bigquery.getObjectParser()); - long limit = destOffset + length; - StringBuilder range = new StringBuilder("bytes "); - range.append(destOffset).append('-').append(limit - 1).append('/'); - if (last) { - range.append(limit); - } else { - range.append('*'); - } - httpRequest.getHeaders().setContentRange(range.toString()); - int code; - String message; - IOException exception = null; - HttpResponse response = null; - try { - response = httpRequest.execute(); - code = response.getStatusCode(); - message = response.getStatusMessage(); - } catch (HttpResponseException ex) { - exception = ex; - code = ex.getStatusCode(); - message = ex.getStatusMessage(); - } - if (!last && code != HTTP_RESUME_INCOMPLETE - || last && !(code == HTTP_OK || code == HTTP_CREATED)) { - if (exception != null) { - throw exception; - } - throw new BigQueryException(code, message); - } - return last && response != null ? response.parseAs(Job.class) : null; + return writeSkipExceptionTranslation( + uploadId, toWrite, toWriteOffset, destOffset, length, last); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Job writeSkipExceptionTranslation( + String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, boolean last) + throws IOException { + if (length == 0) { + return null; + } + GenericUrl url = new GenericUrl(uploadId); + HttpRequest httpRequest = + bigquery + .getRequestFactory() + .buildPutRequest(url, new ByteArrayContent(null, toWrite, toWriteOffset, length)); + httpRequest.setParser(bigquery.getObjectParser()); + long limit = destOffset + length; + StringBuilder range = new StringBuilder("bytes "); + range.append(destOffset).append('-').append(limit - 1).append('/'); + if (last) { + range.append(limit); + } else { + range.append('*'); + } + httpRequest.getHeaders().setContentRange(range.toString()); + int code; + String message; + IOException exception = null; + HttpResponse response = null; + try { + response = httpRequest.execute(); + code = response.getStatusCode(); + message = response.getStatusMessage(); + } catch (HttpResponseException ex) { + exception = ex; + code = ex.getStatusCode(); + message = ex.getStatusMessage(); + } + if (!last && code != HTTP_RESUME_INCOMPLETE + || last && !(code == HTTP_OK || code == HTTP_CREATED)) { + if (exception != null) { + throw exception; + } + throw new BigQueryException(code, message); + } + return last && response != null ? response.parseAs(Job.class) : null; + } + @Override public Policy getIamPolicy(String resourceId, Map options) { try { - validateRPC(); - GetIamPolicyRequest policyRequest = new GetIamPolicyRequest(); - if (null != Option.REQUESTED_POLICY_VERSION.getLong(options)) { - policyRequest = - policyRequest.setOptions( - new GetPolicyOptions() - .setRequestedPolicyVersion( - Option.REQUESTED_POLICY_VERSION.getLong(options).intValue())); - } - return bigquery - .tables() - .getIamPolicy(resourceId, policyRequest) - .setPrettyPrint(false) - .execute(); + return getIamPolicySkipExceptionTranslation(resourceId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Policy getIamPolicySkipExceptionTranslation(String resourceId, Map options) + throws IOException { + validateRPC(); + GetIamPolicyRequest policyRequest = new GetIamPolicyRequest(); + if (null != Option.REQUESTED_POLICY_VERSION.getLong(options)) { + policyRequest = + policyRequest.setOptions( + new GetPolicyOptions() + .setRequestedPolicyVersion( + Option.REQUESTED_POLICY_VERSION.getLong(options).intValue())); + } + Bigquery.Tables.GetIamPolicy bqGetRequest = + bigquery.tables().getIamPolicy(resourceId, policyRequest).setPrettyPrint(false); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getIamPolicy = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getIamPolicy = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getIamPolicy") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "GetIamPolicy") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + Policy bqGetResponse = bqGetRequest.execute(); + if (getIamPolicy != null) { + getIamPolicy.end(); + } + return bqGetResponse; + } + @Override public Policy setIamPolicy(String resourceId, Policy policy, Map options) { try { - validateRPC(); - SetIamPolicyRequest policyRequest = new SetIamPolicyRequest().setPolicy(policy); - return bigquery - .tables() - .setIamPolicy(resourceId, policyRequest) - .setPrettyPrint(false) - .execute(); + return setIamPolicySkipExceptionTranslation(resourceId, policy, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Policy setIamPolicySkipExceptionTranslation( + String resourceId, Policy policy, Map options) throws IOException { + validateRPC(); + SetIamPolicyRequest policyRequest = new SetIamPolicyRequest().setPolicy(policy); + Bigquery.Tables.SetIamPolicy bqSetRequest = + bigquery.tables().setIamPolicy(resourceId, policyRequest).setPrettyPrint(false); + + bqSetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span setIamPolicy = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + setIamPolicy = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.setIamPolicy") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "SetIamPolicy") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + Policy bqSetResponse = bqSetRequest.execute(); + if (setIamPolicy != null) { + setIamPolicy.end(); + } + return bqSetResponse; + } + @Override public TestIamPermissionsResponse testIamPermissions( String resourceId, List permissions, Map options) { try { - validateRPC(); - TestIamPermissionsRequest permissionsRequest = - new TestIamPermissionsRequest().setPermissions(permissions); - return bigquery - .tables() - .testIamPermissions(resourceId, permissionsRequest) - .setPrettyPrint(false) - .execute(); + return testIamPermissionsSkipExceptionTranslation(resourceId, permissions, options); } catch (IOException ex) { throw translate(ex); } } + + public TestIamPermissionsResponse testIamPermissionsSkipExceptionTranslation( + String resourceId, List permissions, Map options) throws IOException { + validateRPC(); + TestIamPermissionsRequest permissionsRequest = + new TestIamPermissionsRequest().setPermissions(permissions); + Bigquery.Tables.TestIamPermissions bqTestRequest = + bigquery.tables().testIamPermissions(resourceId, permissionsRequest).setPrettyPrint(false); + + bqTestRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span testIamPermissions = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + testIamPermissions = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.setIamPolicy") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "SetIamPolicy") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + TestIamPermissionsResponse bqTestResponse = bqTestRequest.execute(); + if (testIamPermissions != null) { + testIamPermissions.end(); + } + return bqTestResponse; + } + + private static Attributes otelAttributesFromOptions(Map options) { + Attributes attributes = Attributes.builder().build(); + for (Map.Entry entry : options.entrySet()) { + attributes.toBuilder().put(entry.getKey().toString(), entry.getValue().toString()); + } + return attributes; + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java index 784ca984f..4b1767362 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java @@ -24,10 +24,10 @@ import com.google.cloud.http.HttpTransportOptions; import java.io.IOException; import java.io.InputStream; +import java.time.Duration; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; -import org.threeten.bp.Duration; /** * Utility to create a remote BigQuery configuration for testing. BigQuery options can be obtained @@ -95,8 +95,7 @@ public static RemoteBigQueryHelper create(String projectId, InputStream keyStrea try { HttpTransportOptions transportOptions = BigQueryOptions.getDefaultHttpTransportOptions(); transportOptions = - transportOptions - .toBuilder() + transportOptions.toBuilder() .setConnectTimeout(connectTimeout) .setReadTimeout(connectTimeout) .build(); @@ -123,8 +122,7 @@ public static RemoteBigQueryHelper create(String projectId, InputStream keyStrea public static RemoteBigQueryHelper create() { HttpTransportOptions transportOptions = BigQueryOptions.getDefaultHttpTransportOptions(); transportOptions = - transportOptions - .toBuilder() + transportOptions.toBuilder() .setConnectTimeout(connectTimeout) .setReadTimeout(connectTimeout) .build(); @@ -144,13 +142,13 @@ private static RetrySettings retrySettings() { long totalTimeOut = 120000L; return RetrySettings.newBuilder() .setMaxAttempts(maxAttempts) - .setMaxRetryDelay(Duration.ofMillis(maxRetryDelay)) - .setTotalTimeout(Duration.ofMillis(totalTimeOut)) - .setInitialRetryDelay(Duration.ofMillis(initialRetryDelay)) + .setMaxRetryDelayDuration(Duration.ofMillis(maxRetryDelay)) + .setTotalTimeoutDuration(Duration.ofMillis(totalTimeOut)) + .setInitialRetryDelayDuration(Duration.ofMillis(initialRetryDelay)) .setRetryDelayMultiplier(retryDelayMultiplier) - .setInitialRpcTimeout(Duration.ofMillis(totalTimeOut)) + .setInitialRpcTimeoutDuration(Duration.ofMillis(totalTimeOut)) .setRpcTimeoutMultiplier(retryDelayMultiplier) - .setMaxRpcTimeout(Duration.ofMillis(totalTimeOut)) + .setMaxRpcTimeoutDuration(Duration.ofMillis(totalTimeOut)) .build(); } diff --git a/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/reflect-config.json b/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/reflect-config.json index 8f6d928da..921440df6 100644 --- a/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/reflect-config.json +++ b/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/reflect-config.json @@ -29,9 +29,62 @@ "name":"java.nio.DirectByteBuffer", "methods":[{"name":"","parameterTypes":["long","int"] }] }, + + { + "name":"org.apache.arrow.memory.BaseAllocator", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.BaseAllocator$Config", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.DefaultAllocationManagerOption", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.netty.NettyAllocationManager$1", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.netty.DefaultAllocationManagerFactory", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.DefaultAllocationManagerFactory", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.RootAllocator", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, { "name":"org.apache.arrow.memory.DefaultAllocationManagerFactory", - "fields":[{"name":"FACTORY"}] + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true }, { "name":"org.apache.arrow.vector.types.pojo.ArrowType", @@ -50,6 +103,18 @@ "allDeclaredFields":true, "queryAllDeclaredMethods":true }, + { + "name": "org.apache.arrow.vector.types.pojo.DictionaryEncoding", + "allDeclaredFields": true + }, + { + "name": "org.apache.arrow.vector.types.pojo.Field", + "allDeclaredFields": true + }, + { + "name": "org.apache.arrow.vector.types.pojo.Schema", + "allDeclaredFields": true + }, { "name":"io.netty.buffer.AbstractReferenceCountedByteBuf", "fields":[{"name":"refCnt"}] diff --git a/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/resource-config.json b/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/resource-config.json index dfdcc58c4..c4710244e 100644 --- a/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/resource-config.json +++ b/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/resource-config.json @@ -1,6 +1,26 @@ { "resources":{ - "includes":[{ - "pattern":"\\Qorg/apache/arrow/memory/DefaultAllocationManagerFactory.class\\E" - }]} + "includes":[ + { + "pattern":"\\Qorg/apache/arrow/memory/DefaultAllocationManagerFactory.class\\E" + }, + { + "pattern":"\\Qorg/apache/arrow/memory/netty/DefaultAllocationManagerFactory.class\\E" + }, + { + "pattern":"\\Qorg/apache/arrow/memory/unsafe/DefaultAllocationManagerFactory.class\\E" + } + ] + }, + "globs":[ + { + "glob": "org/apache/arrow/memory/DefaultAllocationManagerFactory.class" + }, + { + "glob": "org/apache/arrow/memory/netty/DefaultAllocationManagerFactory.class" + }, + { + "glob": "org/apache/arrow/memory/unsafe/DefaultAllocationManagerFactory.class" + } + ] } \ No newline at end of file diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java index 30866c2b6..0b53f32ff 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java @@ -23,6 +23,7 @@ import com.google.cloud.bigquery.Acl.Domain; import com.google.cloud.bigquery.Acl.Entity; import com.google.cloud.bigquery.Acl.Entity.Type; +import com.google.cloud.bigquery.Acl.Expr; import com.google.cloud.bigquery.Acl.Group; import com.google.cloud.bigquery.Acl.IamMember; import com.google.cloud.bigquery.Acl.Role; @@ -136,4 +137,13 @@ public void testOf() { assertEquals(routine, acl.getEntity()); assertEquals(null, acl.getRole()); } + + @Test + public void testOfWithCondition() { + Expr expr = new Expr("expression", "title", "description", "location"); + Acl acl = Acl.of(Group.ofAllAuthenticatedUsers(), Role.READER, expr); + Dataset.Access pb = acl.toPb(); + assertEquals(acl, Acl.fromPb(pb)); + assertEquals(acl.getCondition(), expr); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java index 41915e283..8a2094b55 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java @@ -21,7 +21,10 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -29,9 +32,14 @@ import com.google.api.client.http.HttpHeaders; import com.google.api.client.http.HttpResponseException; import com.google.cloud.BaseServiceException; +import com.google.cloud.ExceptionHandler; import com.google.cloud.RetryHelper.RetryHelperException; +import com.google.cloud.bigquery.spi.v2.BigQueryRpc; +import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import java.io.IOException; import java.net.SocketTimeoutException; +import java.util.HashMap; +import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; @@ -158,4 +166,87 @@ public void testTranslateAndThrow() throws Exception { verify(exceptionMock, times(2)).getCause(); } } + + @Test + public void testDefaultExceptionHandler() throws java.io.IOException { + BigQueryOptions defaultOptions = + BigQueryOptions.newBuilder().setProjectId("project-id").build(); + DatasetInfo info = DatasetInfo.newBuilder("dataset").build(); + Dataset dataset = null; + + final com.google.api.services.bigquery.model.Dataset datasetPb = + info.setProjectId(defaultOptions.getProjectId()).toPb(); + final Map optionsMap = new HashMap<>(); + + BigQueryOptions mockOptions = spy(defaultOptions); + HttpBigQueryRpc bigQueryRpcMock = mock(HttpBigQueryRpc.class); + doReturn(bigQueryRpcMock).when(mockOptions).getBigQueryRpcV2(); + // java.net.SocketException is retry-able in the default exception handler. + doThrow(java.net.SocketException.class) + .when(bigQueryRpcMock) + .createSkipExceptionTranslation(datasetPb, optionsMap); + + BigQuery bigquery = mockOptions.getService(); + try { + dataset = bigquery.create(info); + } catch (BigQueryException e) { + assertEquals(e.getCause().getClass(), java.net.SocketException.class); + assertNull(dataset); + } finally { + verify(bigQueryRpcMock, times(6)).createSkipExceptionTranslation(datasetPb, optionsMap); + } + } + + @Test + public void testCustomExceptionHandler() throws java.io.IOException { + BigQueryOptions defaultOptions = + BigQueryOptions.newBuilder() + .setProjectId("project-id") + .setResultRetryAlgorithm( + ExceptionHandler.newBuilder() + .abortOn(RuntimeException.class) + .retryOn(java.util.EmptyStackException.class) + .addInterceptors(BigQueryBaseService.EXCEPTION_HANDLER_INTERCEPTOR) + .build()) + .build(); + DatasetInfo info = DatasetInfo.newBuilder("dataset").build(); + Dataset dataset = null; + + final com.google.api.services.bigquery.model.Dataset datasetPb = + info.setProjectId(defaultOptions.getProjectId()).toPb(); + final Map optionsMap = new HashMap<>(); + + BigQueryOptions mockOptions = spy(defaultOptions); + HttpBigQueryRpc bigQueryRpcRetryMock = mock(HttpBigQueryRpc.class); + doReturn(bigQueryRpcRetryMock).when(mockOptions).getBigQueryRpcV2(); + doThrow(java.util.EmptyStackException.class) + .when(bigQueryRpcRetryMock) + .createSkipExceptionTranslation(datasetPb, optionsMap); + + BigQuery bigquery = mockOptions.getService(); + try { + dataset = bigquery.create(info); + } catch (BigQueryException e) { + assertEquals(e.getCause().getClass(), java.util.EmptyStackException.class); + assertNull(dataset); + } finally { + verify(bigQueryRpcRetryMock, times(6)).createSkipExceptionTranslation(datasetPb, optionsMap); + } + + BigQueryOptions mockOptionsAbort = spy(defaultOptions); + HttpBigQueryRpc bigQueryRpcAbortMock = mock(HttpBigQueryRpc.class); + doReturn(bigQueryRpcAbortMock).when(mockOptionsAbort).getBigQueryRpcV2(); + doThrow(RuntimeException.class) + .when(bigQueryRpcAbortMock) + .createSkipExceptionTranslation(datasetPb, optionsMap); + bigquery = mockOptionsAbort.getService(); + try { + dataset = bigquery.create(info); + } catch (BigQueryException e) { + assertEquals(e.getCause().getClass(), RuntimeException.class); + assertNull(dataset); + } finally { + verify(bigQueryRpcAbortMock, times(1)).createSkipExceptionTranslation(datasetPb, optionsMap); + } + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java index 62160bef2..393455e36 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java @@ -27,18 +27,23 @@ import com.google.api.services.bigquery.model.*; import com.google.api.services.bigquery.model.JobStatistics; import com.google.cloud.Policy; +import com.google.cloud.RetryOption; import com.google.cloud.ServiceOptions; import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQuery.DatasetOption; import com.google.cloud.bigquery.BigQuery.JobOption; import com.google.cloud.bigquery.BigQuery.QueryResultsOption; import com.google.cloud.bigquery.InsertAllRequest.RowToInsert; import com.google.cloud.bigquery.spi.BigQueryRpcFactory; import com.google.cloud.bigquery.spi.v2.BigQueryRpc; +import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import com.google.common.base.Function; import com.google.common.base.Supplier; import com.google.common.collect.*; import java.io.IOException; import java.math.BigInteger; +import java.net.ConnectException; +import java.net.UnknownHostException; import java.util.Collections; import java.util.List; import java.util.Map; @@ -48,6 +53,7 @@ import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; +import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; @RunWith(MockitoJUnitRunner.class) @@ -81,16 +87,22 @@ public class BigQueryImplTest { Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), Acl.of(new Acl.View(TableId.of(PROJECT, "dataset", "table")))); private static final DatasetInfo DATASET_INFO = - DatasetInfo.newBuilder(DATASET).setAcl(ACCESS_RULES).setDescription("description").build(); + DatasetInfo.newBuilder(DATASET) + .setAcl(ACCESS_RULES) + .setDescription("description") + .setLocation(LOCATION) + .build(); private static final DatasetInfo DATASET_INFO_WITH_PROJECT = DatasetInfo.newBuilder(PROJECT, DATASET) .setAcl(ACCESS_RULES_WITH_PROJECT) .setDescription("description") + .setLocation(LOCATION) .build(); private static final DatasetInfo OTHER_DATASET_INFO = DatasetInfo.newBuilder(PROJECT, OTHER_DATASET) .setAcl(ACCESS_RULES) .setDescription("other description") + .setLocation(LOCATION) .build(); private static final TableId TABLE_ID = TableId.of(DATASET, TABLE); private static final TableId OTHER_TABLE_ID = TableId.of(PROJECT, DATASET, OTHER_TABLE); @@ -120,6 +132,7 @@ public class BigQueryImplTest { .setField("timestampField"); private static final TimePartitioning TIME_PARTITIONING_NULL_TYPE = TimePartitioning.fromPb(PB_TIMEPARTITIONING); + private static final ImmutableMap LABELS = ImmutableMap.of("key", "value"); private static final StandardTableDefinition TABLE_DEFINITION_WITH_PARTITIONING = StandardTableDefinition.newBuilder() .setSchema(TABLE_SCHEMA) @@ -143,6 +156,8 @@ public class BigQueryImplTest { TableInfo.of(TABLE_ID, TABLE_DEFINITION_WITH_RANGE_PARTITIONING); private static final TableInfo TABLE_INFO = TableInfo.of(TABLE_ID, TABLE_DEFINITION); private static final TableInfo OTHER_TABLE_INFO = TableInfo.of(OTHER_TABLE_ID, TABLE_DEFINITION); + private static final TableInfo OTHER_TABLE_WITH_LABELS_INFO = + TableInfo.newBuilder(OTHER_TABLE_ID, TABLE_DEFINITION).setLabels(LABELS).build(); private static final TableInfo TABLE_INFO_WITH_PROJECT = TableInfo.of(TABLE_ID_WITH_PROJECT, TABLE_DEFINITION); private static final TableInfo MODEL_TABLE_INFO_WITH_PROJECT = @@ -489,7 +504,7 @@ public class BigQueryImplTest { .build(); private BigQueryOptions options; private BigQueryRpcFactory rpcFactoryMock; - private BigQueryRpc bigqueryRpcMock; + private HttpBigQueryRpc bigqueryRpcMock; private BigQuery bigquery; private static final String RATE_LIMIT_ERROR_MSG = "Job exceeded rate limits: Your table exceeded quota for table update operations. For more information, see https://cloud.google.com/bigquery/docs/troubleshoot-quotas"; @@ -525,7 +540,7 @@ private BigQueryOptions createBigQueryOptionsForProjectWithLocation( @Before public void setUp() { rpcFactoryMock = mock(BigQueryRpcFactory.class); - bigqueryRpcMock = mock(BigQueryRpc.class); + bigqueryRpcMock = mock(HttpBigQueryRpc.class); when(rpcFactoryMock.create(any(BigQueryOptions.class))).thenReturn(bigqueryRpcMock); options = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); } @@ -537,21 +552,22 @@ public void testGetOptions() { } @Test - public void testCreateDataset() { + public void testCreateDataset() throws IOException { DatasetInfo datasetInfo = DATASET_INFO.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.create(datasetInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.createSkipExceptionTranslation(datasetInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(datasetInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Dataset dataset = bigquery.create(datasetInfo); assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(datasetInfo)), dataset); - verify(bigqueryRpcMock).create(datasetInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).createSkipExceptionTranslation(datasetInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testCreateDatasetWithSelectedFields() { - when(bigqueryRpcMock.create(eq(DATASET_INFO_WITH_PROJECT.toPb()), capturedOptions.capture())) + public void testCreateDatasetWithSelectedFields() throws IOException { + when(bigqueryRpcMock.createSkipExceptionTranslation( + eq(DATASET_INFO_WITH_PROJECT.toPb()), capturedOptions.capture())) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.create(DATASET_INFO, DATASET_OPTION_FIELDS); @@ -562,36 +578,54 @@ public void testCreateDatasetWithSelectedFields() { assertEquals(28, selector.length()); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock).create(eq(DATASET_INFO_WITH_PROJECT.toPb()), capturedOptions.capture()); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation( + eq(DATASET_INFO_WITH_PROJECT.toPb()), capturedOptions.capture()); + } + + @Test + public void testCreateDatasetWithAccessPolicy() throws IOException { + DatasetInfo datasetInfo = DATASET_INFO.setProjectId(OTHER_PROJECT); + DatasetOption datasetOption = DatasetOption.accessPolicyVersion(3); + when(bigqueryRpcMock.createSkipExceptionTranslation( + datasetInfo.toPb(), optionMap(datasetOption))) + .thenReturn(datasetInfo.toPb()); + BigQueryOptions bigQueryOptions = + createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); + bigquery = bigQueryOptions.getService(); + Dataset dataset = bigquery.create(datasetInfo, datasetOption); + assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(datasetInfo)), dataset); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation(datasetInfo.toPb(), optionMap(datasetOption)); } @Test - public void testGetDataset() { - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + public void testGetDataset() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.getDataset(DATASET); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testGetDatasetNotFoundWhenThrowIsDisabled() { - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + public void testGetDatasetNotFoundWhenThrowIsDisabled() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); options.setThrowNotFound(false); bigquery = options.getService(); Dataset dataset = bigquery.getDataset(DATASET); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testGetDatasetNotFoundWhenThrowIsEnabled() { - when(bigqueryRpcMock.getDataset(PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS)) - .thenReturn(null) + public void testGetDatasetNotFoundWhenThrowIsEnabled() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation( + PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(404, "Dataset not found")); options.setThrowNotFound(true); bigquery = options.getService(); @@ -601,35 +635,39 @@ public void testGetDatasetNotFoundWhenThrowIsEnabled() { } catch (BigQueryException ex) { Assert.assertNotNull(ex.getMessage()); } - verify(bigqueryRpcMock).getDataset(PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getDatasetSkipExceptionTranslation(PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS); } @Test - public void testGetDatasetFromDatasetId() { - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + public void testGetDatasetFromDatasetId() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.getDataset(DatasetId.of(DATASET)); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testGetDatasetFromDatasetIdWithProject() { + public void testGetDatasetFromDatasetIdWithProject() throws IOException { DatasetInfo datasetInfo = DATASET_INFO.setProjectId(OTHER_PROJECT); DatasetId datasetId = DatasetId.of(OTHER_PROJECT, DATASET); - when(bigqueryRpcMock.getDataset(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation( + OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(datasetInfo.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.getDataset(datasetId); assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(datasetInfo)), dataset); - verify(bigqueryRpcMock).getDataset(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getDatasetSkipExceptionTranslation(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testGetDatasetWithSelectedFields() { - when(bigqueryRpcMock.getDataset(eq(PROJECT), eq(DATASET), capturedOptions.capture())) + public void testGetDatasetWithSelectedFields() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation( + eq(PROJECT), eq(DATASET), capturedOptions.capture())) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.getDataset(DATASET, DATASET_OPTION_FIELDS); @@ -640,11 +678,12 @@ public void testGetDatasetWithSelectedFields() { assertEquals(28, selector.length()); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock).getDataset(eq(PROJECT), eq(DATASET), capturedOptions.capture()); + verify(bigqueryRpcMock) + .getDatasetSkipExceptionTranslation(eq(PROJECT), eq(DATASET), capturedOptions.capture()); } @Test - public void testListDatasets() { + public void testListDatasets() throws IOException { bigquery = options.getService(); ImmutableList datasetList = ImmutableList.of( @@ -652,16 +691,17 @@ public void testListDatasets() { new Dataset(bigquery, new DatasetInfo.BuilderImpl(OTHER_DATASET_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listDatasets(PROJECT, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listDatasetsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listDatasets(); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals( datasetList.toArray(), Iterables.toArray(page.getValues(), DatasetInfo.class)); - verify(bigqueryRpcMock).listDatasets(PROJECT, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listDatasetsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS); } @Test - public void testListDatasetsWithProjects() { + public void testListDatasetsWithProjects() throws IOException { bigquery = options.getService(); ImmutableList datasetList = ImmutableList.of( @@ -669,30 +709,32 @@ public void testListDatasetsWithProjects() { bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO.setProjectId(OTHER_PROJECT)))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listDatasets(OTHER_PROJECT, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listDatasetsSkipExceptionTranslation(OTHER_PROJECT, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listDatasets(OTHER_PROJECT); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals( datasetList.toArray(), Iterables.toArray(page.getValues(), DatasetInfo.class)); - verify(bigqueryRpcMock).listDatasets(OTHER_PROJECT, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listDatasetsSkipExceptionTranslation(OTHER_PROJECT, EMPTY_RPC_OPTIONS); } @Test - public void testListEmptyDatasets() { + public void testListEmptyDatasets() throws IOException { ImmutableList datasets = ImmutableList.of(); Tuple> result = Tuple.>of(null, datasets); - when(bigqueryRpcMock.listDatasets(PROJECT, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listDatasetsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS)) + .thenReturn(result); bigquery = options.getService(); Page page = bigquery.listDatasets(); assertNull(page.getNextPageToken()); assertArrayEquals( ImmutableList.of().toArray(), Iterables.toArray(page.getValues(), Dataset.class)); - verify(bigqueryRpcMock).listDatasets(PROJECT, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listDatasetsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS); } @Test - public void testListDatasetsWithOptions() { + public void testListDatasetsWithOptions() throws IOException { bigquery = options.getService(); ImmutableList datasetList = ImmutableList.of( @@ -700,71 +742,83 @@ public void testListDatasetsWithOptions() { new Dataset(bigquery, new DatasetInfo.BuilderImpl(OTHER_DATASET_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listDatasets(PROJECT, DATASET_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listDatasetsSkipExceptionTranslation(PROJECT, DATASET_LIST_OPTIONS)) + .thenReturn(result); Page page = bigquery.listDatasets(DATASET_LIST_ALL, DATASET_LIST_PAGE_TOKEN, DATASET_LIST_PAGE_SIZE); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals( datasetList.toArray(), Iterables.toArray(page.getValues(), DatasetInfo.class)); - verify(bigqueryRpcMock).listDatasets(PROJECT, DATASET_LIST_OPTIONS); + verify(bigqueryRpcMock).listDatasetsSkipExceptionTranslation(PROJECT, DATASET_LIST_OPTIONS); } @Test - public void testDeleteDataset() { - when(bigqueryRpcMock.deleteDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(true); + public void testDeleteDataset() throws IOException { + when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(DATASET)); - verify(bigqueryRpcMock).deleteDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testDeleteDatasetFromDatasetId() { - when(bigqueryRpcMock.deleteDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(true); + public void testDeleteDatasetFromDatasetId() throws IOException { + when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(DatasetId.of(DATASET))); - verify(bigqueryRpcMock).deleteDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testDeleteDatasetFromDatasetIdWithProject() { + public void testDeleteDatasetFromDatasetIdWithProject() throws IOException { DatasetId datasetId = DatasetId.of(OTHER_PROJECT, DATASET); - when(bigqueryRpcMock.deleteDataset(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(true); + when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation( + OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(datasetId)); - verify(bigqueryRpcMock).deleteDataset(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .deleteDatasetSkipExceptionTranslation(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testDeleteDatasetWithOptions() { - when(bigqueryRpcMock.deleteDataset(PROJECT, DATASET, DATASET_DELETE_OPTIONS)).thenReturn(true); + public void testDeleteDatasetWithOptions() throws IOException { + when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation( + PROJECT, DATASET, DATASET_DELETE_OPTIONS)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(DATASET, DATASET_DELETE_CONTENTS)); - verify(bigqueryRpcMock).deleteDataset(PROJECT, DATASET, DATASET_DELETE_OPTIONS); + verify(bigqueryRpcMock) + .deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, DATASET_DELETE_OPTIONS); } @Test - public void testUpdateDataset() { + public void testUpdateDataset() throws IOException { DatasetInfo updatedDatasetInfo = - DATASET_INFO - .setProjectId(OTHER_PROJECT) - .toBuilder() + DATASET_INFO.setProjectId(OTHER_PROJECT).toBuilder() .setDescription("newDescription") .build(); - when(bigqueryRpcMock.patch(updatedDatasetInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.patchSkipExceptionTranslation( + updatedDatasetInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(updatedDatasetInfo.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.update(updatedDatasetInfo); assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedDatasetInfo)), dataset); - verify(bigqueryRpcMock).patch(updatedDatasetInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .patchSkipExceptionTranslation(updatedDatasetInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testUpdateDatasetWithSelectedFields() { + public void testUpdateDatasetWithSelectedFields() throws IOException { DatasetInfo updatedDatasetInfo = DATASET_INFO.toBuilder().setDescription("newDescription").build(); DatasetInfo updatedDatasetInfoWithProject = DATASET_INFO_WITH_PROJECT.toBuilder().setDescription("newDescription").build(); - when(bigqueryRpcMock.patch(eq(updatedDatasetInfoWithProject.toPb()), capturedOptions.capture())) + when(bigqueryRpcMock.patchSkipExceptionTranslation( + eq(updatedDatasetInfoWithProject.toPb()), capturedOptions.capture())) .thenReturn(updatedDatasetInfoWithProject.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.update(updatedDatasetInfo, DATASET_OPTION_FIELDS); @@ -776,23 +830,25 @@ public void testUpdateDatasetWithSelectedFields() { assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedDatasetInfoWithProject)), dataset); verify(bigqueryRpcMock) - .patch(eq(updatedDatasetInfoWithProject.toPb()), capturedOptions.capture()); + .patchSkipExceptionTranslation( + eq(updatedDatasetInfoWithProject.toPb()), capturedOptions.capture()); } @Test - public void testCreateTable() { + public void testCreateTable() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.create(tableInfo.toPb(), EMPTY_RPC_OPTIONS)).thenReturn(tableInfo.toPb()); + when(bigqueryRpcMock.createSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(tableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.create(tableInfo); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(tableInfo)), table); - verify(bigqueryRpcMock).create(tableInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).createSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void tesCreateExternalTable() { + public void tesCreateExternalTable() throws IOException { TableInfo createTableInfo = TableInfo.of(TABLE_ID, ExternalTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build()) .setProjectId(OTHER_PROJECT); @@ -800,32 +856,34 @@ public void tesCreateExternalTable() { com.google.api.services.bigquery.model.Table expectedCreateInput = createTableInfo.toPb().setSchema(TABLE_SCHEMA.toPb()); expectedCreateInput.getExternalDataConfiguration().setSchema(null); - when(bigqueryRpcMock.create(expectedCreateInput, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.createSkipExceptionTranslation(expectedCreateInput, EMPTY_RPC_OPTIONS)) .thenReturn(createTableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.create(createTableInfo); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(createTableInfo)), table); - verify(bigqueryRpcMock).create(expectedCreateInput, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).createSkipExceptionTranslation(expectedCreateInput, EMPTY_RPC_OPTIONS); } @Test - public void testCreateTableWithoutProject() { + public void testCreateTableWithoutProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(PROJECT); TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); tableInfo.toBuilder().setTableId(tableId); - when(bigqueryRpcMock.create(tableInfo.toPb(), EMPTY_RPC_OPTIONS)).thenReturn(tableInfo.toPb()); + when(bigqueryRpcMock.createSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(tableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.create(tableInfo); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(tableInfo)), table); - verify(bigqueryRpcMock).create(tableInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).createSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testCreateTableWithSelectedFields() { - when(bigqueryRpcMock.create(eq(TABLE_INFO_WITH_PROJECT.toPb()), capturedOptions.capture())) + public void testCreateTableWithSelectedFields() throws IOException { + when(bigqueryRpcMock.createSkipExceptionTranslation( + eq(TABLE_INFO_WITH_PROJECT.toPb()), capturedOptions.capture())) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Table table = bigquery.create(TABLE_INFO, TABLE_OPTION_FIELDS); @@ -835,34 +893,40 @@ public void testCreateTableWithSelectedFields() { assertTrue(selector.contains("etag")); assertEquals(31, selector.length()); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); - verify(bigqueryRpcMock).create(eq(TABLE_INFO_WITH_PROJECT.toPb()), capturedOptions.capture()); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation( + eq(TABLE_INFO_WITH_PROJECT.toPb()), capturedOptions.capture()); } @Test - public void testGetTable() { - when(bigqueryRpcMock.getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + public void testGetTable() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Table table = bigquery.getTable(DATASET, TABLE); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); - verify(bigqueryRpcMock).getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetModel() { - when(bigqueryRpcMock.getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) + public void testGetModel() throws IOException { + when(bigqueryRpcMock.getModelSkipExceptionTranslation( + PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) .thenReturn(MODEL_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Model model = bigquery.getModel(DATASET, MODEL); assertEquals(new Model(bigquery, new ModelInfo.BuilderImpl(MODEL_INFO_WITH_PROJECT)), model); - verify(bigqueryRpcMock).getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getModelSkipExceptionTranslation(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); } @Test - public void testGetModelNotFoundWhenThrowIsEnabled() { + public void testGetModelNotFoundWhenThrowIsEnabled() throws IOException { String expected = "Model not found"; - when(bigqueryRpcMock.getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) - .thenReturn(null) + when(bigqueryRpcMock.getModelSkipExceptionTranslation( + PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(404, expected)); options.setThrowNotFound(true); bigquery = options.getService(); @@ -871,39 +935,45 @@ public void testGetModelNotFoundWhenThrowIsEnabled() { } catch (BigQueryException ex) { assertEquals(expected, ex.getMessage()); } - verify(bigqueryRpcMock).getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getModelSkipExceptionTranslation(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); } @Test - public void testListPartition() { - when(bigqueryRpcMock.getTable( + public void testListPartition() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( PROJECT, DATASET, "table$__PARTITIONS_SUMMARY__", EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_PARTITIONS.toPb()); - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_DATA_WITH_PARTITIONS); bigquery = options.getService(); List partition = bigquery.listPartitions(TABLE_ID_WITH_PROJECT); assertEquals(3, partition.size()); verify(bigqueryRpcMock) - .getTable(PROJECT, DATASET, "table$__PARTITIONS_SUMMARY__", EMPTY_RPC_OPTIONS); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + .getTableSkipExceptionTranslation( + PROJECT, DATASET, "table$__PARTITIONS_SUMMARY__", EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetTableNotFoundWhenThrowIsDisabled() { - when(bigqueryRpcMock.getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + public void testGetTableNotFoundWhenThrowIsDisabled() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); options.setThrowNotFound(false); bigquery = options.getService(); Table table = bigquery.getTable(DATASET, TABLE); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); - verify(bigqueryRpcMock).getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetTableNotFoundWhenThrowIsEnabled() { - when(bigqueryRpcMock.getTable(PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS)) - .thenReturn(null) + public void testGetTableNotFoundWhenThrowIsEnabled() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(404, "Table not found")); options.setThrowNotFound(true); bigquery = options.getService(); @@ -913,49 +983,57 @@ public void testGetTableNotFoundWhenThrowIsEnabled() { } catch (BigQueryException ex) { Assert.assertNotNull(ex.getMessage()); } - verify(bigqueryRpcMock).getTable(PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS); } @Test - public void testGetTableFromTableId() { - when(bigqueryRpcMock.getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + public void testGetTableFromTableId() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Table table = bigquery.getTable(TABLE_ID); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); - verify(bigqueryRpcMock).getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetTableFromTableIdWithProject() { + public void testGetTableFromTableIdWithProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(OTHER_PROJECT); TableId tableId = TABLE_ID.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.getTable(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(tableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.getTable(tableId); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(tableInfo)), table); - verify(bigqueryRpcMock).getTable(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetTableFromTableIdWithoutProject() { + public void testGetTableFromTableIdWithoutProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(PROJECT); TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); - when(bigqueryRpcMock.getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(tableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.getTable(tableId); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(tableInfo)), table); - verify(bigqueryRpcMock).getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetTableWithSelectedFields() { - when(bigqueryRpcMock.getTable(eq(PROJECT), eq(DATASET), eq(TABLE), capturedOptions.capture())) + public void testGetTableWithSelectedFields() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + eq(PROJECT), eq(DATASET), eq(TABLE), capturedOptions.capture())) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Table table = bigquery.getTable(TABLE_ID, TABLE_OPTION_FIELDS); @@ -966,11 +1044,12 @@ public void testGetTableWithSelectedFields() { assertEquals(31, selector.length()); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); verify(bigqueryRpcMock) - .getTable(eq(PROJECT), eq(DATASET), eq(TABLE), capturedOptions.capture()); + .getTableSkipExceptionTranslation( + eq(PROJECT), eq(DATASET), eq(TABLE), capturedOptions.capture()); } @Test - public void testListTables() { + public void testListTables() throws IOException { bigquery = options.getService(); ImmutableList

tableList = ImmutableList.of( @@ -979,60 +1058,67 @@ public void testListTables() { new Table(bigquery, new TableInfo.BuilderImpl(MODEL_TABLE_INFO_WITH_PROJECT))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page
page = bigquery.listTables(DATASET); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testListTablesReturnedParameters() { + public void testListTablesReturnedParameters() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PARTITIONS))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS)) + .thenReturn(result); Page
page = bigquery.listTables(DATASET, TABLE_LIST_PAGE_SIZE, TABLE_LIST_PAGE_TOKEN); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS); } @Test - public void testListTablesReturnedParametersNullType() { + public void testListTablesReturnedParametersNullType() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PARTITIONS_NULL_TYPE))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS)) + .thenReturn(result); Page
page = bigquery.listTables(DATASET, TABLE_LIST_PAGE_SIZE, TABLE_LIST_PAGE_TOKEN); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS); } @Test - public void testListTablesWithRangePartitioning() { + public void testListTablesWithRangePartitioning() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_RANGE_PARTITIONING))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS)) + .thenReturn(result); Page
page = bigquery.listTables(DATASET, TABLE_LIST_PAGE_SIZE, TABLE_LIST_PAGE_TOKEN); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS); } @Test - public void testListTablesFromDatasetId() { + public void testListTablesFromDatasetId() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( @@ -1040,30 +1126,51 @@ public void testListTablesFromDatasetId() { new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page
page = bigquery.listTables(DatasetId.of(DATASET)); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testListTablesFromDatasetIdWithProject() { + public void testListTablesFromDatasetIdWithProject() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO.setProjectId(OTHER_PROJECT)))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation( + OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page
page = bigquery.listTables(DatasetId.of(OTHER_PROJECT, DATASET)); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listTablesSkipExceptionTranslation(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); + } + + @Test + public void testListTablesWithLabels() throws IOException { + bigquery = options.getService(); + ImmutableList
tableList = + ImmutableList.of( + new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_WITH_LABELS_INFO))); + Tuple> result = + Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); + Page
page = bigquery.listTables(DATASET); + assertEquals(CURSOR, page.getNextPageToken()); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); + verify(bigqueryRpcMock).listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + assertEquals(LABELS, page.getValues().iterator().next().getLabels()); } @Test - public void testListTablesWithOptions() { + public void testListTablesWithOptions() throws IOException { bigquery = options.getService(); ImmutableList
tableList = ImmutableList.of( @@ -1071,15 +1178,17 @@ public void testListTablesWithOptions() { new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS)) + .thenReturn(result); Page
page = bigquery.listTables(DATASET, TABLE_LIST_PAGE_SIZE, TABLE_LIST_PAGE_TOKEN); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS); } @Test - public void testListModels() { + public void testListModels() throws IOException { bigquery = options.getService(); ImmutableList modelList = ImmutableList.of( @@ -1087,15 +1196,16 @@ public void testListModels() { new Model(bigquery, new ModelInfo.BuilderImpl(OTHER_MODEL_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(modelList, ModelInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listModelsSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listModels(DATASET); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(modelList.toArray(), Iterables.toArray(page.getValues(), Model.class)); - verify(bigqueryRpcMock).listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listModelsSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testListModelsWithModelId() { + public void testListModelsWithModelId() throws IOException { bigquery = options.getService(); ImmutableList modelList = ImmutableList.of( @@ -1103,92 +1213,98 @@ public void testListModelsWithModelId() { new Model(bigquery, new ModelInfo.BuilderImpl(OTHER_MODEL_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(modelList, ModelInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listModelsSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listModels(DatasetId.of(DATASET)); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(modelList.toArray(), Iterables.toArray(page.getValues(), Model.class)); - verify(bigqueryRpcMock).listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listModelsSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testDeleteTable() { - when(bigqueryRpcMock.deleteTable(PROJECT, DATASET, TABLE)).thenReturn(true); + public void testDeleteTable() throws IOException { + when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(TABLE_ID)); - verify(bigqueryRpcMock).deleteTable(PROJECT, DATASET, TABLE); + verify(bigqueryRpcMock).deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE); } @Test - public void testDeleteTableFromTableId() { - when(bigqueryRpcMock.deleteTable(PROJECT, DATASET, TABLE)).thenReturn(true); + public void testDeleteTableFromTableId() throws IOException { + when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(TABLE_ID)); - verify(bigqueryRpcMock).deleteTable(PROJECT, DATASET, TABLE); + verify(bigqueryRpcMock).deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE); } @Test - public void testDeleteTableFromTableIdWithProject() { + public void testDeleteTableFromTableIdWithProject() throws IOException { TableId tableId = TABLE_ID.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.deleteTable(OTHER_PROJECT, DATASET, TABLE)).thenReturn(true); + when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(OTHER_PROJECT, DATASET, TABLE)) + .thenReturn(true); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); assertTrue(bigquery.delete(tableId)); - verify(bigqueryRpcMock).deleteTable(OTHER_PROJECT, DATASET, TABLE); + verify(bigqueryRpcMock).deleteTableSkipExceptionTranslation(OTHER_PROJECT, DATASET, TABLE); } @Test - public void testDeleteTableFromTableIdWithoutProject() { + public void testDeleteTableFromTableIdWithoutProject() throws IOException { TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); - when(bigqueryRpcMock.deleteTable(PROJECT, DATASET, TABLE)).thenReturn(true); + when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE)) + .thenReturn(true); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); assertTrue(bigquery.delete(tableId)); - verify(bigqueryRpcMock).deleteTable(PROJECT, DATASET, TABLE); + verify(bigqueryRpcMock).deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE); } @Test - public void testDeleteModel() { - when(bigqueryRpcMock.deleteModel(PROJECT, DATASET, MODEL)).thenReturn(true); + public void testDeleteModel() throws IOException { + when(bigqueryRpcMock.deleteModelSkipExceptionTranslation(PROJECT, DATASET, MODEL)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(ModelId.of(DATASET, MODEL))); - verify(bigqueryRpcMock).deleteModel(PROJECT, DATASET, MODEL); + verify(bigqueryRpcMock).deleteModelSkipExceptionTranslation(PROJECT, DATASET, MODEL); } @Test - public void testUpdateModel() { + public void testUpdateModel() throws IOException { ModelInfo updateModelInfo = - MODEL_INFO_WITH_PROJECT - .setProjectId(OTHER_PROJECT) - .toBuilder() + MODEL_INFO_WITH_PROJECT.setProjectId(OTHER_PROJECT).toBuilder() .setDescription("newDescription") .build(); - when(bigqueryRpcMock.patch(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.patchSkipExceptionTranslation(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(updateModelInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Model actualModel = bigquery.update(updateModelInfo); assertEquals(new Model(bigquery, new ModelInfo.BuilderImpl(updateModelInfo)), actualModel); - verify(bigqueryRpcMock).patch(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .patchSkipExceptionTranslation(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testUpdateTable() { + public void testUpdateTable() throws IOException { TableInfo updatedTableInfo = TABLE_INFO.setProjectId(OTHER_PROJECT).toBuilder().setDescription("newDescription").build(); - when(bigqueryRpcMock.patch(updatedTableInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.patchSkipExceptionTranslation(updatedTableInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(updatedTableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.update(updatedTableInfo); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfo)), table); - verify(bigqueryRpcMock).patch(updatedTableInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .patchSkipExceptionTranslation(updatedTableInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testUpdateExternalTableWithNewSchema() { + public void testUpdateExternalTableWithNewSchema() throws IOException { TableInfo updatedTableInfo = TableInfo.of(TABLE_ID, ExternalTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build()) .setProjectId(OTHER_PROJECT); @@ -1196,35 +1312,37 @@ public void testUpdateExternalTableWithNewSchema() { com.google.api.services.bigquery.model.Table expectedPatchInput = updatedTableInfo.toPb().setSchema(TABLE_SCHEMA.toPb()); expectedPatchInput.getExternalDataConfiguration().setSchema(null); - when(bigqueryRpcMock.patch(expectedPatchInput, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.patchSkipExceptionTranslation(expectedPatchInput, EMPTY_RPC_OPTIONS)) .thenReturn(updatedTableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.update(updatedTableInfo); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfo)), table); - verify(bigqueryRpcMock).patch(expectedPatchInput, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).patchSkipExceptionTranslation(expectedPatchInput, EMPTY_RPC_OPTIONS); } @Test - public void testUpdateTableWithoutProject() { + public void testUpdateTableWithoutProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(PROJECT); TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); tableInfo.toBuilder().setTableId(tableId); - when(bigqueryRpcMock.patch(tableInfo.toPb(), EMPTY_RPC_OPTIONS)).thenReturn(tableInfo.toPb()); + when(bigqueryRpcMock.patchSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(tableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.update(tableInfo); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(tableInfo)), table); - verify(bigqueryRpcMock).patch(tableInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).patchSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testUpdateTableWithSelectedFields() { + public void testUpdateTableWithSelectedFields() throws IOException { TableInfo updatedTableInfo = TABLE_INFO.toBuilder().setDescription("newDescription").build(); TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder().setDescription("newDescription").build(); - when(bigqueryRpcMock.patch(eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture())) + when(bigqueryRpcMock.patchSkipExceptionTranslation( + eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture())) .thenReturn(updatedTableInfoWithProject.toPb()); bigquery = options.getService(); Table table = bigquery.update(updatedTableInfo, TABLE_OPTION_FIELDS); @@ -1236,15 +1354,17 @@ public void testUpdateTableWithSelectedFields() { assertEquals( new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfoWithProject)), table); verify(bigqueryRpcMock) - .patch(eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture()); + .patchSkipExceptionTranslation( + eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture()); } @Test - public void testUpdateTableWithAutoDetectSchema() { + public void testUpdateTableWithAutoDetectSchema() throws IOException { TableInfo updatedTableInfo = TABLE_INFO.toBuilder().setDescription("newDescription").build(); TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder().setDescription("newDescription").build(); - when(bigqueryRpcMock.patch(eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture())) + when(bigqueryRpcMock.patchSkipExceptionTranslation( + eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture())) .thenReturn(updatedTableInfoWithProject.toPb()); bigquery = options.getService(); Table table = bigquery.update(updatedTableInfo, BigQuery.TableOption.autodetectSchema(true)); @@ -1254,11 +1374,12 @@ public void testUpdateTableWithAutoDetectSchema() { assertEquals( new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfoWithProject)), table); verify(bigqueryRpcMock) - .patch(eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture()); + .patchSkipExceptionTranslation( + eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture()); } @Test - public void testInsertAllWithRowIdShouldRetry() { + public void testInsertAllWithRowIdShouldRetry() throws IOException { Map row1 = ImmutableMap.of("field", "value1"); Map row2 = ImmutableMap.of("field", "value2"); List rows = @@ -1293,12 +1414,11 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { new TableDataInsertAllResponse.InsertErrors() .setIndex(0L) .setErrors(ImmutableList.of(new ErrorProto().setMessage("ErrorMessage"))))); - when(bigqueryRpcMock.insertAll(PROJECT, DATASET, TABLE, requestPb)) + when(bigqueryRpcMock.insertAllSkipExceptionTranslation(PROJECT, DATASET, TABLE, requestPb)) .thenThrow(new BigQueryException(500, "InternalError")) .thenReturn(responsePb); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -1307,7 +1427,8 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { assertNull(response.getErrorsFor(1L)); assertEquals(1, response.getErrorsFor(0L).size()); assertEquals("ErrorMessage", response.getErrorsFor(0L).get(0).getMessage()); - verify(bigqueryRpcMock, times(2)).insertAll(PROJECT, DATASET, TABLE, requestPb); + verify(bigqueryRpcMock, times(2)) + .insertAllSkipExceptionTranslation(PROJECT, DATASET, TABLE, requestPb); } @Test @@ -1341,8 +1462,7 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { when(bigqueryRpcMock.insertAll(PROJECT, DATASET, TABLE, requestPb)) .thenThrow(new BigQueryException(500, "InternalError")); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -1356,7 +1476,7 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { } @Test - public void testInsertAllWithProject() { + public void testInsertAllWithProject() throws IOException { Map row1 = ImmutableMap.of("field", "value1"); Map row2 = ImmutableMap.of("field", "value2"); List rows = @@ -1392,7 +1512,8 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { new TableDataInsertAllResponse.InsertErrors() .setIndex(0L) .setErrors(ImmutableList.of(new ErrorProto().setMessage("ErrorMessage"))))); - when(bigqueryRpcMock.insertAll(OTHER_PROJECT, DATASET, TABLE, requestPb)) + when(bigqueryRpcMock.insertAllSkipExceptionTranslation( + OTHER_PROJECT, DATASET, TABLE, requestPb)) .thenReturn(responsePb); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); @@ -1402,11 +1523,12 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { assertNull(response.getErrorsFor(1L)); assertEquals(1, response.getErrorsFor(0L).size()); assertEquals("ErrorMessage", response.getErrorsFor(0L).get(0).getMessage()); - verify(bigqueryRpcMock).insertAll(OTHER_PROJECT, DATASET, TABLE, requestPb); + verify(bigqueryRpcMock) + .insertAllSkipExceptionTranslation(OTHER_PROJECT, DATASET, TABLE, requestPb); } @Test - public void testInsertAllWithProjectInTable() { + public void testInsertAllWithProjectInTable() throws IOException { Map row1 = ImmutableMap.of("field", "value1"); Map row2 = ImmutableMap.of("field", "value2"); List rows = @@ -1442,7 +1564,8 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { new TableDataInsertAllResponse.InsertErrors() .setIndex(0L) .setErrors(ImmutableList.of(new ErrorProto().setMessage("ErrorMessage"))))); - when(bigqueryRpcMock.insertAll("project-different-from-option", DATASET, TABLE, requestPb)) + when(bigqueryRpcMock.insertAllSkipExceptionTranslation( + "project-different-from-option", DATASET, TABLE, requestPb)) .thenReturn(responsePb); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); @@ -1452,47 +1575,56 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { assertNull(response.getErrorsFor(1L)); assertEquals(1, response.getErrorsFor(0L).size()); assertEquals("ErrorMessage", response.getErrorsFor(0L).get(0).getMessage()); - verify(bigqueryRpcMock).insertAll("project-different-from-option", DATASET, TABLE, requestPb); + verify(bigqueryRpcMock) + .insertAllSkipExceptionTranslation( + "project-different-from-option", DATASET, TABLE, requestPb); } @Test - public void testListTableData() { - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + public void testListTableData() throws IOException { + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_DATA_PB); bigquery = options.getService(); Page page = bigquery.listTableData(DATASET, TABLE); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(TABLE_DATA.toArray(), Iterables.toArray(page.getValues(), List.class)); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testListTableDataFromTableId() { - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + public void testListTableDataFromTableId() throws IOException { + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_DATA_PB); bigquery = options.getService(); Page page = bigquery.listTableData(TableId.of(DATASET, TABLE)); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(TABLE_DATA.toArray(), Iterables.toArray(page.getValues(), List.class)); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testListTableDataFromTableIdWithProject() { + public void testListTableDataFromTableIdWithProject() throws IOException { TableId tableId = TABLE_ID.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.listTableData(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_DATA_PB); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Page page = bigquery.listTableData(tableId); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(TABLE_DATA.toArray(), Iterables.toArray(page.getValues(), List.class)); - verify(bigqueryRpcMock).listTableData(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testListTableDataWithOptions() { - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS)) + public void testListTableDataWithOptions() throws IOException { + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS)) .thenReturn(TABLE_DATA_PB); bigquery = options.getService(); Page page = @@ -1504,14 +1636,15 @@ public void testListTableDataWithOptions() { TABLE_DATA_LIST_START_INDEX); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(TABLE_DATA.toArray(), Iterables.toArray(page.getValues(), List.class)); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); } @Test - public void testListTableDataWithNextPage() { + public void testListTableDataWithNextPage() throws IOException { doReturn(TABLE_DATA_PB) .when(bigqueryRpcMock) - .listTableData(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); bigquery = options.getService(); TableResult page = bigquery.listTableData( @@ -1521,7 +1654,8 @@ public void testListTableDataWithNextPage() { TABLE_DATA_LIST_PAGE_TOKEN, TABLE_DATA_LIST_START_INDEX); assertEquals(CURSOR, page.getNextPageToken()); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); assertArrayEquals(TABLE_DATA.toArray(), Iterables.toArray(page.getValues(), List.class)); Map SECOND_TABLE_DATA_LIST_OPTIONS = ImmutableMap.of(BigQueryRpc.Option.PAGE_TOKEN, CURSOR, BigQueryRpc.Option.START_INDEX, 0L); @@ -1534,11 +1668,14 @@ public void testListTableDataWithNextPage() { new TableRow().setF(ImmutableList.of(new TableCell().setV("Value3"))), new TableRow().setF(ImmutableList.of(new TableCell().setV("Value4")))))) .when(bigqueryRpcMock) - .listTableData(PROJECT, DATASET, TABLE, SECOND_TABLE_DATA_LIST_OPTIONS); + .listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, SECOND_TABLE_DATA_LIST_OPTIONS); assertTrue(page.hasNextPage()); page = page.getNextPage(); assertNull(page.getNextPageToken()); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, SECOND_TABLE_DATA_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, SECOND_TABLE_DATA_LIST_OPTIONS); } // The "minimally initialized" Job that lets Job.fromPb run without throwing. @@ -1550,23 +1687,46 @@ private static com.google.api.services.bigquery.model.Job newJobPb() { } @Test - public void testCreateJobSuccess() { + public void testCreateJobSuccess() throws IOException { String id = "testCreateJobSuccess-id"; JobId jobId = JobId.of(id); String query = "SELECT * in FOO"; - when(bigqueryRpcMock.create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) .thenReturn(newJobPb()); bigquery = options.getService(); assertThat(bigquery.create(JobInfo.of(jobId, QueryJobConfiguration.of(query)))).isNotNull(); assertThat(jobCapture.getValue().getJobReference().getJobId()).isEqualTo(id); - verify(bigqueryRpcMock).create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + } + + @Test + public void testCreateJobFailureShouldRetryExceptionHandlerExceptions() throws IOException { + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + .thenThrow(new UnknownHostException()) + .thenThrow(new ConnectException()) + .thenReturn(newJobPb()); + + bigquery = options.getService(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + ((BigQueryImpl) bigquery).create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY)); + verify(bigqueryRpcMock, times(3)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); } @Test - public void testCreateJobFailureShouldRetry() { - when(bigqueryRpcMock.create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + public void testCreateJobFailureShouldRetry() throws IOException { + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) .thenThrow(new BigQueryException(500, "InternalError")) .thenThrow(new BigQueryException(502, "Bad Gateway")) .thenThrow(new BigQueryException(503, "Service Unavailable")) @@ -1578,19 +1738,148 @@ public void testCreateJobFailureShouldRetry() { bigquery = options.getService(); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); ((BigQueryImpl) bigquery).create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY)); - verify(bigqueryRpcMock, times(6)).create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + verify(bigqueryRpcMock, times(6)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + } + + @Test + public void testCreateJobWithBigQueryRetryConfigFailureShouldRetry() throws IOException { + // Validate create job with BigQueryRetryConfig that retries on rate limit error message. + JobOption bigQueryRetryConfigOption = + JobOption.bigQueryRetryConfig( + BigQueryRetryConfig.newBuilder() + .retryOnMessage(BigQueryErrorMessages.RATE_LIMIT_EXCEEDED_MSG) + .retryOnMessage(BigQueryErrorMessages.JOB_RATE_LIMIT_EXCEEDED_MSG) + .retryOnRegEx(BigQueryErrorMessages.RetryRegExPatterns.RATE_LIMIT_EXCEEDED_REGEX) + .build()); + + Map bigQueryRpcOptions = optionMap(bigQueryRetryConfigOption); + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(bigQueryRpcOptions))) + .thenThrow( + new BigQueryException( + 400, RATE_LIMIT_ERROR_MSG)) // retrial on based on RATE_LIMIT_EXCEEDED_MSG + .thenThrow(new BigQueryException(200, RATE_LIMIT_ERROR_MSG)) + .thenReturn(newJobPb()); + + bigquery = options.getService(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + ((BigQueryImpl) bigquery) + .create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), bigQueryRetryConfigOption); + verify(bigqueryRpcMock, times(3)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(bigQueryRpcOptions)); } @Test - public void testCreateJobWithSelectedFields() { - when(bigqueryRpcMock.create( + public void testCreateJobWithBigQueryRetryConfigFailureShouldNotRetry() throws IOException { + // Validate create job with BigQueryRetryConfig that does not retry on rate limit error message. + JobOption bigQueryRetryConfigOption = + JobOption.bigQueryRetryConfig(BigQueryRetryConfig.newBuilder().build()); + + Map bigQueryRpcOptions = optionMap(bigQueryRetryConfigOption); + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(bigQueryRpcOptions))) + .thenThrow(new BigQueryException(400, RATE_LIMIT_ERROR_MSG)); + + // Job create will attempt to retrieve the job even in the case when the job is created in a + // returned failure. + when(bigqueryRpcMock.getJobSkipExceptionTranslation( + nullable(String.class), nullable(String.class), nullable(String.class), Mockito.any())) + .thenThrow(new BigQueryException(500, "InternalError")); + + bigquery = options.getService(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + try { + ((BigQueryImpl) bigquery) + .create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), bigQueryRetryConfigOption); + fail("JobException expected"); + } catch (BigQueryException e) { + assertNotNull(e.getMessage()); + } + // Verify that getQueryResults is attempted only once and not retried since the error message + // does not match. + verify(bigqueryRpcMock, times(1)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(bigQueryRpcOptions)); + } + + @Test + public void testCreateJobWithRetryOptionsFailureShouldRetry() throws IOException { + // Validate create job with RetryOptions. + JobOption retryOptions = JobOption.retryOptions(RetryOption.maxAttempts(4)); + Map bigQueryRpcOptions = optionMap(retryOptions); + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(bigQueryRpcOptions))) + .thenThrow(new BigQueryException(500, "InternalError")) + .thenThrow(new BigQueryException(502, "Bad Gateway")) + .thenThrow(new BigQueryException(503, "Service Unavailable")) + .thenReturn(newJobPb()); + + bigquery = options.getService(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + ((BigQueryImpl) bigquery) + .create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), retryOptions); + verify(bigqueryRpcMock, times(4)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(bigQueryRpcOptions)); + } + + @Test + public void testCreateJobWithRetryOptionsFailureShouldNotRetry() throws IOException { + // Validate create job with RetryOptions that only attempts once (no retry). + JobOption retryOptions = JobOption.retryOptions(RetryOption.maxAttempts(1)); + Map bigQueryRpcOptions = optionMap(retryOptions); + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(bigQueryRpcOptions))) + .thenThrow(new BigQueryException(500, "InternalError")) + .thenReturn(newJobPb()); + + // Job create will attempt to retrieve the job even in the case when the job is created in a + // returned failure. + when(bigqueryRpcMock.getJobSkipExceptionTranslation( + nullable(String.class), nullable(String.class), nullable(String.class), Mockito.any())) + .thenThrow(new BigQueryException(500, "InternalError")); + + bigquery = options.getService(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + try { + ((BigQueryImpl) bigquery) + .create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), retryOptions); + fail("JobException expected"); + } catch (BigQueryException e) { + assertNotNull(e.getMessage()); + } + verify(bigqueryRpcMock, times(1)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(bigQueryRpcOptions)); + } + + @Test + public void testCreateJobWithSelectedFields() throws IOException { + when(bigqueryRpcMock.createSkipExceptionTranslation( any(com.google.api.services.bigquery.model.Job.class), capturedOptions.capture())) .thenReturn(newJobPb()); @@ -1605,16 +1894,18 @@ public void testCreateJobWithSelectedFields() { .asList() .containsExactly("jobReference", "configuration", "user_email"); verify(bigqueryRpcMock) - .create(any(com.google.api.services.bigquery.model.Job.class), capturedOptions.capture()); + .createSkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class), capturedOptions.capture()); } @Test - public void testCreateJobNoGet() { + public void testCreateJobNoGet() throws IOException { String id = "testCreateJobNoGet-id"; JobId jobId = JobId.of(id); String query = "SELECT * in FOO"; - when(bigqueryRpcMock.create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) .thenThrow(new BigQueryException(409, "already exists, for some reason")); bigquery = options.getService(); @@ -1624,11 +1915,12 @@ public void testCreateJobNoGet() { } catch (BigQueryException e) { assertThat(jobCapture.getValue().getJobReference().getJobId()).isEqualTo(id); } - verify(bigqueryRpcMock).create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); } @Test - public void testCreateJobTryGet() { + public void testCreateJobTryGet() throws IOException { final String id = "testCreateJobTryGet-id"; String query = "SELECT * in FOO"; Supplier idProvider = @@ -1639,33 +1931,37 @@ public JobId get() { } }; - when(bigqueryRpcMock.create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) .thenThrow(new BigQueryException(409, "already exists, for some reason")); - when(bigqueryRpcMock.getJob( + when(bigqueryRpcMock.getJobSkipExceptionTranslation( any(String.class), eq(id), eq((String) null), eq(EMPTY_RPC_OPTIONS))) .thenReturn(newJobPb()); bigquery = options.getService(); ((BigQueryImpl) bigquery).create(JobInfo.of(QueryJobConfiguration.of(query)), idProvider); assertThat(jobCapture.getValue().getJobReference().getJobId()).isEqualTo(id); - verify(bigqueryRpcMock).create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); verify(bigqueryRpcMock) - .getJob(any(String.class), eq(id), eq((String) null), eq(EMPTY_RPC_OPTIONS)); + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation( + any(String.class), eq(id), eq((String) null), eq(EMPTY_RPC_OPTIONS)); } @Test - public void testCreateJobTryGetNotRandom() { + public void testCreateJobTryGetNotRandom() throws IOException { Map withStatisticOption = optionMap(JobOption.fields(STATISTICS)); final String id = "testCreateJobTryGet-id"; String query = "SELECT * in FOO"; - when(bigqueryRpcMock.create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) .thenThrow( new BigQueryException( 409, "already exists, for some reason", new RuntimeException("Already Exists: Job"))); - when(bigqueryRpcMock.getJob( + when(bigqueryRpcMock.getJobSkipExceptionTranslation( any(String.class), eq(id), eq((String) null), eq(withStatisticOption))) .thenReturn( newJobPb() @@ -1677,18 +1973,21 @@ public void testCreateJobTryGetNotRandom() { ((BigQueryImpl) bigquery).create(JobInfo.of(JobId.of(id), QueryJobConfiguration.of(query))); assertThat(job).isNotNull(); assertThat(jobCapture.getValue().getJobReference().getJobId()).isEqualTo(id); - verify(bigqueryRpcMock).create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); verify(bigqueryRpcMock) - .getJob(any(String.class), eq(id), eq((String) null), eq(withStatisticOption)); + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation( + any(String.class), eq(id), eq((String) null), eq(withStatisticOption)); } @Test - public void testCreateJobWithProjectId() { + public void testCreateJobWithProjectId() throws IOException { JobInfo jobInfo = JobInfo.newBuilder(QUERY_JOB_CONFIGURATION.setProjectId(OTHER_PROJECT)) .setJobId(JobId.of(OTHER_PROJECT, JOB)) .build(); - when(bigqueryRpcMock.create(eq(jobInfo.toPb()), capturedOptions.capture())) + when(bigqueryRpcMock.createSkipExceptionTranslation( + eq(jobInfo.toPb()), capturedOptions.capture())) .thenReturn(jobInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); @@ -1700,46 +1999,58 @@ public void testCreateJobWithProjectId() { assertTrue(selector.contains("configuration")); assertTrue(selector.contains("user_email")); assertEquals(37, selector.length()); - verify(bigqueryRpcMock).create(eq(jobInfo.toPb()), capturedOptions.capture()); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation(eq(jobInfo.toPb()), capturedOptions.capture()); } @Test - public void testGetJob() { - when(bigqueryRpcMock.getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + public void testDeleteJob() throws IOException { + JobId jobId = JobId.newBuilder().setJob(JOB).setProject(PROJECT).setLocation(LOCATION).build(); + when(bigqueryRpcMock.deleteJobSkipExceptionTranslation(PROJECT, JOB, LOCATION)) + .thenReturn(true); + bigquery = options.getService(); + assertTrue(bigquery.delete(jobId)); + verify(bigqueryRpcMock).deleteJobSkipExceptionTranslation(PROJECT, JOB, LOCATION); + } + + @Test + public void testGetJob() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); bigquery = options.getService(); Job job = bigquery.getJob(JOB); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobWithLocation() { - when(bigqueryRpcMock.getJob(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) + public void testGetJobWithLocation() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); BigQueryOptions options = createBigQueryOptionsForProjectWithLocation(PROJECT, rpcFactoryMock); bigquery = options.getService(); Job job = bigquery.getJob(JOB); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobNotFoundWhenThrowIsDisabled() { - when(bigqueryRpcMock.getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + public void testGetJobNotFoundWhenThrowIsDisabled() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); options.setThrowNotFound(false); bigquery = options.getService(); Job job = bigquery.getJob(JOB); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobNotFoundWhenThrowIsEnabled() { - when(bigqueryRpcMock.getJob(PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS)) - .thenReturn(null) - .thenThrow(new BigQueryException(404, "Job not found")); + public void testGetJobNotFoundWhenThrowIsEnabled() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation( + PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS)) + .thenThrow(new IOException("Job not found")); options.setThrowNotFound(true); bigquery = options.getService(); try { @@ -1748,57 +2059,63 @@ public void testGetJobNotFoundWhenThrowIsEnabled() { } catch (BigQueryException ex) { Assert.assertNotNull(ex.getMessage()); } - verify(bigqueryRpcMock).getJob(PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation(PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobFromJobId() { - when(bigqueryRpcMock.getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + public void testGetJobFromJobId() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); bigquery = options.getService(); Job job = bigquery.getJob(JobId.of(JOB)); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobFromJobIdWithLocation() { - when(bigqueryRpcMock.getJob(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) + public void testGetJobFromJobIdWithLocation() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); BigQueryOptions options = createBigQueryOptionsForProjectWithLocation(PROJECT, rpcFactoryMock); bigquery = options.getService(); Job job = bigquery.getJob(JobId.of(JOB)); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobFromJobIdWithProject() { + public void testGetJobFromJobIdWithProject() throws IOException { JobId jobId = JobId.of(OTHER_PROJECT, JOB); JobInfo jobInfo = COPY_JOB.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.getJob(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getJobSkipExceptionTranslation( + OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(jobInfo.toPb()); bigquery = options.getService(); Job job = bigquery.getJob(jobId); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(jobInfo)), job); - verify(bigqueryRpcMock).getJob(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobFromJobIdWithProjectWithLocation() { + public void testGetJobFromJobIdWithProjectWithLocation() throws IOException { JobId jobId = JobId.of(OTHER_PROJECT, JOB); JobInfo jobInfo = COPY_JOB.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.getJob(OTHER_PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getJobSkipExceptionTranslation( + OTHER_PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) .thenReturn(jobInfo.toPb()); BigQueryOptions options = createBigQueryOptionsForProjectWithLocation(PROJECT, rpcFactoryMock); bigquery = options.getService(); Job job = bigquery.getJob(jobId); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(jobInfo)), job); - verify(bigqueryRpcMock).getJob(OTHER_PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation(OTHER_PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); } @Test - public void testListJobs() { + public void testListJobs() throws IOException { bigquery = options.getService(); ImmutableList jobList = ImmutableList.of( @@ -1815,15 +2132,16 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { return job.toPb(); } })); - when(bigqueryRpcMock.listJobs(PROJECT, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listJobsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listJobs(); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(jobList.toArray(), Iterables.toArray(page.getValues(), Job.class)); - verify(bigqueryRpcMock).listJobs(PROJECT, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listJobsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS); } @Test - public void testListJobsWithOptions() { + public void testListJobsWithOptions() throws IOException { bigquery = options.getService(); ImmutableList jobList = ImmutableList.of( @@ -1840,17 +2158,18 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { return job.toPb(); } })); - when(bigqueryRpcMock.listJobs(PROJECT, JOB_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listJobsSkipExceptionTranslation(PROJECT, JOB_LIST_OPTIONS)) + .thenReturn(result); Page page = bigquery.listJobs( JOB_LIST_ALL_USERS, JOB_LIST_STATE_FILTER, JOB_LIST_PAGE_TOKEN, JOB_LIST_PAGE_SIZE); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(jobList.toArray(), Iterables.toArray(page.getValues(), Job.class)); - verify(bigqueryRpcMock).listJobs(PROJECT, JOB_LIST_OPTIONS); + verify(bigqueryRpcMock).listJobsSkipExceptionTranslation(PROJECT, JOB_LIST_OPTIONS); } @Test - public void testListJobsWithSelectedFields() { + public void testListJobsWithSelectedFields() throws IOException { bigquery = options.getService(); ImmutableList jobList = ImmutableList.of( @@ -1867,7 +2186,8 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { return job.toPb(); } })); - when(bigqueryRpcMock.listJobs(eq(PROJECT), capturedOptions.capture())).thenReturn(result); + when(bigqueryRpcMock.listJobsSkipExceptionTranslation(eq(PROJECT), capturedOptions.capture())) + .thenReturn(result); Page page = bigquery.listJobs(JOB_LIST_OPTION_FIELD); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(jobList.toArray(), Iterables.toArray(page.getValues(), Job.class)); @@ -1880,36 +2200,37 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { assertTrue(selector.contains("errorResult")); assertTrue(selector.contains(")")); assertEquals(75, selector.length()); - verify(bigqueryRpcMock).listJobs(eq(PROJECT), capturedOptions.capture()); + verify(bigqueryRpcMock) + .listJobsSkipExceptionTranslation(eq(PROJECT), capturedOptions.capture()); } @Test - public void testCancelJob() { - when(bigqueryRpcMock.cancel(PROJECT, JOB, null)).thenReturn(true); + public void testCancelJob() throws IOException { + when(bigqueryRpcMock.cancelSkipExceptionTranslation(PROJECT, JOB, null)).thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.cancel(JOB)); - verify(bigqueryRpcMock).cancel(PROJECT, JOB, null); + verify(bigqueryRpcMock).cancelSkipExceptionTranslation(PROJECT, JOB, null); } @Test - public void testCancelJobFromJobId() { - when(bigqueryRpcMock.cancel(PROJECT, JOB, null)).thenReturn(true); + public void testCancelJobFromJobId() throws IOException { + when(bigqueryRpcMock.cancelSkipExceptionTranslation(PROJECT, JOB, null)).thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.cancel(JobId.of(PROJECT, JOB))); - verify(bigqueryRpcMock).cancel(PROJECT, JOB, null); + verify(bigqueryRpcMock).cancelSkipExceptionTranslation(PROJECT, JOB, null); } @Test - public void testCancelJobFromJobIdWithProject() { + public void testCancelJobFromJobIdWithProject() throws IOException { JobId jobId = JobId.of(OTHER_PROJECT, JOB); - when(bigqueryRpcMock.cancel(OTHER_PROJECT, JOB, null)).thenReturn(true); + when(bigqueryRpcMock.cancelSkipExceptionTranslation(OTHER_PROJECT, JOB, null)).thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.cancel(jobId)); - verify(bigqueryRpcMock).cancel(OTHER_PROJECT, JOB, null); + verify(bigqueryRpcMock).cancelSkipExceptionTranslation(OTHER_PROJECT, JOB, null); } @Test - public void testQueryRequestCompleted() throws InterruptedException { + public void testQueryRequestCompleted() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job jobResponsePb = new com.google.api.services.bigquery.model.Job() @@ -1929,13 +2250,13 @@ public void testQueryRequestCompleted() throws InterruptedException { .setTotalRows(BigInteger.valueOf(1L)) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.create( + when(bigqueryRpcMock.createSkipExceptionTranslation( JOB_INFO.toPb(), Collections.emptyMap())) .thenReturn(jobResponsePb); - when(bigqueryRpcMock.getQueryResults( + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) .thenReturn(responsePb); - when(bigqueryRpcMock.listTableData( + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( PROJECT, DATASET, TABLE, Collections.emptyMap())) .thenReturn( new TableDataList() @@ -1952,16 +2273,19 @@ PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) assertThat(row.get(1).getLongValue()).isEqualTo(1); } verify(bigqueryRpcMock) - .create(JOB_INFO.toPb(), Collections.emptyMap()); + .createSkipExceptionTranslation( + JOB_INFO.toPb(), Collections.emptyMap()); verify(bigqueryRpcMock) - .getQueryResults(PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); + .getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); verify(bigqueryRpcMock) - .listTableData(PROJECT, DATASET, TABLE, Collections.emptyMap()); + .listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, Collections.emptyMap()); } @Test - public void testFastQueryRequestCompleted() throws InterruptedException { + public void testFastQueryRequestCompleted() throws InterruptedException, IOException { com.google.api.services.bigquery.model.QueryResponse queryResponsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -1973,7 +2297,7 @@ public void testFastQueryRequestCompleted() throws InterruptedException { .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenReturn(queryResponsePb); bigquery = options.getService(); @@ -1996,11 +2320,12 @@ public void testFastQueryRequestCompleted() throws InterruptedException { assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.useQueryCache(), requestPb.getUseQueryCache()); assertNull(requestPb.getLocation()); - verify(bigqueryRpcMock).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test - public void testFastQueryRequestCompletedWithLocation() throws InterruptedException { + public void testFastQueryRequestCompletedWithLocation() throws InterruptedException, IOException { com.google.api.services.bigquery.model.QueryResponse queryResponsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -2012,7 +2337,7 @@ public void testFastQueryRequestCompletedWithLocation() throws InterruptedExcept .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenReturn(queryResponsePb); BigQueryOptions options = createBigQueryOptionsForProjectWithLocation(PROJECT, rpcFactoryMock); @@ -2036,11 +2361,12 @@ public void testFastQueryRequestCompletedWithLocation() throws InterruptedExcept assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.useQueryCache(), requestPb.getUseQueryCache()); assertEquals(LOCATION, requestPb.getLocation()); - verify(bigqueryRpcMock).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test - public void testFastQueryMultiplePages() throws InterruptedException { + public void testFastQueryMultiplePages() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job responseJob = new com.google.api.services.bigquery.model.Job() @@ -2049,8 +2375,9 @@ public void testFastQueryMultiplePages() throws InterruptedException { .setId(JOB) .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")); responseJob.getConfiguration().getQuery().setDestinationTable(TABLE_ID.toPb()); - when(bigqueryRpcMock.getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)).thenReturn(responseJob); - when(bigqueryRpcMock.listTableData( + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + .thenReturn(responseJob); + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( PROJECT, DATASET, TABLE, optionMap(BigQuery.TableDataListOption.pageToken(CURSOR)))) .thenReturn( new TableDataList() @@ -2070,7 +2397,7 @@ PROJECT, DATASET, TABLE, optionMap(BigQuery.TableDataListOption.pageToken(CURSOR .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenReturn(queryResponsePb); bigquery = options.getService(); @@ -2086,15 +2413,16 @@ PROJECT, DATASET, TABLE, optionMap(BigQuery.TableDataListOption.pageToken(CURSOR requestPb.getDefaultDataset().getDatasetId()); assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.useQueryCache(), requestPb.getUseQueryCache()); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); verify(bigqueryRpcMock) - .listTableData( + .listTableDataSkipExceptionTranslation( PROJECT, DATASET, TABLE, optionMap(BigQuery.TableDataListOption.pageToken(CURSOR))); - verify(bigqueryRpcMock).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test - public void testFastQuerySlowDdl() throws InterruptedException { + public void testFastQuerySlowDdl() throws InterruptedException, IOException { // mock new fast query path response when running a query that takes more than 10s JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.QueryResponse queryResponsePb = @@ -2121,14 +2449,16 @@ public void testFastQuerySlowDdl() throws InterruptedException { .setTotalRows(BigInteger.valueOf(1L)) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenReturn(queryResponsePb); responseJob.getConfiguration().getQuery().setDestinationTable(TABLE_ID.toPb()); - when(bigqueryRpcMock.getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)).thenReturn(responseJob); - when(bigqueryRpcMock.getQueryResults( + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + .thenReturn(responseJob); + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) .thenReturn(queryResultsResponsePb); - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(new TableDataList().setRows(ImmutableList.of(TABLE_ROW)).setTotalRows(1L)); bigquery = options.getService(); @@ -2147,15 +2477,18 @@ PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) requestPb.getDefaultDataset().getDatasetId()); assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.useQueryCache(), requestPb.getUseQueryCache()); - verify(bigqueryRpcMock).queryRpc(eq(PROJECT), requestPbCapture.capture()); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); verify(bigqueryRpcMock) - .getQueryResults(PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock).getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testQueryRequestCompletedOptions() throws InterruptedException { + public void testQueryRequestCompletedOptions() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job jobResponsePb = new com.google.api.services.bigquery.model.Job() @@ -2175,7 +2508,7 @@ public void testQueryRequestCompletedOptions() throws InterruptedException { .setTotalRows(BigInteger.valueOf(1L)) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.create( + when(bigqueryRpcMock.createSkipExceptionTranslation( JOB_INFO.toPb(), Collections.emptyMap())) .thenReturn(jobResponsePb); @@ -2183,10 +2516,10 @@ public void testQueryRequestCompletedOptions() throws InterruptedException { QueryResultsOption pageSizeOption = QueryResultsOption.pageSize(42L); optionMap.put(pageSizeOption.getRpcOption(), pageSizeOption.getValue()); - when(bigqueryRpcMock.getQueryResults( + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) .thenReturn(responsePb); - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, optionMap)) + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, optionMap)) .thenReturn( new TableDataList() .setPageToken("") @@ -2203,14 +2536,17 @@ PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) assertThat(row.get(1).getLongValue()).isEqualTo(1); } verify(bigqueryRpcMock) - .create(JOB_INFO.toPb(), Collections.emptyMap()); + .createSkipExceptionTranslation( + JOB_INFO.toPb(), Collections.emptyMap()); + verify(bigqueryRpcMock) + .getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); verify(bigqueryRpcMock) - .getQueryResults(PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, optionMap); + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, optionMap); } @Test - public void testQueryRequestCompletedOnSecondAttempt() throws InterruptedException { + public void testQueryRequestCompletedOnSecondAttempt() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job jobResponsePb1 = new com.google.api.services.bigquery.model.Job() @@ -2235,16 +2571,16 @@ public void testQueryRequestCompletedOnSecondAttempt() throws InterruptedExcepti .setTotalRows(BigInteger.valueOf(1L)) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.create( + when(bigqueryRpcMock.createSkipExceptionTranslation( JOB_INFO.toPb(), Collections.emptyMap())) .thenReturn(jobResponsePb1); - when(bigqueryRpcMock.getQueryResults( + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) .thenReturn(responsePb1); - when(bigqueryRpcMock.getQueryResults( + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) .thenReturn(responsePb2); - when(bigqueryRpcMock.listTableData( + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( PROJECT, DATASET, TABLE, Collections.emptyMap())) .thenReturn( new TableDataList() @@ -2261,17 +2597,44 @@ PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) assertThat(row.get(1).getLongValue()).isEqualTo(1); } verify(bigqueryRpcMock) - .create(JOB_INFO.toPb(), Collections.emptyMap()); + .createSkipExceptionTranslation( + JOB_INFO.toPb(), Collections.emptyMap()); verify(bigqueryRpcMock) - .getQueryResults(PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); + .getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); verify(bigqueryRpcMock) - .getQueryResults(PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); + .getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); verify(bigqueryRpcMock) - .listTableData(PROJECT, DATASET, TABLE, Collections.emptyMap()); + .listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, Collections.emptyMap()); } @Test - public void testGetQueryResults() { + public void testQueryWithTimeoutSetsTimeout() throws InterruptedException, IOException { + com.google.api.services.bigquery.model.QueryResponse queryResponsePb = + new com.google.api.services.bigquery.model.QueryResponse() + .setCacheHit(false) + .setJobComplete(true) + .setKind("bigquery#queryResponse") + .setPageToken(null) + .setRows(ImmutableList.of(TABLE_ROW)) + .setSchema(TABLE_SCHEMA.toPb()) + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) + .thenReturn(queryResponsePb); + + bigquery = options.getService(); + Object result = bigquery.queryWithTimeout(QUERY_JOB_CONFIGURATION_FOR_QUERY, null, 1000L); + assertTrue(result instanceof TableResult); + QueryRequest requestPb = requestPbCapture.getValue(); + assertEquals((Long) 1000L, requestPb.getTimeoutMs()); + } + + @Test + public void testGetQueryResults() throws IOException { JobId queryJob = JobId.of(JOB); GetQueryResultsResponse responsePb = new GetQueryResultsResponse() @@ -2283,17 +2646,19 @@ public void testGetQueryResults() { .setPageToken(CURSOR) .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.getQueryResults(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(responsePb); bigquery = options.getService(); QueryResponse response = bigquery.getQueryResults(queryJob); assertEquals(true, response.getCompleted()); assertEquals(null, response.getSchema()); - verify(bigqueryRpcMock).getQueryResults(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getQueryResultsSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetQueryResultsRetry() { + public void testGetQueryResultsRetry() throws IOException { JobId queryJob = JobId.of(JOB); GetQueryResultsResponse responsePb = new GetQueryResultsResponse() @@ -2306,7 +2671,8 @@ public void testGetQueryResultsRetry() { .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.getQueryResults(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(500, "InternalError")) .thenThrow(new BigQueryException(502, "Bad Gateway")) .thenThrow(new BigQueryException(503, "Service Unavailable")) @@ -2319,8 +2685,7 @@ public void testGetQueryResultsRetry() { .thenReturn(responsePb); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -2332,11 +2697,12 @@ public void testGetQueryResultsRetry() { // EMPTY_RPC_OPTIONS) as there is no // identifier in this method which will can potentially differ and which can be used to // establish idempotency - verify(bigqueryRpcMock, times(6)).getQueryResults(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock, times(6)) + .getQueryResultsSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetQueryResultsWithProject() { + public void testGetQueryResultsWithProject() throws IOException { JobId queryJob = JobId.of(OTHER_PROJECT, JOB); GetQueryResultsResponse responsePb = new GetQueryResultsResponse() @@ -2348,17 +2714,19 @@ public void testGetQueryResultsWithProject() { .setPageToken(CURSOR) .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.getQueryResults(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(responsePb); bigquery = options.getService(); QueryResponse response = bigquery.getQueryResults(queryJob); assertTrue(response.getCompleted()); assertEquals(null, response.getSchema()); - verify(bigqueryRpcMock).getQueryResults(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getQueryResultsSkipExceptionTranslation(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetQueryResultsWithOptions() { + public void testGetQueryResultsWithOptions() throws IOException { JobId queryJob = JobId.of(PROJECT, JOB); GetQueryResultsResponse responsePb = new GetQueryResultsResponse() @@ -2369,7 +2737,8 @@ public void testGetQueryResultsWithOptions() { .setPageToken(CURSOR) .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.getQueryResults(PROJECT, JOB, null, QUERY_RESULTS_OPTIONS)) + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, QUERY_RESULTS_OPTIONS)) .thenReturn(responsePb); bigquery = options.getService(); QueryResponse response = @@ -2381,34 +2750,34 @@ public void testGetQueryResultsWithOptions() { QUERY_RESULTS_OPTION_PAGE_TOKEN); assertEquals(true, response.getCompleted()); assertEquals(null, response.getSchema()); - verify(bigqueryRpcMock).getQueryResults(PROJECT, JOB, null, QUERY_RESULTS_OPTIONS); + verify(bigqueryRpcMock) + .getQueryResultsSkipExceptionTranslation(PROJECT, JOB, null, QUERY_RESULTS_OPTIONS); } @Test - public void testGetDatasetRetryableException() { - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + public void testGetDatasetRetryableException() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(500, "InternalError")) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); Dataset dataset = bigquery.getDataset(DATASET); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock, times(2)).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock, times(2)) + .getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testNonRetryableException() { + public void testNonRetryableException() throws IOException { String exceptionMessage = "Not Implemented"; - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(501, exceptionMessage)); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -2418,17 +2787,16 @@ public void testNonRetryableException() { } catch (BigQueryException ex) { Assert.assertEquals(exceptionMessage, ex.getMessage()); } - verify(bigqueryRpcMock).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testRuntimeException() { + public void testRuntimeException() throws IOException { String exceptionMessage = "Artificial runtime exception"; - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenThrow(new RuntimeException(exceptionMessage)); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -2438,15 +2806,14 @@ public void testRuntimeException() { } catch (BigQueryException ex) { Assert.assertTrue(ex.getMessage().endsWith(exceptionMessage)); } - verify(bigqueryRpcMock).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test public void testQueryDryRun() throws Exception { // https://github.com/googleapis/google-cloud-java/issues/2479 try { - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService() @@ -2469,7 +2836,7 @@ public void testFastQuerySQLShouldRetry() throws Exception { .setTotalRows(BigInteger.valueOf(1L)) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenThrow(new BigQueryException(500, "InternalError")) .thenThrow(new BigQueryException(502, "Bad Gateway")) .thenThrow(new BigQueryException(503, "Service Unavailable")) @@ -2477,8 +2844,7 @@ public void testFastQuerySQLShouldRetry() throws Exception { .thenReturn(responsePb); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -2495,7 +2861,8 @@ public void testFastQuerySQLShouldRetry() throws Exception { } assertTrue(idempotent); - verify(bigqueryRpcMock, times(5)).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock, times(5)) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test @@ -2510,7 +2877,7 @@ public void testFastQueryDMLShouldRetry() throws Exception { .setNumDmlAffectedRows(1L) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenThrow(new BigQueryException(500, "InternalError")) .thenThrow(new BigQueryException(502, "Bad Gateway")) .thenThrow(new BigQueryException(503, "Service Unavailable")) @@ -2518,8 +2885,7 @@ public void testFastQueryDMLShouldRetry() throws Exception { .thenReturn(responsePb); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -2536,7 +2902,8 @@ public void testFastQueryDMLShouldRetry() throws Exception { } assertTrue(idempotent); - verify(bigqueryRpcMock, times(5)).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock, times(5)) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test @@ -2551,7 +2918,7 @@ public void testFastQueryRateLimitIdempotency() throws Exception { .setNumDmlAffectedRows(1L) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenThrow(new BigQueryException(500, "InternalError")) .thenThrow(new BigQueryException(502, "Bad Gateway")) .thenThrow(new BigQueryException(503, "Service Unavailable")) @@ -2562,8 +2929,7 @@ public void testFastQueryRateLimitIdempotency() throws Exception { .thenReturn(responsePb); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -2584,7 +2950,8 @@ public void testFastQueryRateLimitIdempotency() throws Exception { } assertTrue(idempotent); - verify(bigqueryRpcMock, times(6)).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock, times(6)) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test @@ -2622,7 +2989,7 @@ public void testFastQueryDDLShouldRetry() throws Exception { .setTotalBytesProcessed(42L) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenThrow(new BigQueryException(500, "InternalError")) .thenThrow(new BigQueryException(502, "Bad Gateway")) .thenThrow(new BigQueryException(503, "Service Unavailable")) @@ -2630,8 +2997,7 @@ public void testFastQueryDDLShouldRetry() throws Exception { .thenReturn(responsePb); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -2648,11 +3014,12 @@ public void testFastQueryDDLShouldRetry() throws Exception { } assertTrue(idempotent); - verify(bigqueryRpcMock, times(5)).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock, times(5)) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test - public void testFastQueryBigQueryException() throws InterruptedException { + public void testFastQueryBigQueryException() throws InterruptedException, IOException { List errorProtoList = ImmutableList.of( new ErrorProto() @@ -2669,7 +3036,8 @@ public void testFastQueryBigQueryException() throws InterruptedException { .setPageToken(null) .setErrors(errorProtoList); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())).thenReturn(responsePb); + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) + .thenReturn(responsePb); bigquery = options.getService(); try { @@ -2685,122 +3053,133 @@ public void testFastQueryBigQueryException() throws InterruptedException { QUERY_JOB_CONFIGURATION_FOR_QUERY.getDefaultDataset().getDataset(), requestPb.getDefaultDataset().getDatasetId()); assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.useQueryCache(), requestPb.getUseQueryCache()); - verify(bigqueryRpcMock).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test - public void testCreateRoutine() { + public void testCreateRoutine() throws IOException { RoutineInfo routineInfo = ROUTINE_INFO.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.create(routineInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.createSkipExceptionTranslation(routineInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(routineInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Routine actualRoutine = bigquery.create(routineInfo); assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(routineInfo)), actualRoutine); - verify(bigqueryRpcMock).create(routineInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).createSkipExceptionTranslation(routineInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testGetRoutine() { - when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) + public void testGetRoutine() throws IOException { + when(bigqueryRpcMock.getRoutineSkipExceptionTranslation( + PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) .thenReturn(ROUTINE_INFO.toPb()); bigquery = options.getService(); Routine routine = bigquery.getRoutine(DATASET, ROUTINE); assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO)), routine); - verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); } @Test - public void testGetRoutineWithRountineId() { - when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) + public void testGetRoutineWithRountineId() throws IOException { + when(bigqueryRpcMock.getRoutineSkipExceptionTranslation( + PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) .thenReturn(ROUTINE_INFO.toPb()); bigquery = options.getService(); Routine routine = bigquery.getRoutine(ROUTINE_ID); assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO)), routine); - verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); } @Test - public void testGetRoutineWithEnabledThrowNotFoundException() { - when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) - .thenReturn(null) + public void testGetRoutineWithEnabledThrowNotFoundException() throws IOException { + when(bigqueryRpcMock.getRoutineSkipExceptionTranslation( + PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(404, "Routine not found")); options.setThrowNotFound(true); bigquery = options.getService(); try { - Routine routine = bigquery.getRoutine(ROUTINE_ID); + bigquery.getRoutine(ROUTINE_ID); fail(); } catch (BigQueryException ex) { assertEquals("Routine not found", ex.getMessage()); } - verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); } @Test - public void testUpdateRoutine() { + public void testUpdateRoutine() throws IOException { RoutineInfo updatedRoutineInfo = - ROUTINE_INFO - .setProjectId(OTHER_PROJECT) - .toBuilder() + ROUTINE_INFO.setProjectId(OTHER_PROJECT).toBuilder() .setDescription("newDescription") .build(); - when(bigqueryRpcMock.update(updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.updateSkipExceptionTranslation( + updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(updatedRoutineInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Routine routine = bigquery.update(updatedRoutineInfo); assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(updatedRoutineInfo)), routine); - verify(bigqueryRpcMock).update(updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .updateSkipExceptionTranslation(updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testListRoutines() { + public void testListRoutines() throws IOException { bigquery = options.getService(); ImmutableList routineList = ImmutableList.of(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(routineList, RoutineInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listRoutinesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listRoutines(DATASET); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(routineList.toArray(), Iterables.toArray(page.getValues(), Routine.class)); - verify(bigqueryRpcMock).listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listRoutinesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testListRoutinesWithDatasetId() { + public void testListRoutinesWithDatasetId() throws IOException { bigquery = options.getService(); ImmutableList routineList = ImmutableList.of(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(routineList, RoutineInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listRoutinesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listRoutines(DatasetId.of(PROJECT, DATASET)); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(routineList.toArray(), Iterables.toArray(page.getValues(), Routine.class)); - verify(bigqueryRpcMock).listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listRoutinesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testDeleteRoutine() { - when(bigqueryRpcMock.deleteRoutine(PROJECT, DATASET, ROUTINE)).thenReturn(true); + public void testDeleteRoutine() throws IOException { + when(bigqueryRpcMock.deleteRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(ROUTINE_ID)); - verify(bigqueryRpcMock).deleteRoutine(PROJECT, DATASET, ROUTINE); + verify(bigqueryRpcMock).deleteRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE); } @Test public void testWriteWithJob() throws IOException { bigquery = options.getService(); Job job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); @@ -2808,24 +3187,25 @@ public void testWriteWithJob() throws IOException { assertEquals(job, writer.getJob()); bigquery.writer(JOB_INFO.getJobId(), LOAD_CONFIGURATION); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); } @Test public void testWriteChannel() throws IOException { bigquery = options.getService(); Job job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); @@ -2833,43 +3213,47 @@ public void testWriteChannel() throws IOException { assertEquals(job, writer.getJob()); bigquery.writer(LOAD_CONFIGURATION); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); } @Test - public void testGetIamPolicy() { + public void testGetIamPolicy() throws IOException { final String resourceId = String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); final com.google.api.services.bigquery.model.Policy apiPolicy = PolicyHelper.convertToApiPolicy(SAMPLE_IAM_POLICY); - when(bigqueryRpcMock.getIamPolicy(resourceId, EMPTY_RPC_OPTIONS)).thenReturn(apiPolicy); + when(bigqueryRpcMock.getIamPolicySkipExceptionTranslation(resourceId, EMPTY_RPC_OPTIONS)) + .thenReturn(apiPolicy); bigquery = options.getService(); Policy policy = bigquery.getIamPolicy(TABLE_ID); assertEquals(policy, SAMPLE_IAM_POLICY); - verify(bigqueryRpcMock).getIamPolicy(resourceId, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getIamPolicySkipExceptionTranslation(resourceId, EMPTY_RPC_OPTIONS); } @Test - public void testSetIamPolicy() { + public void testSetIamPolicy() throws IOException { final String resourceId = String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); final com.google.api.services.bigquery.model.Policy apiPolicy = PolicyHelper.convertToApiPolicy(SAMPLE_IAM_POLICY); - when(bigqueryRpcMock.setIamPolicy(resourceId, apiPolicy, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.setIamPolicySkipExceptionTranslation( + resourceId, apiPolicy, EMPTY_RPC_OPTIONS)) .thenReturn(apiPolicy); bigquery = options.getService(); Policy returnedPolicy = bigquery.setIamPolicy(TABLE_ID, SAMPLE_IAM_POLICY); assertEquals(returnedPolicy, SAMPLE_IAM_POLICY); - verify(bigqueryRpcMock).setIamPolicy(resourceId, apiPolicy, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .setIamPolicySkipExceptionTranslation(resourceId, apiPolicy, EMPTY_RPC_OPTIONS); } @Test - public void testTestIamPermissions() { + public void testTestIamPermissions() throws IOException { final String resourceId = String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); final List checkedPermissions = ImmutableList.of("foo", "bar", "baz"); @@ -2877,16 +3261,19 @@ public void testTestIamPermissions() { final com.google.api.services.bigquery.model.TestIamPermissionsResponse response = new com.google.api.services.bigquery.model.TestIamPermissionsResponse() .setPermissions(grantedPermissions); - when(bigqueryRpcMock.testIamPermissions(resourceId, checkedPermissions, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.testIamPermissionsSkipExceptionTranslation( + resourceId, checkedPermissions, EMPTY_RPC_OPTIONS)) .thenReturn(response); bigquery = options.getService(); List perms = bigquery.testIamPermissions(TABLE_ID, checkedPermissions); assertEquals(perms, grantedPermissions); - verify(bigqueryRpcMock).testIamPermissions(resourceId, checkedPermissions, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .testIamPermissionsSkipExceptionTranslation( + resourceId, checkedPermissions, EMPTY_RPC_OPTIONS); } @Test - public void testTestIamPermissionsWhenNoPermissionsGranted() { + public void testTestIamPermissionsWhenNoPermissionsGranted() throws IOException { final String resourceId = String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); final List checkedPermissions = ImmutableList.of("foo", "bar", "baz"); @@ -2894,11 +3281,14 @@ public void testTestIamPermissionsWhenNoPermissionsGranted() { final com.google.api.services.bigquery.model.TestIamPermissionsResponse response = new com.google.api.services.bigquery.model.TestIamPermissionsResponse() .setPermissions(null); - when(bigqueryRpcMock.testIamPermissions(resourceId, checkedPermissions, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.testIamPermissionsSkipExceptionTranslation( + resourceId, checkedPermissions, EMPTY_RPC_OPTIONS)) .thenReturn(response); bigquery = options.getService(); List perms = bigquery.testIamPermissions(TABLE_ID, checkedPermissions); assertEquals(perms, ImmutableList.of()); - verify(bigqueryRpcMock).testIamPermissions(resourceId, checkedPermissions, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .testIamPermissionsSkipExceptionTranslation( + resourceId, checkedPermissions, EMPTY_RPC_OPTIONS); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryResultImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryResultImplTest.java new file mode 100644 index 000000000..ca150eb1b --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryResultImplTest.java @@ -0,0 +1,296 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.ConnectionImpl.EndOfFieldValueList; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.common.collect.ImmutableList; +import com.google.common.io.BaseEncoding; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalTime; +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingDeque; +import org.apache.arrow.vector.util.Text; +import org.junit.Test; + +public class BigQueryResultImplTest { + + private static final Schema SCHEMA = + Schema.of( + Field.newBuilder("boolean", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("long", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("double", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("string", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("bytes", StandardSQLTypeName.BYTES).setMode(Field.Mode.NULLABLE).build(), + Field.newBuilder("timestamp", StandardSQLTypeName.TIMESTAMP) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("time", StandardSQLTypeName.TIME).setMode(Field.Mode.NULLABLE).build(), + Field.newBuilder("date", StandardSQLTypeName.DATE).setMode(Field.Mode.NULLABLE).build(), + Field.newBuilder("intArray", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REPEATED) + .build(), + Field.newBuilder("stringArray", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REPEATED) + .build()); + + private static final FieldList FIELD_LIST_SCHEMA = + FieldList.of( + Field.of("boolean", LegacySQLTypeName.BOOLEAN), + Field.of("long", LegacySQLTypeName.INTEGER), + Field.of("double", LegacySQLTypeName.FLOAT), + Field.of("string", LegacySQLTypeName.STRING), + Field.of("bytes", LegacySQLTypeName.BYTES), + Field.of("timestamp", LegacySQLTypeName.TIMESTAMP), + Field.of("time", LegacySQLTypeName.TIME), + Field.of("date", LegacySQLTypeName.DATE), + Field.of("intArray", LegacySQLTypeName.INTEGER), + Field.of("stringArray", LegacySQLTypeName.STRING)); + + private static final byte[] BYTES = {0xD, 0xE, 0xA, 0xD}; + private static final String BYTES_BASE64 = BaseEncoding.base64().encode(BYTES); + private static final Timestamp EXPECTED_TIMESTAMP = Timestamp.valueOf("2025-01-02 03:04:05.0"); + private static final String TIME = "20:21:22"; + private static final Time EXPECTED_TIME = Time.valueOf(LocalTime.of(20, 21, 22)); + private static final String DATE = "2020-01-21"; + private static final int DATE_INT = 0; + private static final Date EXPECTED_DATE = java.sql.Date.valueOf(DATE); + private static final ArrayList EXPECTED_INT_ARRAY = + new ArrayList<>(Arrays.asList(0, 1, 2, 3, 4)); + private static final String[] STRING_ARRAY = {"str1", "str2", "str3"}; + private static final ArrayList EXPECTED_STRING_ARRAY = + new ArrayList<>(Arrays.asList(STRING_ARRAY)); + private static final int BUFFER_SIZE = 10; + + @Test + public void testResultSetFieldValueList() throws InterruptedException, SQLException { + BlockingQueue> buffer = new LinkedBlockingDeque<>(BUFFER_SIZE); + FieldValueList fieldValues = + FieldValueList.of( + ImmutableList.of( + FieldValue.of(Attribute.PRIMITIVE, "false"), + FieldValue.of(Attribute.PRIMITIVE, "1"), + FieldValue.of(Attribute.PRIMITIVE, "1.5"), + FieldValue.of(Attribute.PRIMITIVE, "string_value"), + FieldValue.of(Attribute.PRIMITIVE, BYTES_BASE64), + FieldValue.of( + Attribute.PRIMITIVE, + Long.toString(EXPECTED_TIMESTAMP.getTime() / 1000), + false), // getTime is in milliseconds. + FieldValue.of(Attribute.PRIMITIVE, TIME), + FieldValue.of(Attribute.PRIMITIVE, DATE), + FieldValue.of(Attribute.REPEATED, EXPECTED_INT_ARRAY), + FieldValue.of(Attribute.REPEATED, STRING_ARRAY)), + FIELD_LIST_SCHEMA); + buffer.put(fieldValues); + + FieldValueList nullValues = + FieldValueList.of( + ImmutableList.of( + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.REPEATED, null), + FieldValue.of(Attribute.REPEATED, null)), + FIELD_LIST_SCHEMA); + buffer.put(nullValues); + + buffer.put(new EndOfFieldValueList()); // End of buffer marker. + + BigQueryResultImpl> bigQueryResult = + new BigQueryResultImpl<>(SCHEMA, 1, buffer, null); + ResultSet resultSet = bigQueryResult.getResultSet(); + assertThat(resultSet.next()).isTrue(); + assertThat(resultSet.getObject("string")).isEqualTo("string_value"); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getString("string")).isEqualTo("string_value"); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getInt("long")).isEqualTo(1); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getLong("long")).isEqualTo(1); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getDouble("double")).isEqualTo(1.5); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBigDecimal("double")).isEqualTo(BigDecimal.valueOf(1.5)); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBoolean("boolean")).isFalse(); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBytes("bytes")).isEqualTo(BYTES); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getTimestamp("timestamp")).isEqualTo(EXPECTED_TIMESTAMP); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getTime("time").getTime()).isEqualTo(EXPECTED_TIME.getTime()); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getDate("date").getTime()).isEqualTo(EXPECTED_DATE.getTime()); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getArray("intArray").getArray()).isEqualTo(EXPECTED_INT_ARRAY); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getArray("stringArray").getArray()).isEqualTo(EXPECTED_STRING_ARRAY); + assertThat(resultSet.wasNull()).isFalse(); + + assertThat(resultSet.next()).isTrue(); + assertThat(resultSet.getObject("string")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getString("string")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getInt("long")).isEqualTo(0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getLong("long")).isEqualTo(0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getDouble("double")).isEqualTo(0.0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBigDecimal("double")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBoolean("boolean")).isFalse(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBytes("bytes")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getTimestamp("timestamp")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getTime("time")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getDate("date")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getArray("intArray")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getArray("stringArray")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + + assertThat(resultSet.next()).isFalse(); + } + + @Test + public void testResultSetReadApi() throws InterruptedException, SQLException { + BlockingQueue buffer = new LinkedBlockingDeque<>(BUFFER_SIZE); + + Map rowValues = new HashMap<>(); + rowValues.put("boolean", false); + rowValues.put("long", 1L); + rowValues.put("double", 1.5); + rowValues.put("string", new Text("string_value")); + rowValues.put("bytes", BYTES); + rowValues.put("timestamp", EXPECTED_TIMESTAMP.getTime() * 1000); + rowValues.put("time", EXPECTED_TIME.getTime() * 1000); + rowValues.put("date", DATE_INT); + rowValues.put("intArray", EXPECTED_INT_ARRAY); + rowValues.put("stringArray", STRING_ARRAY); + buffer.put(new BigQueryResultImpl.Row(rowValues)); + + Map nullValues = new HashMap<>(); + nullValues.put("boolean", null); + nullValues.put("long", null); + nullValues.put("double", null); + nullValues.put("string", null); + nullValues.put("bytes", null); + nullValues.put("timestamp", null); + nullValues.put("time", null); + nullValues.put("date", null); + nullValues.put("intArray", null); + nullValues.put("stringArray", null); + buffer.put(new BigQueryResultImpl.Row(nullValues)); + + buffer.put(new BigQueryResultImpl.Row(null, true)); // End of buffer marker. + + BigQueryResultImpl bigQueryResult = + new BigQueryResultImpl<>(SCHEMA, 1, buffer, null); + ResultSet resultSet = bigQueryResult.getResultSet(); + assertThat(resultSet.next()).isTrue(); + assertThat(resultSet.getObject("string")).isEqualTo(new Text("string_value")); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getString("string")).isEqualTo("string_value"); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getInt("long")).isEqualTo(1); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getLong("long")).isEqualTo(1); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getDouble("double")).isEqualTo(1.5); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBigDecimal("double")).isEqualTo(BigDecimal.valueOf(1.5)); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBoolean("boolean")).isFalse(); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBytes("bytes")).isEqualTo(BYTES); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getTimestamp("timestamp")).isEqualTo(EXPECTED_TIMESTAMP); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getTime("time").getTime()).isEqualTo(EXPECTED_TIME.getTime()); + assertThat(resultSet.wasNull()).isFalse(); + // Do not check date value as Date object do not have timezone but its toString() applies the + // JVM default timezone which causes flakes in non-UTC zones. + assertThat(resultSet.getDate("date")).isNotNull(); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getArray("intArray")).isNotNull(); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getArray("stringArray")).isNotNull(); + assertThat(resultSet.wasNull()).isFalse(); + + assertThat(resultSet.next()).isTrue(); + assertThat(resultSet.getObject("string")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getString("string")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getInt("long")).isEqualTo(0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getLong("long")).isEqualTo(0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getDouble("double")).isEqualTo(0.0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBigDecimal("double")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBoolean("boolean")).isFalse(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBytes("bytes")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getTimestamp("timestamp")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getTime("time")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getDate("date")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getArray("intArray")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getArray("stringArray")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + + assertThat(resultSet.next()).isFalse(); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ColumnReferenceTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ColumnReferenceTest.java index 7a6cac30f..0cc680ddd 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ColumnReferenceTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ColumnReferenceTest.java @@ -32,8 +32,7 @@ public class ColumnReferenceTest { public void testToBuilder() { compareColumnReferenceDefinition(COLUMN_REFERENCE, COLUMN_REFERENCE.toBuilder().build()); ColumnReference columnReference = - COLUMN_REFERENCE - .toBuilder() + COLUMN_REFERENCE.toBuilder() .setReferencingColumn("col1") .setReferencedColumn("col2") .build(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionImplTest.java index dff73d6bd..4edc6f05d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionImplTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionImplTest.java @@ -27,9 +27,10 @@ import com.google.cloud.ServiceOptions; import com.google.cloud.Tuple; import com.google.cloud.bigquery.spi.BigQueryRpcFactory; -import com.google.cloud.bigquery.spi.v2.BigQueryRpc; +import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.ListenableFuture; +import java.io.IOException; import java.math.BigInteger; import java.sql.SQLException; import java.util.AbstractList; @@ -50,7 +51,7 @@ public class ConnectionImplTest { private BigQueryOptions options; private BigQueryRpcFactory rpcFactoryMock; - private BigQueryRpc bigqueryRpcMock; + private HttpBigQueryRpc bigqueryRpcMock; private Connection connectionMock; private BigQuery bigquery; private ConnectionImpl connection; @@ -100,6 +101,15 @@ public class ConnectionImplTest { .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)) .setSchema(FAST_QUERY_TABLESCHEMA); + private static final GetQueryResultsResponse GET_QUERY_RESULTS_RESPONSE_EMPTY = + new GetQueryResultsResponse() + .setJobReference(QUERY_JOB.toPb()) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken(PAGE_TOKEN) + .setTotalBytesProcessed(0L) + .setTotalRows(BigInteger.valueOf(0L)) + .setSchema(FAST_QUERY_TABLESCHEMA); private static final GetQueryResultsResponse GET_QUERY_RESULTS_RESPONSE_NULL_SCHEMA = new GetQueryResultsResponse() @@ -133,7 +143,7 @@ private BigQueryOptions createBigQueryOptionsForProject( @Before public void setUp() { rpcFactoryMock = mock(BigQueryRpcFactory.class); - bigqueryRpcMock = mock(BigQueryRpc.class); + bigqueryRpcMock = mock(HttpBigQueryRpc.class); connectionMock = mock(Connection.class); when(rpcFactoryMock.create(any(BigQueryOptions.class))).thenReturn(bigqueryRpcMock); options = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); @@ -145,8 +155,7 @@ public void setUp() { .setNumBufferedRows(DEFAULT_PAGE_SIZE) .build(); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -155,10 +164,11 @@ public void setUp() { } @Test - public void testFastQuerySinglePage() throws BigQuerySQLException { + public void testFastQuerySinglePage() throws BigQuerySQLException, IOException { com.google.api.services.bigquery.model.QueryResponse mockQueryRes = new QueryResponse().setSchema(FAST_QUERY_TABLESCHEMA).setJobComplete(true); - when(bigqueryRpcMock.queryRpc(any(String.class), any(QueryRequest.class))) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) .thenReturn(mockQueryRes); ConnectionImpl connectionSpy = Mockito.spy(connection); doReturn(BQ_RS_MOCK_RES) @@ -176,13 +186,14 @@ public void testFastQuerySinglePage() throws BigQuerySQLException { @Test // NOTE: This doesn't truly paginates. Returns a response while mocking // processQueryResponseResults - public void testFastQueryMultiplePages() throws BigQuerySQLException { + public void testFastQueryMultiplePages() throws BigQuerySQLException, IOException { com.google.api.services.bigquery.model.QueryResponse mockQueryRes = new QueryResponse() .setSchema(FAST_QUERY_TABLESCHEMA) .setJobComplete(true) .setPageToken(PAGE_TOKEN); - when(bigqueryRpcMock.queryRpc(any(String.class), any(QueryRequest.class))) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) .thenReturn(mockQueryRes); ConnectionImpl connectionSpy = Mockito.spy(connection); @@ -206,7 +217,7 @@ public void testClose() throws BigQuerySQLException { } @Test - public void testQueryDryRun() throws BigQuerySQLException { + public void testQueryDryRun() throws BigQuerySQLException, IOException { List queryParametersMock = ImmutableList.of( new QueryParameter().setParameterType(new QueryParameterType().setType("STRING"))); @@ -228,17 +239,19 @@ public void testQueryDryRun() throws BigQuerySQLException { new com.google.api.services.bigquery.model.Job() .setStatistics(jobStatsMock) .setConfiguration(jobConfig); - when(bigqueryRpcMock.createJobForQuery(any(com.google.api.services.bigquery.model.Job.class))) + when(bigqueryRpcMock.createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class))) .thenReturn(mockDryRunJob); BigQueryDryRunResult dryRunResult = connection.dryRun(DRY_RUN_SQL); assertEquals(1, dryRunResult.getQueryParameters().size()); assertEquals(QUERY_SCHEMA, dryRunResult.getSchema()); verify(bigqueryRpcMock, times(1)) - .createJobForQuery(any(com.google.api.services.bigquery.model.Job.class)); + .createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class)); } @Test - public void testQueryDryRunNoQueryParameters() throws BigQuerySQLException { + public void testQueryDryRunNoQueryParameters() throws BigQuerySQLException, IOException { com.google.api.services.bigquery.model.JobStatistics2 queryMock = new com.google.api.services.bigquery.model.JobStatistics2() .setSchema(FAST_QUERY_TABLESCHEMA); @@ -256,13 +269,15 @@ public void testQueryDryRunNoQueryParameters() throws BigQuerySQLException { new com.google.api.services.bigquery.model.Job() .setStatistics(jobStatsMock) .setConfiguration(jobConfig); - when(bigqueryRpcMock.createJobForQuery(any(com.google.api.services.bigquery.model.Job.class))) + when(bigqueryRpcMock.createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class))) .thenReturn(mockDryRunJob); BigQueryDryRunResult dryRunResult = connection.dryRun(DRY_RUN_SQL); assertEquals(0, dryRunResult.getQueryParameters().size()); assertEquals(QUERY_SCHEMA, dryRunResult.getSchema()); verify(bigqueryRpcMock, times(1)) - .createJobForQuery(any(com.google.api.services.bigquery.model.Job.class)); + .createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class)); } @Test @@ -349,8 +364,8 @@ public void testNextPageTask() throws InterruptedException { } @Test - public void testGetQueryResultsFirstPage() { - when(bigqueryRpcMock.getQueryResultsWithRowLimit( + public void testGetQueryResultsFirstPage() throws IOException { + when(bigqueryRpcMock.getQueryResultsWithRowLimitSkipExceptionTranslation( any(String.class), any(String.class), any(String.class), @@ -361,7 +376,7 @@ public void testGetQueryResultsFirstPage() { assertNotNull(response); assertEquals(GET_QUERY_RESULTS_RESPONSE, response); verify(bigqueryRpcMock, times(1)) - .getQueryResultsWithRowLimit( + .getQueryResultsWithRowLimitSkipExceptionTranslation( any(String.class), any(String.class), any(String.class), @@ -371,11 +386,10 @@ public void testGetQueryResultsFirstPage() { // calls executeSelect with a nonFast query and exercises createQueryJob @Test - public void testLegacyQuerySinglePage() throws BigQuerySQLException { + public void testLegacyQuerySinglePage() throws BigQuerySQLException, IOException { ConnectionImpl connectionSpy = Mockito.spy(connection); com.google.api.services.bigquery.model.Job jobResponseMock = new com.google.api.services.bigquery.model.Job() - // .setConfiguration(QUERY_JOB.g) .setJobReference(QUERY_JOB.toPb()) .setId(JOB) .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")); @@ -392,18 +406,49 @@ public void testLegacyQuerySinglePage() throws BigQuerySQLException { any(JobId.class), any(GetQueryResultsResponse.class), any(Boolean.class)); - when(bigqueryRpcMock.createJobForQuery(any(com.google.api.services.bigquery.model.Job.class))) + when(bigqueryRpcMock.createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class))) .thenReturn(jobResponseMock); // RPC call in createQueryJob BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY); assertEquals(res.getTotalRows(), 2); assertEquals(QUERY_SCHEMA, res.getSchema()); verify(bigqueryRpcMock, times(1)) - .createJobForQuery(any(com.google.api.services.bigquery.model.Job.class)); + .createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class)); + } + + // calls executeSelect with a nonFast query where the query returns an empty result. + @Test + public void testLegacyQuerySinglePageEmptyResults() throws SQLException, IOException { + ConnectionImpl connectionSpy = Mockito.spy(connection); + com.google.api.services.bigquery.model.Job jobResponseMock = + new com.google.api.services.bigquery.model.Job() + .setJobReference(QUERY_JOB.toPb()) + .setId(JOB) + .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")); + // emulating a legacy query + doReturn(false).when(connectionSpy).isFastQuerySupported(); + doReturn(GET_QUERY_RESULTS_RESPONSE_EMPTY) + .when(connectionSpy) + .getQueryResultsFirstPage(any(JobId.class)); + when(bigqueryRpcMock.createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class))) + .thenReturn(jobResponseMock); // RPC call in createQueryJob + BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY); + assertEquals(res.getTotalRows(), 0); + assertEquals(QUERY_SCHEMA, res.getSchema()); + assertEquals( + false, + res.getResultSet() + .next()); // Validates that NPE does not occur when reading from empty ResultSet. + verify(bigqueryRpcMock, times(1)) + .createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class)); } // exercises getSubsequentQueryResultsWithJob for fast running queries @Test - public void testFastQueryLongRunning() throws SQLException { + public void testFastQueryLongRunning() throws SQLException, IOException { ConnectionImpl connectionSpy = Mockito.spy(connection); // emulating a fast query doReturn(true).when(connectionSpy).isFastQuerySupported(); @@ -423,17 +468,19 @@ public void testFastQueryLongRunning() throws SQLException { .setTotalRows(new BigInteger(String.valueOf(4L))) .setJobReference(QUERY_JOB.toPb()) .setRows(TABLE_ROWS); - when(bigqueryRpcMock.queryRpc(any(String.class), any(QueryRequest.class))) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) .thenReturn(mockQueryRes); BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY); assertEquals(res.getTotalRows(), 2); assertEquals(QUERY_SCHEMA, res.getSchema()); - verify(bigqueryRpcMock, times(1)).queryRpc(any(String.class), any(QueryRequest.class)); + verify(bigqueryRpcMock, times(1)) + .queryRpcSkipExceptionTranslation(any(String.class), any(QueryRequest.class)); } @Test public void testFastQueryLongRunningAsync() - throws SQLException, ExecutionException, InterruptedException { + throws SQLException, ExecutionException, InterruptedException, IOException { ConnectionImpl connectionSpy = Mockito.spy(connection); // emulating a fast query doReturn(true).when(connectionSpy).isFastQuerySupported(); @@ -453,7 +500,8 @@ public void testFastQueryLongRunningAsync() .setTotalRows(new BigInteger(String.valueOf(4L))) .setJobReference(QUERY_JOB.toPb()) .setRows(TABLE_ROWS); - when(bigqueryRpcMock.queryRpc(any(String.class), any(QueryRequest.class))) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) .thenReturn(mockQueryRes); ListenableFuture executeSelectFut = connectionSpy.executeSelectAsync(SQL_QUERY); @@ -462,15 +510,17 @@ public void testFastQueryLongRunningAsync() assertEquals(res.getTotalRows(), 2); assertEquals(QUERY_SCHEMA, res.getSchema()); assertTrue(exSelRes.getIsSuccessful()); - verify(bigqueryRpcMock, times(1)).queryRpc(any(String.class), any(QueryRequest.class)); + verify(bigqueryRpcMock, times(1)) + .queryRpcSkipExceptionTranslation(any(String.class), any(QueryRequest.class)); } @Test public void testFastQuerySinglePageAsync() - throws BigQuerySQLException, ExecutionException, InterruptedException { + throws BigQuerySQLException, ExecutionException, InterruptedException, IOException { com.google.api.services.bigquery.model.QueryResponse mockQueryRes = new QueryResponse().setSchema(FAST_QUERY_TABLESCHEMA).setJobComplete(true); - when(bigqueryRpcMock.queryRpc(any(String.class), any(QueryRequest.class))) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) .thenReturn(mockQueryRes); ConnectionImpl connectionSpy = Mockito.spy(connection); doReturn(BQ_RS_MOCK_RES) @@ -535,13 +585,14 @@ public void testExecuteSelectSlowWithParamsAsync() @Test public void testFastQueryMultiplePagesAsync() - throws BigQuerySQLException, ExecutionException, InterruptedException { + throws BigQuerySQLException, ExecutionException, InterruptedException, IOException { com.google.api.services.bigquery.model.QueryResponse mockQueryRes = new QueryResponse() .setSchema(FAST_QUERY_TABLESCHEMA) .setJobComplete(true) .setPageToken(PAGE_TOKEN); - when(bigqueryRpcMock.queryRpc(any(String.class), any(QueryRequest.class))) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) .thenReturn(mockQueryRes); ConnectionImpl connectionSpy = Mockito.spy(connection); @@ -565,7 +616,7 @@ public void testFastQueryMultiplePagesAsync() @Test // Emulates first page response using getQueryResultsFirstPage(jobId) and then subsequent pages // using getQueryResultsFirstPage(jobId) getSubsequentQueryResultsWithJob( - public void testLegacyQueryMultiplePages() throws SQLException { + public void testLegacyQueryMultiplePages() throws SQLException, IOException { ConnectionImpl connectionSpy = Mockito.spy(connection); com.google.api.services.bigquery.model.JobStatistics jobStatistics = new com.google.api.services.bigquery.model.JobStatistics(); @@ -584,13 +635,15 @@ public void testLegacyQueryMultiplePages() throws SQLException { .setId(JOB) .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")) .setStatistics(jobStatistics); - when(bigqueryRpcMock.createJobForQuery(any(com.google.api.services.bigquery.model.Job.class))) + when(bigqueryRpcMock.createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class))) .thenReturn(jobResponseMock); // RPC call in createQueryJob BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY); assertEquals(res.getTotalRows(), 2); assertEquals(QUERY_SCHEMA, res.getSchema()); verify(bigqueryRpcMock, times(1)) - .createJobForQuery(any(com.google.api.services.bigquery.model.Job.class)); + .createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class)); verify(connectionSpy, times(1)) .tableDataList(any(GetQueryResultsResponse.class), any(JobId.class)); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java index dfe76adaf..3f21bf1c0 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java @@ -44,6 +44,7 @@ public class CopyJobConfigurationTest { EncryptionConfiguration.newBuilder().setKmsKeyName("KMS_KEY_1").build(); private static final Map LABELS = ImmutableMap.of("job-name", "copy"); private static final Long TIMEOUT = 10L; + private static final String RESERVATION = "reservation"; private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = CopyJobConfiguration.newBuilder(DESTINATION_TABLE, SOURCE_TABLE) .setCreateDisposition(CREATE_DISPOSITION) @@ -51,6 +52,7 @@ public class CopyJobConfigurationTest { .setDestinationEncryptionConfiguration(COPY_JOB_ENCRYPTION_CONFIGURATION) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); private static final CopyJobConfiguration COPY_JOB_CONFIGURATION_MULTIPLE_TABLES = CopyJobConfiguration.newBuilder(DESTINATION_TABLE, SOURCE_TABLES) @@ -58,6 +60,7 @@ public class CopyJobConfigurationTest { .setWriteDisposition(WRITE_DISPOSITION) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); @Test @@ -67,8 +70,7 @@ public void testToBuilder() { COPY_JOB_CONFIGURATION_MULTIPLE_TABLES, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toBuilder().build()); CopyJobConfiguration jobConfiguration = - COPY_JOB_CONFIGURATION - .toBuilder() + COPY_JOB_CONFIGURATION.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", jobConfiguration.getDestinationTable().getTable()); @@ -143,8 +145,7 @@ public void testSetProjectId() { @Test public void testSetProjectIdDoNotOverride() { CopyJobConfiguration configuration = - COPY_JOB_CONFIGURATION_MULTIPLE_TABLES - .toBuilder() + COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toBuilder() .setSourceTables( Lists.transform( SOURCE_TABLES, @@ -183,5 +184,6 @@ private void compareCopyJobConfiguration( value.getDestinationEncryptionConfiguration()); assertEquals(expected.getLabels(), value.getLabels()); assertEquals(expected.getJobTimeoutMs(), value.getJobTimeoutMs()); + assertEquals(expected.getReservation(), value.getReservation()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java index fa05cddc5..fb0293a97 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java @@ -28,6 +28,7 @@ public class CsvOptionsTest { private static final Boolean ALLOW_QUOTED_NEWLINE = true; private static final Charset ENCODING = StandardCharsets.UTF_8; private static final String FIELD_DELIMITER = ","; + private static final String NULL_MARKER = "\\N"; private static final String QUOTE = "\""; private static final long SKIP_LEADING_ROWS = 42L; @@ -38,6 +39,7 @@ public class CsvOptionsTest { .setAllowQuotedNewLines(ALLOW_QUOTED_NEWLINE) .setEncoding(ENCODING) .setFieldDelimiter(FIELD_DELIMITER) + .setNullMarker(NULL_MARKER) .setQuote(QUOTE) .setSkipLeadingRows(SKIP_LEADING_ROWS) .setPreserveAsciiControlCharacters(PRESERVE_ASCII_CONTROL_CHARACTERS) @@ -65,6 +67,7 @@ public void testBuilder() { assertEquals(ALLOW_QUOTED_NEWLINE, CSV_OPTIONS.allowQuotedNewLines()); assertEquals(ENCODING.name(), CSV_OPTIONS.getEncoding()); assertEquals(FIELD_DELIMITER, CSV_OPTIONS.getFieldDelimiter()); + assertEquals(NULL_MARKER, CSV_OPTIONS.getNullMarker()); assertEquals(QUOTE, CSV_OPTIONS.getQuote()); assertEquals(SKIP_LEADING_ROWS, (long) CSV_OPTIONS.getSkipLeadingRows()); assertEquals( @@ -84,6 +87,7 @@ private void compareCsvOptions(CsvOptions expected, CsvOptions value) { assertEquals(expected.allowQuotedNewLines(), value.allowQuotedNewLines()); assertEquals(expected.getEncoding(), value.getEncoding()); assertEquals(expected.getFieldDelimiter(), value.getFieldDelimiter()); + assertEquals(expected.getNullMarker(), value.getNullMarker()); assertEquals(expected.getQuote(), value.getQuote()); assertEquals(expected.getSkipLeadingRows(), value.getSkipLeadingRows()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java index df62b7dac..1b75195ce 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; @@ -59,6 +60,12 @@ public class DatasetInfoTest { private static final EncryptionConfiguration DATASET_ENCRYPTION_CONFIGURATION = EncryptionConfiguration.newBuilder().setKmsKeyName("KMS_KEY_1").build(); private static final String STORAGE_BILLING_MODEL = "LOGICAL"; + private static final Long MAX_TIME_TRAVEL_HOURS_5_DAYS = 120L; + private static final Long MAX_TIME_TRAVEL_HOURS_7_DAYS = 168L; + private static final Map RESOURCE_TAGS = + ImmutableMap.of( + "example-key1", "example-value1", + "example-key2", "example-value2"); private static final ExternalDatasetReference EXTERNAL_DATASET_REFERENCE = ExternalDatasetReference.newBuilder() @@ -81,10 +88,11 @@ public class DatasetInfoTest { .setDefaultEncryptionConfiguration(DATASET_ENCRYPTION_CONFIGURATION) .setDefaultPartitionExpirationMs(DEFAULT_PARTITION__EXPIRATION) .setStorageBillingModel(STORAGE_BILLING_MODEL) + .setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS_7_DAYS) + .setResourceTags(RESOURCE_TAGS) .build(); private static final DatasetInfo DATASET_INFO_COMPLETE = - DATASET_INFO - .toBuilder() + DATASET_INFO.toBuilder() .setDatasetId(DATASET_ID_COMPLETE) .setAcl(ACCESS_RULES_COMPLETE) .build(); @@ -92,6 +100,8 @@ public class DatasetInfoTest { DATASET_INFO.toBuilder().setAcl(ACCESS_RULES_IAM_MEMBER).build(); private static final DatasetInfo DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE = DATASET_INFO.toBuilder().setExternalDatasetReference(EXTERNAL_DATASET_REFERENCE).build(); + private static final DatasetInfo DATASET_INFO_WITH_MAX_TIME_TRAVEL_5_DAYS = + DATASET_INFO.toBuilder().setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS_5_DAYS).build(); @Test public void testToBuilder() { @@ -100,8 +110,7 @@ public void testToBuilder() { DATASET_INFO_COMPLETE_WITH_IAM_MEMBER, DATASET_INFO_COMPLETE_WITH_IAM_MEMBER.toBuilder().build()); DatasetInfo datasetInfo = - DATASET_INFO - .toBuilder() + DATASET_INFO.toBuilder() .setDatasetId(DatasetId.of("dataset2")) .setDescription("description2") .build(); @@ -130,8 +139,7 @@ public void testToBuilderWithExternalDatasetReference() { .setConnection("connection2") .build(); DatasetInfo datasetInfo = - DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE - .toBuilder() + DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE.toBuilder() .setExternalDatasetReference(externalDatasetReference) .build(); assertEquals(externalDatasetReference, datasetInfo.getExternalDatasetReference()); @@ -173,6 +181,11 @@ public void testBuilder() { EXTERNAL_DATASET_REFERENCE, DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE.getExternalDatasetReference()); assertEquals(STORAGE_BILLING_MODEL, DATASET_INFO_COMPLETE.getStorageBillingModel()); + assertEquals(MAX_TIME_TRAVEL_HOURS_7_DAYS, DATASET_INFO.getMaxTimeTravelHours()); + assertEquals( + MAX_TIME_TRAVEL_HOURS_5_DAYS, + DATASET_INFO_WITH_MAX_TIME_TRAVEL_5_DAYS.getMaxTimeTravelHours()); + assertEquals(RESOURCE_TAGS, DATASET_INFO.getResourceTags()); } @Test @@ -194,6 +207,7 @@ public void testOf() { assertTrue(datasetInfo.getLabels().isEmpty()); assertNull(datasetInfo.getExternalDatasetReference()); assertNull(datasetInfo.getStorageBillingModel()); + assertNull(datasetInfo.getMaxTimeTravelHours()); datasetInfo = DatasetInfo.of(DATASET_ID); assertEquals(DATASET_ID, datasetInfo.getDatasetId()); @@ -212,6 +226,7 @@ public void testOf() { assertTrue(datasetInfo.getLabels().isEmpty()); assertNull(datasetInfo.getExternalDatasetReference()); assertNull(datasetInfo.getStorageBillingModel()); + assertNull(datasetInfo.getMaxTimeTravelHours()); } @Test @@ -229,6 +244,16 @@ public void testSetProjectId() { assertEquals(DATASET_INFO_COMPLETE, DATASET_INFO.setProjectId("project")); } + @Test + public void testSetMaxTimeTravelHours() { + assertNotEquals( + DATASET_INFO_WITH_MAX_TIME_TRAVEL_5_DAYS.getMaxTimeTravelHours(), + DATASET_INFO.getMaxTimeTravelHours()); + assertEquals( + DATASET_INFO_WITH_MAX_TIME_TRAVEL_5_DAYS, + DATASET_INFO.toBuilder().setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS_5_DAYS).build()); + } + private void compareDatasets(DatasetInfo expected, DatasetInfo value) { assertEquals(expected, value); assertEquals(expected.getDatasetId(), value.getDatasetId()); @@ -249,5 +274,7 @@ private void compareDatasets(DatasetInfo expected, DatasetInfo value) { expected.getDefaultPartitionExpirationMs(), value.getDefaultPartitionExpirationMs()); assertEquals(expected.getExternalDatasetReference(), value.getExternalDatasetReference()); assertEquals(expected.getStorageBillingModel(), value.getStorageBillingModel()); + assertEquals(expected.getMaxTimeTravelHours(), value.getMaxTimeTravelHours()); + assertEquals(expected.getResourceTags(), value.getResourceTags()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java index bc42976b2..d138e3cb5 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java @@ -67,6 +67,11 @@ public class DatasetTest { private static final DatasetInfo DATASET_INFO = DatasetInfo.newBuilder(DATASET_ID).build(); private static final Field FIELD = Field.of("FieldName", LegacySQLTypeName.INTEGER); private static final String STORAGE_BILLING_MODEL = "LOGICAL"; + private static final Long MAX_TIME_TRAVEL_HOURS = 168L; + private static final Map RESOURCE_TAGS = + ImmutableMap.of( + "example-key1", "example-value1", + "example-key2", "example-value2"); private static final StandardTableDefinition TABLE_DEFINITION = StandardTableDefinition.of(Schema.of(FIELD)); private static final ViewDefinition VIEW_DEFINITION = ViewDefinition.of("QUERY"); @@ -122,6 +127,8 @@ public void testBuilder() { .setSelfLink(SELF_LINK) .setLabels(LABELS) .setStorageBillingModel(STORAGE_BILLING_MODEL) + .setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS) + .setResourceTags(RESOURCE_TAGS) .build(); assertEquals(DATASET_ID, builtDataset.getDatasetId()); assertEquals(ACCESS_RULES, builtDataset.getAcl()); @@ -136,6 +143,8 @@ public void testBuilder() { assertEquals(SELF_LINK, builtDataset.getSelfLink()); assertEquals(LABELS, builtDataset.getLabels()); assertEquals(STORAGE_BILLING_MODEL, builtDataset.getStorageBillingModel()); + assertEquals(MAX_TIME_TRAVEL_HOURS, builtDataset.getMaxTimeTravelHours()); + assertEquals(RESOURCE_TAGS, builtDataset.getResourceTags()); } @Test @@ -344,6 +353,8 @@ public void testExternalDatasetReference() { .setLabels(LABELS) .setExternalDatasetReference(EXTERNAL_DATASET_REFERENCE) .setStorageBillingModel(STORAGE_BILLING_MODEL) + .setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS) + .setResourceTags(RESOURCE_TAGS) .build(); assertEquals( EXTERNAL_DATASET_REFERENCE, @@ -374,5 +385,7 @@ private void compareDatasetInfo(DatasetInfo expected, DatasetInfo value) { assertEquals(expected.getLastModified(), value.getLastModified()); assertEquals(expected.getExternalDatasetReference(), value.getExternalDatasetReference()); assertEquals(expected.getStorageBillingModel(), value.getStorageBillingModel()); + assertEquals(expected.getMaxTimeTravelHours(), value.getMaxTimeTravelHours()); + assertEquals(expected.getResourceTags(), value.getResourceTags()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java index 3e67ad959..cb7578c75 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; +import com.google.cloud.bigquery.ExternalTableDefinition.SourceColumnMatch; import com.google.common.collect.ImmutableList; import java.util.List; import org.junit.Test; @@ -58,6 +59,16 @@ public class ExternalTableDefinitionTest { .setMode("AUTO") .setSourceUriPrefix(SOURCE_URIS.get(0)) .build(); + private static final String OBJECT_METADATA = "SIMPLE"; + private static final String METADATA_CACHE_MODE = "AUTOMATIC"; + private static final String MAX_STALENESS = "INTERVAL 15 MINUTE"; + private static final String TIME_ZONE = "America/Los_Angeles"; + private static final String DATE_FORMAT = "YYYY-MM-DD"; + private static final String DATETIME_FORMAT = "YYYY-MM-DD HH:MI:SS"; + private static final String TIME_FORMAT = "HH:MI:SS"; + private static final String TIMESTAMP_FORMAT = "YYYY-MM-DD HH:MI:SS"; + private static final SourceColumnMatch SOURCE_COLUMN_MATCH = SourceColumnMatch.POSITION; + private static final List NULL_MARKERS = ImmutableList.of("SQL NULL", "TEST_MARKER"); private static final ExternalTableDefinition EXTERNAL_TABLE_DEFINITION = ExternalTableDefinition.newBuilder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) .setFileSetSpecType("FILE_SET_SPEC_TYPE_FILE_SYSTEM_MATCH") @@ -68,6 +79,16 @@ public class ExternalTableDefinitionTest { .setMaxBadRecords(MAX_BAD_RECORDS) .setAutodetect(AUTODETECT) .setHivePartitioningOptions(HIVE_PARTITIONING_OPTIONS) + .setObjectMetadata(OBJECT_METADATA) + .setMetadataCacheMode(METADATA_CACHE_MODE) + .setMaxStaleness(MAX_STALENESS) + .setTimeZone(TIME_ZONE) + .setDateFormat(DATE_FORMAT) + .setDatetimeFormat(DATETIME_FORMAT) + .setTimeFormat(TIME_FORMAT) + .setTimestampFormat(TIMESTAMP_FORMAT) + .setSourceColumnMatch(SOURCE_COLUMN_MATCH) + .setNullMarkers(NULL_MARKERS) .build(); private static final ExternalTableDefinition EXTERNAL_TABLE_DEFINITION_AVRO = @@ -81,16 +102,14 @@ public void testToBuilder() { compareExternalTableDefinition( EXTERNAL_TABLE_DEFINITION, EXTERNAL_TABLE_DEFINITION.toBuilder().build()); ExternalTableDefinition externalTableDefinition = - EXTERNAL_TABLE_DEFINITION - .toBuilder() + EXTERNAL_TABLE_DEFINITION.toBuilder() .setCompression("NONE") .setConnectionId("00000") .build(); assertEquals("NONE", externalTableDefinition.getCompression()); assertEquals("00000", externalTableDefinition.getConnectionId()); externalTableDefinition = - externalTableDefinition - .toBuilder() + externalTableDefinition.toBuilder() .setCompression(COMPRESSION) .setConnectionId(CONNECTION_ID) .build(); @@ -127,6 +146,13 @@ public void testBuilder() { assertEquals(DECIMAL_TARGET_TYPES, EXTERNAL_TABLE_DEFINITION.getDecimalTargetTypes()); assertEquals(AUTODETECT, EXTERNAL_TABLE_DEFINITION.getAutodetect()); assertEquals(HIVE_PARTITIONING_OPTIONS, EXTERNAL_TABLE_DEFINITION.getHivePartitioningOptions()); + assertEquals(TIME_ZONE, EXTERNAL_TABLE_DEFINITION.getTimeZone()); + assertEquals(DATE_FORMAT, EXTERNAL_TABLE_DEFINITION.getDateFormat()); + assertEquals(DATETIME_FORMAT, EXTERNAL_TABLE_DEFINITION.getDatetimeFormat()); + assertEquals(TIME_FORMAT, EXTERNAL_TABLE_DEFINITION.getTimeFormat()); + assertEquals(TIMESTAMP_FORMAT, EXTERNAL_TABLE_DEFINITION.getTimestampFormat()); + assertEquals(SOURCE_COLUMN_MATCH, EXTERNAL_TABLE_DEFINITION.getSourceColumnMatch()); + assertEquals(NULL_MARKERS, EXTERNAL_TABLE_DEFINITION.getNullMarkers()); assertNotEquals(EXTERNAL_TABLE_DEFINITION, TableDefinition.Type.EXTERNAL); } @@ -167,5 +193,15 @@ private void compareExternalTableDefinition( assertEquals(expected.hashCode(), value.hashCode()); assertEquals(expected.getAutodetect(), value.getAutodetect()); assertEquals(expected.getHivePartitioningOptions(), value.getHivePartitioningOptions()); + assertEquals(expected.getObjectMetadata(), value.getObjectMetadata()); + assertEquals(expected.getMetadataCacheMode(), value.getMetadataCacheMode()); + assertEquals(expected.getMaxStaleness(), value.getMaxStaleness()); + assertEquals(expected.getTimeZone(), value.getTimeZone()); + assertEquals(expected.getDateFormat(), value.getDateFormat()); + assertEquals(expected.getDatetimeFormat(), value.getDatetimeFormat()); + assertEquals(expected.getTimeFormat(), value.getTimeFormat()); + assertEquals(expected.getTimestampFormat(), value.getTimestampFormat()); + assertEquals(expected.getSourceColumnMatch(), value.getSourceColumnMatch()); + assertEquals(expected.getNullMarkers(), value.getNullMarkers()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java index 95142a068..2bf1e80a2 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java @@ -43,6 +43,7 @@ public class ExtractJobConfigurationTest { private static final Map LABELS = ImmutableMap.of("test-job-name", "test-extract-job"); private static final Long TIMEOUT = 10L; + private static final String RESERVATION = "reservation"; private static final ExtractJobConfiguration EXTRACT_CONFIGURATION = ExtractJobConfiguration.newBuilder(TABLE_ID, DESTINATION_URIS) .setPrintHeader(PRINT_HEADER) @@ -51,6 +52,7 @@ public class ExtractJobConfigurationTest { .setFormat(FORMAT) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); private static final ExtractJobConfiguration EXTRACT_CONFIGURATION_ONE_URI = ExtractJobConfiguration.newBuilder(TABLE_ID, DESTINATION_URI) @@ -60,6 +62,7 @@ public class ExtractJobConfigurationTest { .setFormat(FORMAT) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); private static final ExtractJobConfiguration EXTRACT_CONFIGURATION_AVRO = ExtractJobConfiguration.newBuilder(TABLE_ID, DESTINATION_URI) @@ -70,6 +73,7 @@ public class ExtractJobConfigurationTest { .setUseAvroLogicalTypes(USEAVROLOGICALTYPES) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); private static final ExtractJobConfiguration EXTRACT_CONFIGURATION_MODEL = ExtractJobConfiguration.newBuilder(MODEL_ID, DESTINATION_URIS) @@ -80,6 +84,7 @@ public class ExtractJobConfigurationTest { .setUseAvroLogicalTypes(USEAVROLOGICALTYPES) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); @Test @@ -92,8 +97,7 @@ public void testToBuilder() { compareExtractJobConfiguration( EXTRACT_CONFIGURATION_MODEL, EXTRACT_CONFIGURATION_MODEL.toBuilder().build()); ExtractJobConfiguration modelJob = - EXTRACT_CONFIGURATION_MODEL - .toBuilder() + EXTRACT_CONFIGURATION_MODEL.toBuilder() .setSourceModel(ModelId.of("dataset", "newModel")) .build(); assertEquals("newModel", modelJob.getSourceModel().getModel()); @@ -102,8 +106,7 @@ public void testToBuilder() { compareExtractJobConfiguration( EXTRACT_CONFIGURATION_AVRO, EXTRACT_CONFIGURATION_AVRO.toBuilder().build()); ExtractJobConfiguration avroJob = - EXTRACT_CONFIGURATION_AVRO - .toBuilder() + EXTRACT_CONFIGURATION_AVRO.toBuilder() .setSourceTable(TableId.of("dataset", "avroTable")) .build(); assertEquals("avroTable", avroJob.getSourceTable().getTable()); @@ -187,6 +190,7 @@ public void testBuilder() { assertEquals(FORMAT, EXTRACT_CONFIGURATION_MODEL.getFormat()); assertEquals(LABELS, EXTRACT_CONFIGURATION_MODEL.getLabels()); assertEquals(TIMEOUT, EXTRACT_CONFIGURATION_MODEL.getJobTimeoutMs()); + assertEquals(RESERVATION, EXTRACT_CONFIGURATION_MODEL.getReservation()); } @Test @@ -223,15 +227,13 @@ public void testSetProjectId() { @Test public void testSetProjectIdDoNotOverride() { ExtractJobConfiguration configuration = - EXTRACT_CONFIGURATION - .toBuilder() + EXTRACT_CONFIGURATION.toBuilder() .setSourceTable(TABLE_ID.setProjectId(TEST_PROJECT_ID)) .build() .setProjectId("do-not-update"); assertEquals(TEST_PROJECT_ID, configuration.getSourceTable().getProject()); ExtractJobConfiguration modelConfiguration = - EXTRACT_CONFIGURATION_MODEL - .toBuilder() + EXTRACT_CONFIGURATION_MODEL.toBuilder() .setSourceModel(MODEL_ID.setProjectId(TEST_PROJECT_ID)) .build() .setProjectId("do-not-update"); @@ -260,5 +262,6 @@ private void compareExtractJobConfiguration( assertEquals(expected.getFormat(), value.getFormat()); assertEquals(expected.getLabels(), value.getLabels()); assertEquals(expected.getJobTimeoutMs(), value.getJobTimeoutMs()); + assertEquals(expected.getReservation(), value.getReservation()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldElementTypeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldElementTypeTest.java index 9b4590892..cf217b25c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldElementTypeTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldElementTypeTest.java @@ -17,6 +17,7 @@ import static org.junit.Assert.assertEquals; +import com.google.api.services.bigquery.model.QueryParameterType; import org.junit.Test; public class FieldElementTypeTest { @@ -36,6 +37,11 @@ public void testBuilder() { @Test public void testFromAndPb() { assertEquals(FIELD_ELEMENT_TYPE, FieldElementType.fromPb(FIELD_ELEMENT_TYPE.toPb())); + assertEquals( + FIELD_ELEMENT_TYPE, + FieldElementType.fromPb( + new QueryParameterType() + .setRangeElementType(new QueryParameterType().setType("DATE")))); } private void compareFieldElementType(FieldElementType expected, FieldElementType value) { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java index 4db202813..ce431ca29 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java @@ -30,6 +30,8 @@ public class FieldTest { private static final String FIELD_NAME1 = "StringField"; private static final String FIELD_NAME2 = "IntegerField"; private static final String FIELD_NAME3 = "RecordField"; + private static final String FIELD_NAME4 = "NullModeField"; + private static final String FIELD_NAME5 = "NullModeField2"; private static final LegacySQLTypeName FIELD_TYPE1 = LegacySQLTypeName.STRING; private static final LegacySQLTypeName FIELD_TYPE2 = LegacySQLTypeName.INTEGER; private static final StandardSQLTypeName FIELD_TYPE1_STANDARD = StandardSQLTypeName.STRING; @@ -79,6 +81,10 @@ public class FieldTest { .setMode(FIELD_MODE3) .setDescription(FIELD_DESCRIPTION3) .build(); + private static final Field STANDARD_FIELD_SCHEMA4 = + Field.newBuilder(FIELD_NAME4, StandardSQLTypeName.INT64).setMode(null).build(); + private static final Field STANDARD_FIELD_SCHEMA5 = + Field.newBuilder(FIELD_NAME5, StandardSQLTypeName.STRING).build(); @Test public void testToBuilder() { @@ -96,6 +102,8 @@ public void testToBuilderWithStandardSQLTypeName() { compareFieldSchemas(STANDARD_FIELD_SCHEMA1, STANDARD_FIELD_SCHEMA1.toBuilder().build()); compareFieldSchemas(STANDARD_FIELD_SCHEMA2, STANDARD_FIELD_SCHEMA2.toBuilder().build()); compareFieldSchemas(STANDARD_FIELD_SCHEMA3, STANDARD_FIELD_SCHEMA3.toBuilder().build()); + compareFieldSchemas(STANDARD_FIELD_SCHEMA4, STANDARD_FIELD_SCHEMA4.toBuilder().build()); + compareFieldSchemas(STANDARD_FIELD_SCHEMA5, STANDARD_FIELD_SCHEMA5.toBuilder().build()); Field field = STANDARD_FIELD_SCHEMA1.toBuilder().setDescription("New Description").build(); assertEquals("New Description", field.getDescription()); field = field.toBuilder().setDescription(FIELD_DESCRIPTION1).build(); @@ -162,6 +170,8 @@ public void testBuilderWithStandardSQLTypeName() { assertEquals(FIELD_TYPE3, STANDARD_FIELD_SCHEMA3.getType()); assertEquals(FIELD_MODE3, STANDARD_FIELD_SCHEMA3.getMode()); assertEquals(FIELD_DESCRIPTION3, STANDARD_FIELD_SCHEMA3.getDescription()); + assertEquals(null, STANDARD_FIELD_SCHEMA4.getMode()); + assertEquals(null, STANDARD_FIELD_SCHEMA5.getMode()); assertEquals( FieldList.of(STANDARD_FIELD_SCHEMA1, STANDARD_FIELD_SCHEMA2), STANDARD_FIELD_SCHEMA3.getSubFields()); @@ -181,6 +191,8 @@ public void testToAndFromPbWithStandardSQLTypeName() { compareFieldSchemas(STANDARD_FIELD_SCHEMA1, Field.fromPb(STANDARD_FIELD_SCHEMA1.toPb())); compareFieldSchemas(STANDARD_FIELD_SCHEMA2, Field.fromPb(STANDARD_FIELD_SCHEMA2.toPb())); compareFieldSchemas(STANDARD_FIELD_SCHEMA3, Field.fromPb(STANDARD_FIELD_SCHEMA3.toPb())); + compareFieldSchemas(STANDARD_FIELD_SCHEMA4, Field.fromPb(STANDARD_FIELD_SCHEMA4.toPb())); + compareFieldSchemas(STANDARD_FIELD_SCHEMA5, Field.fromPb(STANDARD_FIELD_SCHEMA5.toPb())); Field field = Field.newBuilder(FIELD_NAME1, FIELD_TYPE1).build(); compareFieldSchemas(field, Field.fromPb(field.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java index 7d10a9750..5ade7c229 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import com.google.api.client.util.Data; @@ -52,6 +53,12 @@ public class FieldValueListTest { Field.of("tenth", LegacySQLTypeName.NUMERIC), Field.of("eleventh", LegacySQLTypeName.BIGNUMERIC)); + private final FieldList schemaLosslessTimestamp = + FieldList.of( + Field.of("first", LegacySQLTypeName.BOOLEAN), + Field.of("second", LegacySQLTypeName.INTEGER), + Field.of("third", LegacySQLTypeName.TIMESTAMP)); + private final Map integerPb = ImmutableMap.of("v", "1"); private final Map floatPb = ImmutableMap.of("v", "1.5"); private final Map stringPb = ImmutableMap.of("v", "string"); @@ -68,10 +75,15 @@ public class FieldValueListTest { "v", "99999999999999999999999999999999999999.99999999999999999999999999999999999999"); private final FieldValue booleanFv = FieldValue.of(Attribute.PRIMITIVE, "false"); + private final FieldValue booleanLosslessTimestampFv = + FieldValue.of(Attribute.PRIMITIVE, "false", true); private final FieldValue integerFv = FieldValue.of(Attribute.PRIMITIVE, "1"); + private final FieldValue integerLosslessTimestampFv = + FieldValue.of(Attribute.PRIMITIVE, "1", true); private final FieldValue floatFv = FieldValue.of(Attribute.PRIMITIVE, "1.5"); private final FieldValue stringFv = FieldValue.of(Attribute.PRIMITIVE, "string"); private final FieldValue timestampFv = FieldValue.of(Attribute.PRIMITIVE, "42"); + private final FieldValue losslessTimestampFv = FieldValue.of(Attribute.PRIMITIVE, "42", true); private final FieldValue bytesFv = FieldValue.of(Attribute.PRIMITIVE, BYTES_BASE64); private final FieldValue nullFv = FieldValue.of(Attribute.PRIMITIVE, null); private final FieldValue repeatedFv = @@ -117,11 +129,25 @@ public class FieldValueListTest { bigNumericFv), schema); + private final List fieldValuesLosslessTimestampPb = + ImmutableList.of(booleanPb, integerPb, timestampPb); + private final FieldValueList fieldValuesLosslessTimestamp = + FieldValueList.of( + ImmutableList.of( + booleanLosslessTimestampFv, integerLosslessTimestampFv, losslessTimestampFv), + schemaLosslessTimestamp); + @Test public void testFromPb() { assertEquals(fieldValues, FieldValueList.fromPb(fieldValuesPb, schema)); // Schema does not influence values equality assertEquals(fieldValues, FieldValueList.fromPb(fieldValuesPb, null)); + + assertNotEquals(fieldValues, FieldValueList.fromPb(fieldValuesPb, null, true)); + + assertEquals( + fieldValuesLosslessTimestamp, + FieldValueList.fromPb(fieldValuesLosslessTimestampPb, null, true)); } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java index 90cb69061..4ec527f7c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import com.google.api.client.util.Data; import com.google.api.services.bigquery.model.TableCell; @@ -55,6 +56,9 @@ public class FieldValueTest { private static final Map BYTES_FIELD = ImmutableMap.of("v", BYTES_BASE64); private static final Map NULL_FIELD = ImmutableMap.of("v", Data.nullOf(String.class)); + + private static final Map RANGE_FIELD = ImmutableMap.of("v", "[start, end)"); + private static final Map REPEATED_FIELD = ImmutableMap.of("v", ImmutableList.of(INTEGER_FIELD, INTEGER_FIELD)); private static final Map RECORD_FIELD = @@ -74,12 +78,14 @@ public void testFromPb() { value = FieldValue.fromPb(GEOGRAPHY_FIELD); assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); assertEquals("POINT(-122.350220 47.649154)", value.getStringValue()); + assertEquals("POINT(-122.350220 47.649154)", value.getStringValueOrDefault(null)); value = FieldValue.fromPb(NUMERIC_FIELD); assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); assertEquals(new BigDecimal("123456789.123456789"), value.getNumericValue()); value = FieldValue.fromPb(STRING_FIELD); assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); assertEquals("string", value.getStringValue()); + assertEquals("string", value.getStringValueOrDefault(null)); value = FieldValue.fromPb(TIMESTAMP_FIELD); assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); assertEquals(42000000, value.getTimestampValue()); @@ -89,16 +95,21 @@ public void testFromPb() { PeriodDuration.of(Period.of(3, 2, 1), Duration.parse("PT12H34M56.789S")); assertEquals(periodDuration, value.getPeriodDuration()); assertEquals("P3Y2M1DT12H34M56.789S", value.getStringValue()); + assertEquals("P3Y2M1DT12H34M56.789S", value.getStringValueOrDefault(null)); value = FieldValue.fromPb(INTERVAL_FIELD_2); assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); periodDuration = PeriodDuration.of(Period.of(3, 2, 1), Duration.parse("PT12H34M56.789S")); assertEquals(periodDuration, value.getPeriodDuration()); assertEquals("3-2 1 12:34:56.789", value.getStringValue()); + assertEquals("3-2 1 12:34:56.789", value.getStringValueOrDefault(null)); value = FieldValue.fromPb(BYTES_FIELD); assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); assertArrayEquals(BYTES, value.getBytesValue()); value = FieldValue.fromPb(NULL_FIELD); assertNull(value.getValue()); + value = FieldValue.fromPb(RANGE_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); + assertEquals(Range.of(RANGE_FIELD.get("v")), value.getRangeValue()); value = FieldValue.fromPb(REPEATED_FIELD); assertEquals(FieldValue.Attribute.REPEATED, value.getAttribute()); assertEquals(FieldValue.fromPb(INTEGER_FIELD), value.getRepeatedValue().get(0)); @@ -107,6 +118,10 @@ public void testFromPb() { assertEquals(FieldValue.Attribute.RECORD, value.getAttribute()); assertEquals(FieldValue.fromPb(FLOAT_FIELD), value.getRepeatedValue().get(0)); assertEquals(FieldValue.fromPb(TIMESTAMP_FIELD), value.getRepeatedValue().get(1)); + value = FieldValue.fromPb(NULL_FIELD); + assertTrue(value.isNull()); + assertEquals(null, value.getStringValueOrDefault(null)); + assertEquals("defaultValue", value.getStringValueOrDefault("defaultValue")); } @Test @@ -117,6 +132,25 @@ public void testTimestamp() { assertEquals(expected, received); } + @Test + public void testInt64Timestamp() { + FieldValue lossyFieldValue = + FieldValue.of(FieldValue.Attribute.PRIMITIVE, "1.9954383398377106E10"); + long lossy = lossyFieldValue.getTimestampValue(); + + FieldValue losslessFieldValue = + FieldValue.of(FieldValue.Attribute.PRIMITIVE, "19954383398377106", true); + long lossless = losslessFieldValue.getTimestampValue(); + + assertEquals(lossy, lossless); + + FieldValue fieldValue = + FieldValue.of(FieldValue.Attribute.PRIMITIVE, "19954383398377106", true); + long received = fieldValue.getTimestampValue(); + long expected = 19954383398377106L; + assertEquals(expected, received); + } + @Test public void testEquals() { FieldValue booleanValue = FieldValue.of(FieldValue.Attribute.PRIMITIVE, "false"); @@ -156,6 +190,10 @@ public void testEquals() { assertEquals(nullValue, FieldValue.fromPb(NULL_FIELD)); assertEquals(nullValue.hashCode(), FieldValue.fromPb(NULL_FIELD).hashCode()); + FieldValue rangeValue = FieldValue.of(FieldValue.Attribute.PRIMITIVE, "[start, end)"); + assertEquals(rangeValue, FieldValue.fromPb(RANGE_FIELD)); + assertEquals(rangeValue.hashCode(), FieldValue.fromPb(RANGE_FIELD).hashCode()); + FieldValue repeatedValue = FieldValue.of(FieldValue.Attribute.REPEATED, ImmutableList.of(integerValue, integerValue)); assertEquals(repeatedValue, FieldValue.fromPb(REPEATED_FIELD)); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ForeignKeyTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ForeignKeyTest.java index 5cb2b418f..1ebd93ef4 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ForeignKeyTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ForeignKeyTest.java @@ -54,8 +54,7 @@ public void testToBuilder() { .setReferencedColumn("to2") .build()); ForeignKey foreignKey = - FOREIGN_KEY - .toBuilder() + FOREIGN_KEY.toBuilder() .setName("test") .setReferencedTable(referencedTable) .setColumnReferences(columnReferences) diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java index d286358ca..efbee79b6 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java @@ -44,16 +44,14 @@ public void testToBuilder() { compareGoogleSheetsOptions( GOOGLE_SHEETS_OPTIONS_RANGE, GOOGLE_SHEETS_OPTIONS_RANGE.toBuilder().build()); GoogleSheetsOptions googleSheetsOptionsRange = - GOOGLE_SHEETS_OPTIONS_RANGE - .toBuilder() + GOOGLE_SHEETS_OPTIONS_RANGE.toBuilder() .setSkipLeadingRows(123) .setRange("sheet1!A1:A100") .build(); assertThat(googleSheetsOptionsRange.getSkipLeadingRows()).isEqualTo(123); assertThat(googleSheetsOptionsRange.getRange()).isEqualTo("sheet1!A1:A100"); googleSheetsOptionsRange = - googleSheetsOptionsRange - .toBuilder() + googleSheetsOptionsRange.toBuilder() .setSkipLeadingRows(SKIP_LEADING_ROWS) .setRange(RANGE) .build(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java index 24344514e..2a1353f5d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java @@ -23,6 +23,7 @@ import com.google.cloud.bigquery.JobStatistics.ExtractStatistics; import com.google.cloud.bigquery.JobStatistics.LoadStatistics; import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.ExportDataStats; import com.google.cloud.bigquery.JobStatistics.ReservationUsage; import com.google.cloud.bigquery.JobStatistics.ScriptStatistics; import com.google.cloud.bigquery.JobStatistics.ScriptStatistics.ScriptStackFrame; @@ -64,6 +65,13 @@ public class JobStatisticsTest { .setInsertedRowCount(INSERTED_ROW_COUNT) .setUpdatedRowCount(UPDATED_ROW_COUNT) .build(); + private static final Long EXPORT_DATA_STATS_ROW_COUNT = 3L; + private static final Long EXPORT_DATA_STATS_FILE_COUNT = 2L; + private static final ExportDataStats EXPORT_DATA_STATS = + ExportDataStats.newBuilder() + .setRowCount(EXPORT_DATA_STATS_ROW_COUNT) + .setFileCount(EXPORT_DATA_STATS_FILE_COUNT) + .build(); private static final QueryStatistics.StatementType STATEMENT_TYPE = QueryStatistics.StatementType.SELECT; private static final Long TOTAL_BYTES_BILLED = 24L; @@ -94,6 +102,7 @@ public class JobStatisticsTest { .setStartTime(START_TIME) .setCopiedRows(COPIED_ROW) .setCopiedLogicalBytes(COPIED_LOGICAL_BYTES) + .setTotalSlotMs(TOTAL_SLOT_MS) .build(); private static final ExtractStatistics EXTRACT_STATISTICS = ExtractStatistics.newBuilder() @@ -102,6 +111,7 @@ public class JobStatisticsTest { .setStartTime(START_TIME) .setDestinationUriFileCounts(FILE_COUNT) .setInputBytes(INPUT_BYTES) + .setTotalSlotMs(TOTAL_SLOT_MS) .build(); private static final LoadStatistics LOAD_STATISTICS = LoadStatistics.newBuilder() @@ -113,6 +123,7 @@ public class JobStatisticsTest { .setOutputBytes(OUTPUT_BYTES) .setOutputRows(OUTPUT_ROWS) .setBadRecords(BAD_RECORDS) + .setTotalSlotMs(TOTAL_SLOT_MS) .build(); private static final LoadStatistics LOAD_STATISTICS_INCOMPLETE = LoadStatistics.newBuilder() @@ -122,6 +133,7 @@ public class JobStatisticsTest { .setInputBytes(INPUT_BYTES) .setInputFiles(INPUT_FILES) .setBadRecords(BAD_RECORDS) + .setTotalSlotMs(TOTAL_SLOT_MS) .build(); private static final List SUBSTEPS1 = ImmutableList.of("substep1", "substep2"); private static final List SUBSTEPS2 = ImmutableList.of("substep3", "substep4"); @@ -189,6 +201,7 @@ public class JobStatisticsTest { .setEstimatedBytesProcessed(ESTIMATE_BYTES_PROCESSED) .setNumDmlAffectedRows(NUM_DML_AFFECTED_ROWS) .setDmlStats(DML_STATS) + .setExportDataStats(EXPORT_DATA_STATS) .setReferenceTables(REFERENCED_TABLES) .setStatementType(STATEMENT_TYPE) .setTotalBytesBilled(TOTAL_BYTES_BILLED) @@ -263,18 +276,21 @@ public void testBuilder() { assertEquals(CREATION_TIME, EXTRACT_STATISTICS.getCreationTime()); assertEquals(START_TIME, EXTRACT_STATISTICS.getStartTime()); assertEquals(END_TIME, EXTRACT_STATISTICS.getEndTime()); + assertEquals(TOTAL_SLOT_MS, EXTRACT_STATISTICS.getTotalSlotMs()); assertEquals(FILE_COUNT, EXTRACT_STATISTICS.getDestinationUriFileCounts()); assertEquals(INPUT_BYTES, EXTRACT_STATISTICS.getInputBytes()); assertEquals(CREATION_TIME, COPY_STATISTICS.getCreationTime()); assertEquals(START_TIME, COPY_STATISTICS.getStartTime()); assertEquals(END_TIME, COPY_STATISTICS.getEndTime()); + assertEquals(TOTAL_SLOT_MS, COPY_STATISTICS.getTotalSlotMs()); assertEquals(COPIED_LOGICAL_BYTES, COPY_STATISTICS.getCopiedLogicalBytes()); assertEquals(COPIED_ROW, COPY_STATISTICS.getCopiedRows()); assertEquals(CREATION_TIME, LOAD_STATISTICS.getCreationTime()); assertEquals(START_TIME, LOAD_STATISTICS.getStartTime()); assertEquals(END_TIME, LOAD_STATISTICS.getEndTime()); + assertEquals(TOTAL_SLOT_MS, LOAD_STATISTICS.getTotalSlotMs()); assertEquals(INPUT_BYTES, LOAD_STATISTICS.getInputBytes()); assertEquals(INPUT_FILES, LOAD_STATISTICS.getInputFiles()); assertEquals(OUTPUT_BYTES, LOAD_STATISTICS.getOutputBytes()); @@ -284,6 +300,7 @@ public void testBuilder() { assertEquals(CREATION_TIME, QUERY_STATISTICS.getCreationTime()); assertEquals(START_TIME, QUERY_STATISTICS.getStartTime()); assertEquals(END_TIME, QUERY_STATISTICS.getEndTime()); + assertEquals(TOTAL_SLOT_MS, QUERY_STATISTICS.getTotalSlotMs()); assertEquals(BI_ENGINE_STATS, QUERY_STATISTICS.getBiEngineStats()); assertEquals(BILLING_TIER, QUERY_STATISTICS.getBillingTier()); assertEquals(CACHE_HIT, QUERY_STATISTICS.getCacheHit()); @@ -293,12 +310,12 @@ public void testBuilder() { assertEquals(ESTIMATE_BYTES_PROCESSED, QUERY_STATISTICS.getEstimatedBytesProcessed()); assertEquals(NUM_DML_AFFECTED_ROWS, QUERY_STATISTICS.getNumDmlAffectedRows()); assertEquals(DML_STATS, QUERY_STATISTICS.getDmlStats()); + assertEquals(EXPORT_DATA_STATS, QUERY_STATISTICS.getExportDataStats()); assertEquals(REFERENCED_TABLES, QUERY_STATISTICS.getReferencedTables()); assertEquals(STATEMENT_TYPE, QUERY_STATISTICS.getStatementType()); assertEquals(TOTAL_BYTES_BILLED, QUERY_STATISTICS.getTotalBytesBilled()); assertEquals(TOTAL_BYTES_PROCESSED, QUERY_STATISTICS.getTotalBytesProcessed()); assertEquals(TOTAL_PARTITION_PROCESSED, QUERY_STATISTICS.getTotalPartitionsProcessed()); - assertEquals(TOTAL_SLOT_MS, QUERY_STATISTICS.getTotalSlotMs()); assertEquals(QUERY_PLAN, QUERY_STATISTICS.getQueryPlan()); assertEquals(TIMELINE, QUERY_STATISTICS.getTimeline()); @@ -448,6 +465,8 @@ private void compareQueryStatistics(QueryStatistics expected, QueryStatistics va assertEquals(expected.getMetadataCacheStats(), value.getMetadataCacheStats()); assertEquals(expected.getStatementType(), value.getStatementType()); assertEquals(expected.getTimeline(), value.getTimeline()); + assertEquals(expected.getDmlStats(), value.getDmlStats()); + assertEquals(expected.getExportDataStats(), value.getExportDataStats()); } private void compareStatistics(JobStatistics expected, JobStatistics value) { @@ -460,6 +479,7 @@ private void compareStatistics(JobStatistics expected, JobStatistics value) { assertEquals(expected.getNumChildJobs(), value.getNumChildJobs()); assertEquals(expected.getParentJobId(), value.getParentJobId()); assertEquals(expected.getScriptStatistics(), value.getScriptStatistics()); + assertEquals(expected.getTotalSlotMs(), value.getTotalSlotMs()); } private void compareScriptStatistics(ScriptStatistics expected, ScriptStatistics value) { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java index d10203444..f12d9fcaf 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java @@ -41,6 +41,7 @@ import com.google.cloud.bigquery.JobStatistics.QueryStatistics; import com.google.cloud.bigquery.JobStatus.State; import com.google.common.collect.ImmutableList; +import java.time.Duration; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; @@ -48,7 +49,6 @@ import org.junit.runner.RunWith; import org.mockito.junit.MockitoJUnitRunner; import org.mockito.junit.MockitoRule; -import org.threeten.bp.Duration; @RunWith(MockitoJUnitRunner.class) public class JobTest { @@ -65,6 +65,10 @@ public class JobTest { CopyStatistics.newBuilder().setCreationTimestamp(1L).setEndTime(3L).setStartTime(2L).build(); private static final CopyJobConfiguration COPY_CONFIGURATION = CopyJobConfiguration.of(TABLE_ID1, TABLE_ID2); + private static final QueryJobConfiguration DDL_QUERY_CONFIGURATION = + QueryJobConfiguration.newBuilder("CREATE VIEW").setDestinationTable(TABLE_ID1).build(); + private static final QueryJobConfiguration DRL_QUERY_CONFIGURATION = + QueryJobConfiguration.newBuilder("SELECT 1").setDestinationTable(TABLE_ID1).build(); private static final JobInfo JOB_INFO = JobInfo.newBuilder(COPY_CONFIGURATION) .setJobId(JOB_ID) @@ -79,12 +83,17 @@ public class JobTest { private static final RetryOption[] TEST_RETRY_OPTIONS = new RetryOption[] { - RetryOption.totalTimeout(Duration.ofSeconds(3)), - RetryOption.initialRetryDelay(Duration.ofMillis(1L)), + RetryOption.totalTimeoutDuration(Duration.ofSeconds(3)), + RetryOption.initialRetryDelayDuration(Duration.ofMillis(1L)), RetryOption.jittered(false), RetryOption.retryDelayMultiplier(1.0) }; + private static final BigQueryRetryConfig TEST_BIGQUERY_RETRY_CONFIG = + BigQueryRetryConfig.newBuilder() + .retryOnMessage(BigQueryErrorMessages.RATE_LIMIT_EXCEEDED_MSG) + .build(); + @Rule public MockitoRule rule; private BigQuery bigquery; @@ -191,8 +200,6 @@ public void testWaitFor() throws InterruptedException { @Test public void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { - QueryJobConfiguration jobConfig = - QueryJobConfiguration.newBuilder("CREATE VIEW").setDestinationTable(TABLE_ID1).build(); QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -200,7 +207,7 @@ public void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { .setStartTime(2L) .build(); JobInfo jobInfo = - JobInfo.newBuilder(jobConfig) + JobInfo.newBuilder(DDL_QUERY_CONFIGURATION) .setJobId(JOB_ID) .setStatistics(jobStatistics) .setJobId(JOB_ID) @@ -228,7 +235,7 @@ public void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) .thenReturn(completedQuery); when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(completedJob); - job = this.job.toBuilder().setConfiguration(jobConfig).build(); + job = this.job.toBuilder().setConfiguration(DDL_QUERY_CONFIGURATION).build(); assertThat(job.waitFor(TEST_RETRY_OPTIONS)).isSameInstanceAs(completedJob); assertThat(job.getQueryResults().iterateAll()).isEmpty(); verify(bigquery, times(2)).getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS); @@ -237,8 +244,6 @@ public void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { @Test public void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedException { - QueryJobConfiguration jobConfig = - QueryJobConfiguration.newBuilder("CREATE VIEW").setDestinationTable(TABLE_ID1).build(); QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -246,7 +251,7 @@ public void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedExc .setStartTime(2L) .build(); JobInfo jobInfo = - JobInfo.newBuilder(jobConfig) + JobInfo.newBuilder(DDL_QUERY_CONFIGURATION) .setJobId(JOB_ID) .setStatistics(jobStatistics) .setJobId(JOB_ID) @@ -274,7 +279,7 @@ public void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedExc when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(completedJob); when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) .thenReturn(completedQuery); - job = this.job.toBuilder().setConfiguration(jobConfig).build(); + job = this.job.toBuilder().setConfiguration(DDL_QUERY_CONFIGURATION).build(); assertThat(job.waitFor(TEST_RETRY_OPTIONS)).isSameInstanceAs(completedJob); assertThat(job.getQueryResults().getSchema()) .isEqualTo(Schema.of(Field.of("field1", LegacySQLTypeName.BOOLEAN))); @@ -284,8 +289,6 @@ public void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedExc @Test public void testWaitForAndGetQueryResults() throws InterruptedException { - QueryJobConfiguration jobConfig = - QueryJobConfiguration.newBuilder("SELECT 1").setDestinationTable(TABLE_ID1).build(); QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -293,7 +296,7 @@ public void testWaitForAndGetQueryResults() throws InterruptedException { .setStartTime(2L) .build(); JobInfo jobInfo = - JobInfo.newBuilder(jobConfig) + JobInfo.newBuilder(DRL_QUERY_CONFIGURATION) .setJobId(JOB_ID) .setStatistics(jobStatistics) .setJobId(JOB_ID) @@ -329,7 +332,7 @@ public void testWaitForAndGetQueryResults() throws InterruptedException { when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) .thenReturn(completedQuery); when(bigquery.listTableData(eq(TABLE_ID1), any(Schema.class))).thenReturn(result); - job = this.job.toBuilder().setConfiguration(jobConfig).build(); + job = this.job.toBuilder().setConfiguration(DRL_QUERY_CONFIGURATION).build(); assertThat(job.waitFor(TEST_RETRY_OPTIONS)).isSameInstanceAs(completedJob); assertThat(job.getQueryResults().iterateAll()).hasSize(0); verify(bigquery, times(2)).getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS); @@ -399,13 +402,157 @@ public void testWaitForWithTimeout() throws InterruptedException { when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(runningJob); when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(runningJob); try { - job.waitFor(concat(TEST_RETRY_OPTIONS, RetryOption.totalTimeout(Duration.ofMillis(3)))); + job.waitFor( + concat(TEST_RETRY_OPTIONS, RetryOption.totalTimeoutDuration(Duration.ofMillis(3)))); Assert.fail(); } catch (BigQueryException expected) { Assert.assertNotNull(expected.getMessage()); } } + @Test + public void testWaitForWithBigQueryRetryConfig() throws InterruptedException { + QueryStatistics jobStatistics = + QueryStatistics.newBuilder() + .setCreationTimestamp(1L) + .setEndTime(3L) + .setStartTime(2L) + .build(); + JobInfo jobInfo = + JobInfo.newBuilder(DRL_QUERY_CONFIGURATION) + .setJobId(JOB_ID) + .setStatistics(jobStatistics) + .setJobId(JOB_ID) + .setEtag(ETAG) + .setGeneratedId(GENERATED_ID) + .setSelfLink(SELF_LINK) + .setUserEmail(EMAIL) + .setStatus(JOB_STATUS) + .build(); + + when(bigquery.getOptions()).thenReturn(mockOptions); + when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); + Job completedJob = + expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); + QueryResponse completedQuery = + QueryResponse.newBuilder() + .setCompleted(true) + .setTotalRows(1) // Lies to force call of listTableData(). + .setSchema(Schema.of(Field.of("_f0", LegacySQLTypeName.INTEGER))) + .setErrors(ImmutableList.of()) + .build(); + + when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(completedJob); + when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) + .thenReturn(completedQuery); + job = this.job.toBuilder().setConfiguration(DRL_QUERY_CONFIGURATION).build(); + assertThat(job.waitFor(TEST_BIGQUERY_RETRY_CONFIG, TEST_RETRY_OPTIONS)) + .isSameInstanceAs(completedJob); + verify(bigquery, times(1)).getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS); + verify(bigquery).getJob(JOB_INFO.getJobId()); + } + + @Test + public void testWaitForWithBigQueryRetryConfigShouldRetry() throws InterruptedException { + QueryStatistics jobStatistics = + QueryStatistics.newBuilder() + .setCreationTimestamp(1L) + .setEndTime(3L) + .setStartTime(2L) + .build(); + JobInfo jobInfo = + JobInfo.newBuilder(DRL_QUERY_CONFIGURATION) + .setJobId(JOB_ID) + .setStatistics(jobStatistics) + .setJobId(JOB_ID) + .setEtag(ETAG) + .setGeneratedId(GENERATED_ID) + .setSelfLink(SELF_LINK) + .setUserEmail(EMAIL) + .setStatus(JOB_STATUS) + .build(); + + when(bigquery.getOptions()).thenReturn(mockOptions); + when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); + Job completedJob = + expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); + QueryResponse completedQuery = + QueryResponse.newBuilder() + .setCompleted(true) + .setTotalRows(1) // Lies to force call of listTableData(). + .setSchema(Schema.of(Field.of("_f0", LegacySQLTypeName.INTEGER))) + .setErrors(ImmutableList.of()) + .build(); + + when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(completedJob); + BigQueryError bigQueryError = + new BigQueryError( + "testReasonRateLimitExceeded", "US", "testMessage: Exceeded rate limits:"); + + ImmutableList bigQueryErrorList = ImmutableList.of(bigQueryError); + BigQueryException bigQueryException = new BigQueryException(bigQueryErrorList); + when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) + .thenThrow(bigQueryException) + .thenReturn(completedQuery); + job = this.job.toBuilder().setConfiguration(DRL_QUERY_CONFIGURATION).build(); + assertThat(job.waitFor(TEST_BIGQUERY_RETRY_CONFIG, TEST_RETRY_OPTIONS)) + .isSameInstanceAs(completedJob); + // Verify that getQueryResults is attempted twice. First during bigQueryException with "Exceeded + // rate limits" error message and the second successful attempt. + verify(bigquery, times(2)).getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS); + verify(bigquery).getJob(JOB_INFO.getJobId()); + } + + @Test + public void testWaitForWithBigQueryRetryConfigErrorShouldNotRetry() throws InterruptedException { + QueryStatistics jobStatistics = + QueryStatistics.newBuilder() + .setCreationTimestamp(1L) + .setEndTime(3L) + .setStartTime(2L) + .build(); + JobInfo jobInfo = + JobInfo.newBuilder(DRL_QUERY_CONFIGURATION) + .setJobId(JOB_ID) + .setStatistics(jobStatistics) + .setJobId(JOB_ID) + .setEtag(ETAG) + .setGeneratedId(GENERATED_ID) + .setSelfLink(SELF_LINK) + .setUserEmail(EMAIL) + .setStatus(JOB_STATUS) + .build(); + + when(bigquery.getOptions()).thenReturn(mockOptions); + when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); + QueryResponse completedQuery = + QueryResponse.newBuilder() + .setCompleted(true) + .setTotalRows(1) // Lies to force call of listTableData(). + .setSchema(Schema.of(Field.of("_f0", LegacySQLTypeName.INTEGER))) + .setErrors(ImmutableList.of()) + .build(); + + BigQueryError bigQueryError = + new BigQueryError("testReasonRateLimitExceeded", "US", "testMessage: do not retry error"); + + ImmutableList bigQueryErrorList = ImmutableList.of(bigQueryError); + BigQueryException bigQueryException = new BigQueryException(bigQueryErrorList); + when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) + .thenThrow(bigQueryException) + .thenReturn(completedQuery); + job = this.job.toBuilder().setConfiguration(DRL_QUERY_CONFIGURATION).build(); + try { + job.waitFor(TEST_BIGQUERY_RETRY_CONFIG, TEST_RETRY_OPTIONS); + fail("JobException expected"); + } catch (BigQueryException e) { + assertNotNull(e.getErrors()); + } + // Verify that getQueryResults is attempted only once and not retried since the error message + // does not match. + verify(bigquery, times(1)).getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS); + } + @Test public void testReload() { JobInfo updatedInfo = JOB_INFO.toBuilder().setEtag("etag").build(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java index 563a3f34a..d987eb28e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java @@ -21,6 +21,7 @@ import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; import com.google.cloud.bigquery.JobInfo.WriteDisposition; +import com.google.cloud.bigquery.LoadJobConfiguration.SourceColumnMatch; import com.google.cloud.bigquery.TimePartitioning.Type; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -37,6 +38,7 @@ public class LoadJobConfigurationTest { .setAllowJaggedRows(true) .setAllowQuotedNewLines(false) .setEncoding(StandardCharsets.UTF_8) + .setPreserveAsciiControlCharacters(true) .build(); private static final TableId TABLE_ID = TableId.of("dataset", "table"); private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; @@ -67,6 +69,7 @@ public class LoadJobConfigurationTest { private static final Map LABELS = ImmutableMap.of("test-job-name", "test-load-job"); private static final Long TIMEOUT = 10L; + private static final String RESERVATION = "reservation"; private static final RangePartitioning.Range RANGE = RangePartitioning.Range.newBuilder().setStart(1L).setInterval(2L).setEnd(10L).build(); private static final RangePartitioning RANGE_PARTITIONING = @@ -76,6 +79,13 @@ public class LoadJobConfigurationTest { private static final String KEY = "session_id"; private static final String VALUE = "session_id_1234567890"; + private static final String TIME_ZONE = "America/Los_Angeles"; + private static final String DATE_FORMAT = "YYYY-MM-DD"; + private static final String DATETIME_FORMAT = "YYYY-MM-DD HH:MI:SS"; + private static final String TIME_FORMAT = "HH:MI:SS"; + private static final String TIMESTAMP_FORMAT = "YYYY-MM-DD HH:MI:SS"; + private static final SourceColumnMatch SOURCE_COLUMN_MATCH = SourceColumnMatch.POSITION; + private static final List NULL_MARKERS = ImmutableList.of("SQL NULL", "TEST MARKER"); private static final ConnectionProperty CONNECTION_PROPERTY = ConnectionProperty.newBuilder().setKey(KEY).setValue(VALUE).build(); private static final List CONNECTION_PROPERTIES = @@ -92,6 +102,7 @@ public class LoadJobConfigurationTest { .setWriteDisposition(WRITE_DISPOSITION) .setFormatOptions(CSV_OPTIONS) .setFileSetSpecType("FILE_SET_SPEC_TYPE_FILE_SYSTEM_MATCH") + .setColumnNameCharacterMap("STRICT") .setIgnoreUnknownValues(IGNORE_UNKNOWN_VALUES) .setMaxBadRecords(MAX_BAD_RECORDS) .setSchema(TABLE_SCHEMA) @@ -107,6 +118,14 @@ public class LoadJobConfigurationTest { .setHivePartitioningOptions(HIVE_PARTITIONING_OPTIONS) .setConnectionProperties(CONNECTION_PROPERTIES) .setCreateSession(CREATE_SESSION) + .setReservation(RESERVATION) + .setTimeZone(TIME_ZONE) + .setDateFormat(DATE_FORMAT) + .setDatetimeFormat(DATETIME_FORMAT) + .setTimeFormat(TIME_FORMAT) + .setTimestampFormat(TIMESTAMP_FORMAT) + .setSourceColumnMatch(SOURCE_COLUMN_MATCH) + .setNullMarkers(NULL_MARKERS) .build(); private static final DatastoreBackupOptions BACKUP_OPTIONS = @@ -126,6 +145,7 @@ public class LoadJobConfigurationTest { .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) .setRangePartitioning(RANGE_PARTITIONING) + .setReservation(RESERVATION) .build(); private static final LoadJobConfiguration LOAD_CONFIGURATION_AVRO = LoadJobConfiguration.newBuilder(TABLE_ID, SOURCE_URIS) @@ -144,14 +164,14 @@ public class LoadJobConfigurationTest { .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) .setRangePartitioning(RANGE_PARTITIONING) + .setReservation(RESERVATION) .build(); @Test public void testToBuilder() { compareLoadJobConfiguration(LOAD_CONFIGURATION_CSV, LOAD_CONFIGURATION_CSV.toBuilder().build()); LoadJobConfiguration configurationCSV = - LOAD_CONFIGURATION_CSV - .toBuilder() + LOAD_CONFIGURATION_CSV.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", configurationCSV.getDestinationTable().getTable()); @@ -161,8 +181,7 @@ public void testToBuilder() { compareLoadJobConfiguration( LOAD_CONFIGURATION_BACKUP, LOAD_CONFIGURATION_BACKUP.toBuilder().build()); LoadJobConfiguration configurationBackup = - LOAD_CONFIGURATION_BACKUP - .toBuilder() + LOAD_CONFIGURATION_BACKUP.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", configurationBackup.getDestinationTable().getTable()); @@ -172,8 +191,7 @@ public void testToBuilder() { compareLoadJobConfiguration( LOAD_CONFIGURATION_AVRO, LOAD_CONFIGURATION_AVRO.toBuilder().build()); LoadJobConfiguration configurationAvro = - LOAD_CONFIGURATION_AVRO - .toBuilder() + LOAD_CONFIGURATION_AVRO.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", configurationAvro.getDestinationTable().getTable()); @@ -224,8 +242,7 @@ public void testSetProjectId() { @Test public void testSetProjectIdDoNotOverride() { LoadConfiguration configuration = - LOAD_CONFIGURATION_CSV - .toBuilder() + LOAD_CONFIGURATION_CSV.toBuilder() .setDestinationTable(TABLE_ID.setProjectId(TEST_PROJECT_ID)) .build() .setProjectId("do-not-update"); @@ -242,6 +259,7 @@ private void compareLoadJobConfiguration( assertEquals(expected, value); assertEquals(expected.hashCode(), value.hashCode()); assertEquals(expected.getFileSetSpecType(), value.getFileSetSpecType()); + assertEquals(expected.getColumnNameCharacterMap(), value.getColumnNameCharacterMap()); assertEquals(expected.toString(), value.toString()); assertEquals(expected.getDestinationTable(), value.getDestinationTable()); assertEquals(expected.getDecimalTargetTypes(), value.getDecimalTargetTypes()); @@ -268,5 +286,13 @@ private void compareLoadJobConfiguration( assertEquals(expected.getHivePartitioningOptions(), value.getHivePartitioningOptions()); assertEquals(expected.getConnectionProperties(), value.getConnectionProperties()); assertEquals(expected.getCreateSession(), value.getCreateSession()); + assertEquals(expected.getReservation(), value.getReservation()); + assertEquals(expected.getTimeZone(), value.getTimeZone()); + assertEquals(expected.getDateFormat(), value.getDateFormat()); + assertEquals(expected.getDatetimeFormat(), value.getDatetimeFormat()); + assertEquals(expected.getTimeFormat(), value.getTimeFormat()); + assertEquals(expected.getTimestampFormat(), value.getTimestampFormat()); + assertEquals(expected.getSourceColumnMatch(), value.getSourceColumnMatch()); + assertEquals(expected.getNullMarkers(), value.getNullMarkers()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ParquetOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ParquetOptionsTest.java index 8812b2e27..c70ac3355 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ParquetOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ParquetOptionsTest.java @@ -24,14 +24,22 @@ public class ParquetOptionsTest { private static final ParquetOptions OPTIONS = - ParquetOptions.newBuilder().setEnableListInference(true).setEnumAsString(true).build(); + ParquetOptions.newBuilder() + .setEnableListInference(true) + .setEnumAsString(true) + .setMapTargetType("ARRAY_OF_STRUCT") + .build(); @Test public void testToBuilder() { compareParquetOptions(OPTIONS, OPTIONS.toBuilder().build()); ParquetOptions parquetOptions = OPTIONS.toBuilder().setEnableListInference(true).build(); assertEquals(true, parquetOptions.getEnableListInference()); - parquetOptions = parquetOptions.toBuilder().setEnumAsString(true).build(); + parquetOptions = + parquetOptions.toBuilder() + .setEnumAsString(true) + .setMapTargetType("ARRAY_OF_STRUCT") + .build(); compareParquetOptions(OPTIONS, parquetOptions); } @@ -47,6 +55,7 @@ public void testBuilder() { assertEquals(FormatOptions.PARQUET, OPTIONS.getType()); assertEquals(true, OPTIONS.getEnableListInference()); assertEquals(true, OPTIONS.getEnumAsString()); + assertEquals("ARRAY_OF_STRUCT", OPTIONS.getMapTargetType()); } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java index f71e152e6..f25aa47ed 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java @@ -112,6 +112,7 @@ public class QueryJobConfigurationTest { ImmutableMap.of("string", STRING_PARAMETER, "timestamp", TIMESTAMP_PARAMETER); private static final String PARAMETER_MODE = "POSITIONAL"; private static final JobCreationMode JOB_CREATION_MODE = JobCreationMode.JOB_CREATION_OPTIONAL; + private static final String RESERVATION = "reservation"; private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = QueryJobConfiguration.newBuilder(QUERY) .setUseQueryCache(USE_QUERY_CACHE) @@ -139,16 +140,15 @@ public class QueryJobConfigurationTest { .setConnectionProperties(CONNECTION_PROPERTIES) .setPositionalParameters(POSITIONAL_PARAMETER) .setParameterMode(PARAMETER_MODE) + .setReservation(RESERVATION) .build(); private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_ADD_POSITIONAL_PARAMETER = - QUERY_JOB_CONFIGURATION - .toBuilder() + QUERY_JOB_CONFIGURATION.toBuilder() .setPositionalParameters(ImmutableList.of()) .addPositionalParameter(STRING_PARAMETER) .build(); private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_SET_NAME_PARAMETER = - QUERY_JOB_CONFIGURATION - .toBuilder() + QUERY_JOB_CONFIGURATION.toBuilder() .setPositionalParameters(ImmutableList.of()) .setNamedParameters(NAME_PARAMETER) .build(); @@ -190,6 +190,7 @@ public void testToPbAndFromPb() { assertNotNull(QUERY_JOB_CONFIGURATION.getConnectionProperties()); assertNotNull(QUERY_JOB_CONFIGURATION.getPositionalParameters()); assertNotNull(QUERY_JOB_CONFIGURATION.getNamedParameters()); + assertNotNull(QUERY_JOB_CONFIGURATION.getReservation()); compareQueryJobConfiguration( QUERY_JOB_CONFIGURATION, QueryJobConfiguration.fromPb(QUERY_JOB_CONFIGURATION.toPb())); QueryJobConfiguration job = QueryJobConfiguration.of(QUERY); @@ -206,8 +207,7 @@ public void testSetProjectId() { @Test public void testSetProjectIdDoNotOverride() { QueryJobConfiguration configuration = - QUERY_JOB_CONFIGURATION - .toBuilder() + QUERY_JOB_CONFIGURATION.toBuilder() .setDestinationTable(TABLE_ID.setProjectId(TEST_PROJECT_ID)) .build() .setProjectId("update-only-on-dataset"); @@ -274,5 +274,6 @@ private void compareQueryJobConfiguration( assertEquals(expected.getConnectionProperties(), value.getConnectionProperties()); assertEquals(expected.getPositionalParameters(), value.getPositionalParameters()); assertEquals(expected.getNamedParameters(), value.getNamedParameters()); + assertEquals(expected.getReservation(), value.getReservation()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java index 0534865b2..75060a4f0 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java @@ -17,17 +17,21 @@ package com.google.cloud.bigquery; import static com.google.common.truth.Truth.assertThat; -import static org.threeten.bp.temporal.ChronoField.HOUR_OF_DAY; -import static org.threeten.bp.temporal.ChronoField.MINUTE_OF_HOUR; -import static org.threeten.bp.temporal.ChronoField.NANO_OF_SECOND; -import static org.threeten.bp.temporal.ChronoField.SECOND_OF_MINUTE; +import static java.time.temporal.ChronoField.HOUR_OF_DAY; +import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; +import static java.time.temporal.ChronoField.NANO_OF_SECOND; +import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; import com.google.api.services.bigquery.model.QueryParameterType; import com.google.common.collect.ImmutableMap; import com.google.gson.JsonObject; import java.math.BigDecimal; import java.text.ParseException; +import java.time.Instant; import java.time.Period; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; @@ -35,11 +39,6 @@ import java.util.Map; import org.junit.Assert; import org.junit.Test; -import org.threeten.bp.Instant; -import org.threeten.bp.ZoneOffset; -import org.threeten.bp.format.DateTimeFormatter; -import org.threeten.bp.format.DateTimeFormatterBuilder; -import org.threeten.bp.jdk8.Jdk8Methods; import org.threeten.extra.PeriodDuration; public class QueryParameterValueTest { @@ -338,8 +337,8 @@ public void testTimestampFromLong() { public void testTimestampWithFormatter() { long timestampInMicroseconds = 1571068536842L * 1000 + 123; long microseconds = 1_000_000; - long secs = Jdk8Methods.floorDiv(timestampInMicroseconds, microseconds); - int nano = (int) Jdk8Methods.floorMod(timestampInMicroseconds, microseconds) * 1000; + long secs = Math.floorDiv(timestampInMicroseconds, microseconds); + int nano = (int) Math.floorMod(timestampInMicroseconds, microseconds) * 1000; Instant instant = Instant.ofEpochSecond(secs, nano); String expected = TIMESTAMPFORMATTER.format(instant); assertThat(expected) @@ -619,4 +618,69 @@ private static void assertArrayDataEquals( assertThat(value.getArrayValues()).isNull(); } } + + @Test + public void testRange() { + testRangeDataEquals(null, null, FieldElementType.newBuilder().setType("DATE").build()); + testRangeDataEquals(null, "1971-02-03", FieldElementType.newBuilder().setType("DATE").build()); + testRangeDataEquals("1970-01-02", null, FieldElementType.newBuilder().setType("DATE").build()); + testRangeDataEquals( + "1970-01-02", "1971-02-03", FieldElementType.newBuilder().setType("DATE").build()); + + testRangeDataEquals(null, null, FieldElementType.newBuilder().setType("DATETIME").build()); + testRangeDataEquals( + null, + "2015-09-20 06:41:35.220000", + FieldElementType.newBuilder().setType("DATETIME").build()); + testRangeDataEquals( + "2014-08-19 05:41:35.220000", + null, + FieldElementType.newBuilder().setType("DATETIME").build()); + testRangeDataEquals( + "2014-08-19 05:41:35.220000", + "2015-09-20 06:41:35.220000", + FieldElementType.newBuilder().setType("DATETIME").build()); + + testRangeDataEquals(null, null, FieldElementType.newBuilder().setType("TIMESTAMP").build()); + testRangeDataEquals( + null, + "2015-09-20 13:41:35.220000+01:00", + FieldElementType.newBuilder().setType("TIMESTAMP").build()); + testRangeDataEquals( + "2014-08-19 12:41:35.220000+00:00", + null, + FieldElementType.newBuilder().setType("TIMESTAMP").build()); + testRangeDataEquals( + "2014-08-19 12:41:35.220000+00:00", + "2015-09-20 13:41:35.220000+01:00", + FieldElementType.newBuilder().setType("TIMESTAMP").build()); + } + + /** Helper method to test range QueryParameterValue and its permutations. */ + private static void testRangeDataEquals(String start, String end, FieldElementType type) { + QueryParameterValue rangeField = + QueryParameterValue.range( + Range.newBuilder().setType(type).setStart(start).setEnd(end).build()); + QueryParameterType parameterType = rangeField.toTypePb(); + com.google.api.services.bigquery.model.QueryParameterValue parameterValue = + rangeField.toValuePb(); + QueryParameterValue queryParameterValue = + QueryParameterValue.fromPb(parameterValue, parameterType); + + assertThat(queryParameterValue.getType()).isEqualTo(StandardSQLTypeName.RANGE); + if (start == null) { + assertThat(queryParameterValue.getRangeValues().getStart().isNull()).isTrue(); + } else { + assertThat(queryParameterValue.getRangeValues().getStart().getStringValue()).isEqualTo(start); + } + if (end == null) { + assertThat(queryParameterValue.getRangeValues().getEnd().isNull()).isTrue(); + } else { + assertThat(queryParameterValue.getRangeValues().getEnd().getStringValue()).isEqualTo(end); + } + assertThat(queryParameterValue.getRangeValues().getType()).isEqualTo(type); + assertThat(queryParameterValue.getArrayValues()).isNull(); + assertThat(queryParameterValue.getStructValues()).isNull(); + assertThat(queryParameterValue.getValue()).isNull(); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java index 0d9464c76..ed9effe0b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java @@ -17,7 +17,9 @@ package com.google.cloud.bigquery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; import com.google.api.services.bigquery.model.QueryRequest; import com.google.cloud.bigquery.JobInfo.CreateDisposition; @@ -108,6 +110,7 @@ public class QueryRequestInfoTest { ImmutableMap.of("string", STRING_PARAMETER, "timestamp", TIMESTAMP_PARAMETER); private static final JobCreationMode jobCreationModeRequired = JobCreationMode.JOB_CREATION_REQUIRED; + private static final String RESERVATION = "reservation"; private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = QueryJobConfiguration.newBuilder(QUERY) .setUseQueryCache(USE_QUERY_CACHE) @@ -135,8 +138,9 @@ public class QueryRequestInfoTest { .setPositionalParameters(POSITIONAL_PARAMETER) .setMaxResults(100L) .setJobCreationMode(jobCreationModeRequired) + .setReservation(RESERVATION) .build(); - QueryRequestInfo REQUEST_INFO = new QueryRequestInfo(QUERY_JOB_CONFIGURATION); + QueryRequestInfo REQUEST_INFO = new QueryRequestInfo(QUERY_JOB_CONFIGURATION, false); private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_SUPPORTED = QueryJobConfiguration.newBuilder(QUERY) .setUseQueryCache(USE_QUERY_CACHE) @@ -149,8 +153,10 @@ public class QueryRequestInfoTest { .setPositionalParameters(POSITIONAL_PARAMETER) .setCreateSession(CREATE_SESSION) .setMaxResults(100L) + .setReservation(RESERVATION) .build(); - QueryRequestInfo REQUEST_INFO_SUPPORTED = new QueryRequestInfo(QUERY_JOB_CONFIGURATION_SUPPORTED); + QueryRequestInfo REQUEST_INFO_SUPPORTED = + new QueryRequestInfo(QUERY_JOB_CONFIGURATION_SUPPORTED, false); @Test public void testIsFastQuerySupported() { @@ -171,8 +177,19 @@ public void testToPb() { @Test public void equalTo() { compareQueryRequestInfo( - new QueryRequestInfo(QUERY_JOB_CONFIGURATION_SUPPORTED), REQUEST_INFO_SUPPORTED); - compareQueryRequestInfo(new QueryRequestInfo(QUERY_JOB_CONFIGURATION), REQUEST_INFO); + new QueryRequestInfo(QUERY_JOB_CONFIGURATION_SUPPORTED, false), REQUEST_INFO_SUPPORTED); + compareQueryRequestInfo(new QueryRequestInfo(QUERY_JOB_CONFIGURATION, false), REQUEST_INFO); + } + + @Test + public void testInt64Timestamp() { + QueryRequestInfo requestInfo = new QueryRequestInfo(QUERY_JOB_CONFIGURATION, false); + QueryRequest requestPb = requestInfo.toPb(); + assertFalse(requestPb.getFormatOptions().getUseInt64Timestamp()); + + QueryRequestInfo requestInfoLosslessTs = new QueryRequestInfo(QUERY_JOB_CONFIGURATION, true); + QueryRequest requestLosslessTsPb = requestInfoLosslessTs.toPb(); + assertTrue(requestLosslessTsPb.getFormatOptions().getUseInt64Timestamp()); } /* @@ -199,5 +216,7 @@ private void compareQueryRequestInfo(QueryRequestInfo expected, QueryRequestInfo assertEquals(expectedQueryReq.getUseQueryCache(), actualQueryReq.getUseQueryCache()); assertEquals(expectedQueryReq.getUseLegacySql(), actualQueryReq.getUseLegacySql()); assertEquals(expectedQueryReq.get("jobCreationMode"), actualQueryReq.get("jobCreationMode")); + assertEquals(expectedQueryReq.getFormatOptions(), actualQueryReq.getFormatOptions()); + assertEquals(expectedQueryReq.getReservation(), actualQueryReq.getReservation()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RangeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RangeTest.java new file mode 100644 index 000000000..2d98376b3 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RangeTest.java @@ -0,0 +1,118 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import com.google.common.collect.ImmutableMap; +import org.junit.Test; + +public class RangeTest { + private static final Range RANGE_DATE = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .setStart("1970-01-02") + .setEnd("1970-03-04") + .build(); + + private static final Range RANGE_DATETIME = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .setStart("2014-08-19 05:41:35.220000") + .setEnd("2015-09-20 06:41:35.220000") + .build(); + + private static final Range RANGE_TIMESTAMP = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd("2015-09-20 13:41:35.220000+01:00") + .build(); + + @Test + public void testOf() { + compareRange(null, null, Range.of("[null, NULL)")); + compareRange(null, null, Range.of("[unbounded, UNBOUNDED)")); + compareRange(null, null, Range.of("[nUlL, uNbOuNdEd)")); + + compareRange(null, "2020-12-31", Range.of("[null, 2020-12-31)")); + compareRange("2020-01-01", null, Range.of("[2020-01-01, null)")); + compareRange("2020-01-01", "2020-12-31", Range.of("[2020-01-01, 2020-12-31)")); + } + + @Test + public void testBuilder() { + assertEquals("1970-01-02", RANGE_DATE.getStart().getStringValue()); + assertEquals("1970-03-04", RANGE_DATE.getEnd().getStringValue()); + assertEquals(FieldElementType.newBuilder().setType("DATE").build(), RANGE_DATE.getType()); + + assertEquals("2014-08-19 05:41:35.220000", RANGE_DATETIME.getStart().getStringValue()); + assertEquals("2015-09-20 06:41:35.220000", RANGE_DATETIME.getEnd().getStringValue()); + assertEquals( + FieldElementType.newBuilder().setType("DATETIME").build(), RANGE_DATETIME.getType()); + + assertEquals("2014-08-19 12:41:35.220000+00:00", RANGE_TIMESTAMP.getStart().getStringValue()); + assertEquals("2015-09-20 13:41:35.220000+01:00", RANGE_TIMESTAMP.getEnd().getStringValue()); + assertEquals( + FieldElementType.newBuilder().setType("TIMESTAMP").build(), RANGE_TIMESTAMP.getType()); + } + + @Test + public void testToBuilder() { + compareRange(RANGE_DATE, RANGE_DATE.toBuilder().build()); + compareRange(RANGE_DATETIME, RANGE_DATETIME.toBuilder().build()); + compareRange(RANGE_TIMESTAMP, RANGE_TIMESTAMP.toBuilder().build()); + } + + @Test + public void testGetValues() { + compareRange(null, null, Range.of("[null, NULL)").getValues()); + compareRange(null, null, Range.of("[unbounded, UNBOUNDED)").getValues()); + compareRange(null, null, Range.of("[nUlL, uNbOuNdEd)").getValues()); + + compareRange(null, "2020-12-31", Range.of("[null, 2020-12-31)").getValues()); + compareRange("2020-01-01", null, Range.of("[2020-01-01, null)").getValues()); + compareRange("2020-01-01", "2020-12-31", Range.of("[2020-01-01, 2020-12-31)").getValues()); + } + + private static void compareRange(Range expected, Range value) { + assertEquals(expected.getStart(), value.getStart()); + assertEquals(expected.getEnd(), value.getEnd()); + assertEquals(expected.getType(), value.getType()); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + } + + private static void compareRange(String expectedStart, String expectedEnd, Range range) { + if (expectedStart == null) { + assertTrue(range.getStart().isNull()); + } else { + assertEquals(expectedStart, range.getStart().getStringValue()); + } + if (expectedEnd == null) { + assertTrue(range.getEnd().isNull()); + } else { + assertEquals(expectedEnd, range.getEnd().getStringValue()); + } + } + + private static void compareRange( + String expectedStart, String expectedEnd, ImmutableMap values) { + assertEquals(expectedStart, values.get("start")); + assertEquals(expectedEnd, values.get("end")); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java index ae061b62f..145dc8914 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java @@ -52,8 +52,7 @@ public class RoutineInfoTest { private static final String BODY = "body"; private static final RoutineInfo ROUTINE_INFO = - RoutineInfo.of(ROUTINE_ID) - .toBuilder() + RoutineInfo.of(ROUTINE_ID).toBuilder() .setEtag(ETAG) .setRoutineType(ROUTINE_TYPE) .setCreationTime(CREATION_TIME) diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableConstraintsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableConstraintsTest.java index 05f3bbf41..7d0f57ef7 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableConstraintsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableConstraintsTest.java @@ -81,8 +81,7 @@ public void testToBuilder() { .build(); TableConstraints tableConstraints = - TABLE_CONSTRAINTS - .toBuilder() + TABLE_CONSTRAINTS.toBuilder() .setForeignKeys(Arrays.asList(foreignKey1, foreignKey2)) .setPrimaryKey(primaryKey) .build(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java index a959a8991..a90b5c4d7 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java @@ -31,9 +31,11 @@ import com.google.cloud.RestorableState; import com.google.cloud.WriteChannel; import com.google.cloud.bigquery.spi.BigQueryRpcFactory; -import com.google.cloud.bigquery.spi.v2.BigQueryRpc; +import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import java.io.IOException; +import java.net.ConnectException; import java.net.SocketException; +import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Random; @@ -68,7 +70,7 @@ public class TableDataWriteChannelTest { private BigQueryOptions options; private BigQueryRpcFactory rpcFactoryMock; - private BigQueryRpc bigqueryRpcMock; + private HttpBigQueryRpc bigqueryRpcMock; private BigQueryFactory bigqueryFactoryMock; private BigQuery bigqueryMock; private Job job; @@ -81,7 +83,7 @@ public class TableDataWriteChannelTest { @Before public void setUp() { rpcFactoryMock = mock(BigQueryRpcFactory.class); - bigqueryRpcMock = mock(BigQueryRpc.class); + bigqueryRpcMock = mock(HttpBigQueryRpc.class); bigqueryFactoryMock = mock(BigQueryFactory.class); bigqueryMock = mock(BigQuery.class); when(bigqueryMock.getOptions()).thenReturn(options); @@ -97,8 +99,8 @@ public void setUp() { } @Test - public void testCreate() { - when(bigqueryRpcMock.open( + public void testCreate() throws IOException { + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) @@ -107,26 +109,27 @@ public void testCreate() { assertTrue(writer.isOpen()); assertNull(writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); } @Test - public void testCreateRetryableError() { - BigQueryException exception = new BigQueryException(new SocketException("Socket closed")); - when(bigqueryRpcMock.open( + public void testCreateRetryableErrors() throws IOException { + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) - .thenThrow(exception) + .thenThrow(new SocketException("Socket closed")) + .thenThrow(new UnknownHostException()) + .thenThrow(new ConnectException()) .thenReturn(UPLOAD_ID); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); assertTrue(writer.isOpen()); assertNull(writer.getJob()); - verify(bigqueryRpcMock, times(2)) - .open( + verify(bigqueryRpcMock, times(4)) + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); @@ -134,12 +137,11 @@ public void testCreateRetryableError() { @Test public void testCreateNonRetryableError() throws IOException { - RuntimeException ex = new RuntimeException("expected"); - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) - .thenThrow(ex); + .thenThrow(new RuntimeException("expected")); try (TableDataWriteChannel channel = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION)) { Assert.fail(); @@ -147,7 +149,7 @@ public void testCreateNonRetryableError() throws IOException { Assert.assertEquals("java.lang.RuntimeException: expected", expected.getMessage()); } verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); @@ -155,7 +157,7 @@ public void testCreateNonRetryableError() throws IOException { @Test public void testWriteWithoutFlush() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) @@ -164,7 +166,7 @@ public void testWriteWithoutFlush() throws IOException { assertEquals(MIN_CHUNK_SIZE, writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE))); assertNull(writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); @@ -172,12 +174,12 @@ public void testWriteWithoutFlush() throws IOException { @Test public void testWriteWithFlush() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), @@ -192,12 +194,12 @@ public void testWriteWithFlush() throws IOException { assertArrayEquals(buffer.array(), capturedBuffer.getValue()); assertNull(writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write( + .writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), @@ -207,19 +209,22 @@ public void testWriteWithFlush() throws IOException { } @Test - public void testWritesAndFlush() throws IOException { - when(bigqueryRpcMock.open( + public void testWritesAndFlushRetryableErrors() throws IOException { + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(DEFAULT_CHUNK_SIZE), eq(false))) + .thenThrow(new SocketException("Socket closed")) + .thenThrow(new UnknownHostException()) + .thenThrow(new ConnectException()) .thenReturn(null); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; @@ -235,12 +240,53 @@ public void testWritesAndFlush() throws IOException { } assertNull(writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); + verify(bigqueryRpcMock, times(4)) + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), + capturedBuffer.capture(), + eq(0), + eq(0L), + eq(DEFAULT_CHUNK_SIZE), + eq(false)); + } + + @Test + public void testWritesAndFlushNonRetryableError() throws IOException { + when(bigqueryRpcMock.openSkipExceptionTranslation( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb()))) + .thenReturn(UPLOAD_ID); + when(bigqueryRpcMock.writeSkipExceptionTranslation( + eq(UPLOAD_ID), + capturedBuffer.capture(), + eq(0), + eq(0L), + eq(DEFAULT_CHUNK_SIZE), + eq(false))) + .thenThrow(new RuntimeException("expected")); + try { + writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); + ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; + for (int i = 0; i < buffers.length; i++) { + buffers[i] = randomBuffer(MIN_CHUNK_SIZE); + assertEquals(MIN_CHUNK_SIZE, writer.write(buffers[i])); + } + Assert.fail(); + } catch (RuntimeException expected) { + Assert.assertEquals("java.lang.RuntimeException: expected", expected.getMessage()); + } verify(bigqueryRpcMock) - .write( + .openSkipExceptionTranslation( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb())); + verify(bigqueryRpcMock, times(1)) + .writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), @@ -251,12 +297,12 @@ public void testWritesAndFlush() throws IOException { @Test public void testCloseWithoutFlush() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); @@ -266,23 +312,24 @@ public void testCloseWithoutFlush() throws IOException { assertTrue(!writer.isOpen()); assertEquals(job, writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); } @Test public void testCloseWithFlush() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); ByteBuffer buffer = randomBuffer(MIN_CHUNK_SIZE); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(MIN_CHUNK_SIZE), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); @@ -294,23 +341,23 @@ public void testCloseWithFlush() throws IOException { assertTrue(!writer.isOpen()); assertEquals(job, writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write( + .writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(MIN_CHUNK_SIZE), eq(true)); } @Test public void testWriteClosed() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); @@ -323,22 +370,23 @@ public void testWriteClosed() throws IOException { // expected } verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); } @Test public void testSaveAndRestore() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), @@ -359,12 +407,12 @@ public void testSaveAndRestore() throws IOException { assertArrayEquals(buffer2.array(), capturedBuffer.getAllValues().get(1)); assertEquals(new Long(DEFAULT_CHUNK_SIZE), capturedPosition.getAllValues().get(1)); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock, times(2)) - .write( + .writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), @@ -375,12 +423,12 @@ public void testSaveAndRestore() throws IOException { @Test public void testSaveAndRestoreClosed() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); @@ -398,17 +446,18 @@ public void testSaveAndRestoreClosed() throws IOException { assertArrayEquals(new byte[0], capturedBuffer.getValue()); assertEquals(expectedWriterState, restoredWriter.capture()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); } @Test - public void testStateEquals() { - when(bigqueryRpcMock.open( + public void testStateEquals() throws IOException { + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) @@ -424,7 +473,7 @@ public void testStateEquals() { assertEquals(state.hashCode(), state2.hashCode()); assertEquals(state.toString(), state2.toString()); verify(bigqueryRpcMock, times(2)) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java index 7b912ce2b..240f12185 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java @@ -124,8 +124,7 @@ public class WriteChannelConfigurationTest { public void testToBuilder() { compareLoadConfiguration(LOAD_CONFIGURATION_CSV, LOAD_CONFIGURATION_CSV.toBuilder().build()); WriteChannelConfiguration configuration = - LOAD_CONFIGURATION_CSV - .toBuilder() + LOAD_CONFIGURATION_CSV.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", configuration.getDestinationTable().getTable()); @@ -134,8 +133,7 @@ public void testToBuilder() { compareLoadConfiguration(LOAD_CONFIGURATION_AVRO, LOAD_CONFIGURATION_AVRO.toBuilder().build()); WriteChannelConfiguration configurationAvro = - LOAD_CONFIGURATION_AVRO - .toBuilder() + LOAD_CONFIGURATION_AVRO.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", configurationAvro.getDestinationTable().getTable()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index 7e6da2b27..ec1f7b5a0 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -41,11 +41,15 @@ import com.google.cloud.ServiceOptions; import com.google.cloud.bigquery.Acl; import com.google.cloud.bigquery.Acl.DatasetAclEntity; +import com.google.cloud.bigquery.Acl.Expr; +import com.google.cloud.bigquery.Acl.User; import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption; import com.google.cloud.bigquery.BigQuery.DatasetField; import com.google.cloud.bigquery.BigQuery.DatasetListOption; import com.google.cloud.bigquery.BigQuery.DatasetOption; +import com.google.cloud.bigquery.BigQuery.DatasetUpdateMode; +import com.google.cloud.bigquery.BigQuery.DatasetView; import com.google.cloud.bigquery.BigQuery.JobField; import com.google.cloud.bigquery.BigQuery.JobListOption; import com.google.cloud.bigquery.BigQuery.JobOption; @@ -57,6 +61,7 @@ import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.BigQueryOptions; import com.google.cloud.bigquery.BigQueryResult; +import com.google.cloud.bigquery.BigQueryRetryConfig; import com.google.cloud.bigquery.BigQuerySQLException; import com.google.cloud.bigquery.CloneDefinition; import com.google.cloud.bigquery.Clustering; @@ -108,7 +113,10 @@ import com.google.cloud.bigquery.PolicyTags; import com.google.cloud.bigquery.PrimaryKey; import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; +import com.google.cloud.bigquery.QueryJobConfiguration.Priority; import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.Range; import com.google.cloud.bigquery.RangePartitioning; import com.google.cloud.bigquery.Routine; import com.google.cloud.bigquery.RoutineArgument; @@ -153,6 +161,17 @@ import com.google.common.io.BaseEncoding; import com.google.common.util.concurrent.ListenableFuture; import com.google.gson.JsonObject; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; @@ -166,6 +185,7 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Time; +import java.time.Duration; import java.time.Instant; import java.time.LocalTime; import java.time.Period; @@ -192,7 +212,6 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.Timeout; -import org.threeten.bp.Duration; import org.threeten.extra.PeriodDuration; public class ITBigQueryTest { @@ -210,6 +229,13 @@ public class ITBigQueryTest { private static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); private static final String RANDOM_ID = UUID.randomUUID().toString().substring(0, 8); private static final String STORAGE_BILLING_MODEL = "LOGICAL"; + private static final Long MAX_TIME_TRAVEL_HOURS = 120L; + private static final Long MAX_TIME_TRAVEL_HOURS_DEFAULT = 168L; + private static final Map, Object>> OTEL_ATTRIBUTES = + new HashMap, Object>>(); + private static final Map OTEL_PARENT_SPAN_IDS = new HashMap<>(); + private static final Map OTEL_SPAN_IDS_TO_NAMES = new HashMap<>(); + private static final String OTEL_PARENT_SPAN_ID = "0000000000000000"; private static final String CLOUD_SAMPLES_DATA = Optional.fromNullable(System.getenv("CLOUD_SAMPLES_DATA_BUCKET")).or("cloud-samples-data"); private static final Map LABELS = @@ -585,6 +611,9 @@ public class ITBigQueryTest { RangePartitioning.newBuilder().setField("IntegerField").setRange(RANGE).build(); private static final String LOAD_FILE = "load.csv"; private static final String LOAD_FILE_LARGE = "load_large.csv"; + + private static final String LOAD_FILE_FLEXIBLE_COLUMN_NAME = "load_flexible_column_name.csv"; + private static final String LOAD_FILE_NULL = "load_null.csv"; private static final String JSON_LOAD_FILE = "load.json"; private static final String JSON_LOAD_FILE_BQ_RESULTSET = "load_bq_resultset.json"; private static final String JSON_LOAD_FILE_SIMPLE = "load_simple.json"; @@ -600,6 +629,8 @@ public class ITBigQueryTest { private static final TableId TABLE_ID_FASTQUERY_BQ_RESULTSET = TableId.of(DATASET, "fastquery_testing_bq_resultset"); private static final String CSV_CONTENT = "StringValue1\nStringValue2\n"; + private static final String CSV_CONTENT_NULL = "String\0Value1\n"; + private static final String CSV_CONTENT_FLEXIBLE_COLUMN = "name,&ersand\nrow_name,1"; private static final String JSON_CONTENT = "{" @@ -774,6 +805,20 @@ public class ITBigQueryTest { private static final Set PUBLIC_DATASETS = ImmutableSet.of("github_repos", "hacker_news", "noaa_gsod", "samples", "usa_names"); + private static final Map PUBLIC_DATASETS_LOCATION = + ImmutableMap.builder() + .put("github_repos", "US") + .put("hacker_news", "US") + .put("noaa_gsod", "US") + .put("samples", "US") + .put("usa_names", "US") + // Dataset url: + // https://console.cloud.google.com/bigquery?project=bigquery-public-data&ws=!1m4!1m3!3m2!1sbigquery-public-data!2sgnomAD_asiane1 + .put("gnomAD_asiane1", "asia-northeast1") + // Dataset url: + // https://console.cloud.google.com/bigquery?project=bigquery-public-data&ws=!1m4!1m3!3m2!1sbigquery-public-data!2sgnomAD_eu + .put("gnomAD_eu", "EU") + .build(); private static final String PUBLIC_PROJECT = "bigquery-public-data"; private static final String PUBLIC_DATASET = "census_bureau_international"; @@ -837,8 +882,185 @@ public class ITBigQueryTest { + " \"universe_domain\": \"fake.domain\"\n" + "}"; + private static final Schema RANGE_SCHEMA = + Schema.of( + Field.newBuilder("name", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("Name of the row") + .build(), + Field.newBuilder("date", StandardSQLTypeName.RANGE) + .setMode(Field.Mode.NULLABLE) + .setDescription("Range field with DATE") + .setRangeElementType(FieldElementType.newBuilder().setType("DATE").build()) + .build(), + Field.newBuilder("datetime", StandardSQLTypeName.RANGE) + .setMode(Field.Mode.NULLABLE) + .setDescription("Range field with DATETIME") + .setRangeElementType(FieldElementType.newBuilder().setType("DATETIME").build()) + .build(), + Field.newBuilder("timestamp", StandardSQLTypeName.RANGE) + .setMode(Field.Mode.NULLABLE) + .setDescription("Range field with TIMESTAMP") + .setRangeElementType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()); + + private static final ImmutableMap RANGE_TEST_VALUES_DATES = + new ImmutableMap.Builder() + .put( + "bounded", + Range.newBuilder() + .setStart("2020-01-01") + .setEnd("2020-12-31") + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .build()) + .put( + "unboundedStart", + Range.newBuilder() + .setStart(null) + .setEnd("2020-12-31") + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .build()) + .put( + "unboundedEnd", + Range.newBuilder() + .setStart("2020-01-01") + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .build()) + .put( + "unbounded", + Range.newBuilder() + .setStart(null) + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .build()) + .build(); + + private static final ImmutableMap RANGE_TEST_VALUES_DATETIME = + new ImmutableMap.Builder() + .put( + "bounded", + Range.newBuilder() + .setStart("2014-08-19T05:41:35.220000") + .setEnd("2015-09-20T06:41:35.220000") + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .build()) + .put( + "unboundedStart", + Range.newBuilder() + .setStart(null) + .setEnd("2015-09-20T06:41:35.220000") + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .build()) + .put( + "unboundedEnd", + Range.newBuilder() + .setStart("2014-08-19T05:41:35.220000") + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .build()) + .put( + "unbounded", + Range.newBuilder() + .setStart(null) + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .build()) + .build(); + + private static final ImmutableMap RANGE_TEST_VALUES_TIMESTAMP = + new ImmutableMap.Builder() + .put( + "bounded", + Range.newBuilder() + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd("2015-09-20 13:41:35.220000+01:00") + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unboundedStart", + Range.newBuilder() + .setStart(null) + .setEnd("2015-09-20 13:41:35.220000+01:00") + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unboundedEnd", + Range.newBuilder() + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unbounded", + Range.newBuilder() + .setStart(null) + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .build(); + + // timestamps are returned as seconds since epoch + private static final ImmutableMap RANGE_TEST_VALUES_EXPECTED_TIMESTAMP = + new ImmutableMap.Builder() + .put( + "bounded", + Range.newBuilder() + .setStart("1408452095.220000") + .setEnd("1442752895.220000") + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unboundedStart", + Range.newBuilder() + .setStart(null) + .setEnd("1442752895.220000") + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unboundedEnd", + Range.newBuilder() + .setStart("1408452095.220000") + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unbounded", + Range.newBuilder() + .setStart(null) + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .build(); + private static BigQuery bigquery; private static Storage storage; + private static OpenTelemetry otel; + + private static class TestSpanExporter implements io.opentelemetry.sdk.trace.export.SpanExporter { + @Override + public CompletableResultCode export(Collection collection) { + if (collection.isEmpty()) { + return CompletableResultCode.ofFailure(); + } + for (SpanData data : collection) { + OTEL_ATTRIBUTES.put(data.getName(), data.getAttributes().asMap()); + OTEL_PARENT_SPAN_IDS.put(data.getName(), data.getParentSpanId()); + OTEL_SPAN_IDS_TO_NAMES.put(data.getSpanId(), data.getName()); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + } @Rule public Timeout globalTimeout = Timeout.seconds(300); @@ -847,12 +1069,27 @@ public static void beforeClass() throws InterruptedException, IOException { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); RemoteStorageHelper storageHelper = RemoteStorageHelper.create(); Map labels = ImmutableMap.of("test-job-name", "test-load-job"); + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(new TestSpanExporter())) + .setSampler(Sampler.alwaysOn()) + .build(); + otel = OpenTelemetrySdk.builder().setTracerProvider(tracerProvider).buildAndRegisterGlobal(); + bigquery = bigqueryHelper.getOptions().getService(); storage = storageHelper.getOptions().getService(); storage.create(BucketInfo.of(BUCKET)); storage.create( BlobInfo.newBuilder(BUCKET, LOAD_FILE).setContentType("text/plain").build(), CSV_CONTENT.getBytes(StandardCharsets.UTF_8)); + storage.create( + BlobInfo.newBuilder(BUCKET, LOAD_FILE_NULL).setContentType("text/plain").build(), + CSV_CONTENT_NULL.getBytes(StandardCharsets.UTF_8)); + storage.create( + BlobInfo.newBuilder(BUCKET, LOAD_FILE_FLEXIBLE_COLUMN_NAME) + .setContentType("text/plain") + .build(), + CSV_CONTENT_FLEXIBLE_COLUMN.getBytes(StandardCharsets.UTF_8)); storage.create( BlobInfo.newBuilder(BUCKET, JSON_LOAD_FILE).setContentType("application/json").build(), JSON_CONTENT.getBytes(StandardCharsets.UTF_8)); @@ -961,11 +1198,16 @@ public void testListDatasets() { Page datasets = bigquery.listDatasets("bigquery-public-data"); Iterator iterator = datasets.iterateAll().iterator(); Set datasetNames = new HashSet<>(); + Map datasetLocation = new HashMap<>(); while (iterator.hasNext()) { - datasetNames.add(iterator.next().getDatasetId().getDataset()); + Dataset dataset = iterator.next(); + String name = dataset.getDatasetId().getDataset(); + datasetNames.add(name); + datasetLocation.put(name, dataset.getLocation()); } for (String type : PUBLIC_DATASETS) { assertTrue(datasetNames.contains(type)); + assertEquals(PUBLIC_DATASETS_LOCATION.get(type), datasetLocation.get(type)); } } @@ -1033,6 +1275,51 @@ public void testGetDatasetWithSelectedFields() { assertNull(dataset.getLocation()); assertNull(dataset.getSelfLink()); assertNull(dataset.getStorageBillingModel()); + assertNull(dataset.getMaxTimeTravelHours()); + } + + @Test + public void testGetDatasetWithAccessPolicyVersion() throws IOException { + String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); + ServiceAccountCredentials credentials = + (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); + User user = new User(credentials.getClientEmail()); + Acl.Role role = Acl.Role.WRITER; + Acl.Expr condition = + new Expr( + "request.time > timestamp('2024-01-01T00:00:00Z')", + "test condition", + "requests after the year 2024", + "location"); + Acl acl = Acl.of(user, role, condition); + DatasetOption accessPolicyOption = DatasetOption.accessPolicyVersion(3); + DatasetOption viewOption = DatasetOption.datasetView(DatasetView.FULL); + + Dataset dataset = + bigquery.create( + DatasetInfo.newBuilder(accessPolicyDataset) + .setDescription("Some Description") + .setAcl(ImmutableList.of(acl)) + .build(), + accessPolicyOption); + assertThat(dataset).isNotNull(); + + Dataset remoteDataset = + bigquery.getDataset(accessPolicyDataset, accessPolicyOption, viewOption); + assertNotNull(remoteDataset); + assertEquals(dataset.getDescription(), remoteDataset.getDescription()); + assertNotNull(remoteDataset.getCreationTime()); + + Acl remoteAclWithCond = null; + for (Acl remoteAcl : remoteDataset.getAcl()) { + if (remoteAcl.getCondition() != null) { + remoteAclWithCond = remoteAcl; + } + } + assertNotNull(remoteAclWithCond); + assertEquals(remoteAclWithCond.getCondition(), condition); + + RemoteBigQueryHelper.forceDelete(bigquery, accessPolicyDataset); } @Test @@ -1049,21 +1336,23 @@ public void testUpdateDataset() { assertThat(dataset.getDescription()).isEqualTo("Some Description"); assertThat(dataset.getLabels()).containsExactly("a", "b"); assertThat(dataset.getStorageBillingModel()).isNull(); + assertThat(dataset.getMaxTimeTravelHours()).isNull(); Map updateLabels = new HashMap<>(); updateLabels.put("x", "y"); updateLabels.put("a", null); Dataset updatedDataset = bigquery.update( - dataset - .toBuilder() + dataset.toBuilder() .setDescription("Updated Description") .setLabels(updateLabels) .setStorageBillingModel("LOGICAL") + .setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS) .build()); assertThat(updatedDataset.getDescription()).isEqualTo("Updated Description"); assertThat(updatedDataset.getLabels()).containsExactly("x", "y"); assertThat(updatedDataset.getStorageBillingModel()).isEqualTo("LOGICAL"); + assertThat(updatedDataset.getMaxTimeTravelHours()).isEqualTo(MAX_TIME_TRAVEL_HOURS); updatedDataset = bigquery.update(updatedDataset.toBuilder().setLabels(null).build()); assertThat(updatedDataset.getLabels()).isEmpty(); @@ -1094,9 +1383,63 @@ public void testUpdateDatasetWithSelectedFields() { assertNull(updatedDataset.getLocation()); assertNull(updatedDataset.getSelfLink()); assertNull(updatedDataset.getStorageBillingModel()); + assertNull(updatedDataset.getMaxTimeTravelHours()); assertTrue(dataset.delete()); } + @Test + public void testUpdateDatasetWithAccessPolicyVersion() throws IOException { + String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); + ServiceAccountCredentials credentials = + (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); + Dataset dataset = + bigquery.create( + DatasetInfo.newBuilder(accessPolicyDataset) + .setDescription("Some Description") + .setLabels(Collections.singletonMap("a", "b")) + .build()); + assertThat(dataset).isNotNull(); + + User user = new User(credentials.getClientEmail()); + Acl.Role role = Acl.Role.WRITER; + Acl.Expr condition = + new Expr( + "request.time > timestamp('2024-01-01T00:00:00Z')", + "test condition", + "requests after the year 2024", + "location"); + Acl acl = Acl.of(user, role, condition); + List acls = new ArrayList<>(); + acls.addAll(dataset.getAcl()); + acls.add(acl); + + DatasetOption datasetOption = DatasetOption.accessPolicyVersion(3); + DatasetOption updateModeOption = DatasetOption.updateMode(DatasetUpdateMode.UPDATE_FULL); + Dataset updatedDataset = + bigquery.update( + dataset.toBuilder() + .setDescription("Updated Description") + .setLabels(null) + .setAcl(acls) + .build(), + datasetOption, + updateModeOption); + assertNotNull(updatedDataset); + assertEquals(updatedDataset.getDescription(), "Updated Description"); + assertThat(updatedDataset.getLabels().isEmpty()); + + Acl updatedAclWithCond = null; + for (Acl updatedAcl : updatedDataset.getAcl()) { + if (updatedAcl.getCondition() != null) { + updatedAclWithCond = updatedAcl; + } + } + assertNotNull(updatedAclWithCond); + assertEquals(updatedAclWithCond.getCondition(), condition); + + RemoteBigQueryHelper.forceDelete(bigquery, accessPolicyDataset); + } + @Test public void testGetNonExistingTable() { assertNull(bigquery.getTable(DATASET, "test_get_non_existing_table")); @@ -1313,18 +1656,76 @@ public void testIntervalType() throws InterruptedException { @Test public void testRangeType() throws InterruptedException { - String tableName = "test_create_table_rangetype"; + String tableName = "test_range_type_table"; TableId tableId = TableId.of(DATASET, tableName); - Schema schema = - Schema.of( - Field.newBuilder("rangeField", StandardSQLTypeName.RANGE) - .setRangeElementType(FieldElementType.newBuilder().setType("DATETIME").build()) - .build()); - StandardTableDefinition standardTableDefinition = StandardTableDefinition.of(schema); + + StandardTableDefinition standardTableDefinition = StandardTableDefinition.of(RANGE_SCHEMA); try { - // Create a table with a RANGE column. + // Create a table with a RANGE columns and verify the result. Table createdTable = bigquery.create(TableInfo.of(tableId, standardTableDefinition)); assertNotNull(createdTable); + + Table remoteTable = bigquery.getTable(DATASET, tableName); + Schema remoteSchema = remoteTable.getDefinition().getSchema(); + assertEquals(RANGE_SCHEMA, remoteSchema); + + // Insert range values to the table. + InsertAllRequest.Builder request = InsertAllRequest.newBuilder(tableId); + for (String name : RANGE_TEST_VALUES_DATES.keySet()) { + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.put("name", name); + builder.put("date", RANGE_TEST_VALUES_DATES.get(name).getValues()); + builder.put("datetime", RANGE_TEST_VALUES_DATETIME.get(name).getValues()); + builder.put("timestamp", RANGE_TEST_VALUES_TIMESTAMP.get(name).getValues()); + request.addRow(builder.build()); + } + bigquery.insertAll(request.build()); + + // Test listTableData + TableResult result = bigquery.listTableData(DATASET, tableName, RANGE_SCHEMA); + assertEquals(RANGE_TEST_VALUES_DATES.size(), Iterables.size(result.getValues())); + for (FieldValueList values : result.iterateAll()) { + String name = values.get("name").getStringValue(); + assertEquals(RANGE_TEST_VALUES_DATES.get(name), values.get("date").getRangeValue()); + assertEquals(RANGE_TEST_VALUES_DATETIME.get(name), values.get("datetime").getRangeValue()); + assertEquals( + RANGE_TEST_VALUES_EXPECTED_TIMESTAMP.get(name), + values.get("timestamp").getRangeValue()); + } + + // Test Query Parameter by selecting for the bounded Range entry only. + String query = + String.format( + "SELECT name, date, datetime, timestamp\n" + + "FROM %s.%s\n" + + "WHERE date = @dateParam\n" + + "AND datetime = @datetimeParam\n" + + "AND timestamp = @timestampParam", + DATASET, tableName); + + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .addNamedParameter( + "dateParam", QueryParameterValue.range(RANGE_TEST_VALUES_DATES.get("bounded"))) + .addNamedParameter( + "datetimeParam", + QueryParameterValue.range(RANGE_TEST_VALUES_DATETIME.get("bounded"))) + .addNamedParameter( + "timestampParam", + QueryParameterValue.range(RANGE_TEST_VALUES_TIMESTAMP.get("bounded"))) + .build(); + result = bigquery.query(config); + + assertEquals(1, Iterables.size(result.getValues())); + for (FieldValueList values : result.iterateAll()) { + String name = values.get("name").getStringValue(); + assertEquals(RANGE_TEST_VALUES_DATES.get(name), values.get("date").getRangeValue()); + assertEquals(RANGE_TEST_VALUES_DATETIME.get(name), values.get("datetime").getRangeValue()); + assertEquals( + RANGE_TEST_VALUES_EXPECTED_TIMESTAMP.get(name), + values.get("timestamp").getRangeValue()); + } } finally { assertTrue(bigquery.delete(tableId)); } @@ -1391,6 +1792,40 @@ public void testCreateDatasetWithSpecifiedStorageBillingModel() { RemoteBigQueryHelper.forceDelete(bigquery, billingModelDataset); } + @Test + public void testCreateDatasetWithSpecificMaxTimeTravelHours() { + String timeTravelDataset = RemoteBigQueryHelper.generateDatasetName(); + DatasetInfo info = + DatasetInfo.newBuilder(timeTravelDataset) + .setDescription(DESCRIPTION) + .setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS) + .setLabels(LABELS) + .build(); + bigquery.create(info); + + Dataset dataset = bigquery.getDataset(DatasetId.of(timeTravelDataset)); + assertEquals(MAX_TIME_TRAVEL_HOURS, dataset.getMaxTimeTravelHours()); + + RemoteBigQueryHelper.forceDelete(bigquery, timeTravelDataset); + } + + @Test + public void testCreateDatasetWithDefaultMaxTimeTravelHours() { + String timeTravelDataset = RemoteBigQueryHelper.generateDatasetName(); + DatasetInfo info = + DatasetInfo.newBuilder(timeTravelDataset) + .setDescription(DESCRIPTION) + .setLabels(LABELS) + .build(); + bigquery.create(info); + + Dataset dataset = bigquery.getDataset(DatasetId.of(timeTravelDataset)); + // In the backend, BigQuery sets the default Time Travel Window to be 168 hours (7 days). + assertEquals(MAX_TIME_TRAVEL_HOURS_DEFAULT, dataset.getMaxTimeTravelHours()); + + RemoteBigQueryHelper.forceDelete(bigquery, timeTravelDataset); + } + @Test public void testCreateDatasetWithDefaultCollation() { String collationDataset = RemoteBigQueryHelper.generateDatasetName(); @@ -1408,6 +1843,70 @@ public void testCreateDatasetWithDefaultCollation() { RemoteBigQueryHelper.forceDelete(bigquery, collationDataset); } + @Test + public void testCreateDatasetWithAccessPolicyVersion() throws IOException { + String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); + ServiceAccountCredentials credentials = + (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); + User user = new User(credentials.getClientEmail()); + Acl.Role role = Acl.Role.OWNER; + Acl.Expr condition = + new Expr( + "request.time > timestamp('2024-01-01T00:00:00Z')", + "test condition", + "requests after the year 2024", + "location"); + Acl acl = Acl.of(user, role, condition); + DatasetInfo info = + DatasetInfo.newBuilder(accessPolicyDataset) + .setDescription(DESCRIPTION) + .setLabels(LABELS) + .setAcl(ImmutableList.of(acl)) + .build(); + DatasetOption datasetOption = DatasetOption.accessPolicyVersion(3); + Dataset dataset = bigquery.create(info, datasetOption); + assertNotNull(dataset); + assertEquals(dataset.getDescription(), DESCRIPTION); + + Acl remoteAclWithCond = null; + for (Acl remoteAcl : dataset.getAcl()) { + if (remoteAcl.getCondition() != null) { + remoteAclWithCond = remoteAcl; + } + } + assertNotNull(remoteAclWithCond); + assertEquals(remoteAclWithCond.getCondition(), condition); + + RemoteBigQueryHelper.forceDelete(bigquery, accessPolicyDataset); + } + + @Test(expected = BigQueryException.class) + public void testCreateDatasetWithInvalidAccessPolicyVersion() throws IOException { + String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); + ServiceAccountCredentials credentials = + (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); + User user = new User(credentials.getClientEmail()); + Acl.Role role = Acl.Role.READER; + Acl.Expr condition = + new Expr( + "request.time > timestamp('2024-01-01T00:00:00Z')", + "test condition", + "requests after the year 2024", + "location"); + Acl acl = Acl.of(user, role, condition); + DatasetInfo info = + DatasetInfo.newBuilder(accessPolicyDataset) + .setDescription(DESCRIPTION) + .setLabels(LABELS) + .setAcl(ImmutableList.of(acl)) + .build(); + DatasetOption datasetOption = DatasetOption.accessPolicyVersion(4); + Dataset dataset = bigquery.create(info, datasetOption); + assertNotNull(dataset); + + RemoteBigQueryHelper.forceDelete(bigquery, accessPolicyDataset); + } + @Test public void testCreateTableWithDefaultCollation() { String tableName = "test_create_table_with_default_collation"; @@ -1600,8 +2099,7 @@ public void testCreateAndUpdateTableWithPolicyTags() throws IOException { fieldList.add(stringFieldWithPolicy); Schema updatedSchemaWithPolicyTag = Schema.of(fieldList); Table updatedTable = - createdTableForUpdate - .toBuilder() + createdTableForUpdate.toBuilder() .setDefinition(StandardTableDefinition.of(updatedSchemaWithPolicyTag)) .build(); updatedTable.update(); @@ -1666,6 +2164,41 @@ public void testCreateAndGetTable() { assertTrue(remoteTable.delete()); } + @Test + public void testCreateAndListTable() { + String tableName = "test_create_and_list_table"; + TableId tableId = TableId.of(DATASET, tableName); + TimePartitioning partitioning = TimePartitioning.of(Type.DAY); + Clustering clustering = + Clustering.newBuilder().setFields(ImmutableList.of(STRING_FIELD_SCHEMA.getName())).build(); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder() + .setSchema(TABLE_SCHEMA) + .setTimePartitioning(partitioning) + .setClustering(clustering) + .build(); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.getTableId().getDataset()); + assertEquals(tableName, createdTable.getTableId().getTable()); + + Page
tables = bigquery.listTables(DATASET); + boolean found = false; + Iterator
tableIterator = tables.getValues().iterator(); + // Find createdTable and validate the table definition. + while (tableIterator.hasNext() && !found) { + Table table = tableIterator.next(); + if (table.getTableId().equals(createdTable.getTableId())) { + StandardTableDefinition definition = table.getDefinition(); + assertThat(definition.getClustering()).isNotNull(); + assertThat(definition.getTimePartitioning()).isNotNull(); + found = true; + } + } + assertTrue(found); + assertTrue(createdTable.delete()); + } + @Test public void testCreateAndGetTableWithBasicTableMetadataView() { String tableName = "test_create_and_get_table_with_basic_metadata_view"; @@ -1837,9 +2370,13 @@ public void testCreateAndGetTableWithSelectedField() { public void testCreateExternalTable() throws InterruptedException { String tableName = "test_create_external_table"; TableId tableId = TableId.of(DATASET, tableName); + ExternalTableDefinition externalTableDefinition = ExternalTableDefinition.of( - "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()); + "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()) + .toBuilder() + .setMaxStaleness("INTERVAL 15 MINUTE") + .build(); TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); Table createdTable = bigquery.create(tableInfo); assertNotNull(createdTable); @@ -1932,14 +2469,10 @@ public void testUpdatePermExternableTableWithAutodetectSchemaUpdatesSchema() { Table updatedTable = bigquery.update( - createdTable - .toBuilder() + createdTable.toBuilder() .setDefinition( ((ExternalTableDefinition) createdTable.getDefinition()) - .toBuilder() - .setSchema(null) - .setAutodetect(true) - .build()) + .toBuilder().setSchema(null).setAutodetect(true).build()) .build(), BigQuery.TableOption.autodetectSchema(true)); // Schema should change. @@ -2039,8 +2572,7 @@ public void testTableIAM() { // get and modify policy Policy policy = bigquery.getIamPolicy(tableId); Policy editedPolicy = - policy - .toBuilder() + policy.toBuilder() .addIdentity(Role.of("roles/bigquery.dataViewer"), Identity.allUsers()) .build(); Policy updatedPolicy = bigquery.setIamPolicy(tableId, editedPolicy); @@ -2176,8 +2708,7 @@ public void testUpdateTable() { updateLabels.put("a", null); Table updatedTable = bigquery.update( - createdTable - .toBuilder() + createdTable.toBuilder() .setDescription("Updated Description") .setLabels(updateLabels) .build()); @@ -2206,11 +2737,9 @@ public void testUpdateTimePartitioning() { .isNull(); table = - table - .toBuilder() + table.toBuilder() .setDefinition( - tableDefinition - .toBuilder() + tableDefinition.toBuilder() .setTimePartitioning(TimePartitioning.of(Type.DAY, 42L)) .build()) .build() @@ -2221,11 +2750,9 @@ public void testUpdateTimePartitioning() { .isEqualTo(42L); table = - table - .toBuilder() + table.toBuilder() .setDefinition( - tableDefinition - .toBuilder() + tableDefinition.toBuilder() .setTimePartitioning(TimePartitioning.of(Type.DAY)) .build()) .build() @@ -2578,7 +3105,7 @@ public void testModelLifecycle() throws InterruptedException { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " @@ -2665,8 +3192,7 @@ public void testRoutineLifecycle() throws InterruptedException { // Mutate metadata. RoutineInfo newInfo = - routine - .toBuilder() + routine.toBuilder() .setBody("x * 4") .setReturnType(routine.getReturnType()) .setArguments(routine.getArguments()) @@ -2916,6 +3442,42 @@ public void testTimestamp() throws InterruptedException { } } + @Test + public void testLosslessTimestamp() throws InterruptedException { + String query = "SELECT TIMESTAMP '2022-01-24T23:54:25.095574Z'"; + long expectedTimestamp = 1643068465095574L; + + TableResult result = + bigquery.query( + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .build()); + assertNotNull(result.getJobId()); + for (FieldValueList row : result.getValues()) { + FieldValue timeStampCell = row.get(0); + assertFalse(timeStampCell.getUseInt64Timestamps()); + assertEquals(expectedTimestamp, timeStampCell.getTimestampValue()); + } + + // Create new BQ object to toggle lossless timestamps without affecting + // other tests. + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + BigQuery bigqueryLossless = bigqueryHelper.getOptions().getService(); + bigqueryLossless.getOptions().setUseInt64Timestamps(true); + + TableResult resultLossless = + bigqueryLossless.query( + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .build()); + assertNotNull(resultLossless.getJobId()); + for (FieldValueList row : resultLossless.getValues()) { + FieldValue timeStampCellLossless = row.get(0); + assertTrue(timeStampCellLossless.getUseInt64Timestamps()); + assertEquals(expectedTimestamp, timeStampCellLossless.getTimestampValue()); + } + } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test public void testQuery() throws InterruptedException { @@ -2950,6 +3512,23 @@ public void testQuery() throws InterruptedException { assertNotNull(statistics.getQueryPlan()); } + @Test + public void testQueryStatistics() throws InterruptedException { + // Use CURRENT_TIMESTAMP to avoid potential caching. + String query = "SELECT CURRENT_TIMESTAMP() AS ts"; + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseQueryCache(false) + .build(); + Job job = bigquery.create(JobInfo.of(JobId.of(), config)); + job = job.waitFor(); + + JobStatistics.QueryStatistics statistics = job.getStatistics(); + assertNotNull(statistics.getQueryPlan()); + assertThat(statistics.getTotalSlotMs()).isGreaterThan(0L); + } + @Test public void testExecuteSelectDefaultConnectionSettings() throws SQLException { // Use the default connection settings @@ -2957,6 +3536,130 @@ public void testExecuteSelectDefaultConnectionSettings() throws SQLException { String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; BigQueryResult bigQueryResult = connection.executeSelect(query); assertEquals(42, bigQueryResult.getTotalRows()); + assertFalse(bigQueryResult.getBigQueryResultStats().getQueryStatistics().getUseReadApi()); + } + + @Test + public void testExecuteSelectWithReadApi() throws SQLException { + final int rowLimit = 5000; + final String QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 LIMIT %s"; + bigquery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_REQUIRED); + // Job timeout is somewhat arbitrary - just ensures that fast query is not used. + // min result size and page row count ratio ensure that the ReadAPI is used. + ConnectionSettings connectionSettingsReadAPIEnabledFastQueryDisabled = + ConnectionSettings.newBuilder() + .setUseReadAPI(true) + .setJobTimeoutMs(Long.MAX_VALUE) + .setMinResultSize(500) + .setTotalToPageRowCountRatio(1) + .build(); + + Connection connectionReadAPIEnabled = + bigquery.createConnection(connectionSettingsReadAPIEnabledFastQueryDisabled); + + String selectQuery = String.format(QUERY, rowLimit); + + BigQueryResult bigQueryResultSet = connectionReadAPIEnabled.executeSelect(selectQuery); + ResultSet rs = bigQueryResultSet.getResultSet(); + // Paginate results to avoid an InterruptedException + while (rs.next()) {} + + assertTrue(bigQueryResultSet.getBigQueryResultStats().getQueryStatistics().getUseReadApi()); + connectionReadAPIEnabled.close(); + } + + @Test + public void testExecuteSelectWithFastQueryReadApi() throws SQLException { + final int rowLimit = 5000; + final String QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 LIMIT %s"; + // min result size and page row count ratio ensure that the ReadAPI is used. + ConnectionSettings connectionSettingsReadAPIEnabledFastQueryDisabled = + ConnectionSettings.newBuilder() + .setUseReadAPI(true) + .setMinResultSize(500) + .setTotalToPageRowCountRatio(1) + .build(); + + Connection connectionReadAPIEnabled = + bigquery.createConnection(connectionSettingsReadAPIEnabledFastQueryDisabled); + + String selectQuery = String.format(QUERY, rowLimit); + + BigQueryResult bigQueryResultSet = connectionReadAPIEnabled.executeSelect(selectQuery); + ResultSet rs = bigQueryResultSet.getResultSet(); + // Paginate results to avoid an InterruptedException + while (rs.next()) {} + + assertTrue(bigQueryResultSet.getBigQueryResultStats().getQueryStatistics().getUseReadApi()); + connectionReadAPIEnabled.close(); + } + + @Test + public void testExecuteSelectReadApiEmptyResultSet() throws SQLException { + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setJobTimeoutMs( + Long.MAX_VALUE) // Force executeSelect to use ReadAPI instead of fast query. + .setUseReadAPI(true) + .setUseQueryCache(false) + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + String query = "SELECT TIMESTAMP '2022-01-24T23:54:25.095574Z' LIMIT 0"; + BigQueryResult bigQueryResult = connection.executeSelect(query); + + ResultSet rs = bigQueryResult.getResultSet(); + assertThat(rs.next()).isFalse(); + assertThat(bigQueryResult.getTotalRows()).isEqualTo(0); + } + + @Test + public void testExecuteSelectWithCredentials() throws SQLException { + // This test validate that executeSelect uses the same credential provided by the BigQuery + // object used to create the Connection client. + // This is done the following scenarios: + // 1. Validate that setting a valid credential executes the query. + // 2. Validate that setting an invalid credential causes failure. + + // Scenario 1. + // Create a new bigQuery object but explicitly set the credentials. + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + BigQueryOptions bigQueryOptions = + bigqueryHelper.getOptions().toBuilder() + .setCredentials(bigquery.getOptions().getCredentials()) + .build(); + BigQuery bigQueryGoodCredentials = bigQueryOptions.getService(); + + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setPriority(Priority.INTERACTIVE) // Force non-fast query to use BigQueryReadClient. + .setDefaultDataset(DatasetId.of(DATASET)) + .build(); + Connection connectionGoodCredentials = + bigQueryGoodCredentials.createConnection(connectionSettings); + String query = + "SELECT * FROM " + + TABLE_ID_LARGE.getTable(); // Large query result is needed to use BigQueryReadClient. + BigQueryResult bigQueryResult = connectionGoodCredentials.executeSelect(query); + assertEquals(313348, bigQueryResult.getTotalRows()); + assertTrue(bigQueryResult.getBigQueryResultStats().getQueryStatistics().getUseReadApi()); + + // Scenario 2. + // Create a new bigQuery object but explicitly an invalid credential. + BigQueryOptions bigQueryOptionsBadCredentials = + bigqueryHelper.getOptions().toBuilder() + .setCredentials(loadCredentials(FAKE_JSON_CRED_WITH_GOOGLE_DOMAIN)) + .build(); + BigQuery bigQueryBadCredentials = bigQueryOptionsBadCredentials.getService(); + Connection connectionBadCredentials = + bigQueryBadCredentials.createConnection(connectionSettings); + try { + connectionBadCredentials.executeSelect(query); + fail(); // this line should not be reached + } catch (BigQuerySQLException e) { + assertNotNull(e); + } } /* TODO(prasmish): replicate the entire test case for executeSelect */ @@ -3211,16 +3914,86 @@ public void testBQResultSetPaginationSlowQuery() throws SQLException { } @Test - public void testExecuteSelectSinglePageTableRow() throws SQLException { + public void testExecuteSelectSinglePageTableRow() throws SQLException { + String query = + "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, " + + "NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from " + + TABLE_ID_FASTQUERY_BQ_RESULTSET.getTable() + + " order by TimestampField"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + ResultSet rs = bigQueryResult.getResultSet(); + Schema sc = bigQueryResult.getSchema(); + + assertEquals(BQ_RESULTSET_EXPECTED_SCHEMA, sc); // match the schema + assertEquals(2, bigQueryResult.getTotalRows()); // Expecting 2 rows + + assertTrue(rs.next()); // first row + // checking for the null or 0 column values + assertNull(rs.getString("StringField")); + assertTrue(rs.getDouble("BigNumericField") == 0.0d); + assertFalse(rs.getBoolean("BooleanField")); + assertNull(rs.getBytes("BytesField")); + assertEquals(rs.getInt("IntegerField"), 0); + assertNull(rs.getTimestamp("TimestampField")); + assertNull(rs.getDate("DateField")); + assertTrue(rs.getDouble("FloatField") == 0.0d); + assertTrue(rs.getDouble("NumericField") == 0.0d); + assertNull(rs.getTime("TimeField")); + assertNull(rs.getString("DateTimeField")); + assertNull(rs.getString("GeographyField")); + assertNull(rs.getBytes("BytesField_1")); + assertFalse(rs.getBoolean("BooleanField_1")); + + assertTrue(rs.next()); // second row + // second row is non null, comparing the values + assertEquals("StringValue1", rs.getString("StringField")); + assertTrue(rs.getDouble("BigNumericField") == 0.3333333333333333d); + assertFalse(rs.getBoolean("BooleanField")); + assertNotNull(rs.getBytes("BytesField")); + assertEquals(1, rs.getInt("IntegerField")); + assertEquals(1534680695123L, rs.getTimestamp("TimestampField").getTime()); + assertEquals(java.sql.Date.valueOf("2018-08-19"), rs.getDate("DateField")); + assertTrue(rs.getDouble("FloatField") == 10.1d); + assertTrue(rs.getDouble("NumericField") == 100.0d); + assertEquals(Time.valueOf(LocalTime.of(12, 11, 35, 123456)), rs.getTime("TimeField")); + assertEquals("2018-08-19T12:11:35.123456", rs.getString("DateTimeField")); + assertEquals("POINT(-122.35022 47.649154)", rs.getString("GeographyField")); + assertNotNull(rs.getBytes("BytesField_1")); + assertTrue(rs.getBoolean("BooleanField_1")); + assertTrue( + rs.getObject("IntegerArrayField") instanceof com.google.cloud.bigquery.FieldValueList); + FieldValueList integerArrayFieldValue = + (com.google.cloud.bigquery.FieldValueList) rs.getObject("IntegerArrayField"); + assertEquals(4, integerArrayFieldValue.size()); // Array has 4 elements + assertEquals(3, (integerArrayFieldValue.get(2).getNumericValue()).intValue()); + List integerArrayFieldValueList = + (List) rs.getArray("IntegerArrayField").getArray(); + assertEquals(4, integerArrayFieldValueList.size()); + assertEquals(3, integerArrayFieldValueList.get(2).getNumericValue().intValue()); + + assertFalse(rs.next()); // no 3rd row in the table + } + + @Test + public void testExecuteSelectSinglePageTableRowWithReadAPI() throws SQLException { String query = "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, " + "NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from " + TABLE_ID_FASTQUERY_BQ_RESULTSET.getTable() + " order by TimestampField"; ConnectionSettings connectionSettings = - ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseReadAPI(true) + .setMinResultSize(1) + .setTotalToPageRowCountRatio(1) + .build(); Connection connection = bigquery.createConnection(connectionSettings); BigQueryResult bigQueryResult = connection.executeSelect(query); + assertTrue(bigQueryResult.getBigQueryResultStats().getQueryStatistics().getUseReadApi()); ResultSet rs = bigQueryResult.getResultSet(); Schema sc = bigQueryResult.getSchema(); @@ -3255,17 +4028,16 @@ public void testExecuteSelectSinglePageTableRow() throws SQLException { assertEquals(java.sql.Date.valueOf("2018-08-19"), rs.getDate("DateField")); assertTrue(rs.getDouble("FloatField") == 10.1d); assertTrue(rs.getDouble("NumericField") == 100.0d); - assertEquals(Time.valueOf(LocalTime.of(12, 11, 35, 123456)), rs.getTime("TimeField")); + assertEquals( + Time.valueOf(LocalTime.of(12, 11, 35, 123456)).toString(), + rs.getTime("TimeField").toString()); assertEquals("2018-08-19T12:11:35.123456", rs.getString("DateTimeField")); assertEquals("POINT(-122.35022 47.649154)", rs.getString("GeographyField")); assertNotNull(rs.getBytes("BytesField_1")); assertTrue(rs.getBoolean("BooleanField_1")); - assertTrue( - rs.getObject("IntegerArrayField") instanceof com.google.cloud.bigquery.FieldValueList); - FieldValueList integerArrayFieldValue = - (com.google.cloud.bigquery.FieldValueList) rs.getObject("IntegerArrayField"); - assertEquals(4, integerArrayFieldValue.size()); // Array has 4 elements - assertEquals(3, (integerArrayFieldValue.get(2).getNumericValue()).intValue()); + List integerArray = (List) rs.getArray("IntegerArrayField").getArray(); + assertEquals(4, integerArray.size()); + assertEquals(3, integerArray.get(2).intValue()); assertFalse(rs.next()); // no 3rd row in the table } @@ -3322,8 +4094,7 @@ public void testBQResultSetPagination() throws SQLException { assertEquals(300000, cnt); // total 300000 rows should be read } - // @Test - Temporarily disabling till https://github.com/googleapis/gax-java/issues/1712 or - // b/235591056 are resolved + @Test public void testReadAPIIterationAndOrder() throws SQLException { // use read API to read 300K records and check the order String query = @@ -3362,7 +4133,8 @@ public void testReadAPIIterationAndOrder() @Test public void testReadAPIIterationAndOrderAsync() - throws SQLException, ExecutionException, + throws SQLException, + ExecutionException, InterruptedException { // use read API to read 300K records and check the order String query = "SELECT date, county, state_name, confirmed_cases, deaths / 10 FROM " @@ -3409,7 +4181,8 @@ public void testReadAPIIterationAndOrderAsync() // be uncompleted in 1000ms is nondeterministic! Though very likely it won't be complete in the // specified amount of time public void testExecuteSelectAsyncCancel() - throws SQLException, ExecutionException, + throws SQLException, + ExecutionException, InterruptedException { // use read API to read 300K records and check the order String query = "SELECT date, county, state_name, confirmed_cases, deaths FROM " @@ -3455,7 +4228,8 @@ public void testExecuteSelectAsyncCancel() // be uncompleted in 1000ms is nondeterministic! Though very likely it won't be complete in the // specified amount of time public void testExecuteSelectAsyncTimeout() - throws SQLException, ExecutionException, + throws SQLException, + ExecutionException, InterruptedException { // use read API to read 300K records and check the order String query = "SELECT date, county, state_name, confirmed_cases, deaths FROM " @@ -3519,8 +4293,7 @@ public void testCreateDefaultConnection() throws BigQuerySQLException { assertTrue(connection.close()); } - // @Test - Temporarily disabling till https://github.com/googleapis/gax-java/issues/1712 or - // b/235591056 are resolved + @Test public void testReadAPIConnectionMultiClose() throws SQLException { // use read API to read 300K records, then closes the connection. This test @@ -3619,6 +4392,19 @@ public void testExecuteSelectSinglePageTableRowColInd() throws SQLException { (integerArrayFieldValue.get(2).getNumericValue()).intValue(), (integerArrayFieldValueColInd.get(2).getNumericValue()).intValue()); } + + List integerArrayFieldValueList = + (List) rs.getArray("IntegerArrayField").getArray(); + List integerArrayFieldValueListColInd = + (List) rs.getArray(14).getArray(); + assertEquals( + integerArrayFieldValueList.size(), + integerArrayFieldValueListColInd.size()); // Array has 4 elements + if (integerArrayFieldValueList.size() == 4) { // as we are picking the third index + assertEquals( + (integerArrayFieldValueList.get(2).getNumericValue()).intValue(), + (integerArrayFieldValueListColInd.get(2).getNumericValue()).intValue()); + } } } @@ -3968,26 +4754,11 @@ public void testFastDMLQuery() throws InterruptedException { TableResult result = bigquery.query(dmlConfig); assertNotNull(result.getJobId()); assertEquals(TABLE_SCHEMA, result.getSchema()); - assertEquals(2, result.getTotalRows()); - // Verify correctness of table content - String sqlQuery = String.format("SELECT * FROM %s.%s", DATASET, tableName); - QueryJobConfiguration sqlConfig = QueryJobConfiguration.newBuilder(sqlQuery).build(); - TableResult resultAfterDML = bigquery.query(sqlConfig); - assertNotNull(resultAfterDML.getJobId()); - for (FieldValueList row : resultAfterDML.getValues()) { - FieldValue timestampCell = row.get(0); - assertEquals(timestampCell, row.get("TimestampField")); - FieldValue stringCell = row.get(1); - assertEquals(stringCell, row.get("StringField")); - FieldValue booleanCell = row.get(3); - assertEquals(booleanCell, row.get("BooleanField")); - assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.getAttribute()); - assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.getAttribute()); - assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); - assertEquals(1408452095220000L, timestampCell.getTimestampValue()); - assertEquals("hello", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); - } + // Using the job reference on the TableResult, lookup and verify DML statistics. + Job queryJob = bigquery.getJob(result.getJobId()); + JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); + assertEquals(2L, statistics.getNumDmlAffectedRows().longValue()); + assertEquals(2L, statistics.getDmlStats().getUpdatedRowCount().longValue()); } @Test @@ -4219,6 +4990,7 @@ public void testLoadSessionSupport() throws InterruptedException { Job loadJob = bigquery.getJob(job.getJobId()); JobStatistics.LoadStatistics statistics = loadJob.getStatistics(); + assertThat(statistics.getTotalSlotMs()).isGreaterThan(0L); String sessionId = statistics.getSessionInfo().getSessionId(); assertNotNull(sessionId); @@ -4973,15 +5745,34 @@ public void testCreateAndGetJob() throws InterruptedException, TimeoutException assertNotNull(remoteJob.getStatus()); assertEquals(createdJob.getSelfLink(), remoteJob.getSelfLink()); assertEquals(createdJob.getUserEmail(), remoteJob.getUserEmail()); - assertTrue(createdTable.delete()); - - Job completedJob = remoteJob.waitFor(RetryOption.totalTimeout(Duration.ofMinutes(1))); + Job completedJob = remoteJob.waitFor(RetryOption.totalTimeoutDuration(Duration.ofMinutes(1))); assertNotNull(completedJob); assertNull(completedJob.getStatus().getError()); + assertTrue(createdTable.delete()); assertTrue(bigquery.delete(destinationTable)); } + @Test + public void testCreateJobAndWaitForWithRetryOptions() + throws InterruptedException, TimeoutException { + // Note: This only tests the non failure/retry case. For retry cases, see unit tests with mocked + // RPC calls. + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder("SELECT CURRENT_TIMESTAMP() as ts") + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .build(); + + BigQueryRetryConfig bigQueryRetryConfig = BigQueryRetryConfig.newBuilder().build(); + JobOption bigQueryRetryConfigOption = JobOption.bigQueryRetryConfig(bigQueryRetryConfig); + JobOption retryOptions = JobOption.retryOptions(RetryOption.maxAttempts(1)); + + Job job = bigquery.create(JobInfo.of(config), bigQueryRetryConfigOption, retryOptions); + job = job.waitFor(bigQueryRetryConfig); + assertEquals(DONE, job.getStatus().getState()); + } + @Test public void testCreateAndGetJobWithSelectedFields() throws InterruptedException, TimeoutException { @@ -5023,8 +5814,8 @@ public void testCreateAndGetJobWithSelectedFields() assertNull(remoteJob.getUserEmail()); Job completedJob = remoteJob.waitFor( - RetryOption.initialRetryDelay(Duration.ofSeconds(1)), - RetryOption.totalTimeout(Duration.ofMinutes(1))); + RetryOption.initialRetryDelayDuration(Duration.ofSeconds(1)), + RetryOption.totalTimeoutDuration(Duration.ofMinutes(1))); assertNotNull(completedJob); assertTrue(createdTable.delete()); assertNull(completedJob.getStatus().getError()); @@ -5469,6 +6260,7 @@ public void testExtractJob() throws InterruptedException, TimeoutException { assertEquals(1L, extractStatistics.getDestinationUriFileCounts().size()); assertEquals( loadStatistics.getOutputBytes().longValue(), extractStatistics.getInputBytes().longValue()); + assertThat(extractStatistics.getTotalSlotMs()).isGreaterThan(0L); String extractedCsv = new String(storage.readAllBytes(BUCKET, EXTRACT_FILE), StandardCharsets.UTF_8); @@ -5488,7 +6280,7 @@ public void testExtractJobWithModel() throws InterruptedException { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " @@ -5702,6 +6494,14 @@ public void testLocation() throws Exception { assertThat(location).isNotEqualTo(wrongLocation); + Tracer tracer = otel.getTracer("Test Tracer"); + bigquery = + bigquery.getOptions().toBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build() + .getService(); + Dataset dataset = bigquery.create( DatasetInfo.newBuilder("locationset_" + UUID.randomUUID().toString().replace("-", "_")) @@ -5778,6 +6578,11 @@ public void testLocation() throws Exception { bigquery.writer( JobId.newBuilder().setLocation(location).build(), writeChannelConfiguration)) { writer.write(ByteBuffer.wrap("foo".getBytes())); + assertEquals( + OTEL_ATTRIBUTES + .get("com.google.cloud.bigquery.TableDataWriteChannel.open") + .get(AttributeKey.stringKey("bq.job.location")), + location); } try { @@ -5790,13 +6595,19 @@ public void testLocation() throws Exception { } } finally { bigquery.delete(dataset.getDatasetId(), DatasetDeleteOption.deleteContents()); + bigquery = + bigquery.getOptions().toBuilder() + .setEnableOpenTelemetryTracing(false) + .setOpenTelemetryTracer(null) + .build() + .getService(); } } @Test - public void testPreserveAsciiControlCharacters() + public void testWriteChannelPreserveAsciiControlCharacters() throws InterruptedException, IOException, TimeoutException { - String destinationTableName = "test_preserve_ascii_control_characters"; + String destinationTableName = "test_write_channel_preserve_ascii_control_characters"; TableId tableId = TableId.of(DATASET, destinationTableName); WriteChannelConfiguration configuration = WriteChannelConfiguration.newBuilder(tableId) @@ -5819,6 +6630,26 @@ public void testPreserveAsciiControlCharacters() assertTrue(bigquery.delete(tableId)); } + @Test + public void testLoadJobPreserveAsciiControlCharacters() throws InterruptedException { + String destinationTableName = "test_load_job_preserve_ascii_control_characters"; + TableId destinationTable = TableId.of(DATASET, destinationTableName); + + try { + LoadJobConfiguration configuration = + LoadJobConfiguration.newBuilder(destinationTable, "gs://" + BUCKET + "/" + LOAD_FILE_NULL) + .setFormatOptions( + CsvOptions.newBuilder().setPreserveAsciiControlCharacters(true).build()) + .setSchema(SIMPLE_SCHEMA) + .build(); + Job remoteLoadJob = bigquery.create(JobInfo.of(configuration)); + remoteLoadJob = remoteLoadJob.waitFor(); + assertNull(remoteLoadJob.getStatus().getError()); + } finally { + assertTrue(bigquery.delete(destinationTable)); + } + } + @Test public void testReferenceFileSchemaUriForAvro() { try { @@ -6352,33 +7183,26 @@ public void testStatelessQueries() throws InterruptedException { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); BigQuery bigQuery = bigqueryHelper.getOptions().getService(); - // Simulate setting the QUERY_PREVIEW_ENABLED environment variable. - bigQuery.getOptions().setQueryPreviewEnabled("TRUE"); + // Stateless query should have no job id. + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); TableResult tableResult = executeSimpleQuery(bigQuery); assertNotNull(tableResult.getQueryId()); assertNull(tableResult.getJobId()); - // The flag should be case-insensitive. - bigQuery.getOptions().setQueryPreviewEnabled("tRuE"); - tableResult = executeSimpleQuery(bigQuery); - assertNotNull(tableResult.getQueryId()); - assertNull(tableResult.getJobId()); - - // Any other values won't enable optional job creation mode. - bigQuery.getOptions().setQueryPreviewEnabled("test_value"); + // Job creation takes over, no query id is created. + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_REQUIRED); tableResult = executeSimpleQuery(bigQuery); - assertNotNull(tableResult.getQueryId()); + assertNull(tableResult.getQueryId()); assertNotNull(tableResult.getJobId()); - // Reset the flag. - bigQuery.getOptions().setQueryPreviewEnabled(null); + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_MODE_UNSPECIFIED); tableResult = executeSimpleQuery(bigQuery); assertNotNull(tableResult.getQueryId()); assertNotNull(tableResult.getJobId()); } private TableResult executeSimpleQuery(BigQuery bigQuery) throws InterruptedException { - String query = "SELECT 1 as one"; + String query = "SELECT CURRENT_TIMESTAMP() as ts"; QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).build(); TableResult result = bigQuery.query(config); return result; @@ -6391,13 +7215,14 @@ public void testTableResultJobIdAndQueryId() throws InterruptedException { // 2. For queries that fails the requirements to be stateless, then jobId is populated and // queryId is not. // 3. For explicitly created jobs, then jobId is populated and queryId is not populated. + // 4. If QueryJobConfiguration explicitly sets Job Creation Mode to Required. // Test scenario 1. // Create local BigQuery for test scenario 1 to not contaminate global test parameters. RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); BigQuery bigQuery = bigqueryHelper.getOptions().getService(); - // Simulate setting the QUERY_PREVIEW_ENABLED environment variable. - bigQuery.getOptions().setQueryPreviewEnabled("TRUE"); + // Allow queries to be stateless. + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); String query = "SELECT 1 as one"; QueryJobConfiguration configStateless = QueryJobConfiguration.newBuilder(query).build(); TableResult result = bigQuery.query(configStateless); @@ -6417,6 +7242,16 @@ public void testTableResultJobIdAndQueryId() throws InterruptedException { result = job.getQueryResults(); assertNotNull(result.getJobId()); assertNull(result.getQueryId()); + + // Test scenario 4. + configWithJob = + QueryJobConfiguration.newBuilder(query) + .setJobCreationMode(JobCreationMode.JOB_CREATION_REQUIRED) + .build(); + result = bigQuery.query(configWithJob); + result = job.getQueryResults(); + assertNotNull(result.getJobId()); + assertNull(result.getQueryId()); } @Test @@ -6449,7 +7284,7 @@ public void testStatelessQueriesWithLocation() throws Exception { table.getTableId().getTable()); // Test stateless query when BigQueryOption location matches dataset location. - bigQuery.getOptions().setQueryPreviewEnabled("TRUE"); + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); TableResult tb = bigQuery.query(QueryJobConfiguration.of(query)); assertNull(tb.getJobId()); @@ -6457,7 +7292,9 @@ public void testStatelessQueriesWithLocation() throws Exception { try { BigQuery bigQueryWrongLocation = bigqueryHelper.getOptions().toBuilder().setLocation(wrongLocation).build().getService(); - bigQueryWrongLocation.getOptions().setQueryPreviewEnabled("TRUE"); + bigQueryWrongLocation + .getOptions() + .setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); bigQueryWrongLocation.query(QueryJobConfiguration.of(query)); fail("querying a table with wrong location shouldn't work"); } catch (BigQueryException e) { @@ -6468,13 +7305,55 @@ public void testStatelessQueriesWithLocation() throws Exception { } } + @Test + public void testQueryWithTimeout() throws InterruptedException { + // Validate that queryWithTimeout returns either TableResult or Job object + + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + BigQuery bigQuery = bigqueryHelper.getOptions().getService(); + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + String largeQuery = + "SELECT * FROM UNNEST(GENERATE_ARRAY(1, 20000)) CROSS JOIN UNNEST(GENERATE_ARRAY(1, 20000))"; + String query = "SELECT 1 as one"; + // Test scenario 1. + // Stateless query returns TableResult + QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).build(); + Object result = bigQuery.queryWithTimeout(config, null, null); + assertTrue(result instanceof TableResult); + assertNull(((TableResult) result).getJobId()); + assertNotNull(((TableResult) result).getQueryId()); + + // Stateful query returns Job + // Test scenario 2 to ensure job is created if JobCreationMode is set, but for a small query + // it still returns results. + config = + QueryJobConfiguration.newBuilder(query) + .setJobCreationMode(JobCreationMode.JOB_CREATION_REQUIRED) + .build(); + result = bigQuery.queryWithTimeout(config, null, null); + assertTrue(result instanceof TableResult); + assertNotNull(((TableResult) result).getJobId()); + assertNull(((TableResult) result).getQueryId()); + + // Stateful query returns Job + // Test scenario 3 to ensure job is created if Query is long running. + // Explicitly disable cache to ensure it is long-running query; + config = QueryJobConfiguration.newBuilder(largeQuery).setUseQueryCache(false).build(); + long millis = System.currentTimeMillis(); + result = bigQuery.queryWithTimeout(config, null, 1000L); + millis = System.currentTimeMillis() - millis; + assertTrue(result instanceof Job); + // Cancel the job as we don't need results. + ((Job) result).cancel(); + // Allow 2 seconds of timeout value to account for random delays + assertTrue(millis < 1_000_000 * 2); + } + @Test public void testUniverseDomainWithInvalidUniverseDomain() { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); BigQueryOptions bigQueryOptions = - bigqueryHelper - .getOptions() - .toBuilder() + bigqueryHelper.getOptions().toBuilder() .setCredentials(loadCredentials(FAKE_JSON_CRED_WITH_GOOGLE_DOMAIN)) .setUniverseDomain("invalid.domain") .build(); @@ -6498,9 +7377,7 @@ public void testUniverseDomainWithInvalidUniverseDomain() { public void testInvalidUniverseDomainWithMismatchCredentials() { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); BigQueryOptions bigQueryOptions = - bigqueryHelper - .getOptions() - .toBuilder() + bigqueryHelper.getOptions().toBuilder() .setCredentials(loadCredentials(FAKE_JSON_CRED_WITH_INVALID_DOMAIN)) .build(); BigQuery bigQuery = bigQueryOptions.getService(); @@ -6531,11 +7408,16 @@ public void testUniverseDomainWithMatchingDomain() { Page datasets = bigQuery.listDatasets("bigquery-public-data"); Iterator iterator = datasets.iterateAll().iterator(); Set datasetNames = new HashSet<>(); + Map datasetLocation = new HashMap<>(); while (iterator.hasNext()) { - datasetNames.add(iterator.next().getDatasetId().getDataset()); + Dataset dataset = iterator.next(); + String name = dataset.getDatasetId().getDataset(); + datasetNames.add(name); + datasetLocation.put(name, dataset.getLocation()); } for (String type : PUBLIC_DATASETS) { assertTrue(datasetNames.contains(type)); + assertEquals(PUBLIC_DATASETS_LOCATION.get(type), datasetLocation.get(type)); } } @@ -6580,6 +7462,75 @@ public void testExternalTableMetadataCachingNotEnable() throws InterruptedExcept assertTrue(remoteTable.delete()); } + @Test + public void testExternalMetadataCacheModeFailForNonBiglake() { + // Validate that MetadataCacheMode is passed to the backend. + // TODO: Enhance this test after BigLake testing infrastructure is inplace. + String tableName = "test_metadata_cache_mode_fail_for_non_biglake"; + TableId tableId = TableId.of(DATASET, tableName); + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.newBuilder( + "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()) + .setMetadataCacheMode("AUTOMATIC") + .build(); + TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); + + try { + bigquery.create(tableInfo); + fail("BigQueryException was expected"); + } catch (BigQueryException e) { + BigQueryError error = e.getError(); + assertNotNull(error); + assertEquals("invalid", error.getReason()); + assertThat( + e.getMessage().contains("metadataCacheMode provided for non BigLake external table")) + .isTrue(); + } + } + + @Test + public void testObjectTable() throws InterruptedException { + String tableName = "test_object_table"; + TableId tableId = TableId.of(DATASET, tableName); + + String sourceUri = "gs://" + BUCKET + "/" + JSON_LOAD_FILE; + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.newBuilder(sourceUri) + .setConnectionId( + "projects/java-docs-samples-testing/locations/us/connections/DEVREL_TEST_CONNECTION") + .setObjectMetadata("SIMPLE") + .build(); + TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.getTableId().getDataset()); + assertEquals(tableName, createdTable.getTableId().getTable()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + + try { + assertTrue(remoteTable.getDefinition() instanceof ExternalTableDefinition); + assertEquals(createdTable.getTableId(), remoteTable.getTableId()); + assertEquals( + "SIMPLE", ((ExternalTableDefinition) remoteTable.getDefinition()).getObjectMetadata()); + assertNotNull(remoteTable.getDefinition().getSchema().getFields().get("uri")); + + String query = String.format("SELECT * FROM %s.%s", DATASET, tableName); + QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).build(); + + Job remoteJob = bigquery.create(JobInfo.of(config)); + remoteJob = remoteJob.waitFor(); + assertNull(remoteJob.getStatus().getError()); + + Job queryJob = bigquery.getJob(remoteJob.getJobId()); + JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); + assertNotNull(statistics); + assertThat(statistics.getTotalBytesProcessed()).isGreaterThan(0); + } finally { + assertTrue(remoteTable.delete()); + } + } + static GoogleCredentials loadCredentials(String credentialFile) { try { InputStream keyStream = new ByteArrayInputStream(credentialFile.getBytes()); @@ -6589,4 +7540,284 @@ static GoogleCredentials loadCredentials(String credentialFile) { } return null; } + + @Test + public void testQueryExportStatistics() throws InterruptedException { + String query = + String.format( + "EXPORT DATA OPTIONS(\n" + + " uri='gs://%s/*.csv',\n" + + " format='CSV',\n" + + " overwrite=true,\n" + + " header=true,\n" + + " field_delimiter=';') AS\n" + + "SELECT num FROM UNNEST([1,2,3]) AS num", + BUCKET); + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); + Job job = bigquery.create(JobInfo.of(JobId.of(), config)); + job = job.waitFor(); + + QueryStatistics queryStatistics = job.getStatistics(); + assertNotNull(queryStatistics); + assertNotNull(queryStatistics.getExportDataStats()); + assertEquals(1L, queryStatistics.getExportDataStats().getFileCount().longValue()); + assertEquals(3L, queryStatistics.getExportDataStats().getRowCount().longValue()); + } + + @Test + public void testLoadConfigurationFlexibleColumnName() throws InterruptedException { + // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#columnnamecharactermap for + // mapping. + + // Test v1 mapping. + String v1TableName = "flexible_column_name_data_testing_table_v1"; + TableId v1TableId = TableId.of(DATASET, v1TableName); + try { + LoadJobConfiguration loadJobConfigurationV1 = + LoadJobConfiguration.newBuilder( + v1TableId, + "gs://" + BUCKET + "/" + LOAD_FILE_FLEXIBLE_COLUMN_NAME, + FormatOptions.csv()) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setAutodetect(true) + .setColumnNameCharacterMap("V1") + .build(); + Job jobV1 = bigquery.create(JobInfo.of(loadJobConfigurationV1)); + jobV1 = jobV1.waitFor(); + assertNull(jobV1.getStatus().getError()); + + Table remoteTableV1 = bigquery.getTable(DATASET, v1TableName); + assertNotNull(remoteTableV1); + assertEquals( + "_ampersand", remoteTableV1.getDefinition().getSchema().getFields().get(1).getName()); + } finally { + bigquery.delete(v1TableId); + } + + // Test v2 mapping. + String v2TableName = "flexible_column_name_data_testing_table_v2"; + TableId v2TableId = TableId.of(DATASET, v2TableName); + try { + LoadJobConfiguration loadJobConfigurationV2 = + LoadJobConfiguration.newBuilder( + v2TableId, + "gs://" + BUCKET + "/" + LOAD_FILE_FLEXIBLE_COLUMN_NAME, + FormatOptions.csv()) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setAutodetect(true) + .setColumnNameCharacterMap("V2") + .build(); + Job jobV2 = bigquery.create(JobInfo.of(loadJobConfigurationV2)); + jobV2 = jobV2.waitFor(); + assertNull(jobV2.getStatus().getError()); + + Table remoteTableV2 = bigquery.getTable(DATASET, v2TableName); + assertNotNull(remoteTableV2); + assertEquals( + "&ersand", remoteTableV2.getDefinition().getSchema().getFields().get(1).getName()); + } finally { + bigquery.delete(v2TableId); + } + } + + @Test + public void testStatementType() throws InterruptedException { + String tableName = "test_materialized_view_table_statemnt_type"; + String createQuery = + String.format( + "CREATE MATERIALIZED VIEW %s.%s.%s " + + "AS (SELECT MAX(TimestampField) AS TimestampField,StringField, MAX(BooleanField) AS BooleanField FROM %s.%s.%s GROUP BY StringField)", + PROJECT_ID, DATASET, tableName, PROJECT_ID, DATASET, TABLE_ID.getTable()); + TableResult result = bigquery.query(QueryJobConfiguration.of(createQuery)); + assertNotNull(result); + Job job = bigquery.getJob(result.getJobId()); + JobStatistics.QueryStatistics stats = job.getStatistics(); + assertEquals(StatementType.CREATE_MATERIALIZED_VIEW, stats.getStatementType()); + + // cleanup + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertTrue(remoteTable.getDefinition() instanceof MaterializedViewDefinition); + assertTrue(remoteTable.delete()); + } + + @Test + public void testOpenTelemetryTracingDatasets() { + Tracer tracer = otel.getTracer("Test Tracer"); + BigQueryOptions otelOptions = + BigQueryOptions.newBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build(); + BigQuery bigquery = otelOptions.getService(); + + Span parentSpan = + tracer + .spanBuilder("Test Parent Span") + .setNoParent() + .setAttribute("test-attribute", "test-value") + .startSpan(); + String billingModelDataset = RemoteBigQueryHelper.generateDatasetName(); + + try (Scope parentScope = parentSpan.makeCurrent()) { + DatasetInfo info = + DatasetInfo.newBuilder(billingModelDataset) + .setDescription(DESCRIPTION) + .setMaxTimeTravelHours(72L) + .setLabels(LABELS) + .build(); + + Dataset dataset = bigquery.create(info); + assertNotNull(dataset); + dataset = bigquery.getDataset(dataset.getDatasetId().getDataset()); + assertNotNull(dataset); + + DatasetInfo updatedInfo = + DatasetInfo.newBuilder(billingModelDataset) + .setDescription("Updated Description") + .setMaxTimeTravelHours(96L) + .setLabels(LABELS) + .build(); + + dataset = bigquery.update(updatedInfo, DatasetOption.accessPolicyVersion(2)); + assertEquals(dataset.getDescription(), "Updated Description"); + assertTrue(bigquery.delete(dataset.getDatasetId())); + } finally { + parentSpan.end(); + Map, Object> createMap = + OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.createDataset"); + assertEquals(createMap.get(AttributeKey.stringKey("bq.dataset.location")), "null"); + assertEquals( + OTEL_ATTRIBUTES + .get("com.google.cloud.bigquery.BigQueryRpc.createDataset") + .get(AttributeKey.stringKey("bq.rpc.service")), + "DatasetService"); + + Map, Object> getMap = + OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.getDataset"); + assertEquals(getMap.get(AttributeKey.stringKey("bq.dataset.id")), billingModelDataset); + + Map, Object> updateMap = + OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.updateDataset"); + assertEquals(updateMap.get(AttributeKey.stringKey("bq.option.ACCESS_POLICY_VERSION")), "2"); + + Map, Object> deleteMap = + OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.deleteDataset"); + assertEquals(deleteMap.get(AttributeKey.stringKey("bq.dataset.id")), billingModelDataset); + + // All should be children spans of parentSpan + assertEquals( + OTEL_SPAN_IDS_TO_NAMES.get( + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.getDataset")), + "Test Parent Span"); + assertEquals( + OTEL_SPAN_IDS_TO_NAMES.get( + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createDataset")), + "Test Parent Span"); + assertEquals( + OTEL_SPAN_IDS_TO_NAMES.get( + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.deleteDataset")), + "Test Parent Span"); + assertEquals( + OTEL_SPAN_IDS_TO_NAMES.get( + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQueryRpc.createDataset")), + "com.google.cloud.bigquery.BigQueryRetryHelper.runWithRetries"); + assertEquals(OTEL_PARENT_SPAN_IDS.get("Test Parent Span"), OTEL_PARENT_SPAN_ID); + RemoteBigQueryHelper.forceDelete(bigquery, billingModelDataset); + } + } + + @Test + public void testOpenTelemetryTracingTables() { + Tracer tracer = otel.getTracer("Test Tracer"); + BigQueryOptions otelOptions = + BigQueryOptions.newBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build(); + BigQuery bigquery = otelOptions.getService(); + + String tableName = "test_otel_table"; + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + TableInfo tableInfo = + TableInfo.newBuilder(TableId.of(DATASET, tableName), tableDefinition) + .setDescription("Some Description") + .build(); + Table createdTable = bigquery.create(tableInfo); + assertThat(createdTable.getDescription()).isEqualTo("Some Description"); + + assertEquals( + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createTable"), + OTEL_PARENT_SPAN_ID); + assertEquals( + OTEL_ATTRIBUTES + .get("com.google.cloud.bigquery.BigQuery.createTable") + .get(AttributeKey.stringKey("bq.table.id")), + tableName); + assertEquals( + OTEL_ATTRIBUTES + .get("com.google.cloud.bigquery.BigQuery.createTable") + .get(AttributeKey.stringKey("bq.table.creation_time")), + "null"); + assertEquals( + OTEL_ATTRIBUTES + .get("com.google.cloud.bigquery.BigQueryRpc.createTable") + .get(AttributeKey.stringKey("bq.rpc.method")), + "InsertTable"); + + Table updatedTable = + bigquery.update(createdTable.toBuilder().setDescription("Updated Description").build()); + assertThat(updatedTable.getDescription()).isEqualTo("Updated Description"); + + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.updateTable")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.patchTable")); + assertEquals( + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.updateTable"), + OTEL_PARENT_SPAN_ID); + assertTrue(bigquery.delete(updatedTable.getTableId())); + } + + @Test + public void testOpenTelemetryTracingQuery() throws InterruptedException { + Tracer tracer = otel.getTracer("Test Tracer"); + BigQueryOptions otelOptions = + BigQueryOptions.newBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build(); + BigQuery bigquery = otelOptions.getService(); + + // Stateless query + bigquery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + TableResult tableResult = executeSimpleQuery(bigquery); + assertNotNull(tableResult.getQueryId()); + assertNull(tableResult.getJobId()); + + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.queryRpc")); + assertNotNull( + OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRetryHelper.runWithRetries")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.queryRpc")); + assertTrue(OTEL_ATTRIBUTES.containsKey("com.google.cloud.bigquery.BigQuery.queryWithTimeout")); + + // Query job + String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); + Job job = bigquery.create(JobInfo.of(JobId.of(), config)); + + TableResult result = job.getQueryResults(); + assertNotNull(result.getJobId()); + assertEquals(QUERY_RESULT_SCHEMA, result.getSchema()); + + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.getQueryResults")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.listTableData")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.listTableData")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.createJob")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.createJob")); + // Key exists, but value is null because no options were supplied in the request. + assertTrue(OTEL_ATTRIBUTES.containsKey("com.google.cloud.bigquery.Job.getQueryResults")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.getQueryResults")); + assertTrue(OTEL_ATTRIBUTES.containsKey("com.google.cloud.bigquery.Job.waitForQueryResults")); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java index 576e5c4e3..eec39f633 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java @@ -35,7 +35,8 @@ public void testListToDataset() { .setId("project-id:dataset-id") .setFriendlyName("friendly") .setKind("bigquery#dataset") - .setLabels(Collections.singletonMap("foo", "bar")); + .setLabels(Collections.singletonMap("foo", "bar")) + .setLocation("test-region-1"); Dataset dataset = HttpBigQueryRpc.LIST_TO_DATASET.apply(listDataSet); assertThat(dataset.getKind()).isEqualTo("bigquery#dataset"); @@ -43,5 +44,6 @@ public void testListToDataset() { assertThat(dataset.getFriendlyName()).isEqualTo("friendly"); assertThat(dataset.getDatasetReference()).isEqualTo(datasetRef); assertThat(dataset.getLabels()).containsExactly("foo", "bar"); + assertThat(dataset.getLocation()).isEqualTo("test-region-1"); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java index 9b2ef4804..5aadd11e3 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java @@ -25,12 +25,12 @@ import com.google.cloud.http.HttpTransportOptions; import java.io.ByteArrayInputStream; import java.io.InputStream; +import java.time.Duration; import java.util.concurrent.ExecutionException; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.mockito.junit.MockitoJUnitRunner; -import org.threeten.bp.Duration; @RunWith(MockitoJUnitRunner.class) public class RemoteBigQueryHelperTest { @@ -83,8 +83,8 @@ public void testCreateFromStream() { assertEquals(60000, ((HttpTransportOptions) options.getTransportOptions()).getConnectTimeout()); assertEquals(60000, ((HttpTransportOptions) options.getTransportOptions()).getReadTimeout()); assertEquals(10, options.getRetrySettings().getMaxAttempts()); - assertEquals(Duration.ofMillis(30000), options.getRetrySettings().getMaxRetryDelay()); - assertEquals(Duration.ofMillis(120000), options.getRetrySettings().getTotalTimeout()); - assertEquals(Duration.ofMillis(250), options.getRetrySettings().getInitialRetryDelay()); + assertEquals(Duration.ofMillis(30000), options.getRetrySettings().getMaxRetryDelayDuration()); + assertEquals(Duration.ofMillis(120000), options.getRetrySettings().getTotalTimeoutDuration()); + assertEquals(Duration.ofMillis(250), options.getRetrySettings().getInitialRetryDelayDuration()); } } diff --git a/owlbot.py b/owlbot.py deleted file mode 100644 index a2540ce52..000000000 --- a/owlbot.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import synthtool.languages.java as java - -java.common_templates(excludes=[ - '.kokoro/nightly/samples.cfg', - '.kokoro/nightly/integration.cfg', - '.kokoro/presubmit/samples.cfg', - '.kokoro/presubmit/graalvm-native.cfg', - '.kokoro/presubmit/graalvm-native-17.cfg', - 'codecov.yaml', - 'renovate.json', - '.kokoro/build.sh' - '.kokoro/requirements.in', - '.kokoro/requirements.txt' -]) \ No newline at end of file diff --git a/pom.xml b/pom.xml index 30133f49d..2ec753691 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 2.38.4-mi + 2.56.0-mi BigQuery Parent https://github.com/googleapis/java-bigquery @@ -14,7 +14,7 @@ com.google.cloud sdk-platform-java-config - 3.28.1 + 3.54.1 @@ -60,7 +60,7 @@ UTF-8 github google-cloud-bigquery-parent - v2-rev20240229-2.0.0 + v2-rev20251012-2.0.0 repo.metricinsights.com var/www/repo.metricinsights.com @@ -79,7 +79,7 @@ com.google.cloud google-cloud-bigquerystorage-bom - 3.4.0 + 3.18.0 pom import @@ -87,7 +87,7 @@ com.google.cloud google-cloud-datacatalog-bom - 1.45.0 + 1.78.0 pom import @@ -101,7 +101,7 @@ com.google.cloud google-cloud-bigquery - 2.38.3-SNAPSHOT + 2.56.0-mi @@ -114,7 +114,7 @@ org.threeten threeten-extra - 1.7.2 + 1.8.0 @@ -127,7 +127,7 @@ com.google.truth truth - 1.4.2 + 1.4.4 test @@ -145,19 +145,19 @@ com.google.cloud google-cloud-storage - 2.36.1 + 2.53.3 test com.google.cloud google-cloud-bigqueryconnection - 2.41.0 + 2.74.0 test com.google.api.grpc proto-google-cloud-bigqueryconnection-v1 - 2.41.0 + 2.70.0 test @@ -175,7 +175,10 @@ maven-dependency-plugin + io.netty:netty-buffer + io.netty:netty-common org.apache.arrow:arrow-memory-netty + com.google.api:gax @@ -210,7 +213,7 @@ org.apache.maven.plugins maven-project-info-reports-plugin - 3.5.0 + 3.9.0 diff --git a/renovate.json b/renovate.json index 8034b0379..38fdb78eb 100644 --- a/renovate.json +++ b/renovate.json @@ -7,95 +7,111 @@ ":updateNotScheduled", ":automergeDisabled", ":ignoreModulesAndTests", - ":maintainLockFilesDisabled", - ":autodetectPinVersions" + ":maintainLockFilesDisabled" + ], + "ignorePaths": [ + ".kokoro/requirements.txt" ], - "ignorePaths": [".kokoro/requirements.txt"], "customManagers": [ { "customType": "regex", - "fileMatch": [ - "^.kokoro/continuous/graalvm-native.*.cfg$", - "^.kokoro/presubmit/graalvm-native.*.cfg$" + "managerFilePatterns": [ + "/^.kokoro/continuous/graalvm-native.*.cfg$/", + "/^.kokoro/presubmit/graalvm-native.*.cfg$/" + ], + "matchStrings": [ + "value: \"gcr.io/cloud-devrel-public-resources/graalvm.*:(?.*?)\"" ], - "matchStrings": ["value: \"gcr.io/cloud-devrel-public-resources/graalvm.*:(?.*?)\""], "depNameTemplate": "com.google.cloud:sdk-platform-java-config", "datasourceTemplate": "maven" }, { "customType": "regex", - "fileMatch": [ - "^.github/workflows/unmanaged_dependency_check.yaml$" + "managerFilePatterns": [ + "/^.github/workflows/unmanaged_dependency_check.yaml$/" + ], + "matchStrings": [ + "uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v(?.+?)\\n" ], - "matchStrings": ["uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v(?.+?)\\n"], "depNameTemplate": "com.google.cloud:sdk-platform-java-config", "datasourceTemplate": "maven" } ], "packageRules": [ { - "packagePatterns": [ - "^com.google.guava:" - ], - "versionScheme": "docker" + "versioning": "docker", + "matchPackageNames": [ + "/^com.google.guava:/" + ] }, { - "packagePatterns": [ - "*", - "^com.google.cloud:google-cloud-bigquerystorage" - ], "semanticCommitType": "deps", - "semanticCommitScope": null + "semanticCommitScope": null, + "matchPackageNames": [ + "*", + "/^com.google.cloud:google-cloud-bigquerystorage/" + ] }, { - "packagePatterns": [ - "^org.apache.maven", - "^org.jacoco:", - "^org.codehaus.mojo:", - "^org.sonatype.plugins:", - "^com.coveo:", - "^com.google.cloud:google-cloud-shared-config" - ], "semanticCommitType": "build", - "semanticCommitScope": "deps" + "semanticCommitScope": "deps", + "matchPackageNames": [ + "/^org.apache.maven/", + "/^org.jacoco:/", + "/^org.codehaus.mojo:/", + "/^org.sonatype.plugins:/", + "/^com.coveo:/", + "/^com.google.cloud:google-cloud-shared-config/" + ] }, { - "packagePatterns": [ - "^com.google.cloud:google-cloud-bigquery", - "^com.google.cloud:google-cloud-bigtable", - "^com.google.cloud:libraries-bom", - "^com.google.cloud.samples:shared-configuration" - ], "semanticCommitType": "chore", - "semanticCommitScope": "deps" + "semanticCommitScope": "deps", + "matchPackageNames": [ + "/^com.google.cloud:google-cloud-bigquery/", + "/^com.google.cloud:google-cloud-bigtable/", + "/^com.google.cloud:libraries-bom/", + "/^com.google.cloud.samples:shared-configuration/" + ] }, { - "packagePatterns": [ - "^junit:junit", - "^com.google.truth:truth", - "^org.mockito:mockito-core", - "^org.objenesis:objenesis", - "^com.google.cloud:google-cloud-storage" - ], "semanticCommitType": "test", - "semanticCommitScope": "deps" + "semanticCommitScope": "deps", + "matchPackageNames": [ + "/^junit:junit/", + "/^com.google.truth:truth/", + "/^org.mockito:mockito-core/", + "/^org.objenesis:objenesis/", + "/^com.google.cloud:google-cloud-storage/" + ] }, { - "packagePatterns": [ - "^com.google.cloud:google-cloud-" - ], - "ignoreUnstable": false + "ignoreUnstable": false, + "matchPackageNames": [ + "/^com.google.cloud:google-cloud-/" + ] }, { - "packagePatterns": [ - "^com.fasterxml.jackson.core" + "groupName": "jackson dependencies", + "matchPackageNames": [ + "/^com.fasterxml.jackson.core/" + ] + }, + { + "matchPackageNames": [ + "com.google.cloud:google-cloud-datacatalog-bom", + "com.google.cloud:google-cloud-bigqueryconnection-v1", + "com.google.cloud:google-cloud-bigqueryconnection" ], - "groupName": "jackson dependencies" + "groupName": "Google Cloud Java Monorepo Updates", + "groupSlug": "google-cloud-java-monorepo-release-updates" } ], - "semanticCommits": true, + "semanticCommits": "enabled", "dependencyDashboard": true, "dependencyDashboardLabels": [ "type: process" - ] + ], + "prConcurrentLimit": 0, + "prHourlyLimit": 0 } diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml index d3a9e60a3..14a17e189 100644 --- a/samples/install-without-bom/pom.xml +++ b/samples/install-without-bom/pom.xml @@ -30,13 +30,14 @@ com.google.cloud.samples shared-configuration - 1.2.0 + 1.2.2 21 21 UTF-8 + 1.52.0 @@ -45,31 +46,62 @@ com.google.cloud google-cloud-bigquery - 2.38.2 + 2.53.0 com.google.oauth-client google-oauth-client-java6 - 1.35.0 + 1.39.0 com.google.oauth-client google-oauth-client-jetty - 1.35.0 + 1.39.0 + + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-context + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-sdk + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-sdk-common + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-sdk-trace + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-exporter-logging + ${opentelemetry.version} + + com.google.cloud google-cloud-bigtable - 2.37.0 + 2.62.0 test com.google.cloud google-cloud-bigqueryconnection - 2.41.0 + 2.74.0 test @@ -81,7 +113,7 @@ com.google.truth truth - 1.4.2 + 1.4.4 test @@ -92,7 +124,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.5.0 + 3.6.1 add-snippets-source diff --git a/samples/native-image-sample/README.md b/samples/native-image-sample/README.md deleted file mode 100644 index ed54aa6fa..000000000 --- a/samples/native-image-sample/README.md +++ /dev/null @@ -1,81 +0,0 @@ -# BigQuery Sample Application with Native Image - -The BigQuery sample application demonstrates some common operations with [Google Cloud BigQuery](https://cloud.google.com/bigquery) and is compatible with Native Image compilation. - - -## Setup Instructions - -You will need to follow these prerequisite steps in order to run the samples: - -1. If you have not already, [create a Google Cloud Platform Project](https://cloud.google.com/resource-manager/docs/creating-managing-projects#creating_a_project). - -2. Install the [Google Cloud SDK](https://cloud.google.com/sdk/) which will allow you to run the sample with your project's credentials. - - Once installed, log in with Application Default Credentials using the following command: - - ``` - gcloud auth application-default login - ``` - - **Note:** Authenticating with Application Default Credentials is convenient to use during development, but we recommend [alternate methods of authentication](https://cloud.google.com/docs/authentication/production) during production use. - -3. Install the native image compiler. - - You can follow [the installation instructions](https://www.graalvm.org/docs/getting-started/#install-graalvm). - After following the instructions, ensure that you install the native image extension installed by running: - - ``` - gu install native-image - ``` - - Once you finish following the instructions, verify that the default version of Java is set to the correct version by running `java -version` in a terminal. - - You will see something similar to the below output: - - ``` - $ java -version - - openjdk version "17.0.3" 2022-04-19 - OpenJDK Runtime Environment GraalVM CE 22.1.0 (build 17.0.3+7-jvmci-22.1-b06) - OpenJDK 64-Bit Server VM GraalVM CE 22.1.0 (build 17.0.3+7-jvmci-22.1-b06, mixed mode, sharing) - ``` - -2. [Enable the BigQuery APIs](https://console.cloud.google.com/apis/api/bigquery.googleapis.com). - -### Run with Native Image Support - -Navigate to this directory in a new terminal. - -1. Compile the application using the native image Compiler. This step may take a few minutes. - - ``` - mvn package -P native -DskipTests - ``` - -2. Run the application: - - ``` - ./target/native-image-sample - ``` - -3. The application will create a sample BigQuery dataset in your GCP project called `nativeimage_test_dataset` and perform some simple operations like creating a table, inserting data, and running a query. - - If you would like to delete the BigQuery dataset later, you can manage your BigQuery resources through [Google Cloud Console](https://console.cloud.google.com/bigquery) to clean up BigQuery resources under your project. - - When you run the application, you'll see output like this in the terminal: - - ``` - Created new table: nativeimage_test_table_2351b0891d2f48af9309bd289c3bad13 - Successfully inserted test row. - Queried the following records: - User id: TestUser-2f39e3ec-d81a-483f-9ec0-b9bd54155710 | age: 40 - Deleted table: nativeimage_test_table_2351b0891d2f48af9309bd289c3bad13 - ``` - -### Sample Integration test with Native Image Support - -In order to run the sample integration test, call the following command: - -``` -mvn test -Pnative -``` diff --git a/samples/native-image-sample/pom.xml b/samples/native-image-sample/pom.xml index 83d088085..e69de29bb 100644 --- a/samples/native-image-sample/pom.xml +++ b/samples/native-image-sample/pom.xml @@ -1,156 +0,0 @@ - - - - 4.0.0 - com.example.bigquery - native-image-sample - Native Image Sample - - - com.google.cloud.samples - shared-configuration - 1.2.0 - - - - 21 - 21 - UTF-8 - - - - - - com.google.cloud - libraries-bom - 26.20.0 - pom - import - - - - - - - com.google.cloud - google-cloud-bigquery - - - - - junit - junit - 4.13.2 - test - - - com.google.truth - truth - 1.4.2 - test - - - - - - - org.apache.maven.plugins - maven-jar-plugin - - - - com.example.bigquery.NativeImageBigquerySample - - - - - - - - - - - - native - - - - org.junit.vintage - junit-vintage-engine - 5.10.2 - test - - - org.graalvm.buildtools - junit-platform-native - 0.10.1 - test - - - - - - - - org.apache.maven.plugins - maven-surefire-plugin - - 3.2.5 - - - **/*IT - - - - - org.graalvm.buildtools - native-maven-plugin - 0.10.1 - true - - com.example.bigquery.NativeImageBigquerySample - - - --no-fallback - --no-server - - - - - build-native - - build - test - - package - - - test-native - - test - - test - - - - - - - - diff --git a/samples/native-image-sample/src/main/java/com/example/bigquery/NativeImageBigquerySample.java b/samples/native-image-sample/src/main/java/com/example/bigquery/NativeImageBigquerySample.java deleted file mode 100644 index e7f1b35c5..000000000 --- a/samples/native-image-sample/src/main/java/com/example/bigquery/NativeImageBigquerySample.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.example.bigquery; - -import com.google.api.gax.paging.Page; -import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.BigQueryError; -import com.google.cloud.bigquery.BigQueryOptions; -import com.google.cloud.bigquery.Dataset; -import com.google.cloud.bigquery.DatasetInfo; -import com.google.cloud.bigquery.Field; -import com.google.cloud.bigquery.FieldValueList; -import com.google.cloud.bigquery.InsertAllRequest; -import com.google.cloud.bigquery.InsertAllResponse; -import com.google.cloud.bigquery.QueryJobConfiguration; -import com.google.cloud.bigquery.Schema; -import com.google.cloud.bigquery.StandardSQLTypeName; -import com.google.cloud.bigquery.StandardTableDefinition; -import com.google.cloud.bigquery.Table; -import com.google.cloud.bigquery.TableDefinition; -import com.google.cloud.bigquery.TableId; -import com.google.cloud.bigquery.TableInfo; -import com.google.cloud.bigquery.TableResult; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; - -/** - * Sample application demonstrating BigQuery operations. - * - *

Note: This application will create a BigQuery dataset in your GCP project. You can delete this - * by viewing BigQuery in Cloud Console https://console.cloud.google.com/bigquery or by uncommenting - * the call to `deleteDataset(..)` made in main(). - */ -public class NativeImageBigquerySample { - - private static final String DATASET_ID = "nativeimage_test_dataset"; - - private static final String TABLE_ID = "nativeimage_test_table"; - - private static final Schema TABLE_SCHEMA = - Schema.of( - Field.of("id", StandardSQLTypeName.STRING), Field.of("age", StandardSQLTypeName.INT64)); - - /** Entrypoint to the application. */ - public static void main(String[] args) throws InterruptedException { - BigQuery bigQuery = BigQueryOptions.getDefaultInstance().getService(); - - if (!hasDataset(bigQuery, DATASET_ID)) { - createDataset(bigQuery, DATASET_ID); - } - - String tableName = TABLE_ID + "_" + UUID.randomUUID().toString().replace("-", ""); - createTable(bigQuery, DATASET_ID, tableName, TABLE_SCHEMA); - String testId = "TestUser-" + UUID.randomUUID().toString(); - int testAge = 40; - insertTestRecord(bigQuery, DATASET_ID, tableName, testId, testAge); - queryTable(bigQuery, DATASET_ID, tableName); - - // Clean up resources. - deleteTable(bigQuery, DATASET_ID, tableName); - - // Uncomment this to delete the created dataset. - // deleteDataset(bigQuery, DATASET_ID); - } - - static String queryTable(BigQuery bigQuery, String datasetName, String tableName) - throws InterruptedException { - String fullyQualifiedTable = datasetName + "." + tableName; - String query = "SELECT * FROM " + fullyQualifiedTable; - - QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build(); - TableResult results = bigQuery.query(queryConfig); - - String result = ""; - System.out.println("Queried the following records: "); - for (FieldValueList row : results.iterateAll()) { - String rowStatement = - String.format( - "User id: %s | age: %d\n", - row.get("id").getStringValue(), row.get("age").getLongValue()); - result += rowStatement; - System.out.println(row); - } - return result; - } - - static void insertTestRecord( - BigQuery bigQuery, String datasetName, String tableName, String id, int age) { - - Map rowContent = new HashMap<>(); - rowContent.put("id", id); - rowContent.put("age", age); - - InsertAllRequest request = - InsertAllRequest.newBuilder(datasetName, tableName).addRow(rowContent).build(); - - InsertAllResponse response = bigQuery.insertAll(request); - - if (response.hasErrors()) { - System.out.println("Insert resulted in errors:"); - for (Map.Entry> entry : response.getInsertErrors().entrySet()) { - System.out.println("Response error: \n" + entry.getValue()); - } - } else { - System.out.println("Successfully inserted test row."); - } - } - - static void createTable(BigQuery bigQuery, String datasetName, String tableName, Schema schema) { - - TableId tableId = TableId.of(datasetName, tableName); - TableDefinition tableDefinition = StandardTableDefinition.of(schema); - TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); - bigQuery.create(tableInfo); - System.out.println("Created new table: " + tableName); - } - - static boolean hasTable(BigQuery bigQuery, String datasetName, String tableName) { - - Page

tables = bigQuery.listTables(datasetName); - for (Table table : tables.iterateAll()) { - if (tableName.equals(table.getTableId().getTable())) { - return true; - } - } - return false; - } - - static void createDataset(BigQuery bigQuery, String datasetName) { - DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build(); - Dataset newDataset = bigQuery.create(datasetInfo); - System.out.println("Created new dataset: " + newDataset.getDatasetId().getDataset()); - } - - static boolean hasDataset(BigQuery bigQuery, String datasetName) { - Page datasets = bigQuery.listDatasets(); - for (Dataset dataset : datasets.iterateAll()) { - if (datasetName.equals(dataset.getDatasetId().getDataset())) { - return true; - } - } - return false; - } - - static void deleteTable(BigQuery bigQuery, String datasetName, String tableName) { - bigQuery.getTable(datasetName, tableName).delete(); - System.out.println("Deleted table: " + tableName); - } - - static void deleteDataset(BigQuery bigQuery, String datasetName) { - bigQuery.getDataset(datasetName).delete(); - System.out.println("Deleting dataset " + datasetName); - } -} diff --git a/samples/native-image-sample/src/test/java/com/example/bigquery/NativeImageBigquerySampleIT.java b/samples/native-image-sample/src/test/java/com/example/bigquery/NativeImageBigquerySampleIT.java deleted file mode 100644 index 8cdb01ab9..000000000 --- a/samples/native-image-sample/src/test/java/com/example/bigquery/NativeImageBigquerySampleIT.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2022 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.example.bigquery; - -import static com.google.common.truth.Truth.assertThat; - -import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.BigQueryOptions; -import com.google.cloud.bigquery.Field; -import com.google.cloud.bigquery.Schema; -import com.google.cloud.bigquery.StandardSQLTypeName; -import java.util.UUID; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Test; - -/** Tests for {@link NativeImageBigquerySample} */ -@Ignore -public class NativeImageBigquerySampleIT { - - private static final String DATASET_ID = "nativeimage_it_dataset"; - - private static final String TABLE_ID = "nativeimage_it_table"; - - private static final Schema TABLE_SCHEMA = - Schema.of( - Field.of("id", StandardSQLTypeName.STRING), Field.of("age", StandardSQLTypeName.INT64)); - - private BigQuery bigQuery; - - private String tableName; - - @Before - public void setUp() { - bigQuery = BigQueryOptions.getDefaultInstance().getService(); - tableName = TABLE_ID + "_" + UUID.randomUUID().toString().replace("-", ""); - if (!NativeImageBigquerySample.hasDataset(bigQuery, DATASET_ID)) { - NativeImageBigquerySample.createDataset(bigQuery, DATASET_ID); - } - NativeImageBigquerySample.createTable(bigQuery, DATASET_ID, tableName, TABLE_SCHEMA); - } - - @Test - public void testQueryTable() throws InterruptedException { - String testId = "TestUser-" + UUID.randomUUID(); - NativeImageBigquerySample.insertTestRecord(bigQuery, DATASET_ID, tableName, testId, 40); - - String result = NativeImageBigquerySample.queryTable(bigQuery, DATASET_ID, tableName); - - assertThat(result).isEqualTo("User id: " + testId + " | age: 40\n"); - - // Clean up - NativeImageBigquerySample.deleteTable(bigQuery, DATASET_ID, tableName); - } -} diff --git a/samples/pom.xml b/samples/pom.xml index e91b1c2b6..d18478421 100644 --- a/samples/pom.xml +++ b/samples/pom.xml @@ -34,7 +34,7 @@ com.google.cloud.samples shared-configuration - 1.2.0 + 1.2.2 @@ -47,7 +47,6 @@ install-without-bom snapshot snippets - native-image-sample @@ -55,7 +54,7 @@ org.apache.maven.plugins maven-deploy-plugin - 3.1.1 + 3.1.4 true @@ -63,7 +62,7 @@ org.sonatype.plugins nexus-staging-maven-plugin - 1.6.13 + 1.7.0 true diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index 049c34349..e0dacbd7f 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -30,7 +30,7 @@ com.google.cloud.samples shared-configuration - 1.2.0 + 1.2.2 @@ -39,35 +39,47 @@ UTF-8 + + + + io.opentelemetry + opentelemetry-bom + 1.52.0 + pom + import + + + + com.google.cloud google-cloud-bigquery - 2.38.4-mi + 2.56.0-mi com.google.oauth-client google-oauth-client-java6 - 1.35.0 + 1.39.0 com.google.oauth-client google-oauth-client-jetty - 1.35.0 + 1.39.0 com.google.cloud google-cloud-bigtable - 2.37.0 + 2.62.0 test com.google.cloud google-cloud-bigqueryconnection - 2.41.0 + 2.74.0 test @@ -79,9 +91,33 @@ com.google.truth truth - 1.4.2 + 1.4.4 test + + io.opentelemetry + opentelemetry-api + + + io.opentelemetry + opentelemetry-context + + + io.opentelemetry + opentelemetry-sdk + + + io.opentelemetry + opentelemetry-sdk-common + + + io.opentelemetry + opentelemetry-sdk-trace + + + io.opentelemetry + opentelemetry-exporter-logging + @@ -90,7 +126,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.5.0 + 3.6.1 add-snippets-source diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index c0ffbc785..ff99cb02d 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -29,7 +29,7 @@ com.google.cloud.samples shared-configuration - 1.2.0 + 1.2.2 @@ -47,7 +47,14 @@ com.google.cloud libraries-bom - 26.20.0 + 26.64.0 + pom + import + + + io.opentelemetry + opentelemetry-bom + 1.52.0 pom import @@ -59,6 +66,30 @@ com.google.cloud google-cloud-bigquery + + io.opentelemetry + opentelemetry-api + + + io.opentelemetry + opentelemetry-context + + + io.opentelemetry + opentelemetry-sdk + + + io.opentelemetry + opentelemetry-sdk-common + + + io.opentelemetry + opentelemetry-sdk-trace + + + io.opentelemetry + opentelemetry-exporter-logging + @@ -66,12 +97,12 @@ com.google.oauth-client google-oauth-client-java6 - 1.35.0 + 1.39.0 com.google.oauth-client google-oauth-client-jetty - 1.35.0 + 1.39.0 @@ -79,13 +110,13 @@ com.google.cloud google-cloud-bigtable - 2.37.0 + 2.62.0 test com.google.cloud google-cloud-bigqueryconnection - 2.41.0 + 2.74.0 test @@ -97,7 +128,7 @@ com.google.truth truth - 1.4.2 + 1.4.4 test @@ -109,7 +140,7 @@ org.apache.maven.plugins maven-deploy-plugin - 3.1.1 + 3.1.4 true @@ -117,7 +148,7 @@ org.sonatype.plugins nexus-staging-maven-plugin - 1.6.13 + 1.7.0 true diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java b/samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java index 960f28071..96f0c3f16 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java @@ -46,7 +46,9 @@ public static void createIamPolicy(String datasetName, String tableName) { Policy policy = bigquery.getIamPolicy(tableId); policy .toBuilder() - .addIdentity(Role.of("roles/bigquery.dataViewer"), Identity.allUsers()) + .addIdentity( + Role.of("roles/bigquery.dataViewer"), + Identity.user("example-analyst-group@google.com")) .build(); bigquery.setIamPolicy(tableId, policy); System.out.println("Iam policy created successfully"); diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateModel.java b/samples/snippets/src/main/java/com/example/bigquery/CreateModel.java index 6b45718ce..dfc546712 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/CreateModel.java +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateModel.java @@ -39,7 +39,7 @@ public static void main(String[] args) { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracing.java b/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracing.java new file mode 100644 index 000000000..57ec7eb71 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracing.java @@ -0,0 +1,85 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_enable_otel_tracing] +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetInfo; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.exporter.logging.LoggingSpanExporter; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.util.logging.ConsoleHandler; +import java.util.logging.Logger; + +public class EnableOpenTelemetryTracing { + private static final Logger log = Logger.getLogger(EnableOpenTelemetryTracing.class.getName()); + + public static void main(String[] args) { + // Set logging to System.err. + ConsoleHandler ch = new ConsoleHandler(); + log.addHandler(ch); + + // TODO(developer): Replace values before running the sample. + final String tracerName = "Sample Tracer"; + final String datasetId = "sampleDatasetId"; + + // Create TracerProvider that exports to a logger. + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.builder(LoggingSpanExporter.create()).build()) + .setSampler(Sampler.alwaysOn()) + .build(); + + // Create global OpenTelemetry instance using the TracerProvider. + OpenTelemetry otel = OpenTelemetrySdk.builder().setTracerProvider(tracerProvider).build(); + + // Create Tracer instance from the OpenTelemetry object. Tracers are used to create + // Spans. There can be multiple Tracers in an OpenTelemetry instance. + Tracer tracer = otel.getTracer(tracerName); + + enableOpenTelemetry(tracer, datasetId); + } + + public static void enableOpenTelemetry(Tracer tracer, String datasetId) { + // Create BigQuery client to trace. EnableOpenTelemetryTracing and OpenTelemetryTracer must + // be set to enable tracing. + BigQueryOptions otelOptions = + BigQueryOptions.newBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build(); + BigQuery bigquery = otelOptions.getService(); + + try { + // Create dataset. + DatasetInfo info = DatasetInfo.newBuilder(datasetId).build(); + Dataset dataset = bigquery.create(info); + } catch (Exception e) { + System.out.println( + String.format("Failed to create dataset: %s: %s", e.toString(), e.getMessage())); + } finally { + bigquery.delete(datasetId); + } + } +} +// [END bigquery_enable_otel_tracing] diff --git a/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpan.java b/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpan.java new file mode 100644 index 000000000..af69df10b --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpan.java @@ -0,0 +1,105 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_enable_otel_tracing_with_parent_span] +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetInfo; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import io.opentelemetry.exporter.logging.LoggingSpanExporter; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.time.LocalDate; +import java.util.logging.ConsoleHandler; +import java.util.logging.Logger; + +public class EnableOpenTelemetryTracingWithParentSpan { + private static final Logger log = + Logger.getLogger(EnableOpenTelemetryTracingWithParentSpan.class.getName()); + + public static void main(String[] args) { + // Set logging to System.err. + ConsoleHandler ch = new ConsoleHandler(); + log.addHandler(ch); + + // TODO(developer): Replace values before running the sample. + final String tracerName = "Sample Tracer"; + final String parentSpanName = "Sample Parent Span"; + final String datasetId = "sampleDatasetId"; + + // Create TracerProvider that exports to a logger. + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.builder(LoggingSpanExporter.create()).build()) + .setSampler(Sampler.alwaysOn()) + .build(); + + // Create OpenTelemetry instance using the TracerProvider. + OpenTelemetry otel = OpenTelemetrySdk.builder().setTracerProvider(tracerProvider).build(); + + // Create Tracer instance from the global OpenTelemetry object. Tracers are used to create + // Spans. There can be multiple Tracers in a global OpenTelemetry instance. + final Tracer tracer = otel.getTracer(tracerName); + enableOpenTelemetryWithParentSpan(tracer, parentSpanName, datasetId); + } + + public static void enableOpenTelemetryWithParentSpan( + Tracer tracer, String parentSpanName, String datasetId) { + // Create BigQuery client to trace. EnableOpenTelemetryTracing and OpenTelemetryTracer must + // be set to enable tracing. + BigQueryOptions otelOptions = + BigQueryOptions.newBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build(); + BigQuery bigquery = otelOptions.getService(); + + LocalDate currentDate = LocalDate.now(); + + // Create the root parent Span. setNoParent() ensures that it is a parent Span with a Span ID + // of 0. + Span parentSpan = + tracer + .spanBuilder(parentSpanName) + .setNoParent() + .setAttribute("current_date", currentDate.toString()) + .startSpan(); + + // The Span Context is automatically passed on to any functions called within the scope of the + // try block. parentSpan.makeCurrent() sets parentSpan to be the parent of any Spans created in + // this scope, or the scope of any functions called within this scope. + try (Scope parentScope = parentSpan.makeCurrent()) { + DatasetInfo info = DatasetInfo.newBuilder(datasetId).build(); + Dataset dataset = bigquery.create(info); + } catch (Exception e) { + System.out.println( + String.format("Failed to create dataset: %s: %s", e.toString(), e.getMessage())); + } finally { + // finally block ensures that Spans are cleaned up properly. + parentSpan.end(); + bigquery.delete(datasetId); + } + } +} +// [END bigquery_enable_otel_tracing_with_parent_span] diff --git a/samples/snippets/src/main/java/com/example/bigquery/QueryJobOptional.java b/samples/snippets/src/main/java/com/example/bigquery/QueryJobOptional.java new file mode 100644 index 000000000..1abf58067 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/QueryJobOptional.java @@ -0,0 +1,78 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_query_job_optional] +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; +import com.google.cloud.bigquery.TableResult; + +// Sample demonstrating short mode query execution. +// +// This feature is controlled by setting the defaultJobCreationMode +// field in the BigQueryOptions used for the client. JOB_CREATION_OPTIONAL +// allows for the execution of queries without creating a job. +public class QueryJobOptional { + + public static void main(String[] args) { + String query = + "SELECT name, gender, SUM(number) AS total FROM " + + "bigquery-public-data.usa_names.usa_1910_2013 GROUP BY " + + "name, gender ORDER BY total DESC LIMIT 10"; + queryJobOptional(query); + } + + public static void queryJobOptional(String query) { + try { + // Initialize client that will be used to send requests. This client only needs + // to be created once, and can be reused for multiple requests. + BigQueryOptions options = BigQueryOptions.getDefaultInstance(); + options.setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + BigQuery bigquery = options.getService(); + + // Execute the query. The returned TableResult provides access information + // about the query execution as well as query results. + TableResult results = bigquery.query(QueryJobConfiguration.of(query)); + + JobId jobId = results.getJobId(); + if (jobId != null) { + System.out.println("Query was run with job state. Job ID: " + jobId.toString()); + } else { + System.out.println("Query was run in short mode. Query ID: " + results.getQueryId()); + } + + // Print the results. + results + .iterateAll() + .forEach( + row -> { + System.out.print("name:" + row.get("name").getStringValue()); + System.out.print(", gender: " + row.get("gender").getStringValue()); + System.out.print(", total: " + row.get("total").getLongValue()); + System.out.println(); + }); + + } catch (BigQueryException | InterruptedException e) { + System.out.println("Query not performed \n" + e.toString()); + } + } +} +// [END bigquery_query_job_optional] diff --git a/samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java b/samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java index cc294f7a5..a530885cf 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java +++ b/samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java @@ -54,6 +54,7 @@ public static void main(String[] args) { || datasetName.contains("gcloud_test_") || datasetName.contains("SHARED_DATASET_TEST_") || datasetName.contains("WRITE_STREAM_TEST")) + || datasetName.contains("MY_VIEW_DATASET_NAME_TEST_") && dataset.getCreationTime() > sixHourAgo) { System.out.format("\tDeleting Dataset: %s\n", datasetName); bigquery.delete( diff --git a/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java b/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java index 65ebbc241..7b72bb0f9 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java +++ b/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java @@ -20,6 +20,7 @@ // [START bigquery_simple_app_deps] import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.BigQueryOptions; import com.google.cloud.bigquery.FieldValueList; import com.google.cloud.bigquery.Job; @@ -27,56 +28,67 @@ import com.google.cloud.bigquery.JobInfo; import com.google.cloud.bigquery.QueryJobConfiguration; import com.google.cloud.bigquery.TableResult; -import java.util.UUID; // [END bigquery_simple_app_deps] public class SimpleApp { + public static void main(String... args) throws Exception { - // [START bigquery_simple_app_client] - BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - // [END bigquery_simple_app_client] - // [START bigquery_simple_app_query] - QueryJobConfiguration queryConfig = - QueryJobConfiguration.newBuilder( - "SELECT CONCAT('https://stackoverflow.com/questions/', " - + "CAST(id as STRING)) as url, view_count " - + "FROM `bigquery-public-data.stackoverflow.posts_questions` " - + "WHERE tags like '%google-bigquery%' " - + "ORDER BY view_count DESC " - + "LIMIT 10") - // Use standard SQL syntax for queries. - // See: https://cloud.google.com/bigquery/sql-reference/ - .setUseLegacySql(false) - .build(); + // TODO(developer): Replace these variables before running the app. + String projectId = "MY_PROJECT_ID"; + simpleApp(projectId); + } - // Create a job ID so that we can safely retry. - JobId jobId = JobId.of(UUID.randomUUID().toString()); - Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); + public static void simpleApp(String projectId) { + try { + // [START bigquery_simple_app_client] + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + // [END bigquery_simple_app_client] + // [START bigquery_simple_app_query] + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder( + "SELECT CONCAT('https://stackoverflow.com/questions/', " + + "CAST(id as STRING)) as url, view_count " + + "FROM `bigquery-public-data.stackoverflow.posts_questions` " + + "WHERE tags like '%google-bigquery%' " + + "ORDER BY view_count DESC " + + "LIMIT 10") + // Use standard SQL syntax for queries. + // See: https://cloud.google.com/bigquery/sql-reference/ + .setUseLegacySql(false) + .build(); - // Wait for the query to complete. - queryJob = queryJob.waitFor(); + JobId jobId = JobId.newBuilder().setProject(projectId).build(); + Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); - // Check for errors - if (queryJob == null) { - throw new RuntimeException("Job no longer exists"); - } else if (queryJob.getStatus().getError() != null) { - // You can also look at queryJob.getStatus().getExecutionErrors() for all - // errors, not just the latest one. - throw new RuntimeException(queryJob.getStatus().getError().toString()); - } - // [END bigquery_simple_app_query] + // Wait for the query to complete. + queryJob = queryJob.waitFor(); + + // Check for errors + if (queryJob == null) { + throw new RuntimeException("Job no longer exists"); + } else if (queryJob.getStatus().getExecutionErrors() != null + && queryJob.getStatus().getExecutionErrors().size() > 0) { + // TODO(developer): Handle errors here. An error here do not necessarily mean that the job + // has completed or was unsuccessful. + // For more details: https://cloud.google.com/bigquery/troubleshooting-errors + throw new RuntimeException("An unhandled error has occurred"); + } + // [END bigquery_simple_app_query] - // [START bigquery_simple_app_print] - // Get the results. - TableResult result = queryJob.getQueryResults(); + // [START bigquery_simple_app_print] + // Get the results. + TableResult result = queryJob.getQueryResults(); - // Print all pages of the results. - for (FieldValueList row : result.iterateAll()) { - // String type - String url = row.get("url").getStringValue(); - String viewCount = row.get("view_count").getStringValue(); - System.out.printf("%s : %s views\n", url, viewCount); + // Print all pages of the results. + for (FieldValueList row : result.iterateAll()) { + // String type + String url = row.get("url").getStringValue(); + String viewCount = row.get("view_count").getStringValue(); + System.out.printf("%s : %s views\n", url, viewCount); + } + } catch (BigQueryException | InterruptedException e) { + System.out.println("Simple App failed due to error: \n" + e.toString()); } // [END bigquery_simple_app_print] } diff --git a/samples/snippets/src/main/java/com/example/bigquery/SimpleQueryConnectionReadApi.java b/samples/snippets/src/main/java/com/example/bigquery/SimpleQueryConnectionReadApi.java new file mode 100644 index 000000000..970c29a2e --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/SimpleQueryConnectionReadApi.java @@ -0,0 +1,66 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_simple_query_connection_read_api] + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.BigQueryResult; +import com.google.cloud.bigquery.Connection; +import com.google.cloud.bigquery.ConnectionSettings; +import java.sql.ResultSet; +import java.sql.SQLException; + +public class SimpleQueryConnectionReadApi { + + public static void main(String[] args) { + String query = + "SELECT corpus, count(*) as corpus_count " + + "FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; + simpleQueryConnectionReadApi(query); + } + + public static void simpleQueryConnectionReadApi(String query) { + + try { + // Initialize client and create a Connection session. + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setRequestTimeout(10L) + .setMaxResults(100L) + .setUseQueryCache(true) + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + + // Execute the query using the Connection session. + BigQueryResult bigQueryResult = connection.executeSelect(query); + ResultSet resultSet = bigQueryResult.getResultSet(); + + while (resultSet.next()) { + System.out.print("corpus:" + resultSet.getString("corpus")); + System.out.print(", count:" + resultSet.getLong("corpus_count")); + System.out.println(); + } + System.out.println("Query ran successfully"); + } catch (SQLException e) { + System.out.println("Query did not run \n" + e.toString()); + } + } +} +// [END bigquery_simple_query_connection_read_api] diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateModelIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateModelIT.java index 52af26303..dd18a02f1 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CreateModelIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateModelIT.java @@ -81,7 +81,7 @@ public void testCreateModel() { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/samples/snippets/src/test/java/com/example/bigquery/DeleteModelIT.java b/samples/snippets/src/test/java/com/example/bigquery/DeleteModelIT.java index f33ad1d03..9df25a658 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/DeleteModelIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/DeleteModelIT.java @@ -69,7 +69,7 @@ public void setUp() { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingIT.java b/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingIT.java new file mode 100644 index 000000000..0ad565101 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingIT.java @@ -0,0 +1,105 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.Collection; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class EnableOpenTelemetryTracingIT { + private final Logger log = Logger.getLogger(this.getClass().getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static class ConsoleSpanExporter + implements io.opentelemetry.sdk.trace.export.SpanExporter { + @Override + public CompletableResultCode export(Collection collection) { + if (collection.isEmpty()) { + return CompletableResultCode.ofFailure(); + } + for (SpanData data : collection) { + System.out.println(data); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testEnableOpenTelemetryTracing() { + final String tracerName = "testSampleTracer"; + final String datasetId = RemoteBigQueryHelper.generateDatasetName(); + + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.builder(new ConsoleSpanExporter()).build()) + .setSampler(Sampler.alwaysOn()) + .build(); + + OpenTelemetry otel = OpenTelemetrySdk.builder().setTracerProvider(tracerProvider).build(); + + final Tracer tracer = otel.getTracer(tracerName); + + EnableOpenTelemetryTracing.enableOpenTelemetry(tracer, datasetId); + + assertThat(bout.toString()).contains("com.google.cloud.bigquery.BigQuery.createDataset"); + assertThat(bout.toString()).contains("com.google.cloud.bigquery.BigQuery.deleteDataset"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpanIT.java b/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpanIT.java new file mode 100644 index 000000000..482915008 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpanIT.java @@ -0,0 +1,110 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.time.LocalDate; +import java.util.Collection; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class EnableOpenTelemetryTracingWithParentSpanIT { + private final Logger log = Logger.getLogger(this.getClass().getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static class ConsoleSpanExporter + implements io.opentelemetry.sdk.trace.export.SpanExporter { + @Override + public CompletableResultCode export(Collection collection) { + if (collection.isEmpty()) { + return CompletableResultCode.ofFailure(); + } + for (SpanData data : collection) { + System.out.println(data); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testEnableOpenTelemetryWithParentSpan() { + final String tracerName = "testSampleTracer"; + final String parentSpanName = "testSampleParentSpan"; + final String datasetId = RemoteBigQueryHelper.generateDatasetName(); + final LocalDate currentDate = LocalDate.now(); + + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.builder(new ConsoleSpanExporter()).build()) + .setSampler(Sampler.alwaysOn()) + .build(); + + OpenTelemetry otel = OpenTelemetrySdk.builder().setTracerProvider(tracerProvider).build(); + + final Tracer tracer = otel.getTracer(tracerName); + + EnableOpenTelemetryTracingWithParentSpan.enableOpenTelemetryWithParentSpan( + tracer, parentSpanName, datasetId); + + assertThat(bout.toString()).contains(parentSpanName); + assertThat(bout.toString()) + .contains(String.format("AttributesMap{data={current_date=%s}", currentDate.toString())); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/GetModelIT.java b/samples/snippets/src/test/java/com/example/bigquery/GetModelIT.java index b7e1820d7..7a1c931ee 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/GetModelIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/GetModelIT.java @@ -69,7 +69,7 @@ public void setUp() { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/samples/snippets/src/test/java/com/example/bigquery/GrantViewAccessIT.java b/samples/snippets/src/test/java/com/example/bigquery/GrantViewAccessIT.java index 11b1215ff..d8655a9d5 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/GrantViewAccessIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/GrantViewAccessIT.java @@ -94,6 +94,7 @@ public void tearDown() { DeleteTable.deleteTable(viewDatasetName, viewName); DeleteTable.deleteTable(viewDatasetName, tableName); DeleteDataset.deleteDataset(PROJECT_ID, datasetName); + DeleteDataset.deleteDataset(PROJECT_ID, viewDatasetName); // restores print statements in the original method System.out.flush(); System.setOut(originalPrintStream); diff --git a/samples/snippets/src/test/java/com/example/bigquery/ListModelsIT.java b/samples/snippets/src/test/java/com/example/bigquery/ListModelsIT.java index fd99bc24a..4ddac63bc 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/ListModelsIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/ListModelsIT.java @@ -69,7 +69,7 @@ public void setUp() { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryJobOptionalIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryJobOptionalIT.java new file mode 100644 index 000000000..8e3e97909 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryJobOptionalIT.java @@ -0,0 +1,62 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class QueryJobOptionalIT { + + private final Logger log = Logger.getLogger(this.getClass().getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testQueryBatch() { + String query = + "SELECT name, gender, SUM(number) AS total FROM " + + "bigquery-public-data.usa_names.usa_1910_2013 GROUP BY " + + "name, gender ORDER BY total DESC LIMIT 10"; + + QueryJobOptional.queryJobOptional(query); + assertThat(bout.toString()).contains("Query was run"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java b/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java index cfb77f107..4c4030c7d 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java @@ -17,6 +17,7 @@ package com.example.bigquery; import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; import java.io.ByteArrayOutputStream; import java.io.PrintStream; @@ -24,6 +25,7 @@ import java.util.logging.Logger; import org.junit.After; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @@ -37,6 +39,20 @@ public class SimpleAppIT { private ByteArrayOutputStream bout; private PrintStream out; private PrintStream originalPrintStream; + private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("GOOGLE_CLOUD_PROJECT"); + } @Before public void setUp() { @@ -56,7 +72,7 @@ public void tearDown() { @Test public void testQuickstart() throws Exception { - SimpleApp.main(); + SimpleApp.simpleApp(PROJECT_ID); String got = bout.toString(); assertThat(got).contains("https://stackoverflow.com/questions/"); } diff --git a/samples/snippets/src/test/java/com/example/bigquery/SimpleQueryConnectionReadApiIT.java b/samples/snippets/src/test/java/com/example/bigquery/SimpleQueryConnectionReadApiIT.java new file mode 100644 index 000000000..b7cb109c7 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/SimpleQueryConnectionReadApiIT.java @@ -0,0 +1,61 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class SimpleQueryConnectionReadApiIT { + + private final Logger log = Logger.getLogger(this.getClass().getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testSimpleQueryConnectionReadApi() { + String query = + "SELECT corpus, count(*) as corpus_count " + + "FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; + + SimpleQueryConnectionReadApi.simpleQueryConnectionReadApi(query); + assertThat(bout.toString()).contains("Query ran successfully"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateModelDescriptionIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateModelDescriptionIT.java index 156e83d17..478cfec97 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateModelDescriptionIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateModelDescriptionIT.java @@ -68,7 +68,7 @@ public void setUp() { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index db25ea1e3..000000000 --- a/synth.metadata +++ /dev/null @@ -1,90 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/java-bigquery.git", - "sha": "611689c0bd72c789e7e1adf9451e8abfcd143983" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "396d9b84a1e93880f5bf88b59ecd38a0a6dffc5e" - } - } - ], - "generatedFiles": [ - ".github/CODEOWNERS", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/blunderbuss.yml", - ".github/generated-files-bot.yml", - ".github/release-please.yml", - ".github/release-trigger.yml", - ".github/snippet-bot.yml", - ".github/sync-repo-settings.yaml", - ".github/trusted-contribution.yml", - ".github/workflows/approve-readme.yaml", - ".github/workflows/auto-release.yaml", - ".github/workflows/ci.yaml", - ".github/workflows/samples.yaml", - ".kokoro/build.bat", - ".kokoro/build.sh", - ".kokoro/coerce_logs.sh", - ".kokoro/common.cfg", - ".kokoro/common.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/java8.cfg", - ".kokoro/nightly/common.cfg", - ".kokoro/nightly/integration.cfg", - ".kokoro/nightly/java11.cfg", - ".kokoro/nightly/java7.cfg", - ".kokoro/nightly/java8-osx.cfg", - ".kokoro/nightly/java8-win.cfg", - ".kokoro/nightly/java8.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/clirr.cfg", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/dependencies.cfg", - ".kokoro/presubmit/integration.cfg", - ".kokoro/presubmit/java11.cfg", - ".kokoro/presubmit/java7.cfg", - ".kokoro/presubmit/java8-osx.cfg", - ".kokoro/presubmit/java8-win.cfg", - ".kokoro/presubmit/java8.cfg", - ".kokoro/presubmit/linkage-monitor.cfg", - ".kokoro/presubmit/lint.cfg", - ".kokoro/presubmit/samples.cfg", - ".kokoro/readme.sh", - ".kokoro/release/bump_snapshot.cfg", - ".kokoro/release/common.cfg", - ".kokoro/release/common.sh", - ".kokoro/release/drop.cfg", - ".kokoro/release/drop.sh", - ".kokoro/release/promote.cfg", - ".kokoro/release/promote.sh", - ".kokoro/release/publish_javadoc.cfg", - ".kokoro/release/publish_javadoc.sh", - ".kokoro/release/publish_javadoc11.cfg", - ".kokoro/release/publish_javadoc11.sh", - ".kokoro/release/snapshot.cfg", - ".kokoro/release/snapshot.sh", - ".kokoro/release/stage.cfg", - ".kokoro/release/stage.sh", - ".kokoro/trampoline.sh", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.md", - "LICENSE", - "SECURITY.md", - "java.header", - "license-checks.xml", - "samples/install-without-bom/pom.xml", - "samples/pom.xml", - "samples/snapshot/pom.xml", - "samples/snippets/pom.xml" - ] -} \ No newline at end of file diff --git a/versions.txt b/versions.txt index 3d8630e02..46c19475b 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,4 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:2.38.2:2.38.3-SNAPSHOT \ No newline at end of file +google-cloud-bigquery:2.56.0:2.56.0 \ No newline at end of file