*
* @throws BigQueryException upon failure
*/
@@ -923,21 +981,17 @@ public int hashCode() {
/**
* Lists the project's datasets. This method returns partial information on each dataset: ({@link
* Dataset#getDatasetId()}, {@link Dataset#getFriendlyName()} and {@link
- * Dataset#getGeneratedId()}). To get complete information use either {@link #getDataset(String,
- * DatasetOption...)} or {@link #getDataset(DatasetId, DatasetOption...)}.
+ * Dataset#getGeneratedId()}). To get complete information use {@link #getDataset}.
*
*
Example of listing datasets, specifying the page size.
*
- *
- * {
- * @code
- * // List datasets in the default project
- * Page<Dataset> datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
- * for (Dataset dataset : datasets.iterateAll()) {
- * // do something with the dataset
- * }
+ *
{@code
+ * // List datasets in the default project
+ * Page datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
+ * for (Dataset dataset : datasets.iterateAll()) {
+ * // do something with the dataset
* }
- *
+ * }
*
* @throws BigQueryException upon failure
*/
@@ -961,22 +1015,18 @@ public int hashCode() {
/**
* Lists the datasets in the provided project. This method returns partial information on each
* dataset: ({@link Dataset#getDatasetId()}, {@link Dataset#getFriendlyName()} and {@link
- * Dataset#getGeneratedId()}). To get complete information use either {@link #getDataset(String,
- * DatasetOption...)} or {@link #getDataset(DatasetId, DatasetOption...)}.
+ * Dataset#getGeneratedId()}). To get complete information use either {@link #getDataset}.
*
*
Example of listing datasets in a project, specifying the page size.
*
- *
- * {
- * @code
- * String projectId = "my_project_id";
- * // List datasets in a specified project
- * Page<{@link Dataset}> datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
- * for (Dataset dataset : datasets.iterateAll()) {
- * // do something with the dataset
- * }
+ *
{@code
+ * String projectId = "my_project_id";
+ * // List datasets in a specified project
+ * Page datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
+ * for (Dataset dataset : datasets.iterateAll()) {
+ * // do something with the dataset
* }
- *
+ * }
*
* @throws BigQueryException upon failure
*/
@@ -987,18 +1037,15 @@ public int hashCode() {
*
*
Example of deleting a dataset from its id, even if non-empty.
*
- *
- * {
- * @code
- * String datasetName = "my_dataset_name";
- * boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
- * if (deleted) {
- * // the dataset was deleted
- * } else {
- * // the dataset was not found
- * }
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
+ * if (deleted) {
+ * // the dataset was deleted
+ * } else {
+ * // the dataset was not found
* }
- *
+ * }
*
* @return {@code true} if dataset was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
@@ -1010,20 +1057,17 @@ public int hashCode() {
*
*
Example of deleting a dataset, even if non-empty.
*
- *
- * {
- * @code
- * String projectId = "my_project_id";
- * String datasetName = "my_dataset_name";
- * DatasetId datasetId = DatasetId.of(projectId, datasetName);
- * boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
- * if (deleted) {
- * // the dataset was deleted
- * } else {
- * // the dataset was not found
- * }
+ *
{@code
+ * String projectId = "my_project_id";
+ * String datasetName = "my_dataset_name";
+ * DatasetId datasetId = DatasetId.of(projectId, datasetName);
+ * boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
+ * if (deleted) {
+ * // the dataset was deleted
+ * } else {
+ * // the dataset was not found
* }
- *
+ * }
*
* @return {@code true} if dataset was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
@@ -1047,21 +1091,18 @@ public int hashCode() {
*
*
Example of deleting a table.
*
- *
- * {
- * @code
- * String projectId = "my_project_id";
- * String datasetName = "my_dataset_name";
- * String tableName = "my_table_name";
- * TableId tableId = TableId.of(projectId, datasetName, tableName);
- * boolean deleted = bigquery.delete(tableId);
- * if (deleted) {
- * // the table was deleted
- * } else {
- * // the table was not found
- * }
+ *
{@code
+ * String projectId = "my_project_id";
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * TableId tableId = TableId.of(projectId, datasetName, tableName);
+ * boolean deleted = bigquery.delete(tableId);
+ * if (deleted) {
+ * // the table was deleted
+ * } else {
+ * // the table was not found
* }
- *
+ * }
*
* @return {@code true} if table was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
@@ -1073,21 +1114,18 @@ public int hashCode() {
*
*
Example of deleting a model.
*
- *
- * {
- * @code
- * String projectId = "my_project_id";
- * String datasetName = "my_dataset_name";
- * String tableName = "my_model_name";
- * ModelId modelId = ModelId.of(projectId, datasetName, modelName);
- * boolean deleted = bigquery.delete(modelId);
- * if (deleted) {
- * // the model was deleted
- * } else {
- * // the model was not found
- * }
+ *
{@code
+ * String projectId = "my_project_id";
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_model_name";
+ * ModelId modelId = ModelId.of(projectId, datasetName, modelName);
+ * boolean deleted = bigquery.delete(modelId);
+ * if (deleted) {
+ * // the model was deleted
+ * } else {
+ * // the model was not found
* }
- *
+ * }
*
* @return {@code true} if model was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
@@ -1132,19 +1170,15 @@ public int hashCode() {
*
*
- *
*
* @throws BigQueryException upon failure
*/
@@ -1322,20 +1335,17 @@ public int hashCode() {
* Lists the tables in the dataset. This method returns partial information on each table: ({@link
* Table#getTableId()}, {@link Table#getFriendlyName()}, {@link Table#getGeneratedId()} and type,
* which is part of {@link Table#getDefinition()}). To get complete information use either {@link
- * #getTable(TableId, TableOption...)} or {@link #getTable(String, String, TableOption...)}.
+ * #getTable}.
*
*
Example of listing the tables in a dataset, specifying the page size.
*
- *
- * {
- * @code
- * String datasetName = "my_dataset_name";
- * Page<Table> tables = bigquery.listTables(datasetName, TableListOption.pageSize(100));
- * for (Table table : tables.iterateAll()) {
- * // do something with the table
- * }
+ *
tables = bigquery.listTables(datasetName, TableListOption.pageSize(100));
+ * for (Table table : tables.iterateAll()) {
+ * // do something with the table
* }
- *
+ * }
*
* @throws BigQueryException upon failure
*/
@@ -1345,22 +1355,19 @@ public int hashCode() {
* Lists the tables in the dataset. This method returns partial information on each table: ({@link
* Table#getTableId()}, {@link Table#getFriendlyName()}, {@link Table#getGeneratedId()} and type,
* which is part of {@link Table#getDefinition()}). To get complete information use either {@link
- * #getTable(TableId, TableOption...)} or {@link #getTable(String, String, TableOption...)}.
+ * #getTable}.
*
*
tables = bigquery.listTables(datasetId, TableListOption.pageSize(100));
+ * for (Table table : tables.iterateAll()) {
+ * // do something with the table
* }
- *
+ * }
*
* @throws BigQueryException upon failure
*/
@@ -1383,33 +1390,30 @@ public int hashCode() {
*
*
Example of inserting rows into a table without running a load job.
*
- *
- * {
- * @code
- * String datasetName = "my_dataset_name";
- * String tableName = "my_table_name";
- * TableId tableId = TableId.of(datasetName, tableName);
- * // Values of the row to insert
- * Map<String, Object> rowContent = new HashMap<>();
- * rowContent.put("booleanField", true);
- * // Bytes are passed in base64
- * rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64
- * // Records are passed as a map
- * Map<String, Object> recordsContent = new HashMap<>();
- * recordsContent.put("stringField", "Hello, World!");
- * rowContent.put("recordField", recordsContent);
- * InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow("rowId", rowContent)
- * // More rows can be added in the same RPC by invoking .addRow() on the
- * // builder
- * .build());
- * if (response.hasErrors()) {
- * // If any of the insertions failed, this lets you inspect the errors
- * for (Entry<Long, List<BigQueryError>> entry : response.getInsertErrors().entrySet()) {
- * // inspect row error
- * }
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * TableId tableId = TableId.of(datasetName, tableName);
+ * // Values of the row to insert
+ * Map rowContent = new HashMap<>();
+ * rowContent.put("booleanField", true);
+ * // Bytes are passed in base64
+ * rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64
+ * // Records are passed as a map
+ * Map recordsContent = new HashMap<>();
+ * recordsContent.put("stringField", "Hello, World!");
+ * rowContent.put("recordField", recordsContent);
+ * InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow("rowId", rowContent)
+ * // More rows can be added in the same RPC by invoking .addRow() on the
+ * // builder
+ * .build());
+ * if (response.hasErrors()) {
+ * // If any of the insertions failed, this lets you inspect the errors
+ * for (Entry> entry : response.getInsertErrors().entrySet()) {
+ * // inspect row error
* }
* }
- *
+ * }
*
* @throws BigQueryException upon failure
*/
@@ -1420,20 +1424,17 @@ public int hashCode() {
*
*
Example of listing table rows, specifying the page size.
*
- *
- * {
- * @code
- * String datasetName = "my_dataset_name";
- * String tableName = "my_table_name";
- * // This example reads the result 100 rows per RPC call. If there's no need
- * // to limit the number,
- * // simply omit the option.
- * TableResult tableData = bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
- * for (FieldValueList row : tableData.iterateAll()) {
- * // do something with the row
- * }
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * // This example reads the result 100 rows per RPC call. If there's no need
+ * // to limit the number,
+ * // simply omit the option.
+ * TableResult tableData = bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
+ * for (FieldValueList row : tableData.iterateAll()) {
+ * // do something with the row
* }
- *
+ * }
*
* @throws BigQueryException upon failure
*/
@@ -1444,21 +1445,18 @@ public int hashCode() {
*
*
Example of listing table rows, specifying the page size.
*
- *
- * {
- * @code
- * String datasetName = "my_dataset_name";
- * String tableName = "my_table_name";
- * TableId tableIdObject = TableId.of(datasetName, tableName);
- * // This example reads the result 100 rows per RPC call. If there's no need
- * // to limit the number,
- * // simply omit the option.
- * TableResult tableData = bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
- * for (FieldValueList row : tableData.iterateAll()) {
- * // do something with the row
- * }
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * TableId tableIdObject = TableId.of(datasetName, tableName);
+ * // This example reads the result 100 rows per RPC call. If there's no need
+ * // to limit the number,
+ * // simply omit the option.
+ * TableResult tableData = bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
+ * for (FieldValueList row : tableData.iterateAll()) {
+ * // do something with the row
* }
- *
Example of listing jobs, specifying the page size.
*
- *
- * {
- * @code
- * Page<Job> jobs = bigquery.listJobs(JobListOption.pageSize(100));
- * for (Job job : jobs.iterateAll()) {
- * // do something with the job
- * }
+ *
{@code
+ * Page jobs = bigquery.listJobs(JobListOption.pageSize(100));
+ * for (Job job : jobs.iterateAll()) {
+ * // do something with the job
* }
- *
+ * }
*
* @throws BigQueryException upon failure
*/
@@ -1573,25 +1559,21 @@ TableResult listTableData(
/**
* Sends a job cancel request. This call will return immediately. The job status can then be
- * checked using either {@link #getJob(JobId, JobOption...)} or {@link #getJob(String,
- * JobOption...)}).
+ * checked by using {@link #getJob}.
*
*
If the location of the job is not "US" or "EU", {@link #cancel(JobId)} must be used instead.
*
*
Example of cancelling a job.
*
- *
- * {
- * @code
- * String jobName = "my_job_name";
- * boolean success = bigquery.cancel(jobName);
- * if (success) {
- * // job was cancelled
- * } else {
- * // job was not found
- * }
+ *
{@code
+ * String jobName = "my_job_name";
+ * boolean success = bigquery.cancel(jobName);
+ * if (success) {
+ * // job was cancelled
+ * } else {
+ * // job was not found
* }
- *
+ * }
*
* @return {@code true} if cancel was requested successfully, {@code false} if the job was not
* found
@@ -1601,27 +1583,23 @@ TableResult listTableData(
/**
* Sends a job cancel request. This call will return immediately. The job status can then be
- * checked using either {@link #getJob(JobId, JobOption...)} or {@link #getJob(String,
- * JobOption...)}).
+ * checked using {@link #getJob}.
*
*
If the location of the job is not "US" or "EU", the {@code jobId} must specify the job
* location.
*
*
{@code
+ * String jobName = "my_job_name";
+ * JobId jobId = JobId.of(jobName);
+ * boolean success = bigquery.cancel(jobId);
+ * if (success) {
+ * // job was cancelled
+ * } else {
+ * // job was not found
* }
- *
+ * }
*
* @return {@code true} if cancel was requested successfully, {@code false} if the job was not
* found
@@ -1640,22 +1618,19 @@ TableResult listTableData(
*
*
Example of running a query.
*
- *
- * {
- * @code
- * // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
- * String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
- * QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
- *
- * // Print the results.
- * for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
- * for (FieldValue val : row) {
- * System.out.printf("%s,", val.toString());
- * }
- * System.out.printf("\n");
+ *
{@code
+ * // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
+ * String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
+ * QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
+ *
+ * // Print the results.
+ * for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
+ * for (FieldValue val : row) {
+ * System.out.printf("%s,", val.toString());
* }
+ * System.out.printf("\n");
* }
- *
+ * }
*
* This method supports query-related preview features via environmental variables (enabled by
* setting the {@code QUERY_PREVIEW_ENABLED} environment variable to "TRUE"). Specifically, this
@@ -1681,7 +1656,7 @@ TableResult query(QueryJobConfiguration configuration, JobOption... options)
*
If the location of the job is not "US" or "EU", the {@code jobId} must specify the job
* location.
*
- *
This method cannot be used in conjuction with {@link QueryJobConfiguration#dryRun()}
+ *
This method cannot be used in conjunction with {@link QueryJobConfiguration#dryRun()}
* queries. Since dry-run queries are not actually executed, there's no way to retrieve results.
*
*
See {@link #query(QueryJobConfiguration, JobOption...)} for examples on populating a {@link
@@ -1695,6 +1670,28 @@ TableResult query(QueryJobConfiguration configuration, JobOption... options)
TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... options)
throws InterruptedException, JobException;
+ /**
+ * Starts the query associated with the request, using the given JobId. It returns either
+ * TableResult for quick queries or Job object for long-running queries.
+ *
+ *
If the location of the job is not "US" or "EU", the {@code jobId} must specify the job
+ * location.
+ *
+ *
This method cannot be used in conjunction with {@link QueryJobConfiguration#dryRun()}
+ * queries. Since dry-run queries are not actually executed, there's no way to retrieve results.
+ *
+ *
See {@link #query(QueryJobConfiguration, JobOption...)} for examples on populating a {@link
+ * QueryJobConfiguration}.
+ *
+ * @throws BigQueryException upon failure
+ * @throws InterruptedException if the current thread gets interrupted while waiting for the query
+ * to complete
+ * @throws JobException if the job completes unsuccessfully
+ */
+ Object queryWithTimeout(
+ QueryJobConfiguration configuration, JobId jobId, Long timeoutMs, JobOption... options)
+ throws InterruptedException, JobException;
+
/**
* Returns results of the query associated with the provided job.
*
@@ -1710,56 +1707,50 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption...
*
*
Example of creating a channel with which to write to a table.
*
- *
tables =
Iterables.transform(
@@ -1055,7 +1537,7 @@ public Table apply(com.google.api.services.bigquery.model.Table table) {
});
return new PageImpl<>(
new TablePageFetcher(datasetId, serviceOptions, cursor, optionsMap), cursor, tables);
- } catch (RetryHelper.RetryHelperException e) {
+ } catch (BigQueryRetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@@ -1066,20 +1548,24 @@ private static Page listModels(
final Map optionsMap) {
try {
Tuple> result =
- runWithRetries(
+ BigQueryRetryHelper.runWithRetries(
new Callable<
Tuple>>() {
@Override
- public Tuple>
- call() {
+ public Tuple> call()
+ throws IOException {
return serviceOptions
.getBigQueryRpcV2()
- .listModels(datasetId.getProject(), datasetId.getDataset(), optionsMap);
+ .listModelsSkipExceptionTranslation(
+ datasetId.getProject(), datasetId.getDataset(), optionsMap);
}
},
serviceOptions.getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
- serviceOptions.getClock());
+ serviceOptions.getResultRetryAlgorithm(),
+ serviceOptions.getClock(),
+ EMPTY_RETRY_CONFIG,
+ serviceOptions.isOpenTelemetryTracingEnabled(),
+ serviceOptions.getOpenTelemetryTracer());
String cursor = result.x();
Iterable models =
Iterables.transform(
@@ -1092,7 +1578,7 @@ public Model apply(com.google.api.services.bigquery.model.Model model) {
});
return new PageImpl<>(
new ModelPageFetcher(datasetId, serviceOptions, cursor, optionsMap), cursor, models);
- } catch (RetryHelper.RetryHelperException e) {
+ } catch (BigQueryRetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@@ -1103,20 +1589,24 @@ private static Page listRoutines(
final Map optionsMap) {
try {
Tuple> result =
- runWithRetries(
+ BigQueryRetryHelper.runWithRetries(
new Callable<
Tuple>>() {
@Override
public Tuple>
- call() {
+ call() throws IOException {
return serviceOptions
.getBigQueryRpcV2()
- .listRoutines(datasetId.getProject(), datasetId.getDataset(), optionsMap);
+ .listRoutinesSkipExceptionTranslation(
+ datasetId.getProject(), datasetId.getDataset(), optionsMap);
}
},
serviceOptions.getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
- serviceOptions.getClock());
+ serviceOptions.getResultRetryAlgorithm(),
+ serviceOptions.getClock(),
+ EMPTY_RETRY_CONFIG,
+ serviceOptions.isOpenTelemetryTracingEnabled(),
+ serviceOptions.getOpenTelemetryTracer());
String cursor = result.x();
Iterable routines =
Iterables.transform(
@@ -1129,7 +1619,7 @@ public Routine apply(com.google.api.services.bigquery.model.Routine routinePb) {
});
return new PageImpl<>(
new RoutinePageFetcher(datasetId, serviceOptions, cursor, optionsMap), cursor, routines);
- } catch (RetryHelper.RetryHelperException e) {
+ } catch (BigQueryRetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@@ -1167,31 +1657,54 @@ public Rows apply(RowToInsert rowToInsert) {
requestPb.setRows(rowsPb);
TableDataInsertAllResponse responsePb;
- if (allInsertIdsSet[0]) {
- // allowing retries only if all row insertIds are set (used for deduplication)
- try {
+ Span insertAll = null;
+ if (getOptions().isOpenTelemetryTracingEnabled()
+ && getOptions().getOpenTelemetryTracer() != null) {
+ insertAll =
+ getOptions()
+ .getOpenTelemetryTracer()
+ .spanBuilder("com.google.cloud.bigquery.BigQuery.insertAll")
+ .setAllAttributes(request.getOtelAttributes())
+ .startSpan();
+ }
+ try (Scope insertAllScope = insertAll != null ? insertAll.makeCurrent() : null) {
+ if (allInsertIdsSet[0]) {
+ // allowing retries only if all row insertIds are set (used for deduplication)
+ try {
+ responsePb =
+ BigQueryRetryHelper.runWithRetries(
+ new Callable() {
+ @Override
+ public TableDataInsertAllResponse call() throws Exception {
+ return bigQueryRpc.insertAllSkipExceptionTranslation(
+ tableId.getProject(),
+ tableId.getDataset(),
+ tableId.getTable(),
+ requestPb);
+ }
+ },
+ getOptions().getRetrySettings(),
+ getOptions().getResultRetryAlgorithm(),
+ getOptions().getClock(),
+ EMPTY_RETRY_CONFIG,
+ getOptions().isOpenTelemetryTracingEnabled(),
+ getOptions().getOpenTelemetryTracer());
+ } catch (BigQueryRetryHelperException e) {
+ throw BigQueryException.translateAndThrow(e);
+ }
+ } else {
+ // Use insertAll that translate the exception as we are not retrying.
responsePb =
- runWithRetries(
- new Callable() {
- @Override
- public TableDataInsertAllResponse call() throws Exception {
- return bigQueryRpc.insertAll(
- tableId.getProject(), tableId.getDataset(), tableId.getTable(), requestPb);
- }
- },
- getOptions().getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
- getOptions().getClock());
- } catch (RetryHelperException e) {
- throw BigQueryException.translateAndThrow(e);
+ bigQueryRpc.insertAll(
+ tableId.getProject(), tableId.getDataset(), tableId.getTable(), requestPb);
}
- } else {
- responsePb =
- bigQueryRpc.insertAll(
- tableId.getProject(), tableId.getDataset(), tableId.getTable(), requestPb);
- }
- return InsertAllResponse.fromPb(responsePb);
+ return InsertAllResponse.fromPb(responsePb);
+ } finally {
+ if (insertAll != null) {
+ insertAll.end();
+ }
+ }
}
@Override
@@ -1215,11 +1728,28 @@ public TableResult listTableData(
public TableResult listTableData(TableId tableId, Schema schema, TableDataListOption... options) {
Tuple extends Page, Long> data =
listTableData(tableId, schema, getOptions(), optionMap(options));
- return TableResult.newBuilder()
- .setSchema(schema)
- .setTotalRows(data.y())
- .setPageNoSchema(data.x())
- .build();
+ Span tableDataList = null;
+ if (getOptions().isOpenTelemetryTracingEnabled()
+ && getOptions().getOpenTelemetryTracer() != null) {
+ tableDataList =
+ getOptions()
+ .getOpenTelemetryTracer()
+ .spanBuilder("com.google.cloud.bigquery.BigQuery.listTableData")
+ .setAllAttributes(tableId.getOtelAttributes())
+ .setAllAttributes(otelAttributesFromOptions(options))
+ .startSpan();
+ }
+ try (Scope tableDataListScope = tableDataList != null ? tableDataList.makeCurrent() : null) {
+ return TableResult.newBuilder()
+ .setSchema(schema)
+ .setTotalRows(data.y())
+ .setPageNoSchema(data.x())
+ .build();
+ } finally {
+ if (tableDataList != null) {
+ tableDataList.end();
+ }
+ }
}
private static Tuple extends Page, Long> listTableData(
@@ -1234,13 +1764,13 @@ private static Tuple extends Page, Long> listTableData(
? serviceOptions.getProjectId()
: tableId.getProject());
TableDataList result =
- runWithRetries(
+ BigQueryRetryHelper.runWithRetries(
new Callable() {
@Override
- public TableDataList call() {
+ public TableDataList call() throws IOException {
return serviceOptions
.getBigQueryRpcV2()
- .listTableData(
+ .listTableDataSkipExceptionTranslation(
completeTableId.getProject(),
completeTableId.getDataset(),
completeTableId.getTable(),
@@ -1248,8 +1778,11 @@ public TableDataList call() {
}
},
serviceOptions.getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
- serviceOptions.getClock());
+ serviceOptions.getResultRetryAlgorithm(),
+ serviceOptions.getClock(),
+ EMPTY_RETRY_CONFIG,
+ serviceOptions.isOpenTelemetryTracingEnabled(),
+ serviceOptions.getOpenTelemetryTracer());
String cursor = result.getPageToken();
Map pageOptionMap =
Strings.isNullOrEmpty(cursor) ? optionsMap : optionMap(TableDataListOption.startIndex(0));
@@ -1257,15 +1790,15 @@ public TableDataList call() {
new PageImpl<>(
new TableDataPageFetcher(tableId, schema, serviceOptions, cursor, pageOptionMap),
cursor,
- transformTableData(result.getRows(), schema)),
+ transformTableData(result.getRows(), schema, serviceOptions.getUseInt64Timestamps())),
result.getTotalRows());
- } catch (RetryHelper.RetryHelperException e) {
+ } catch (BigQueryRetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
private static Iterable transformTableData(
- Iterable tableDataPb, final Schema schema) {
+ Iterable tableDataPb, final Schema schema, boolean useInt64Timestamps) {
return ImmutableList.copyOf(
Iterables.transform(
tableDataPb != null ? tableDataPb : ImmutableList.of(),
@@ -1274,7 +1807,7 @@ private static Iterable transformTableData(
@Override
public FieldValueList apply(TableRow rowPb) {
- return FieldValueList.fromPb(rowPb.getF(), fields);
+ return FieldValueList.fromPb(rowPb.getF(), fields, useInt64Timestamps);
}
}));
}
@@ -1294,13 +1827,24 @@ public Job getJob(JobId jobId, JobOption... options) {
jobId.getLocation() == null && getOptions().getLocation() != null
? getOptions().getLocation()
: jobId.getLocation());
- try {
+ Span jobGet = null;
+ if (getOptions().isOpenTelemetryTracingEnabled()
+ && getOptions().getOpenTelemetryTracer() != null) {
+ jobGet =
+ getOptions()
+ .getOpenTelemetryTracer()
+ .spanBuilder("com.google.cloud.bigquery.BigQuery.getJob")
+ .setAllAttributes(completeJobId.getOtelAttributes())
+ .setAllAttributes(otelAttributesFromOptions(options))
+ .startSpan();
+ }
+ try (Scope jobGetScope = jobGet != null ? jobGet.makeCurrent() : null) {
com.google.api.services.bigquery.model.Job answer =
- runWithRetries(
+ BigQueryRetryHelper.runWithRetries(
new Callable() {
@Override
- public com.google.api.services.bigquery.model.Job call() {
- return bigQueryRpc.getJob(
+ public com.google.api.services.bigquery.model.Job call() throws IOException {
+ return bigQueryRpc.getJobSkipExceptionTranslation(
completeJobId.getProject(),
completeJobId.getJob(),
completeJobId.getLocation(),
@@ -1308,48 +1852,82 @@ public com.google.api.services.bigquery.model.Job call() {
}
},
getOptions().getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
- getOptions().getClock());
- if (getOptions().getThrowNotFound() && answer == null) {
- throw new BigQueryException(HTTP_NOT_FOUND, "Job not found");
+ getOptions().getResultRetryAlgorithm(),
+ getOptions().getClock(),
+ EMPTY_RETRY_CONFIG,
+ getOptions().isOpenTelemetryTracingEnabled(),
+ getOptions().getOpenTelemetryTracer());
+ return Job.fromPb(this, answer);
+ } catch (BigQueryRetryHelperException e) {
+ if (isRetryErrorCodeHttpNotFound(e)) {
+ if (getOptions().getThrowNotFound()) {
+ throw new BigQueryException(HTTP_NOT_FOUND, "Job not found");
+ }
+ return null;
}
- return answer == null ? null : Job.fromPb(this, answer);
- } catch (RetryHelper.RetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
+ } finally {
+ if (jobGet != null) {
+ jobGet.end();
+ }
}
}
@Override
public Page listJobs(JobListOption... options) {
- return listJobs(getOptions(), optionMap(options));
+ Span jobsList = null;
+ if (getOptions().isOpenTelemetryTracingEnabled()
+ && getOptions().getOpenTelemetryTracer() != null) {
+ jobsList =
+ getOptions()
+ .getOpenTelemetryTracer()
+ .spanBuilder("com.google.cloud.bigquery.BigQuery.listJobs")
+ .setAllAttributes(otelAttributesFromOptions(options))
+ .startSpan();
+ }
+ try (Scope jobsListScope = jobsList != null ? jobsList.makeCurrent() : null) {
+ return listJobs(getOptions(), optionMap(options));
+ } finally {
+ if (jobsList != null) {
+ jobsList.end();
+ }
+ }
}
private static Page listJobs(
final BigQueryOptions serviceOptions, final Map optionsMap) {
- Tuple> result =
- runWithRetries(
- new Callable>>() {
- @Override
- public Tuple> call() {
- return serviceOptions
- .getBigQueryRpcV2()
- .listJobs(serviceOptions.getProjectId(), optionsMap);
- }
- },
- serviceOptions.getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
- serviceOptions.getClock());
- String cursor = result.x();
- Iterable jobs =
- Iterables.transform(
- result.y(),
- new Function() {
- @Override
- public Job apply(com.google.api.services.bigquery.model.Job job) {
- return Job.fromPb(serviceOptions.getService(), job);
- }
- });
- return new PageImpl<>(new JobPageFetcher(serviceOptions, cursor, optionsMap), cursor, jobs);
+ try {
+ Tuple> result =
+ BigQueryRetryHelper.runWithRetries(
+ new Callable>>() {
+ @Override
+ public Tuple> call()
+ throws IOException {
+ return serviceOptions
+ .getBigQueryRpcV2()
+ .listJobsSkipExceptionTranslation(serviceOptions.getProjectId(), optionsMap);
+ }
+ },
+ serviceOptions.getRetrySettings(),
+ serviceOptions.getResultRetryAlgorithm(),
+ serviceOptions.getClock(),
+ EMPTY_RETRY_CONFIG,
+ serviceOptions.isOpenTelemetryTracingEnabled(),
+ serviceOptions.getOpenTelemetryTracer());
+ String cursor = result.x();
+ Iterable jobs =
+ Iterables.transform(
+ result.y(),
+ new Function() {
+ @Override
+ public Job apply(com.google.api.services.bigquery.model.Job job) {
+ return Job.fromPb(serviceOptions.getService(), job);
+ }
+ });
+ return new PageImpl<>(new JobPageFetcher(serviceOptions, cursor, optionsMap), cursor, jobs);
+ } catch (BigQueryRetryHelperException e) {
+ throw BigQueryException.translateAndThrow(e);
+ }
}
@Override
@@ -1366,70 +1944,86 @@ public boolean cancel(JobId jobId) {
jobId.getLocation() == null && getOptions().getLocation() != null
? getOptions().getLocation()
: jobId.getLocation());
- try {
- return runWithRetries(
+ Span jobCancel = null;
+ if (getOptions().isOpenTelemetryTracingEnabled()
+ && getOptions().getOpenTelemetryTracer() != null) {
+ jobCancel =
+ getOptions()
+ .getOpenTelemetryTracer()
+ .spanBuilder("com.google.cloud.bigquery.BigQuery.cancelJob")
+ .setAllAttributes(completeJobId.getOtelAttributes())
+ .startSpan();
+ }
+ try (Scope jobCancelScope = jobCancel != null ? jobCancel.makeCurrent() : null) {
+ return BigQueryRetryHelper.runWithRetries(
new Callable() {
@Override
- public Boolean call() {
- return bigQueryRpc.cancel(
+ public Boolean call() throws IOException {
+ return bigQueryRpc.cancelSkipExceptionTranslation(
completeJobId.getProject(), completeJobId.getJob(), completeJobId.getLocation());
}
},
getOptions().getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
- getOptions().getClock());
- } catch (RetryHelper.RetryHelperException e) {
+ getOptions().getResultRetryAlgorithm(),
+ getOptions().getClock(),
+ EMPTY_RETRY_CONFIG,
+ getOptions().isOpenTelemetryTracingEnabled(),
+ getOptions().getOpenTelemetryTracer());
+ } catch (BigQueryRetryHelperException e) {
+ if (isRetryErrorCodeHttpNotFound(e)) {
+ return false;
+ }
throw BigQueryException.translateAndThrow(e);
+ } finally {
+ if (jobCancel != null) {
+ jobCancel.end();
+ }
}
}
@Override
public TableResult query(QueryJobConfiguration configuration, JobOption... options)
throws InterruptedException, JobException {
- Job.checkNotDryRun(configuration, "query");
-
- if (getOptions().isQueryPreviewEnabled()) {
- configuration =
- configuration
- .toBuilder()
- .setJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL)
- .build();
- }
-
- // If all parameters passed in configuration are supported by the query() method on the backend,
- // put on fast path
- QueryRequestInfo requestInfo = new QueryRequestInfo(configuration);
- if (requestInfo.isFastQuerySupported(null)) {
- String projectId = getOptions().getProjectId();
- QueryRequest content = requestInfo.toPb();
- if (getOptions().getLocation() != null) {
- content.setLocation(getOptions().getLocation());
- }
- return queryRpc(projectId, content, options);
- }
- // Otherwise, fall back to the existing create query job logic
- return create(JobInfo.of(configuration), options).getQueryResults();
+ return query(configuration, null, options);
}
- private TableResult queryRpc(
- final String projectId, final QueryRequest content, JobOption... options)
+ private Object queryRpc(final String projectId, final QueryRequest content, JobOption... options)
throws InterruptedException {
com.google.api.services.bigquery.model.QueryResponse results;
- try {
+ Span queryRpc = null;
+ if (getOptions().isOpenTelemetryTracingEnabled()
+ && getOptions().getOpenTelemetryTracer() != null) {
+ queryRpc =
+ getOptions()
+ .getOpenTelemetryTracer()
+ .spanBuilder("com.google.cloud.bigquery.BigQuery.queryRpc")
+ .setAttribute("bq.query.project_id", projectId)
+ .setAllAttributes(otelAttributesFromQueryRequest(content))
+ .setAllAttributes(otelAttributesFromOptions(options))
+ .startSpan();
+ }
+ try (Scope queryRpcScope = queryRpc != null ? queryRpc.makeCurrent() : null) {
results =
BigQueryRetryHelper.runWithRetries(
new Callable() {
@Override
- public com.google.api.services.bigquery.model.QueryResponse call() {
- return bigQueryRpc.queryRpc(projectId, content);
+ public com.google.api.services.bigquery.model.QueryResponse call()
+ throws IOException {
+ return bigQueryRpc.queryRpcSkipExceptionTranslation(projectId, content);
}
},
getOptions().getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
+ getOptions().getResultRetryAlgorithm(),
getOptions().getClock(),
- DEFAULT_RETRY_CONFIG);
+ DEFAULT_RETRY_CONFIG,
+ getOptions().isOpenTelemetryTracingEnabled(),
+ getOptions().getOpenTelemetryTracer());
} catch (BigQueryRetryHelper.BigQueryRetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
+ } finally {
+ if (queryRpc != null) {
+ queryRpc.end();
+ }
}
if (results.getErrors() != null) {
@@ -1457,7 +2051,7 @@ public com.google.api.services.bigquery.model.QueryResponse call() {
// here, but this is left as future work.
JobId jobId = JobId.fromPb(results.getJobReference());
Job job = getJob(jobId, options);
- return job.getQueryResults();
+ return job;
}
if (results.getPageToken() != null) {
@@ -1471,7 +2065,8 @@ public com.google.api.services.bigquery.model.QueryResponse call() {
// fetch next pages of results
new QueryPageFetcher(jobId, schema, getOptions(), cursor, optionMap(options)),
cursor,
- transformTableData(results.getRows(), schema)))
+ transformTableData(
+ results.getRows(), schema, getOptions().getUseInt64Timestamps())))
.setJobId(jobId)
.setQueryId(results.getQueryId())
.build();
@@ -1484,7 +2079,8 @@ public com.google.api.services.bigquery.model.QueryResponse call() {
new PageImpl<>(
new TableDataPageFetcher(null, schema, getOptions(), null, optionMap(options)),
null,
- transformTableData(results.getRows(), schema)))
+ transformTableData(
+ results.getRows(), schema, getOptions().getUseInt64Timestamps())))
// Return the JobID of the successful job
.setJobId(
results.getJobReference() != null ? JobId.fromPb(results.getJobReference()) : null)
@@ -1495,38 +2091,100 @@ public com.google.api.services.bigquery.model.QueryResponse call() {
@Override
public TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... options)
throws InterruptedException, JobException {
+ Object result = queryWithTimeout(configuration, jobId, null, options);
+ if (result instanceof Job) {
+ return ((Job) result).getQueryResults();
+ }
+ return (TableResult) result;
+ }
+
+ @Override
+ public Object queryWithTimeout(
+ QueryJobConfiguration configuration, JobId jobId, Long timeoutMs, JobOption... options)
+ throws InterruptedException, JobException {
Job.checkNotDryRun(configuration, "query");
- // If all parameters passed in configuration are supported by the query() method on the backend,
- // put on fast path
- QueryRequestInfo requestInfo = new QueryRequestInfo(configuration);
- if (requestInfo.isFastQuerySupported(jobId)) {
- // Be careful when setting the projectID in JobId, if a projectID is specified in the JobId,
- // the job created by the query method will use that project. This may cause the query to
- // fail with "Access denied" if the project do not have enough permissions to run the job.
-
- String projectId =
- jobId.getProject() != null ? jobId.getProject() : getOptions().getProjectId();
- QueryRequest content = requestInfo.toPb();
- // Be careful when setting the location, if a location is specified in the BigQueryOption or
- // JobId the job created by the query method will be in that location, even if the table to be
- // queried is in a different location. This may cause the query to fail with
- // "BigQueryException: Not found"
- if (jobId.getLocation() != null) {
- content.setLocation(jobId.getLocation());
- } else if (getOptions().getLocation() != null) {
- content.setLocation(getOptions().getLocation());
- }
+ // If JobCreationMode is not explicitly set, update it with default value;
+ if (configuration.getJobCreationMode() == null) {
+ configuration =
+ configuration.toBuilder()
+ .setJobCreationMode(getOptions().getDefaultJobCreationMode())
+ .build();
+ }
- return queryRpc(projectId, content, options);
+ Span querySpan = null;
+ if (getOptions().isOpenTelemetryTracingEnabled()
+ && getOptions().getOpenTelemetryTracer() != null) {
+ querySpan =
+ getOptions()
+ .getOpenTelemetryTracer()
+ .spanBuilder("com.google.cloud.bigquery.BigQuery.queryWithTimeout")
+ .setAllAttributes(jobId != null ? jobId.getOtelAttributes() : null)
+ .setAllAttributes(otelAttributesFromOptions(options))
+ .startSpan();
+ }
+ try (Scope queryScope = querySpan != null ? querySpan.makeCurrent() : null) {
+ // If all parameters passed in configuration are supported by the query() method on the
+ // backend,
+ // put on fast path
+ QueryRequestInfo requestInfo =
+ new QueryRequestInfo(configuration, getOptions().getUseInt64Timestamps());
+ if (requestInfo.isFastQuerySupported(jobId)) {
+ // Be careful when setting the projectID in JobId, if a projectID is specified in the JobId,
+ // the job created by the query method will use that project. This may cause the query to
+ // fail with "Access denied" if the project do not have enough permissions to run the job.
+
+ String projectId =
+ jobId != null && jobId.getProject() != null
+ ? jobId.getProject()
+ : getOptions().getProjectId();
+ QueryRequest content = requestInfo.toPb();
+ // Be careful when setting the location, if a location is specified in the BigQueryOption or
+ // JobId the job created by the query method will be in that location, even if the table to
+ // be
+ // queried is in a different location. This may cause the query to fail with
+ // "BigQueryException: Not found"
+ if (jobId != null && jobId.getLocation() != null) {
+ content.setLocation(jobId.getLocation());
+ } else if (getOptions().getLocation() != null) {
+ content.setLocation(getOptions().getLocation());
+ }
+ if (timeoutMs != null) {
+ content.setTimeoutMs(timeoutMs);
+ }
+
+ return queryRpc(projectId, content, options);
+ }
+ return create(JobInfo.of(jobId, configuration), options).getQueryResults();
+ } finally {
+ if (querySpan != null) {
+ querySpan.end();
+ }
}
- return create(JobInfo.of(jobId, configuration), options).getQueryResults();
}
@Override
public QueryResponse getQueryResults(JobId jobId, QueryResultsOption... options) {
Map optionsMap = optionMap(options);
- return getQueryResults(jobId, getOptions(), optionsMap);
+ Span getQueryResults = null;
+ if (getOptions().isOpenTelemetryTracingEnabled()
+ && getOptions().getOpenTelemetryTracer() != null) {
+ getQueryResults =
+ getOptions()
+ .getOpenTelemetryTracer()
+ .spanBuilder("com.google.cloud.bigquery.BigQuery.getQueryResults")
+ .setAllAttributes(jobId.getOtelAttributes())
+ .setAllAttributes(otelAttributesFromOptions(options))
+ .startSpan();
+ }
+ try (Scope getQueryResultsScope =
+ getQueryResults != null ? getQueryResults.makeCurrent() : null) {
+ return getQueryResults(jobId, getOptions(), optionsMap);
+ } finally {
+ if (getQueryResults != null) {
+ getQueryResults.end();
+ }
+ }
}
private static QueryResponse getQueryResults(
@@ -1545,10 +2203,10 @@ private static QueryResponse getQueryResults(
BigQueryRetryHelper.runWithRetries(
new Callable() {
@Override
- public GetQueryResultsResponse call() {
+ public GetQueryResultsResponse call() throws IOException {
return serviceOptions
.getBigQueryRpcV2()
- .getQueryResults(
+ .getQueryResultsSkipExceptionTranslation(
completeJobId.getProject(),
completeJobId.getJob(),
completeJobId.getLocation(),
@@ -1556,9 +2214,11 @@ public GetQueryResultsResponse call() {
}
},
serviceOptions.getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
+ serviceOptions.getResultRetryAlgorithm(),
serviceOptions.getClock(),
- DEFAULT_RETRY_CONFIG);
+ DEFAULT_RETRY_CONFIG,
+ serviceOptions.isOpenTelemetryTracingEnabled(),
+ serviceOptions.getOpenTelemetryTracer());
TableSchema schemaPb = results.getSchema();
@@ -1575,7 +2235,7 @@ public GetQueryResultsResponse call() {
.setTotalRows(results.getTotalRows() == null ? 0 : results.getTotalRows().longValue())
.setErrors(errors.build())
.build();
- } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) {
+ } catch (BigQueryRetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
}
}
@@ -1602,21 +2262,40 @@ public Policy getIamPolicy(TableId tableId, IAMOption... options) {
? getOptions().getProjectId()
: tableId.getProject());
- try {
- final Map optionsMap = optionMap(options);
+ final Map optionsMap = optionMap(options);
+ Span iamPolicyGet = null;
+ if (getOptions().isOpenTelemetryTracingEnabled()
+ && getOptions().getOpenTelemetryTracer() != null) {
+ iamPolicyGet =
+ getOptions()
+ .getOpenTelemetryTracer()
+ .spanBuilder("com.google.cloud.bigquery.BigQuery.getIamPolicy")
+ .setAllAttributes(tableId.getOtelAttributes())
+ .setAllAttributes(otelAttributesFromOptions(options))
+ .startSpan();
+ }
+ try (Scope iamPolicyGetScope = iamPolicyGet != null ? iamPolicyGet.makeCurrent() : null) {
return convertFromApiPolicy(
- runWithRetries(
+ BigQueryRetryHelper.runWithRetries(
new Callable() {
@Override
- public com.google.api.services.bigquery.model.Policy call() {
- return bigQueryRpc.getIamPolicy(completeTableId.getIAMResourceName(), optionsMap);
+ public com.google.api.services.bigquery.model.Policy call() throws IOException {
+ return bigQueryRpc.getIamPolicySkipExceptionTranslation(
+ completeTableId.getIAMResourceName(), optionsMap);
}
},
getOptions().getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
- getOptions().getClock()));
- } catch (RetryHelper.RetryHelperException e) {
+ getOptions().getResultRetryAlgorithm(),
+ getOptions().getClock(),
+ EMPTY_RETRY_CONFIG,
+ getOptions().isOpenTelemetryTracingEnabled(),
+ getOptions().getOpenTelemetryTracer()));
+ } catch (BigQueryRetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
+ } finally {
+ if (iamPolicyGet != null) {
+ iamPolicyGet.end();
+ }
}
}
@@ -1627,22 +2306,42 @@ public Policy setIamPolicy(TableId tableId, final Policy policy, IAMOption... op
Strings.isNullOrEmpty(tableId.getProject())
? getOptions().getProjectId()
: tableId.getProject());
- try {
- final Map optionsMap = optionMap(options);
+
+ final Map optionsMap = optionMap(options);
+ Span iamPolicySet = null;
+ if (getOptions().isOpenTelemetryTracingEnabled()
+ && getOptions().getOpenTelemetryTracer() != null) {
+ iamPolicySet =
+ getOptions()
+ .getOpenTelemetryTracer()
+ .spanBuilder("com.google.cloud.bigquery.BigQuery.setIamPolicy")
+ .setAllAttributes(tableId.getOtelAttributes())
+ .setAllAttributes(otelAttributesFromPolicy(policy))
+ .setAllAttributes(otelAttributesFromOptions(options))
+ .startSpan();
+ }
+ try (Scope iamPolicySetScope = iamPolicySet != null ? iamPolicySet.makeCurrent() : null) {
return convertFromApiPolicy(
- runWithRetries(
+ BigQueryRetryHelper.runWithRetries(
new Callable() {
@Override
- public com.google.api.services.bigquery.model.Policy call() {
- return bigQueryRpc.setIamPolicy(
+ public com.google.api.services.bigquery.model.Policy call() throws IOException {
+ return bigQueryRpc.setIamPolicySkipExceptionTranslation(
completeTableId.getIAMResourceName(), convertToApiPolicy(policy), optionsMap);
}
},
getOptions().getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
- getOptions().getClock()));
- } catch (RetryHelperException e) {
+ getOptions().getResultRetryAlgorithm(),
+ getOptions().getClock(),
+ EMPTY_RETRY_CONFIG,
+ getOptions().isOpenTelemetryTracingEnabled(),
+ getOptions().getOpenTelemetryTracer()));
+ } catch (BigQueryRetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
+ } finally {
+ if (iamPolicySet != null) {
+ iamPolicySet.end();
+ }
}
}
@@ -1654,25 +2353,46 @@ public List testIamPermissions(
Strings.isNullOrEmpty(tableId.getProject())
? getOptions().getProjectId()
: tableId.getProject());
- try {
- final Map optionsMap = optionMap(options);
+ final Map optionsMap = optionMap(options);
+ Span testIamPermissions = null;
+ if (getOptions().isOpenTelemetryTracingEnabled()
+ && getOptions().getOpenTelemetryTracer() != null) {
+ testIamPermissions =
+ getOptions()
+ .getOpenTelemetryTracer()
+ .spanBuilder("com.google.cloud.bigquery.BigQuery.testIamPermissions")
+ .setAllAttributes(tableId.getOtelAttributes())
+ .setAttribute("bq.iam.permissions", permissions.toString())
+ .setAllAttributes(otelAttributesFromOptions(options))
+ .startSpan();
+ }
+ try (Scope testIamPermissionsScope =
+ testIamPermissions != null ? testIamPermissions.makeCurrent() : null) {
com.google.api.services.bigquery.model.TestIamPermissionsResponse response =
- runWithRetries(
+ BigQueryRetryHelper.runWithRetries(
new Callable() {
@Override
- public com.google.api.services.bigquery.model.TestIamPermissionsResponse call() {
- return bigQueryRpc.testIamPermissions(
+ public com.google.api.services.bigquery.model.TestIamPermissionsResponse call()
+ throws IOException {
+ return bigQueryRpc.testIamPermissionsSkipExceptionTranslation(
completeTableId.getIAMResourceName(), permissions, optionsMap);
}
},
getOptions().getRetrySettings(),
- BigQueryBaseService.BIGQUERY_EXCEPTION_HANDLER,
- getOptions().getClock());
+ getOptions().getResultRetryAlgorithm(),
+ getOptions().getClock(),
+ EMPTY_RETRY_CONFIG,
+ getOptions().isOpenTelemetryTracingEnabled(),
+ getOptions().getOpenTelemetryTracer());
return response.getPermissions() == null
? ImmutableList.of()
: ImmutableList.copyOf(response.getPermissions());
- } catch (RetryHelperException e) {
+ } catch (BigQueryRetryHelperException e) {
throw BigQueryException.translateAndThrow(e);
+ } finally {
+ if (testIamPermissions != null) {
+ testIamPermissions.end();
+ }
}
}
@@ -1685,4 +2405,55 @@ public com.google.api.services.bigquery.model.TestIamPermissionsResponse call()
}
return optionMap;
}
+
+ private static String getFieldAsString(Object field) {
+ return field == null ? "null" : field.toString();
+ }
+
+ private static Attributes otelAttributesFromOptions(Option... options) {
+ Attributes attributes = Attributes.builder().build();
+ for (Option option : options) {
+ attributes =
+ attributes.toBuilder()
+ .put("bq.option." + option.getRpcOption().toString(), option.getValue().toString())
+ .build();
+ }
+ return attributes;
+ }
+
+ private static Attributes otelAttributesFromQueryRequest(QueryRequest request) {
+ return Attributes.builder()
+ .put("bq.query.dry_run", getFieldAsString(request.getDryRun()))
+ .put("bq.query.job_creation_mode", getFieldAsString(request.getJobCreationMode()))
+ .put("bq.query.kind", getFieldAsString(request.getKind()))
+ .put("bq.query.location", getFieldAsString(request.getLocation()))
+ .put("bq.query.request_id", getFieldAsString(request.getRequestId()))
+ .put("bq.query.use_query_cache", getFieldAsString(request.getUseQueryCache()))
+ .build();
+ }
+
+ private static Attributes otelAttributesFromPolicy(Policy policy) {
+ return Attributes.builder()
+ .put("bq.policy.version", getFieldAsString(policy.getVersion()))
+ .put("bq.policy.bindings", getFieldAsString(policy.getBindings()))
+ .build();
+ }
+
+ static BigQueryRetryConfig getBigQueryRetryConfig(Map options) {
+ return (BigQueryRetryConfig)
+ options.getOrDefault(BigQueryRpc.Option.BIGQUERY_RETRY_CONFIG, null);
+ }
+
+ static RetryOption[] getRetryOptions(Map options) {
+ return (RetryOption[]) options.getOrDefault(BigQueryRpc.Option.RETRY_OPTIONS, null);
+ }
+
+ private static boolean isRetryErrorCodeHttpNotFound(BigQueryRetryHelperException e) {
+ if (e.getCause() instanceof BigQueryException) {
+ if (((BigQueryException) e.getCause()).getCode() == HTTP_NOT_FOUND) {
+ return true;
+ }
+ }
+ return false;
+ }
}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java
index e53439f02..7adb58d3a 100644
--- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java
@@ -16,16 +16,18 @@
package com.google.cloud.bigquery;
+import com.google.api.core.BetaApi;
+import com.google.api.gax.retrying.ResultRetryAlgorithm;
import com.google.cloud.ServiceDefaults;
import com.google.cloud.ServiceOptions;
import com.google.cloud.ServiceRpc;
import com.google.cloud.TransportOptions;
+import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode;
import com.google.cloud.bigquery.spi.BigQueryRpcFactory;
-import com.google.cloud.bigquery.spi.v2.BigQueryRpc;
import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc;
import com.google.cloud.http.HttpTransportOptions;
-import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableSet;
+import io.opentelemetry.api.trace.Tracer;
import java.util.Set;
public class BigQueryOptions extends ServiceOptions {
@@ -34,11 +36,15 @@ public class BigQueryOptions extends ServiceOptions {
private static final int DEFAULT_READ_API_TIME_OUT = 60000;
private static final String BIGQUERY_SCOPE = "https://www.googleapis.com/auth/bigquery";
private static final Set SCOPES = ImmutableSet.of(BIGQUERY_SCOPE);
- private static final long serialVersionUID = -2437598817433266049L;
+ private static final long serialVersionUID = -2437598817433266048L;
private final String location;
// set the option ThrowNotFound when you want to throw the exception when the value not found
private boolean setThrowNotFound;
- private String queryPreviewEnabled = System.getenv("QUERY_PREVIEW_ENABLED");
+ private boolean useInt64Timestamps;
+ private JobCreationMode defaultJobCreationMode = JobCreationMode.JOB_CREATION_MODE_UNSPECIFIED;
+ private boolean enableOpenTelemetryTracing;
+ private Tracer openTelemetryTracer;
+ private ResultRetryAlgorithm> resultRetryAlgorithm;
public static class DefaultBigQueryFactory implements BigQueryFactory {
@@ -63,6 +69,10 @@ public ServiceRpc create(BigQueryOptions options) {
public static class Builder extends ServiceOptions.Builder {
private String location;
+ private boolean useInt64Timestamps;
+ private boolean enableOpenTelemetryTracing;
+ private Tracer openTelemetryTracer;
+ private ResultRetryAlgorithm> resultRetryAlgorithm;
private Builder() {}
@@ -84,6 +94,38 @@ public Builder setLocation(String location) {
return this;
}
+ public Builder setUseInt64Timestamps(boolean useInt64Timestamps) {
+ this.useInt64Timestamps = useInt64Timestamps;
+ return this;
+ }
+
+ /**
+ * Enables OpenTelemetry tracing functionality for this BigQuery instance
+ *
+ * @param enableOpenTelemetryTracing enables OpenTelemetry tracing if true
+ */
+ @BetaApi
+ public Builder setEnableOpenTelemetryTracing(boolean enableOpenTelemetryTracing) {
+ this.enableOpenTelemetryTracing = enableOpenTelemetryTracing;
+ return this;
+ }
+
+ /**
+ * Sets the OpenTelemetry tracer for this BigQuery instance to be tracer.
+ *
+ * @param tracer OpenTelemetry tracer to be used
+ */
+ @BetaApi
+ public Builder setOpenTelemetryTracer(Tracer tracer) {
+ this.openTelemetryTracer = tracer;
+ return this;
+ }
+
+ public Builder setResultRetryAlgorithm(ResultRetryAlgorithm> resultRetryAlgorithm) {
+ this.resultRetryAlgorithm = resultRetryAlgorithm;
+ return this;
+ }
+
@Override
public BigQueryOptions build() {
return new BigQueryOptions(this);
@@ -93,6 +135,14 @@ public BigQueryOptions build() {
private BigQueryOptions(Builder builder) {
super(BigQueryFactory.class, BigQueryRpcFactory.class, builder, new BigQueryDefaults());
this.location = builder.location;
+ this.useInt64Timestamps = builder.useInt64Timestamps;
+ this.enableOpenTelemetryTracing = builder.enableOpenTelemetryTracing;
+ this.openTelemetryTracer = builder.openTelemetryTracer;
+ if (builder.resultRetryAlgorithm != null) {
+ this.resultRetryAlgorithm = builder.resultRetryAlgorithm;
+ } else {
+ this.resultRetryAlgorithm = BigQueryBaseService.DEFAULT_BIGQUERY_EXCEPTION_HANDLER;
+ }
}
private static class BigQueryDefaults implements ServiceDefaults {
@@ -124,31 +174,70 @@ protected Set getScopes() {
return SCOPES;
}
- protected BigQueryRpc getBigQueryRpcV2() {
- return (BigQueryRpc) getRpc();
+ protected HttpBigQueryRpc getBigQueryRpcV2() {
+ return (HttpBigQueryRpc) getRpc();
}
public String getLocation() {
return location;
}
+ @Deprecated
public boolean isQueryPreviewEnabled() {
- return queryPreviewEnabled != null && queryPreviewEnabled.equalsIgnoreCase("TRUE");
+ return false;
}
public void setThrowNotFound(boolean setThrowNotFound) {
this.setThrowNotFound = setThrowNotFound;
}
- @VisibleForTesting
- public void setQueryPreviewEnabled(String queryPreviewEnabled) {
- this.queryPreviewEnabled = queryPreviewEnabled;
+ public void setUseInt64Timestamps(boolean useInt64Timestamps) {
+ this.useInt64Timestamps = useInt64Timestamps;
+ }
+
+ @Deprecated
+ public void setQueryPreviewEnabled(String queryPreviewEnabled) {}
+
+ public void setDefaultJobCreationMode(JobCreationMode jobCreationMode) {
+ this.defaultJobCreationMode = jobCreationMode;
}
public boolean getThrowNotFound() {
return setThrowNotFound;
}
+ public boolean getUseInt64Timestamps() {
+ return useInt64Timestamps;
+ }
+
+ public JobCreationMode getDefaultJobCreationMode() {
+ return defaultJobCreationMode;
+ }
+
+ /**
+ * Returns whether this BigQuery instance has OpenTelemetry tracing enabled
+ *
+ * @return true if tracing is enabled, false if not
+ */
+ @BetaApi("Span names and attributes are subject to change without notice")
+ public boolean isOpenTelemetryTracingEnabled() {
+ return enableOpenTelemetryTracing;
+ }
+
+ /**
+ * Returns the OpenTelemetry tracer used by this BigQuery instance
+ *
+ * @return OpenTelemetry tracer object or {@code null} if not set
+ */
+ @BetaApi("Span names and attributes are subject to change without notice")
+ public Tracer getOpenTelemetryTracer() {
+ return openTelemetryTracer;
+ }
+
+ public ResultRetryAlgorithm> getResultRetryAlgorithm() {
+ return resultRetryAlgorithm;
+ }
+
@SuppressWarnings("unchecked")
@Override
public Builder toBuilder() {
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultImpl.java
index e944efceb..b705e77c1 100644
--- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultImpl.java
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultImpl.java
@@ -16,22 +16,30 @@
package com.google.cloud.bigquery;
+import com.google.cloud.bigquery.FieldValue.Attribute;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.ResultSet;
import java.sql.SQLException;
+import java.sql.SQLFeatureNotSupportedException;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.time.LocalTime;
-import java.time.ZoneId;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
import java.util.Map;
-import java.util.TimeZone;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
import org.apache.arrow.vector.util.JsonStringArrayList;
import org.apache.arrow.vector.util.Text;
+/**
+ * An implementation of BigQueryResult.
+ *
+ *
This class and the ResultSet it returns is not thread-safe.
+ */
public class BigQueryResultImpl implements BigQueryResult {
private static final String NULL_CURSOR_MSG =
@@ -109,10 +117,85 @@ private class BigQueryResultSet extends AbstractJdbcResultSet {
private boolean hasReachedEnd =
false; // flag which will be set to true when we have encountered a EndOfStream or when
// curTup.isLast(). Ref: https://github.com/googleapis/java-bigquery/issues/2033
+ private boolean wasNull = false;
+
+ private class BigQueryArrayResult implements java.sql.Array {
+ List