Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add snippets to BigQuery Table class and tests #1272

Merged
merged 2 commits into from
Sep 21, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,16 @@ public Table build() {
/**
* Checks if this table exists.
*
* <p>Example of checking if the table exists.
* <pre> {@code
* boolean exists = table.exists();
* if (exists) {
* // the table exists
* } else {
* // the table was not found
* }
* }</pre>
*
* @return {@code true} if this table exists, {@code false} otherwise
* @throws BigQueryException upon failure
*/
Expand All @@ -149,6 +159,17 @@ public boolean exists() {
/**
* Fetches current table's latest information. Returns {@code null} if the table does not exist.
*
* <p>Example of fetching the table's latest information, specifying particular table fields to
* get.
* <pre> {@code
* TableField field1 = TableField.LAST_MODIFIED_TIME;
* TableField field2 = TableField.NUM_ROWS;
* Table latestTable = table.reload(TableOption.fields(field1, field2));
* if (latestTable == null) {
* // the table was not found
* }
* }</pre>
*
* @param options table options
* @return a {@code Table} object with latest information or {@code null} if not found
* @throws BigQueryException upon failure
Expand All @@ -161,6 +182,11 @@ public Table reload(TableOption... options) {
* Updates the table's information with this table's information. Dataset's and table's
* user-defined ids cannot be changed. A new {@code Table} object is returned.
*
* <p>Example of updating the table's information.
* <pre> {@code
* Table updatedTable = table.toBuilder().description("new description").build().update();
* }</pre>
*
* @param options dataset options
* @return a {@code Table} object with updated information
* @throws BigQueryException upon failure
Expand All @@ -172,6 +198,16 @@ public Table update(TableOption... options) {
/**
* Deletes this table.
*
* <p>Example of deleting the table.
* <pre> {@code
* boolean deleted = table.delete();
* if (deleted) {
* // the table was deleted
* } else {
* // the table was not found
* }
* }</pre>
*
* @return {@code true} if table was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
*/
Expand All @@ -182,6 +218,23 @@ public boolean delete() {
/**
* Insert rows into the table.
*
* <p>Example of inserting rows into the table.
* <pre> {@code
* String rowId1 = "rowId1";
* String rowId2 = "rowId2";
* List<RowToInsert> rows = new ArrayList<>();
* Map<String, Object> row1 = new HashMap<>();
* row1.put("stringField", "value1");
* row1.put("booleanField", true);
* Map<String, Object> row2 = new HashMap<>();
* row2.put("stringField", "value2");
* row2.put("booleanField", false);
* rows.add(RowToInsert.of(rowId1, row1));
* rows.add(RowToInsert.of(rowId2, row2));
* InsertAllResponse response = table.insert(rows);
* // do something with response
* }</pre>
*
* @param rows rows to be inserted
* @throws BigQueryException upon failure
*/
Expand All @@ -193,6 +246,23 @@ public InsertAllResponse insert(Iterable<InsertAllRequest.RowToInsert> rows)
/**
* Insert rows into the table.
*
* <p>Example of inserting rows into the table, ignoring invalid rows.
* <pre> {@code
* String rowId1 = "rowId1";
* String rowId2 = "rowId2";
* List<RowToInsert> rows = new ArrayList<>();
* Map<String, Object> row1 = new HashMap<>();
* row1.put("stringField", 1);
* row1.put("booleanField", true);
* Map<String, Object> row2 = new HashMap<>();
* row2.put("stringField", "value2");
* row2.put("booleanField", false);
* rows.add(RowToInsert.of(rowId1, row1));
* rows.add(RowToInsert.of(rowId2, row2));
* InsertAllResponse response = table.insert(rows, true, true);
* // do something with response
* }</pre>
*
* @param rows rows to be inserted
* @param skipInvalidRows whether to insert all valid rows, even if invalid rows exist. If not set
* the entire insert operation will fail if rows to be inserted contain an invalid row
Expand All @@ -202,7 +272,7 @@ public InsertAllResponse insert(Iterable<InsertAllRequest.RowToInsert> rows)
* @throws BigQueryException upon failure
*/
public InsertAllResponse insert(Iterable<InsertAllRequest.RowToInsert> rows,
boolean skipInvalidRows, boolean ignoreUnknownValues) throws BigQueryException {
boolean skipInvalidRows, boolean ignoreUnknownValues) throws BigQueryException {
InsertAllRequest request = InsertAllRequest.builder(tableId(), rows)
.skipInvalidRows(skipInvalidRows)
.ignoreUnknownValues(ignoreUnknownValues)
Expand All @@ -213,6 +283,16 @@ public InsertAllResponse insert(Iterable<InsertAllRequest.RowToInsert> rows,
/**
* Returns the paginated list rows in this table.
*
* <p>Example of listing rows in the table.
* <pre> {@code
* Page<List<FieldValue>> page = table.list(TableDataListOption.pageSize(100));
* Iterator<List<FieldValue>> rowIterator = page.iterateAll();
* while (rowIterator.hasNext()) {
* List<FieldValue> row = rowIterator.next();
* // do something with the row
* }
* }</pre>
*
* @param options table data list options
* @throws BigQueryException upon failure
*/
Expand All @@ -225,6 +305,25 @@ public Page<List<FieldValue>> list(TableDataListOption... options)
* Starts a BigQuery Job to copy the current table to the provided destination table. Returns the
* started {@link Job} object.
*
* <p>Example of copying the table to a destination table.
* <pre> {@code
* String datasetName = "my_dataset";
* String tableName = "my_destination_table";
* Job job = table.copy(datasetName, tableName);
* // Wait for the job to complete.
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(3, TimeUnit.MINUTES));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully
* } else {
* // Handle error case
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param destinationDataset the user-defined id of the destination dataset
* @param destinationTable the user-defined id of the destination table
* @param options job options
Expand All @@ -239,6 +338,27 @@ public Job copy(String destinationDataset, String destinationTable, JobOption...
* Starts a BigQuery Job to copy the current table to the provided destination table. Returns the
* started {@link Job} object.
*
* <p>Example copying the table to a destination table.
* <pre> {@code
* String dataset = "my_dataset";
* String tableName = "my_destination_table";
* TableId destinationId = TableId.of(dataset, tableName);
* JobOption options = JobOption.fields(JobField.STATUS, JobField.USER_EMAIL);
* Job job = table.copy(destinationId, options);
* // Wait for the job to complete.
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(3, TimeUnit.MINUTES));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully.
* } else {
* // Handle error case.
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param destinationTable the destination table of the copy job
* @param options job options
* @throws BigQueryException upon failure
Expand All @@ -253,6 +373,25 @@ public Job copy(TableId destinationTable, JobOption... options)
* Starts a BigQuery Job to extract the current table to the provided destination URI. Returns the
* started {@link Job} object.
*
* <p>Example extracting data to single Google Cloud Storage file.
* <pre> {@code
* String format = "CSV";
* String gcsUrl = "gs://my_bucket/filename.csv";
* Job job = table.extract(format, gcsUrl);
* // Wait for the job to complete
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(3, TimeUnit.MINUTES));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully
* } else {
* // Handle error case
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param format the format of the extracted data
* @param destinationUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path)
* where the extracted table should be written
Expand All @@ -268,6 +407,29 @@ public Job extract(String format, String destinationUri, JobOption... options)
* Starts a BigQuery Job to extract the current table to the provided destination URIs. Returns
* the started {@link Job} object.
*
* <p>Example of partitioning data to a list of Google Cloud Storage files.
* <pre> {@code
* String format = "CSV";
* String gcsUrl1 = "gs://my_bucket/PartitionA_*.csv";
* String gcsUrl2 = "gs://my_bucket/PartitionB_*.csv";
* List<String> destinationUris = new ArrayList<>();
* destinationUris.add(gcsUrl1);
* destinationUris.add(gcsUrl2);
* Job job = table.extract(format, destinationUris);
* // Wait for the job to complete
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(3, TimeUnit.MINUTES));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully
* } else {
* // Handle error case
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param format the format of the exported data
* @param destinationUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path)
* where the extracted table should be written
Expand All @@ -285,6 +447,24 @@ public Job extract(String format, List<String> destinationUris, JobOption... opt
* Starts a BigQuery Job to load data into the current table from the provided source URI. Returns
* the started {@link Job} object.
*
* <p>Example loading data from a single Google Cloud Storage file.
* <pre> {@code
* String sourceUri = "gs://my_bucket/filename.csv";
* Job job = table.load(FormatOptions.csv(), sourceUri);
* // Wait for the job to complete
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(3, TimeUnit.MINUTES));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully
* } else {
* // Handle error case
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param format the format of the data to load
* @param sourceUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path) from
* which to load the data
Expand All @@ -300,6 +480,28 @@ public Job load(FormatOptions format, String sourceUri, JobOption... options)
* Starts a BigQuery Job to load data into the current table from the provided source URIs.
* Returns the started {@link Job} object.
*
* <p>Example loading data from a list of Google Cloud Storage files.
* <pre> {@code
* String gcsUrl1 = "gs://my_bucket/filename1.csv";
* String gcsUrl2 = "gs://my_bucket/filename2.csv";
* List<String> sourceUris = new ArrayList<>();
* sourceUris.add(gcsUrl1);
* sourceUris.add(gcsUrl2);
* Job job = table.load(FormatOptions.csv(), sourceUris);
* // Wait for the job to complete
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(3, TimeUnit.MINUTES));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully
* } else {
* // Handle error case
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait
* }
* }</pre>
*
* @param format the format of the exported data
* @param sourceUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) from
* which to load the data
Expand Down