Skip to content

Commit

Permalink
refactor(samples): add column load append (#644)
Browse files Browse the repository at this point in the history
  • Loading branch information
Praful Makani committed Aug 5, 2020
1 parent 1655f51 commit a14bc8b
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 36 deletions.
Expand Up @@ -36,9 +36,10 @@
import java.util.List;
import java.util.UUID;

// Sample to append column in existing table.
public class AddColumnLoadAppend {

public static void runAddColumnLoadAppend() throws Exception {
public static void runAddColumnLoadAppend() {
// TODO(developer): Replace these variables before running the sample.
String datasetName = "MY_DATASET_NAME";
String tableName = "MY_TABLE_NAME";
Expand All @@ -65,7 +66,7 @@ public static void runAddColumnLoadAppend() throws Exception {
}

public static void addColumnLoadAppend(
String datasetName, String tableName, String sourceUri, Schema newSchema) throws Exception {
String datasetName, String tableName, String sourceUri, Schema newSchema) {
try {
// Initialize client that will be used to send requests. This client only needs to be created
// once, and can be reused for multiple requests.
Expand All @@ -87,19 +88,16 @@ public static void addColumnLoadAppend(

// Load data from a GCS parquet file into the table
// Blocks until this load table job completes its execution, either failing or succeeding.
Job completedJob = loadJob.waitFor();
Job job = loadJob.waitFor();

// Check for errors
if (completedJob == null) {
throw new Exception("Job not executed since it no longer exists.");
} else if (completedJob.getStatus().getError() != null) {
// You can also look at queryJob.getStatus().getExecutionErrors() for all
// errors, not just the latest one.
throw new Exception(
"BigQuery was unable to load into the table due to an error: \n"
+ loadJob.getStatus().getError());
if (job.isDone() && job.getStatus().getError() == null) {
System.out.println("Column successfully added during load append job");
} else {
System.out.println(
"BigQuery was unable to load into the table due to an error:"
+ job.getStatus().getError());
}
System.out.println("Column successfully added during load append job");
} catch (BigQueryException | InterruptedException e) {
System.out.println("Column not added during load append \n" + e.toString());
}
Expand Down
Expand Up @@ -17,7 +17,6 @@
package com.example.bigquery;

import static com.google.common.truth.Truth.assertThat;
import static junit.framework.TestCase.assertNotNull;

import com.google.cloud.bigquery.Field;
import com.google.cloud.bigquery.LegacySQLTypeName;
Expand All @@ -26,21 +25,28 @@
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;

public class AddColumnLoadAppendIT {

private String tableName;
private Schema schema;
private ByteArrayOutputStream bout;
private PrintStream out;

private static final String BIGQUERY_DATASET_NAME = System.getenv("BIGQUERY_DATASET_NAME");
private static final String BIGQUERY_DATASET_NAME = requireEnvVar("BIGQUERY_DATASET_NAME");

private static void requireEnvVar(String varName) {
assertNotNull(
private static String requireEnvVar(String varName) {
String value = System.getenv(varName);
Assert.assertNotNull(
"Environment variable " + varName + " is required to perform these tests.",
System.getenv(varName));
return value;
}

@BeforeClass
Expand All @@ -53,41 +59,41 @@ public void setUp() {
bout = new ByteArrayOutputStream();
out = new PrintStream(bout);
System.setOut(out);

// create a test table.
tableName = "ADD_COLUMN_LOAD_APPEND_TEST_" + UUID.randomUUID().toString().substring(0, 8);
schema =
Schema.of(
Field.newBuilder("name", LegacySQLTypeName.STRING)
.setMode(Field.Mode.REQUIRED)
.build());

CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, schema);

bout = new ByteArrayOutputStream();
out = new PrintStream(bout);
System.setOut(out);
}

@After
public void tearDown() {
// Clean up
DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableName);
System.setOut(null);
}

@Test
public void testAddColumnLoadAppend() throws Exception {
public void testAddColumnLoadAppend() {
String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.csv";

String tableName = "ADD_COLUMN_LOAD_APPEND_TEST";
Schema originalSchema =
Schema.of(
Field.newBuilder("name", LegacySQLTypeName.STRING)
.setMode(Field.Mode.REQUIRED)
.build());

CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, originalSchema);

List<Field> fields = originalSchema.getFields();
// Adding below additional column during the load job
Field newField =
Field.newBuilder("post_abbr", LegacySQLTypeName.STRING)
.setMode(Field.Mode.NULLABLE)
.build();
List<Field> newFields = new ArrayList<>(fields);
List<Field> newFields = new ArrayList<>(schema.getFields());
newFields.add(newField);
Schema newSchema = Schema.of(newFields);

AddColumnLoadAppend.addColumnLoadAppend(BIGQUERY_DATASET_NAME, tableName, sourceUri, newSchema);

AddColumnLoadAppend.addColumnLoadAppend(
BIGQUERY_DATASET_NAME, tableName, sourceUri, Schema.of(newFields));
assertThat(bout.toString()).contains("Column successfully added during load append job");

// Clean up
DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableName);
}
}

0 comments on commit a14bc8b

Please sign in to comment.