diff --git a/v1/src/main/java/com/google/cloud/teleport/bigtable/AvroToBigtable.java b/v1/src/main/java/com/google/cloud/teleport/bigtable/AvroToBigtable.java index bf4b754350..77aa88891e 100644 --- a/v1/src/main/java/com/google/cloud/teleport/bigtable/AvroToBigtable.java +++ b/v1/src/main/java/com/google/cloud/teleport/bigtable/AvroToBigtable.java @@ -78,6 +78,7 @@ public final class AvroToBigtable { public interface Options extends PipelineOptions { @TemplateParameter.ProjectId( order = 1, + groupName = "Target", description = "Project ID", helpText = "The ID of the Google Cloud project that contains the Bigtable instance that you want to write data to.") @@ -88,6 +89,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 2, + groupName = "Target", regexes = {"[a-z][a-z0-9\\-]+[a-z0-9]"}, description = "Instance ID", helpText = "The ID of the Bigtable instance that contains the table.") @@ -98,6 +100,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 4, + groupName = "Target", regexes = {"[_a-zA-Z0-9][-_.a-zA-Z0-9]*"}, description = "Table ID", helpText = "The ID of the Bigtable table to import.") @@ -108,6 +111,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.GcsReadFile( order = 5, + groupName = "Source", description = "Input Cloud Storage File(s)", helpText = "The Cloud Storage path pattern where data is located.", example = "gs:////*") diff --git a/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToAvro.java b/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToAvro.java index cc7db434ad..644645ec32 100644 --- a/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToAvro.java +++ b/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToAvro.java @@ -76,6 +76,7 @@ public class BigtableToAvro { public interface Options extends PipelineOptions { @TemplateParameter.ProjectId( order = 1, + groupName = "Source", description = "Project ID", helpText = "The ID of the Google Cloud project that contains the Bigtable instance that you want to read data from.") @@ -86,6 +87,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 2, + groupName = "Source", regexes = {"[a-z][a-z0-9\\-]+[a-z0-9]"}, description = "Instance ID", helpText = "The ID of the Bigtable instance that contains the table.") @@ -96,6 +98,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 3, + groupName = "Source", regexes = {"[_a-zA-Z0-9][-_.a-zA-Z0-9]*"}, description = "Table ID", helpText = "The ID of the Bigtable table to export.") @@ -106,6 +109,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.GcsWriteFolder( order = 4, + groupName = "Target", description = "Output file directory in Cloud Storage", helpText = "The Cloud Storage path where data is written.", example = "gs://mybucket/somefolder") @@ -116,6 +120,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 5, + groupName = "Target", description = "Avro file prefix", helpText = "The prefix of the Avro filename. For example, `output-`.") @Default.String("part") diff --git a/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToJson.java b/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToJson.java index 7c787ce4ff..47bdf73483 100644 --- a/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToJson.java +++ b/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToJson.java @@ -74,6 +74,7 @@ public class BigtableToJson { public interface Options extends PipelineOptions { @TemplateParameter.ProjectId( order = 1, + groupName = "Source", description = "Project ID", helpText = "The ID for the Google Cloud project that contains the Bigtable instance that you want to read data from.") @@ -84,6 +85,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 2, + groupName = "Source", regexes = {"[a-z][a-z0-9\\-]+[a-z0-9]"}, description = "Instance ID", helpText = "The ID of the Bigtable instance that contains the table.") @@ -94,6 +96,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 3, + groupName = "Source", regexes = {"[_a-zA-Z0-9][-_.a-zA-Z0-9]*"}, description = "Table ID", helpText = "The ID of the Bigtable table to read from.") @@ -104,6 +107,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.GcsWriteFolder( order = 4, + groupName = "Target", optional = true, description = "Cloud Storage directory for storing JSON files", helpText = "The Cloud Storage path where the output JSON files are stored.", @@ -115,6 +119,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 5, + groupName = "Target", description = "JSON file prefix", helpText = "The prefix of the JSON file name. For example, \"table1-\". If no value is provided, defaults to `part`.") @@ -126,6 +131,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Enum( order = 6, + groupName = "Target", optional = true, enumOptions = {@TemplateEnumOption("FLATTEN"), @TemplateEnumOption("NONE")}, description = "User option", @@ -139,6 +145,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 7, + groupName = "Target", optional = true, parentName = "userOption", parentTriggerValues = {"FLATTEN"}, diff --git a/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToParquet.java b/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToParquet.java index 7c27c101e7..1336207754 100644 --- a/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToParquet.java +++ b/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToParquet.java @@ -75,6 +75,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.ProjectId( order = 1, + groupName = "Source", description = "Project ID", helpText = "The ID of the Google Cloud project that contains the Cloud Bigtable instance that you want to read data from.") @@ -85,6 +86,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 2, + groupName = "Source", regexes = {"[a-z][a-z0-9\\-]+[a-z0-9]"}, description = "Instance ID", helpText = "The ID of the Cloud Bigtable instance that contains the table.") @@ -95,6 +97,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 3, + groupName = "Source", regexes = {"[_a-zA-Z0-9][-_.a-zA-Z0-9]*"}, description = "Table ID", helpText = "The ID of the Cloud Bigtable table to export.") @@ -105,6 +108,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.GcsWriteFolder( order = 4, + groupName = "Target", description = "Output file directory in Cloud Storage", helpText = "The path and filename prefix for writing output files. Must end with a slash. DateTime formatting is used to parse the directory path for date and time formatters. For example: gs://your-bucket/your-path.") @@ -115,6 +119,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 5, + groupName = "Target", description = "Parquet file prefix", helpText = "The prefix of the Parquet file name. For example, \"table1-\". Defaults to: part.") @@ -126,6 +131,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Integer( order = 6, + groupName = "Target", optional = true, description = "Maximum output shards", helpText = diff --git a/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToVectorEmbeddings.java b/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToVectorEmbeddings.java index 757eae5b99..6e2396a05f 100644 --- a/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToVectorEmbeddings.java +++ b/v1/src/main/java/com/google/cloud/teleport/bigtable/BigtableToVectorEmbeddings.java @@ -80,6 +80,7 @@ public class BigtableToVectorEmbeddings { public interface Options extends PipelineOptions { @TemplateParameter.ProjectId( order = 1, + groupName = "Source", description = "Project ID", helpText = "The ID for the Google Cloud project that contains the Bigtable instance that you want to read data from.") @@ -90,6 +91,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 2, + groupName = "Source", regexes = {"[a-z][a-z0-9\\-]+[a-z0-9]"}, description = "Instance ID", helpText = "The ID of the Bigtable instance that contains the table.") @@ -100,6 +102,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 3, + groupName = "Source", regexes = {"[_a-zA-Z0-9][-_.a-zA-Z0-9]*"}, description = "Table ID", helpText = "The ID of the Bigtable table to read from.") @@ -110,6 +113,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.GcsWriteFolder( order = 4, + groupName = "Target", optional = true, description = "Cloud Storage directory for storing JSON files", helpText = "The Cloud Storage path where the output JSON files are stored.", @@ -121,6 +125,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 5, + groupName = "Target", description = "JSON file prefix", helpText = "The prefix of the JSON filename. For example: \"table1-\". If no value is provided, defaults to \"part\".") diff --git a/v1/src/main/java/com/google/cloud/teleport/bigtable/CassandraToBigtable.java b/v1/src/main/java/com/google/cloud/teleport/bigtable/CassandraToBigtable.java index e72d47cb34..370478c540 100644 --- a/v1/src/main/java/com/google/cloud/teleport/bigtable/CassandraToBigtable.java +++ b/v1/src/main/java/com/google/cloud/teleport/bigtable/CassandraToBigtable.java @@ -70,6 +70,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 1, + groupName = "Source", regexes = {"^[a-zA-Z0-9\\.\\-,]*$"}, description = "Cassandra Hosts", helpText = "The hosts of the Apache Cassandra nodes in a comma-separated list.") @@ -80,6 +81,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Integer( order = 2, + groupName = "Source", optional = true, description = "Cassandra Port", helpText = @@ -92,6 +94,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 3, + groupName = "Source", regexes = {"^[a-zA-Z0-9][a-zA-Z0-9_]{0,47}$"}, description = "Cassandra Keyspace", helpText = "The Apache Cassandra keyspace where the table is located.") @@ -102,6 +105,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 4, + groupName = "Source", regexes = {"^[a-zA-Z][a-zA-Z0-9_]*$"}, description = "Cassandra Table", helpText = "The Apache Cassandra table to copy.") @@ -112,6 +116,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.ProjectId( order = 5, + groupName = "Target", description = "Bigtable Project ID", helpText = "The Google Cloud project ID associated with the Bigtable instance.") ValueProvider getBigtableProjectId(); @@ -121,6 +126,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 6, + groupName = "Target", regexes = {"[a-z][a-z0-9\\-]+[a-z0-9]"}, description = "Target Bigtable Instance", helpText = "The ID of the Bigtable instance that the Apache Cassandra table is copied to.") @@ -131,6 +137,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 7, + groupName = "Target", regexes = {"[_a-zA-Z0-9][-_.a-zA-Z0-9]*"}, description = "Target Bigtable Table", helpText = "The name of the Bigtable table that the Apache Cassandra table is copied to.") @@ -141,6 +148,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 8, + groupName = "Target", optional = true, regexes = {"[-_.a-zA-Z0-9]+"}, description = "The Default Bigtable Column Family", @@ -154,6 +162,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 9, + groupName = "Target", optional = true, description = "The Row Key Separator", helpText = "The separator used to build row-keys. The default value is '#'.") @@ -165,6 +174,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Boolean( order = 10, + groupName = "Target", optional = true, description = "If true, large rows will be split into multiple MutateRows requests", helpText = diff --git a/v1/src/main/java/com/google/cloud/teleport/bigtable/ParquetToBigtable.java b/v1/src/main/java/com/google/cloud/teleport/bigtable/ParquetToBigtable.java index 57c3fe9a76..ddd71186ac 100644 --- a/v1/src/main/java/com/google/cloud/teleport/bigtable/ParquetToBigtable.java +++ b/v1/src/main/java/com/google/cloud/teleport/bigtable/ParquetToBigtable.java @@ -82,6 +82,7 @@ public class ParquetToBigtable { public interface Options extends PipelineOptions { @TemplateParameter.ProjectId( order = 1, + groupName = "Target", description = "Project ID", helpText = "The Google Cloud project ID associated with the Bigtable instance.") ValueProvider getBigtableProjectId(); @@ -91,6 +92,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 2, + groupName = "Target", regexes = {"[a-z][a-z0-9\\-]+[a-z0-9]"}, description = "Instance ID", helpText = "The ID of the Cloud Bigtable instance that contains the table") @@ -101,6 +103,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 3, + groupName = "Target", regexes = {"[_a-zA-Z0-9][-_.a-zA-Z0-9]*"}, description = "Table ID", helpText = "The ID of the Bigtable table to import.") @@ -111,6 +114,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.GcsReadFile( order = 4, + groupName = "Source", description = "Input Cloud Storage File(s)", helpText = "The Cloud Storage path with the files that contain the data.", example = "gs://your-bucket/your-files/*.parquet") @@ -121,6 +125,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Boolean( order = 5, + groupName = "Target", optional = true, description = "If true, large rows will be split into multiple MutateRows requests", helpText = diff --git a/v1/src/main/java/com/google/cloud/teleport/spanner/ExportPipeline.java b/v1/src/main/java/com/google/cloud/teleport/spanner/ExportPipeline.java index 54c675b794..215664eb5d 100644 --- a/v1/src/main/java/com/google/cloud/teleport/spanner/ExportPipeline.java +++ b/v1/src/main/java/com/google/cloud/teleport/spanner/ExportPipeline.java @@ -77,6 +77,7 @@ public class ExportPipeline { public interface ExportPipelineOptions extends PipelineOptions { @TemplateParameter.Text( order = 1, + groupName = "Source", regexes = {"[a-z][a-z0-9\\-]*[a-z0-9]"}, description = "Cloud Spanner instance ID", helpText = "The instance ID of the Spanner database that you want to export.") @@ -86,6 +87,7 @@ public interface ExportPipelineOptions extends PipelineOptions { @TemplateParameter.Text( order = 2, + groupName = "Source", regexes = {"[a-z][a-z0-9_\\-]*[a-z0-9]"}, description = "Cloud Spanner database ID", helpText = "The database ID of the Spanner database that you want to export.") @@ -95,6 +97,7 @@ public interface ExportPipelineOptions extends PipelineOptions { @TemplateParameter.GcsWriteFolder( order = 3, + groupName = "Target", description = "Cloud Storage output directory", helpText = "The Cloud Storage path to export Avro files to. The export job creates a new directory under this path that contains the exported files.", @@ -121,6 +124,7 @@ public interface ExportPipelineOptions extends PipelineOptions { @TemplateParameter.Text( order = 6, + groupName = "Source", optional = true, description = "Cloud Spanner Endpoint to call", helpText = "The Cloud Spanner endpoint to call in the template. Only used for testing.", @@ -154,6 +158,7 @@ public interface ExportPipelineOptions extends PipelineOptions { @TemplateParameter.ProjectId( order = 8, + groupName = "Source", optional = true, description = "Cloud Spanner Project Id", helpText = @@ -175,6 +180,7 @@ public interface ExportPipelineOptions extends PipelineOptions { @TemplateParameter.Text( order = 10, + groupName = "Source", optional = true, regexes = {"^[a-zA-Z0-9_\\.]+(,[a-zA-Z0-9_\\.]+)*$"}, description = "Cloud Spanner table name(s).", @@ -188,6 +194,7 @@ public interface ExportPipelineOptions extends PipelineOptions { @TemplateParameter.Boolean( order = 11, + groupName = "Source", optional = true, description = "Export necessary Related Spanner tables.", helpText = @@ -199,6 +206,7 @@ public interface ExportPipelineOptions extends PipelineOptions { @TemplateParameter.Enum( order = 12, + groupName = "Source", enumOptions = { @TemplateEnumOption("LOW"), @TemplateEnumOption("MEDIUM"), @@ -214,6 +222,7 @@ public interface ExportPipelineOptions extends PipelineOptions { @TemplateParameter.Boolean( order = 13, + groupName = "Source", optional = true, description = "Use independent compute resource (Spanner DataBoost).", helpText = diff --git a/v1/src/main/java/com/google/cloud/teleport/spanner/ImportPipeline.java b/v1/src/main/java/com/google/cloud/teleport/spanner/ImportPipeline.java index d640484310..e9e834c43c 100644 --- a/v1/src/main/java/com/google/cloud/teleport/spanner/ImportPipeline.java +++ b/v1/src/main/java/com/google/cloud/teleport/spanner/ImportPipeline.java @@ -65,6 +65,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 1, + groupName = "Target", regexes = {"^[a-z0-9\\-]+$"}, description = "Cloud Spanner instance ID", helpText = "The instance ID of the Spanner database.") @@ -74,6 +75,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 2, + groupName = "Target", regexes = {"^[a-z_0-9\\-]+$"}, description = "Cloud Spanner database ID", helpText = "The database ID of the Spanner database.") @@ -83,6 +85,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.GcsReadFolder( order = 3, + groupName = "Source", description = "Cloud storage input directory", helpText = "The Cloud Storage path where the Avro files are imported from.") ValueProvider getInputDir(); @@ -91,6 +94,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 4, + groupName = "Target", optional = true, description = "Cloud Spanner Endpoint to call", helpText = "The Cloud Spanner endpoint to call in the template. Only used for testing.", @@ -163,6 +167,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.ProjectId( order = 9, + groupName = "Target", optional = true, description = "Cloud Spanner Project Id", helpText = @@ -186,6 +191,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Enum( order = 11, + groupName = "Target", enumOptions = { @TemplateEnumOption("LOW"), @TemplateEnumOption("MEDIUM"), diff --git a/v1/src/main/java/com/google/cloud/teleport/spanner/TextImportPipeline.java b/v1/src/main/java/com/google/cloud/teleport/spanner/TextImportPipeline.java index a882900e52..b5c9618160 100644 --- a/v1/src/main/java/com/google/cloud/teleport/spanner/TextImportPipeline.java +++ b/v1/src/main/java/com/google/cloud/teleport/spanner/TextImportPipeline.java @@ -107,6 +107,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 1, + groupName = "Target", regexes = {"^[a-z0-9\\-]+$"}, description = "Cloud Spanner instance ID", helpText = "The instance ID of the Spanner database.") @@ -116,6 +117,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 2, + groupName = "Target", regexes = {"^[a-z_0-9\\-]+$"}, description = "Cloud Spanner database ID", helpText = "The database ID of the Spanner database.") @@ -125,6 +127,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 3, + groupName = "Target", optional = true, description = "Cloud Spanner Endpoint to call", helpText = "The Cloud Spanner endpoint to call in the template. Only used for testing.", @@ -136,6 +139,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.GcsReadFile( order = 4, + groupName = "Source", description = "Text Import Manifest file", helpText = "The path in Cloud Storage to use when importing manifest files.", example = "gs://your-bucket/your-folder/your-manifest.json") @@ -145,6 +149,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 5, + groupName = "Source", optional = true, description = "Column delimiter of the data files", helpText = "The column delimiter that the source file uses. The default value is ','.", @@ -156,6 +161,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 6, + groupName = "Source", optional = true, description = "Field qualifier used by the source file", helpText = @@ -168,6 +174,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Boolean( order = 7, + groupName = "Source", optional = true, description = "If true, the lines has trailing delimiters", helpText = @@ -181,6 +188,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 8, + groupName = "Source", optional = true, description = "Escape character", helpText = @@ -192,6 +200,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 9, + groupName = "Source", optional = true, description = "Null String", helpText = @@ -204,6 +213,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 10, + groupName = "Source", optional = true, description = "Date format", helpText = @@ -217,6 +227,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 11, + groupName = "Source", optional = true, description = "Timestamp format", helpText = @@ -239,6 +250,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.ProjectId( order = 13, + groupName = "Target", optional = true, description = "Cloud Spanner Project Id", helpText = @@ -249,6 +261,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Enum( order = 14, + groupName = "Target", enumOptions = { @TemplateEnumOption("LOW"), @TemplateEnumOption("MEDIUM"), @@ -265,6 +278,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Boolean( order = 15, + groupName = "Source", optional = true, description = "Handle new line", helpText = diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/BigQueryToTFRecord.java b/v1/src/main/java/com/google/cloud/teleport/templates/BigQueryToTFRecord.java index e2b5dd65b5..fd6cde8cbe 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/BigQueryToTFRecord.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/BigQueryToTFRecord.java @@ -330,6 +330,7 @@ public interface Options extends BigQueryReadOptions { @TemplateParameter.GcsWriteFolder( order = 1, + groupName = "Target", description = "Output Cloud Storage directory.", helpText = "The top-level Cloud Storage path prefix to use when writing the training, testing, and validation TFRecord files. Subdirectories for resulting training, testing, and validation TFRecord files are automatically generated from `outputDirectory`. For example, `gs://mybucket/output/train`", @@ -340,6 +341,7 @@ public interface Options extends BigQueryReadOptions { @TemplateParameter.Text( order = 2, + groupName = "Target", optional = true, regexes = {"^[A-Za-z_0-9.]*"}, description = "The output suffix for TFRecord files", diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/BulkCompressor.java b/v1/src/main/java/com/google/cloud/teleport/templates/BulkCompressor.java index 625b59c66d..7ba93c4d80 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/BulkCompressor.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/BulkCompressor.java @@ -113,6 +113,7 @@ public class BulkCompressor { public interface Options extends PipelineOptions { @TemplateParameter.GcsReadFile( order = 1, + groupName = "Source", description = "Input Cloud Storage File(s)", helpText = "The Cloud Storage location of the files you'd like to process.", example = "gs://your-bucket/your-files/*.txt") @@ -123,6 +124,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.GcsWriteFolder( order = 2, + groupName = "Target", description = "Output file directory in Cloud Storage", helpText = "The path and filename prefix for writing output files. Must end with a slash. DateTime formatting is used to parse directory path for date & time formatters.", @@ -134,6 +136,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.GcsWriteFile( order = 3, + groupName = "Target", description = "Output failure file", helpText = "The error log output file to use for write failures that occur during compression. The contents will be one line for " @@ -162,6 +165,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.Text( order = 5, + groupName = "Target", optional = true, regexes = {"^[A-Za-z_0-9.]*"}, description = "Output filename suffix", diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/BulkDecompressor.java b/v1/src/main/java/com/google/cloud/teleport/templates/BulkDecompressor.java index 9c1929e828..508c464733 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/BulkDecompressor.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/BulkDecompressor.java @@ -161,6 +161,7 @@ public class BulkDecompressor { public interface Options extends PipelineOptions { @TemplateParameter.GcsReadFile( order = 1, + groupName = "Source", description = "Input Cloud Storage File(s)", helpText = "The Cloud Storage location of the files you'd like to process.", example = "gs://your-bucket/your-files/*.gz") @@ -171,6 +172,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.GcsWriteFolder( order = 2, + groupName = "Target", description = "Output file directory in Cloud Storage", helpText = "The path and filename prefix for writing output files. Must end with a slash. DateTime formatting is used to parse directory path for date & time formatters.", diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/CSVToBigQuery.java b/v1/src/main/java/com/google/cloud/teleport/templates/CSVToBigQuery.java index cd3d809acc..9d459dcba6 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/CSVToBigQuery.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/CSVToBigQuery.java @@ -101,6 +101,7 @@ public interface Options extends DataflowPipelineOptions, CsvPipelineOptions { @TemplateParameter.Text( order = 1, + groupName = "Source", description = "Cloud Storage Input File(s)", helpText = "The Cloud Storage path to the CSV file that contains the text to process.", regexes = {"^gs:\\/\\/[^\\n\\r]+$"}, @@ -111,6 +112,7 @@ public interface Options extends DataflowPipelineOptions, CsvPipelineOptions { @TemplateParameter.GcsReadFile( order = 2, + groupName = "Target", description = "Cloud Storage location of your BigQuery schema file, described as a JSON", helpText = "The Cloud Storage path to the JSON file that defines your BigQuery schema.") ValueProvider getSchemaJSONPath(); @@ -119,6 +121,7 @@ public interface Options extends DataflowPipelineOptions, CsvPipelineOptions { @TemplateParameter.BigQueryTable( order = 3, + groupName = "Target", description = "BigQuery output table", helpText = "The name of the BigQuery table that stores your processed data. If you reuse an existing " diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/DLPTextToBigQueryStreaming.java b/v1/src/main/java/com/google/cloud/teleport/templates/DLPTextToBigQueryStreaming.java index 29b9bc26e7..20c7257f88 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/DLPTextToBigQueryStreaming.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/DLPTextToBigQueryStreaming.java @@ -288,6 +288,7 @@ public interface TokenizePipelineOptions extends DataflowPipelineOptions { @TemplateParameter.GcsReadFile( order = 1, + groupName = "Source", description = "Input Cloud Storage File(s)", helpText = "The CSV files to read input data records from. Wildcards are also accepted.", example = "gs://mybucket/my_csv_filename.csv or gs://mybucket/file-*.csv") @@ -297,6 +298,7 @@ public interface TokenizePipelineOptions extends DataflowPipelineOptions { @TemplateParameter.Text( order = 2, + groupName = "Source", regexes = { "^projects\\/[^\\n\\r\\/]+(\\/locations\\/[^\\n\\r\\/]+)?\\/deidentifyTemplates\\/[^\\n\\r\\/]+$" }, @@ -312,6 +314,7 @@ public interface TokenizePipelineOptions extends DataflowPipelineOptions { @TemplateParameter.Text( order = 3, + groupName = "DLP Configuration", optional = true, regexes = { "^projects\\/[^\\n\\r\\/]+(\\/locations\\/[^\\n\\r\\/]+)?\\/inspectTemplates\\/[^\\n\\r\\/]+$" @@ -328,6 +331,7 @@ public interface TokenizePipelineOptions extends DataflowPipelineOptions { @TemplateParameter.Integer( order = 4, + groupName = "DLP Configuration", optional = true, description = "Batch size", helpText = @@ -341,6 +345,7 @@ public interface TokenizePipelineOptions extends DataflowPipelineOptions { @TemplateParameter.Text( order = 5, + groupName = "Target", regexes = {"^[^.]*$"}, description = "BigQuery Dataset", helpText = @@ -351,6 +356,7 @@ public interface TokenizePipelineOptions extends DataflowPipelineOptions { @TemplateParameter.ProjectId( order = 6, + groupName = "DLP Configuration", description = "Cloud DLP project ID", helpText = "The ID for the Google Cloud project that owns the DLP API resource. This project" diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/DatastoreToBigQuery.java b/v1/src/main/java/com/google/cloud/teleport/templates/DatastoreToBigQuery.java index 3afda8be70..2b6aaf776c 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/DatastoreToBigQuery.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/DatastoreToBigQuery.java @@ -36,6 +36,7 @@ public interface DatastoreToBigQueryOptions extends PipelineOptions, DatastoreReadOptions, JavascriptTextTransformerOptions { @TemplateParameter.BigQueryTable( order = 1, + groupName = "Target", description = "BigQuery output table", helpText = "BigQuery table location to write the output to. The name should be in the format " @@ -46,6 +47,7 @@ public interface DatastoreToBigQueryOptions @TemplateParameter.GcsWriteFolder( order = 2, + groupName = "Target", description = "Temporary directory for BigQuery loading process", helpText = "Temporary directory for BigQuery loading process", example = "gs://your-bucket/your-files/temp_dir") diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/ExportJobPlaceholder.java b/v1/src/main/java/com/google/cloud/teleport/templates/ExportJobPlaceholder.java index 54ed81d6b8..c60806ba9f 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/ExportJobPlaceholder.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/ExportJobPlaceholder.java @@ -57,6 +57,7 @@ protected interface ExportJobPlaceholderOptions { @TemplateParameter.ProjectId( order = 1, + groupName = "Source", description = "Project ID", helpText = "The ID of the Google Cloud project that contains the Bigtable instance that you want to read data from.") @@ -64,6 +65,7 @@ protected interface ExportJobPlaceholderOptions { @TemplateParameter.Text( order = 2, + groupName = "Source", regexes = {"[a-z][a-z0-9\\-]+[a-z0-9]"}, description = "Instance ID", helpText = "The ID of the Bigtable instance that contains the table.") @@ -71,6 +73,7 @@ protected interface ExportJobPlaceholderOptions { @TemplateParameter.Text( order = 3, + groupName = "Source", regexes = {"[_a-zA-Z0-9][-_.a-zA-Z0-9]*"}, description = "Table ID", helpText = "The ID of the Bigtable table to export.") @@ -78,6 +81,7 @@ protected interface ExportJobPlaceholderOptions { @TemplateParameter.Text( order = 4, + groupName = "Source", optional = true, regexes = {"[_a-zA-Z0-9][-_.a-zA-Z0-9]*"}, description = "Application profile ID", @@ -87,6 +91,7 @@ protected interface ExportJobPlaceholderOptions { @TemplateParameter.Text( order = 5, + groupName = "Source", optional = true, description = "Bigtable Start Row", helpText = "The row where to start the export from, defaults to the first row.") @@ -95,6 +100,7 @@ protected interface ExportJobPlaceholderOptions { @TemplateParameter.Text( order = 6, + groupName = "Source", optional = true, description = "Bigtable Stop Row", helpText = "The row where to stop the export, defaults to the last row.") @@ -103,6 +109,7 @@ protected interface ExportJobPlaceholderOptions { @TemplateParameter.Integer( order = 7, + groupName = "Source", optional = true, description = "Bigtable Max Versions", helpText = "Maximum number of cell versions.") @@ -111,6 +118,7 @@ protected interface ExportJobPlaceholderOptions { @TemplateParameter.Text( order = 8, + groupName = "Source", optional = true, description = "Bigtable Filter", helpText = "Filter string. See: http://hbase.apache.org/book.html#thrift.") @@ -119,6 +127,7 @@ protected interface ExportJobPlaceholderOptions { @TemplateParameter.GcsWriteFolder( order = 9, + groupName = "Target", description = "Destination path", helpText = "The Cloud Storage path where data is written.", example = "gs://your-bucket/your-path/") @@ -126,6 +135,7 @@ protected interface ExportJobPlaceholderOptions { @TemplateParameter.Text( order = 10, + groupName = "Target", description = "SequenceFile prefix", helpText = "The prefix of the SequenceFile filename.", example = "output-") diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/ImportJobPlaceholder.java b/v1/src/main/java/com/google/cloud/teleport/templates/ImportJobPlaceholder.java index de24f11b01..a350a6e376 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/ImportJobPlaceholder.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/ImportJobPlaceholder.java @@ -58,6 +58,7 @@ protected interface ImportJobPlaceholderOptions { @TemplateParameter.ProjectId( order = 1, + groupName = "Target", description = "Project ID", helpText = "The ID of the Google Cloud project that contains the Bigtable instance that you want to write data to.") @@ -65,6 +66,7 @@ protected interface ImportJobPlaceholderOptions { @TemplateParameter.Text( order = 2, + groupName = "Target", regexes = {"[a-z][a-z0-9\\-]+[a-z0-9]"}, description = "Instance ID", helpText = "The ID of the Bigtable instance that contains the table.") @@ -72,6 +74,7 @@ protected interface ImportJobPlaceholderOptions { @TemplateParameter.Text( order = 3, + groupName = "Target", regexes = {"[_a-zA-Z0-9][-_.a-zA-Z0-9]*"}, description = "Table ID", helpText = "The ID of the Bigtable table to import.") @@ -79,6 +82,7 @@ protected interface ImportJobPlaceholderOptions { @TemplateParameter.Text( order = 4, + groupName = "Target", optional = true, regexes = {"[_a-zA-Z0-9][-_.a-zA-Z0-9]*"}, description = "Application profile ID", @@ -88,6 +92,7 @@ protected interface ImportJobPlaceholderOptions { @TemplateParameter.GcsReadFile( order = 5, + groupName = "Source", description = "Source path pattern", helpText = "The Cloud Storage path pattern to the location of the data.", example = "gs://your-bucket/your-path/prefix*") diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToAvro.java b/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToAvro.java index 78f94d1dce..2e710bad8d 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToAvro.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToAvro.java @@ -95,6 +95,7 @@ public interface Options extends PipelineOptions, StreamingOptions, WindowedFilenamePolicyOptions { @TemplateParameter.PubsubSubscription( order = 1, + groupName = "Source", description = "Pub/Sub input subscription", helpText = "Pub/Sub subscription to read the input from, in the format of" @@ -106,6 +107,7 @@ public interface Options @TemplateParameter.PubsubTopic( order = 2, + groupName = "Source", description = "Pub/Sub input topic", helpText = "The Pub/Sub topic to subscribe to for message consumption. The topic name must be in the format projects//topics/.") @@ -123,6 +125,7 @@ public interface Options @TemplateParameter.GcsWriteFolder( order = 4, + groupName = "Target", description = "Output file directory in Cloud Storage", helpText = "The output directory where output Avro files are archived. Must contain / at the end. For example: gs://example-bucket/example-directory/") @@ -133,6 +136,7 @@ public interface Options @TemplateParameter.Text( order = 5, + groupName = "Target", optional = true, description = "Output filename prefix of the files to write", helpText = "The output filename prefix for the Avro files.", @@ -144,6 +148,7 @@ public interface Options @TemplateParameter.Text( order = 6, + groupName = "Target", optional = true, description = "Output filename suffix of the files to write", helpText = "The output filename suffix for the Avro files.") diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToPubsub.java b/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToPubsub.java index 2bd1f94de5..e6ec1eda3d 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToPubsub.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToPubsub.java @@ -130,6 +130,7 @@ public static PipelineResult run(Options options) { public interface Options extends PipelineOptions, StreamingOptions { @TemplateParameter.PubsubSubscription( order = 1, + groupName = "Source", description = "Pub/Sub input subscription", helpText = "The Pub/Sub subscription to read the input from.", example = "projects/your-project-id/subscriptions/your-subscription-name") @@ -140,6 +141,7 @@ public interface Options extends PipelineOptions, StreamingOptions { @TemplateParameter.PubsubTopic( order = 2, + groupName = "Target", description = "Output Pub/Sub topic", helpText = "The Pub/Sub topic to write the output to.", example = "projects/your-project-id/topics/your-topic-name") diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToText.java b/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToText.java index 1c3095f31e..ba8faa07b5 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToText.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/PubsubToText.java @@ -86,6 +86,7 @@ public interface Options @TemplateParameter.PubsubSubscription( order = 1, + groupName = "Source", optional = true, description = "Pub/Sub input subscription", helpText = @@ -98,6 +99,7 @@ public interface Options @TemplateParameter.PubsubTopic( order = 2, + groupName = "Source", optional = true, description = "Pub/Sub input topic", helpText = @@ -117,6 +119,7 @@ public interface Options @TemplateParameter.GcsWriteFolder( order = 3, + groupName = "Target", description = "Output file directory in Cloud Storage", helpText = "The path and filename prefix for writing output files. For example, `gs://bucket-name/path/`. This value must end in a slash.") @@ -137,6 +140,7 @@ public interface Options @TemplateParameter.Text( order = 5, + groupName = "Target", description = "Output filename prefix of the files to write", helpText = "The prefix to place on each windowed file. For example, `output-`.") @Default.String("output") @@ -147,6 +151,7 @@ public interface Options @TemplateParameter.Text( order = 6, + groupName = "Target", optional = true, description = "Output filename suffix of the files to write", helpText = diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/SpannerToText.java b/v1/src/main/java/com/google/cloud/teleport/templates/SpannerToText.java index 98c7642651..c29957474c 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/SpannerToText.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/SpannerToText.java @@ -86,6 +86,7 @@ public interface SpannerToTextOptions @TemplateParameter.GcsWriteFolder( order = 1, + groupName = "Target", optional = true, description = "Cloud Storage temp directory for storing CSV files", helpText = "The Cloud Storage path where temporary CSV files are written.", @@ -97,6 +98,7 @@ public interface SpannerToTextOptions @TemplateParameter.Enum( order = 2, + groupName = "Source", enumOptions = { @TemplateEnumOption("LOW"), @TemplateEnumOption("MEDIUM"), diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/SpannerVectorEmbeddingExport.java b/v1/src/main/java/com/google/cloud/teleport/templates/SpannerVectorEmbeddingExport.java index 2e639dd397..1c6fed5028 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/SpannerVectorEmbeddingExport.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/SpannerVectorEmbeddingExport.java @@ -87,6 +87,7 @@ public class SpannerVectorEmbeddingExport { public interface SpannerToVectorEmbeddingJsonOptions extends PipelineOptions { @TemplateParameter.ProjectId( order = 10, + groupName = "Source", description = "Cloud Spanner Project Id", helpText = "The project ID of the Spanner instance.") ValueProvider getSpannerProjectId(); @@ -95,6 +96,7 @@ public interface SpannerToVectorEmbeddingJsonOptions extends PipelineOptions { @TemplateParameter.Text( order = 20, + groupName = "Source", regexes = {"[a-z][a-z0-9\\-]*[a-z0-9]"}, description = "Cloud Spanner instance ID", helpText = "The ID of the Spanner instance to export the vector embeddings from.") @@ -104,6 +106,7 @@ public interface SpannerToVectorEmbeddingJsonOptions extends PipelineOptions { @TemplateParameter.Text( order = 30, + groupName = "Source", regexes = {"[a-z][a-z0-9_\\-]*[a-z0-9]"}, description = "Cloud Spanner database ID", helpText = "The ID of the Spanner database to export the vector embeddings from.") @@ -113,6 +116,7 @@ public interface SpannerToVectorEmbeddingJsonOptions extends PipelineOptions { @TemplateParameter.Text( order = 40, + groupName = "Source", regexes = {"^.+$"}, description = "Spanner Table", helpText = "The Spanner table to read from.") @@ -122,6 +126,7 @@ public interface SpannerToVectorEmbeddingJsonOptions extends PipelineOptions { @TemplateParameter.Text( order = 50, + groupName = "Source", description = "Columns to Export from Spanner Table", helpText = "A comma-separated list of required columns for the Vertex AI Vector Search index. The ID and embedding columns are required by Vector Search. If your column names don't match the Vertex AI Vector Search index input structure, create column mappings by using aliases. If the column names don't match the format expected by Vertex AI, use the notation from:to. For example, if you have columns named id and my_embedding, specify id, my_embedding:embedding.") @@ -131,6 +136,7 @@ public interface SpannerToVectorEmbeddingJsonOptions extends PipelineOptions { @TemplateParameter.GcsWriteFolder( order = 60, + groupName = "Target", description = "Output files folder in Cloud Storage", helpText = "The Cloud Storage folder to write output files to. The path must end with a slash.", @@ -141,6 +147,7 @@ public interface SpannerToVectorEmbeddingJsonOptions extends PipelineOptions { @TemplateParameter.Text( order = 70, + groupName = "Target", description = "Output files prefix in Cloud Storage", helpText = "The filename prefix for writing output files.", example = "vector-embeddings") @@ -150,6 +157,7 @@ public interface SpannerToVectorEmbeddingJsonOptions extends PipelineOptions { @TemplateParameter.Text( order = 80, + groupName = "Source", optional = true, description = "Cloud Spanner Endpoint to call", helpText = @@ -162,6 +170,7 @@ public interface SpannerToVectorEmbeddingJsonOptions extends PipelineOptions { @TemplateParameter.Text( order = 90, + groupName = "Source", optional = true, regexes = { "^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):(([0-9]{2})(\\.[0-9]+)?)Z$" @@ -177,6 +186,7 @@ public interface SpannerToVectorEmbeddingJsonOptions extends PipelineOptions { @TemplateParameter.Boolean( order = 100, + groupName = "Source", optional = true, description = "Use independent compute resource (Spanner DataBoost).", helpText = @@ -188,6 +198,7 @@ public interface SpannerToVectorEmbeddingJsonOptions extends PipelineOptions { @TemplateParameter.Enum( order = 110, + groupName = "Source", enumOptions = { @TemplateEnumOption("LOW"), @TemplateEnumOption("MEDIUM"), diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/TextIOToBigQuery.java b/v1/src/main/java/com/google/cloud/teleport/templates/TextIOToBigQuery.java index 2e6ee80916..5b4ed1cb11 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/TextIOToBigQuery.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/TextIOToBigQuery.java @@ -117,6 +117,7 @@ public interface Options extends DataflowPipelineOptions, JavascriptTextTransfor @TemplateParameter.GcsReadFile( order = 1, + groupName = "Source", description = "Cloud Storage Input File(s)", helpText = "Path of the file pattern glob to read from.", example = "gs://your-bucket/path/*.csv") @@ -126,6 +127,7 @@ public interface Options extends DataflowPipelineOptions, JavascriptTextTransfor @TemplateParameter.GcsReadFile( order = 2, + groupName = "Target", description = "Cloud Storage location of your BigQuery schema file, described as a JSON", helpText = "JSON file with BigQuery Schema description. JSON Example: {\n" @@ -158,6 +160,7 @@ public interface Options extends DataflowPipelineOptions, JavascriptTextTransfor @TemplateParameter.BigQueryTable( order = 3, + groupName = "Target", description = "BigQuery output table", helpText = "BigQuery table location to write the output to. The table's schema must match the " @@ -168,6 +171,7 @@ public interface Options extends DataflowPipelineOptions, JavascriptTextTransfor @TemplateParameter.GcsWriteFolder( order = 6, + groupName = "Target", description = "Temporary directory for BigQuery loading process", helpText = "Temporary directory for BigQuery loading process", example = "gs://your-bucket/your-files/temp_dir") diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/TextToPubsub.java b/v1/src/main/java/com/google/cloud/teleport/templates/TextToPubsub.java index c1e7a9d2ee..10bf00644e 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/TextToPubsub.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/TextToPubsub.java @@ -62,6 +62,7 @@ public class TextToPubsub { public interface Options extends PipelineOptions { @TemplateParameter.GcsReadFile( order = 1, + groupName = "Source", description = "Cloud Storage Input File(s)", helpText = "The input file pattern to read from.", example = "gs://bucket-name/files/*.json") @@ -72,6 +73,7 @@ public interface Options extends PipelineOptions { @TemplateParameter.PubsubTopic( order = 2, + groupName = "Target", description = "Output Pub/Sub topic", helpText = "The Pub/Sub input topic to write to. The name must be in the format `projects//topics/`.", diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/common/SpannerConverters.java b/v1/src/main/java/com/google/cloud/teleport/templates/common/SpannerConverters.java index d6eb649bde..e219b4d0cf 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/common/SpannerConverters.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/common/SpannerConverters.java @@ -87,6 +87,7 @@ public class SpannerConverters { public interface SpannerReadOptions extends PipelineOptions { @TemplateParameter.Text( order = 1, + groupName = "Source", regexes = {"^.+$"}, description = "Spanner Table", helpText = "The Spanner table to read the data from.") @@ -97,6 +98,7 @@ public interface SpannerReadOptions extends PipelineOptions { @TemplateParameter.ProjectId( order = 2, + groupName = "Source", description = "Read data from Cloud Spanner Project Id", helpText = "The ID of the Google Cloud project that contains the Spanner database to read data from.") @@ -107,6 +109,7 @@ public interface SpannerReadOptions extends PipelineOptions { @TemplateParameter.Text( order = 3, + groupName = "Source", regexes = {".+"}, description = "Read data from Cloud Spanner Instance", helpText = "The instance ID of the requested table.") @@ -117,6 +120,7 @@ public interface SpannerReadOptions extends PipelineOptions { @TemplateParameter.Text( order = 4, + groupName = "Source", regexes = {".+"}, description = "Read data from Cloud Spanner Database ", helpText = "The database ID of the requested table.") @@ -127,6 +131,7 @@ public interface SpannerReadOptions extends PipelineOptions { @TemplateParameter.Text( order = 5, + groupName = "Source", optional = true, description = "Cloud Spanner Endpoint to call", helpText = "The Cloud Spanner endpoint to call in the template. Only used for testing.", @@ -139,6 +144,7 @@ public interface SpannerReadOptions extends PipelineOptions { @TemplateParameter.Text( order = 6, + groupName = "Source", optional = true, regexes = { "^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):(([0-9]{2})(\\.[0-9]+)?)Z$" @@ -158,6 +164,7 @@ public interface SpannerReadOptions extends PipelineOptions { @TemplateParameter.Boolean( order = 7, + groupName = "Source", optional = true, description = "Use independent compute resource (Spanner DataBoost).", helpText = diff --git a/v1/src/main/java/com/google/cloud/teleport/templates/common/TextConverters.java b/v1/src/main/java/com/google/cloud/teleport/templates/common/TextConverters.java index 986969cb41..16ece3faf6 100644 --- a/v1/src/main/java/com/google/cloud/teleport/templates/common/TextConverters.java +++ b/v1/src/main/java/com/google/cloud/teleport/templates/common/TextConverters.java @@ -43,6 +43,7 @@ public interface FilesystemReadOptions extends PipelineOptions { public interface FilesystemWriteOptions extends PipelineOptions { @TemplateParameter.GcsWriteFolder( order = 2, + groupName = "Target", description = "Output file directory in Cloud Storage", helpText = "The Cloud Storage path prefix that specifies where the data is written.", example = "gs://mybucket/somefolder/") @@ -56,6 +57,7 @@ public interface FilesystemWindowedWriteOptions extends PipelineOptions { @TemplateParameter.GcsWriteFolder( order = 1, + groupName = "Target", description = "Output file directory in Cloud Storage", helpText = "The path and filename prefix for writing output files. Must end with a slash. DateTime formatting is used to parse directory path for date & time formatters.", @@ -67,6 +69,7 @@ public interface FilesystemWindowedWriteOptions extends PipelineOptions { @TemplateParameter.Text( order = 2, + groupName = "Target", description = "Output filename prefix of the files to write", helpText = "The prefix to place on each windowed file.", example = "output-") @@ -92,6 +95,7 @@ public interface FilesystemWindowedWriteOptions extends PipelineOptions { @TemplateParameter.Integer( order = 4, + groupName = "Target", optional = true, description = "Maximum output shards", helpText = @@ -105,6 +109,7 @@ public interface FilesystemWindowedWriteOptions extends PipelineOptions { @TemplateParameter.Duration( order = 5, + groupName = "Target", optional = true, description = "Window duration", helpText =