diff --git a/clients/bigquery/bigquery.go b/clients/bigquery/bigquery.go index 658c343e..c6d992ab 100644 --- a/clients/bigquery/bigquery.go +++ b/clients/bigquery/bigquery.go @@ -34,13 +34,12 @@ const ( describeNameCol = "column_name" describeTypeCol = "data_type" describeCommentCol = "description" - // Storage Write API is limited to 10 MB, let's start out conservative and use 80% of that. - maxRequestByteSize = 10_000_000 * .8 + // Storage Write API is limited to 10 MiB, subtract 50 KiB to account for request overhead. + maxRequestByteSize = (10 * 1024 * 1024) - (50 * 1024) ) type Store struct { configMap *types.DwhToTablesConfigMap - batchSize int config config.Config db.Store @@ -223,7 +222,6 @@ func (s *Store) Dedupe(tableID sql.TableIdentifier, primaryKeys []string, includ } func LoadBigQuery(cfg config.Config, _store *db.Store) (*Store, error) { - cfg.BigQuery.LoadDefaultValues() if _store != nil { // Used for tests. return &Store{ @@ -250,7 +248,6 @@ func LoadBigQuery(cfg config.Config, _store *db.Store) (*Store, error) { return &Store{ Store: store, configMap: &types.DwhToTablesConfigMap{}, - batchSize: cfg.BigQuery.BatchSize, config: cfg, }, nil } diff --git a/lib/config/bigquery.go b/lib/config/bigquery.go index 6e8250da..5226fa95 100644 --- a/lib/config/bigquery.go +++ b/lib/config/bigquery.go @@ -9,13 +9,6 @@ type BigQuery struct { DefaultDataset string `yaml:"defaultDataset"` ProjectID string `yaml:"projectID"` Location string `yaml:"location"` - BatchSize int `yaml:"batchSize"` -} - -func (b *BigQuery) LoadDefaultValues() { - if b.BatchSize == 0 { - b.BatchSize = 1000 - } } // DSN - returns the notation for BigQuery following this format: bigquery://projectID/[location/]datasetID?queryString