Skip to content

Commit

Permalink
feat(bigquery): Update minimum Ruby version to 2.6
Browse files Browse the repository at this point in the history
  • Loading branch information
dazuma committed Jul 26, 2022
1 parent aad96c6 commit fd7a010
Show file tree
Hide file tree
Showing 58 changed files with 630 additions and 637 deletions.
12 changes: 6 additions & 6 deletions google-cloud-bigquery/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,14 +70,14 @@ Google::Apis.logger = my_logger

## Supported Ruby Versions

This library is supported on Ruby 2.5+.
This library is supported on Ruby 2.6+.

Google provides official support for Ruby versions that are actively supported
by Ruby Core—that is, Ruby versions that are either in normal maintenance or in
security maintenance, and not end of life. Currently, this means Ruby 2.5 and
later. Older versions of Ruby _may_ still work, but are unsupported and not
recommended. See https://www.ruby-lang.org/en/downloads/branches/ for details
about the Ruby support schedule.
by Ruby Core—that is, Ruby versions that are either in normal maintenance or
in security maintenance, and not end of life. Older versions of Ruby _may_
still work, but are unsupported and not recommended. See
https://www.ruby-lang.org/en/downloads/branches/ for details about the Ruby
support schedule.

## Versioning

Expand Down
6 changes: 3 additions & 3 deletions google-cloud-bigquery/google-cloud-bigquery.gemspec
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ Gem::Specification.new do |gem|
"CHANGELOG.md", "CODE_OF_CONDUCT.md", "LICENSE", ".yardopts"]
gem.require_paths = ["lib"]

gem.required_ruby_version = ">= 2.5"
gem.required_ruby_version = ">= 2.6"

gem.add_dependency "concurrent-ruby", "~> 1.0"
gem.add_dependency "google-apis-bigquery_v2", "~> 0.1"
Expand All @@ -26,8 +26,8 @@ Gem::Specification.new do |gem|

gem.add_development_dependency "autotest-suffix", "~> 1.1"
gem.add_development_dependency "google-cloud-data_catalog", "~> 1.2"
gem.add_development_dependency "google-style", "~> 1.25.1"
gem.add_development_dependency "minitest", "~> 5.14"
gem.add_development_dependency "google-style", "~> 1.26.1"
gem.add_development_dependency "minitest", "~> 5.16"
gem.add_development_dependency "minitest-autotest", "~> 1.0"
gem.add_development_dependency "minitest-focus", "~> 1.1"
gem.add_development_dependency "minitest-rg", "~> 5.2"
Expand Down
10 changes: 4 additions & 6 deletions google-cloud-bigquery/lib/google/cloud/bigquery/convert.rb
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,9 @@ def self.format_rows rows, fields
end

def self.format_row row, fields
row_pairs = fields.zip(row[:f]).map do |f, v|
fields.zip(row[:f]).to_h do |f, v|
[f.name.to_sym, format_value(v, f)]
end
Hash[row_pairs]
end

# rubocop:disable all
Expand Down Expand Up @@ -123,10 +122,9 @@ def self.to_query_param_value value, type = nil
array_values = json_value.map { |v| to_query_param_value v, type }
Google::Apis::BigqueryV2::QueryParameterValue.new array_values: array_values
when Hash
struct_pairs = json_value.map do |k, v|
struct_values = json_value.to_h do |k, v|
[String(k), to_query_param_value(v, type)]
end
struct_values = Hash[struct_pairs]
Google::Apis::BigqueryV2::QueryParameterValue.new struct_values: struct_values
else
# Everything else is converted to a string, per the API expectations.
Expand Down Expand Up @@ -239,7 +237,7 @@ def self.to_json_value value, type = nil
type = extract_array_type type
value.map { |x| to_json_value x, type }
elsif Hash === value
Hash[value.map { |k, v| [k.to_s, to_json_value(v, type)] }]
value.to_h { |k, v| [k.to_s, to_json_value(v, type)] }
else
value
end
Expand All @@ -266,7 +264,7 @@ def self.extract_array_type type
end

def self.to_json_row row
Hash[row.map { |k, v| [k.to_s, to_json_value(v)] }]
row.to_h { |k, v| [k.to_s, to_json_value(v)] }
end

def self.resolve_legacy_sql standard_sql, legacy_sql
Expand Down
2 changes: 1 addition & 1 deletion google-cloud-bigquery/lib/google/cloud/bigquery/dataset.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2754,7 +2754,7 @@ def gapi_exists?
def patch_gapi! *attributes
return if attributes.empty?
ensure_service!
patch_args = Hash[attributes.map { |attr| [attr, @gapi.send(attr)] }]
patch_args = attributes.to_h { |attr| [attr, @gapi.send(attr)] }
patch_gapi = Google::Apis::BigqueryV2::Dataset.new(**patch_args)
patch_gapi.etag = etag if etag
@gapi = service.patch_dataset dataset_id, patch_gapi
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def destinations_file_counts
# and the counts as values.
#
def destinations_counts
Hash[destinations.zip destinations_file_counts]
destinations.zip(destinations_file_counts).to_h
end

##
Expand Down
2 changes: 1 addition & 1 deletion google-cloud-bigquery/lib/google/cloud/bigquery/job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@ def reload!
#
def wait_until_done!
backoff = lambda do |retries|
delay = [retries**2 + 5, 60].min # Maximum delay is 60
delay = [(retries**2) + 5, 60].min # Maximum delay is 60
sleep delay
end
retries = 0
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1193,7 +1193,7 @@ def standard_sql= value
#
def external= value
external_table_pairs = value.map { |name, obj| [String(name), obj.to_gapi] }
external_table_hash = Hash[external_table_pairs]
external_table_hash = external_table_pairs.to_h
@gapi.configuration.query.table_definitions = external_table_hash
end

Expand Down
2 changes: 1 addition & 1 deletion google-cloud-bigquery/lib/google/cloud/bigquery/schema.rb
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def headers
# schema.param_types
#
def param_types
Hash[fields.map { |field| [field.name.to_sym, field.param_type] }]
fields.to_h { |field| [field.name.to_sym, field.param_type] }
end

##
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -471,7 +471,7 @@ def headers
#
def param_type
param_type = type.to_sym
param_type = Hash[fields.map { |field| [field.name.to_sym, field.param_type] }] if record?
param_type = fields.to_h { |field| [field.name.to_sym, field.param_type] } if record?
param_type = [param_type] if repeated?
param_type
end
Expand Down
2 changes: 1 addition & 1 deletion google-cloud-bigquery/lib/google/cloud/bigquery/table.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2766,7 +2766,7 @@ def gapi_exists?
def patch_gapi! *attributes
return if attributes.empty?
ensure_service!
patch_args = Hash[attributes.map { |attr| [attr, @gapi.send(attr)] }]
patch_args = attributes.to_h { |attr| [attr, @gapi.send(attr)] }
patch_gapi = Google::Apis::BigqueryV2::Table.new(**patch_args)
patch_gapi.etag = etag if etag
@gapi = service.patch_table dataset_id, table_id, patch_gapi
Expand Down
3 changes: 1 addition & 2 deletions google-cloud-bigquery/samples/simple_app/stackoverflow.rb
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,7 @@ def stackoverflow

# [START bigquery_simple_app_query]
sql = "SELECT " \
"CONCAT('https://stackoverflow.com/questions/', " \
" CAST(id as STRING)) as url, view_count " \
"CONCAT('https://stackoverflow.com/questions/', CAST(id as STRING)) as url, view_count " \
"FROM `bigquery-public-data.stackoverflow.posts_questions` " \
"WHERE tags like '%google-bigquery%' " \
"ORDER BY view_count DESC LIMIT 10"
Expand Down
40 changes: 20 additions & 20 deletions google-cloud-bigquery/test/google/cloud/bigquery/data_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}

data = table.data
mock.verify
Expand Down Expand Up @@ -79,7 +79,7 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}

data = table.data
mock.verify
Expand All @@ -104,7 +104,7 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}

data = table.data
mock.verify
Expand All @@ -121,7 +121,7 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
nil_table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}

nil_data = table.data
mock.verify
Expand Down Expand Up @@ -161,7 +161,7 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
nested_table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}

nested_data = nested_table.data
mock.verify
Expand Down Expand Up @@ -206,7 +206,7 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
nested_table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}

nested_data = nested_table.data
mock.verify
Expand All @@ -222,10 +222,10 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true}

data1 = table.data

Expand All @@ -242,10 +242,10 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}
mock.expect :list_table_data,
table_data_gapi(token: nil).to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true}

data1 = table.data

Expand All @@ -264,10 +264,10 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}
mock.expect :list_table_data,
table_data_gapi(token: nil).to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true}

data = table.data.all.to_a

Expand All @@ -281,10 +281,10 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}
mock.expect :list_table_data,
table_data_gapi(token: nil).to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true}

data = table.data

Expand All @@ -297,10 +297,10 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true}

data = table.data.all.take(5)

Expand All @@ -314,10 +314,10 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: nil, options: {skip_deserialization: true}
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: "token1234567890", start_index: nil, options: {skip_deserialization: true}

data = table.data.all(request_limit: 1).to_a

Expand All @@ -331,7 +331,7 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: 3, page_token: nil, start_index: nil, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: 3, page_token: nil, start_index: nil, options: {skip_deserialization: true}

data = table.data max: 3
_(data.class).must_equal Google::Cloud::Bigquery::Data
Expand All @@ -342,7 +342,7 @@
bigquery.service.mocked_service = mock
mock.expect :list_table_data,
table_data_gapi.to_json,
[project, dataset_id, table_id, { max_results: nil, page_token: nil, start_index: 25, options: {skip_deserialization: true} }]
[project, dataset_id, table_id], max_results: nil, page_token: nil, start_index: 25, options: {skip_deserialization: true}

data = table.data start: 25
mock.verify
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
found_model_id = "found_model"

mock = Minitest::Mock.new
mock.expect :get_model, random_model_full_hash(dataset.dataset_id, found_model_id).to_json, [project, dataset.dataset_id, found_model_id, options: { skip_deserialization: true }]
mock.expect :get_model, random_model_full_hash(dataset.dataset_id, found_model_id).to_json, [project, dataset.dataset_id, found_model_id], options: { skip_deserialization: true }
dataset.service.mocked_service = mock

model = dataset.model found_model_id
Expand Down
Loading

0 comments on commit fd7a010

Please sign in to comment.