Skip to content
This repository has been archived by the owner on Oct 16, 2023. It is now read-only.

Commit

Permalink
chore: use gapic-generator-python 0.62.1 (#36)
Browse files Browse the repository at this point in the history
- [ ] Regenerate this pull request now.

fix: resolve DuplicateCredentialArgs error when using credentials_file

committer: parthea
PiperOrigin-RevId: 425964861

Source-Link: googleapis/googleapis@84b1a5a

Source-Link: googleapis/googleapis-gen@4fb761b
Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNGZiNzYxYmJkODUwNmFjMTU2ZjQ5YmFjNWYxODMwNmFhOGViM2FhOCJ9
  • Loading branch information
gcf-owl-bot[bot] committed Feb 3, 2022
1 parent 124de81 commit b3b1ee2
Show file tree
Hide file tree
Showing 6 changed files with 100 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ async def create_migration_workflow(
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, migration_workflow])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -335,7 +335,7 @@ async def get_migration_workflow(
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -418,7 +418,7 @@ async def list_migration_workflows(
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -499,7 +499,7 @@ async def delete_migration_workflow(
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -568,7 +568,7 @@ async def start_migration_workflow(
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -652,7 +652,7 @@ async def get_migration_subtask(
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -734,7 +734,7 @@ async def list_migration_subtasks(
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -467,7 +467,7 @@ def create_migration_workflow(
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, migration_workflow])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -544,7 +544,7 @@ def get_migration_workflow(
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -618,7 +618,7 @@ def list_migration_workflows(
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -690,7 +690,7 @@ def delete_migration_workflow(
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -761,7 +761,7 @@ def start_migration_workflow(
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -836,7 +836,7 @@ def get_migration_subtask(
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
Expand Down Expand Up @@ -909,7 +909,7 @@ def list_migration_subtasks(
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,8 +160,11 @@ def __init__(
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
credentials_file=credentials_file,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,8 +205,11 @@ def __init__(
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
credentials_file=credentials_file,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,8 @@ class MigrationTask(proto.Message):
This field is a member of `oneof`_ ``task_details``.
id (str):
Output only. Immutable. The unique identifier
for the migration task. The ID is server-
generated.
for the migration task. The ID is
server-generated.
type_ (str):
The type of the task. This must be a
supported task type.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -534,21 +534,28 @@ def test_migration_service_client_client_options_scopes(


@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
"client_class,transport_class,transport_name,grpc_helpers",
[
(MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"),
(
MigrationServiceClient,
transports.MigrationServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
MigrationServiceAsyncClient,
transports.MigrationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_migration_service_client_client_options_credentials_file(
client_class, transport_class, transport_name
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")

with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
Expand Down Expand Up @@ -584,6 +591,72 @@ def test_migration_service_client_client_options_from_dict():
)


@pytest.mark.parametrize(
"client_class,transport_class,transport_name,grpc_helpers",
[
(
MigrationServiceClient,
transports.MigrationServiceGrpcTransport,
"grpc",
grpc_helpers,
),
(
MigrationServiceAsyncClient,
transports.MigrationServiceGrpcAsyncIOTransport,
"grpc_asyncio",
grpc_helpers_async,
),
],
)
def test_migration_service_client_create_channel_credentials_file(
client_class, transport_class, transport_name, grpc_helpers
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")

with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)

# test that the credentials from file are saved and used as the credentials.
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel"
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
file_creds = ga_credentials.AnonymousCredentials()
load_creds.return_value = (file_creds, None)
adc.return_value = (creds, None)
client = client_class(client_options=options, transport=transport_name)
create_channel.assert_called_with(
"bigquerymigration.googleapis.com:443",
credentials=file_creds,
credentials_file=None,
quota_project_id=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=None,
default_host="bigquerymigration.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)


@pytest.mark.parametrize(
"request_type", [migration_service.CreateMigrationWorkflowRequest, dict,]
)
Expand Down

0 comments on commit b3b1ee2

Please sign in to comment.