Skip to content
Permalink
Browse files
fix!: avoid collision with built-in functions by renaming type proper…
…ty to type_ (#53)

BREAKING CHANGE: type is renamed to type_ to avoid conflict with built-in functions (introduced in googleapis/gapic-generator-python#595)

* changes without context

        autosynth cannot find the source of changes triggered by earlier changes in this
        repository, or by version upgrades to tools such as linters.

* chore(python): use BUILD_SPECIFIC_GCLOUD_PROJECT for samples

https://github.com/googleapis/python-talent/blob/ef045e8eb348db36d7a2a611e6f26b11530d273b/samples/snippets/noxfile_config.py#L27-L32

`BUILD_SPECIFIC_GCLOUD_PROJECT` is an alternate project used for sample tests that do poorly with concurrent runs on the same project.

Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com>
Source-Date: Wed Sep 30 13:06:03 2020 -0600
Source-Repo: googleapis/synthtool
Source-Sha: 9b0da5204ab90bcc36f8cd4e5689eff1a54cc3e4
Source-Link: googleapis/synthtool@9b0da52

Co-authored-by: Takashi Matsuo <tmatsuo@google.com>
  • Loading branch information
yoshi-automation and tmatsuo committed Oct 13, 2020
1 parent 629a088 commit b95441140f7c86dd3e833aef0532badd6280ef48
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.6"
}

# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
value: "python-docs-samples-tests-py36"
}

env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh"
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.7"
}

# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
value: "python-docs-samples-tests-py37"
}

env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh"
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.8"
}

# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
value: "python-docs-samples-tests-py38"
}

env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-bigquery-datatransfer/.kokoro/test-samples.sh"
@@ -19,10 +19,10 @@
from distutils import util
import os
import re
from typing import Callable, Dict, Sequence, Tuple, Type, Union
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources

import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
@@ -161,9 +161,9 @@ def parse_transfer_config_path(path: str) -> Dict[str, str]:
def __init__(
self,
*,
credentials: credentials.Credentials = None,
transport: Union[str, DataTransferServiceTransport] = None,
client_options: ClientOptions = None,
credentials: Optional[credentials.Credentials] = None,
transport: Union[str, DataTransferServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the data transfer service client.
@@ -177,8 +177,8 @@ def __init__(
transport (Union[str, ~.DataTransferServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
client_options (client_options_lib.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
@@ -204,9 +204,9 @@ def __init__(
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = ClientOptions.from_dict(client_options)
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = ClientOptions.ClientOptions()
client_options = client_options_lib.ClientOptions()

# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(
@@ -70,7 +70,7 @@ class DataSourceParameter(proto.Message):
Parameter display name in the user interface.
description (str):
Parameter description.
type (~.datatransfer.DataSourceParameter.Type):
type_ (~.datatransfer.DataSourceParameter.Type):
Parameter type.
required (bool):
Is parameter required.
@@ -122,7 +122,7 @@ class Type(proto.Enum):

description = proto.Field(proto.STRING, number=3)

type = proto.Field(proto.ENUM, number=4, enum=Type,)
type_ = proto.Field(proto.ENUM, number=4, enum=Type,)

required = proto.Field(proto.BOOL, number=5)

@@ -0,0 +1 @@
See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md
@@ -0,0 +1 @@
See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md
@@ -3,22 +3,30 @@
{
"git": {
"name": ".",
"remote": "git@github.com:plamut/python-bigquery-datatransfer.git",
"sha": "41256eec1994fbff48894c7055e6440b4e636628"
"remote": "https://github.com/googleapis/python-bigquery-datatransfer.git",
"sha": "cc2b9ff311dfa6ec9d181d4c2a4c952a609f5dec"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
"sha": "eabe7c0fde64b1451df6ea171b2009238b0df07c",
"internalRef": "335110052"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
"sha": "da29da32b3a988457b49ae290112b74f14b713cc"
"sha": "9b0da5204ab90bcc36f8cd4e5689eff1a54cc3e4"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
"sha": "da29da32b3a988457b49ae290112b74f14b713cc"
"sha": "9b0da5204ab90bcc36f8cd4e5689eff1a54cc3e4"
}
}
],
@@ -32,5 +40,98 @@
"generator": "bazel"
}
}
],
"generatedFiles": [
".flake8",
".github/CONTRIBUTING.md",
".github/ISSUE_TEMPLATE/bug_report.md",
".github/ISSUE_TEMPLATE/feature_request.md",
".github/ISSUE_TEMPLATE/support_request.md",
".github/PULL_REQUEST_TEMPLATE.md",
".github/release-please.yml",
".github/snippet-bot.yml",
".gitignore",
".kokoro/build.sh",
".kokoro/continuous/common.cfg",
".kokoro/continuous/continuous.cfg",
".kokoro/docker/docs/Dockerfile",
".kokoro/docker/docs/fetch_gpg_keys.sh",
".kokoro/docs/common.cfg",
".kokoro/docs/docs-presubmit.cfg",
".kokoro/docs/docs.cfg",
".kokoro/populate-secrets.sh",
".kokoro/presubmit/common.cfg",
".kokoro/presubmit/presubmit.cfg",
".kokoro/publish-docs.sh",
".kokoro/release.sh",
".kokoro/release/common.cfg",
".kokoro/release/release.cfg",
".kokoro/samples/lint/common.cfg",
".kokoro/samples/lint/continuous.cfg",
".kokoro/samples/lint/periodic.cfg",
".kokoro/samples/lint/presubmit.cfg",
".kokoro/samples/python3.6/common.cfg",
".kokoro/samples/python3.6/continuous.cfg",
".kokoro/samples/python3.6/periodic.cfg",
".kokoro/samples/python3.6/presubmit.cfg",
".kokoro/samples/python3.7/common.cfg",
".kokoro/samples/python3.7/continuous.cfg",
".kokoro/samples/python3.7/periodic.cfg",
".kokoro/samples/python3.7/presubmit.cfg",
".kokoro/samples/python3.8/common.cfg",
".kokoro/samples/python3.8/continuous.cfg",
".kokoro/samples/python3.8/periodic.cfg",
".kokoro/samples/python3.8/presubmit.cfg",
".kokoro/test-samples.sh",
".kokoro/trampoline.sh",
".kokoro/trampoline_v2.sh",
".trampolinerc",
"CODE_OF_CONDUCT.md",
"CONTRIBUTING.rst",
"LICENSE",
"MANIFEST.in",
"docs/_static/custom.css",
"docs/_templates/layout.html",
"docs/bigquery_datatransfer_v1/services.rst",
"docs/bigquery_datatransfer_v1/types.rst",
"docs/conf.py",
"docs/multiprocessing.rst",
"google/cloud/bigquery_datatransfer/__init__.py",
"google/cloud/bigquery_datatransfer/py.typed",
"google/cloud/bigquery_datatransfer_v1/__init__.py",
"google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto",
"google/cloud/bigquery_datatransfer_v1/proto/transfer.proto",
"google/cloud/bigquery_datatransfer_v1/py.typed",
"google/cloud/bigquery_datatransfer_v1/services/__init__.py",
"google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/__init__.py",
"google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py",
"google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py",
"google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/pagers.py",
"google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py",
"google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py",
"google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py",
"google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py",
"google/cloud/bigquery_datatransfer_v1/types/__init__.py",
"google/cloud/bigquery_datatransfer_v1/types/datatransfer.py",
"google/cloud/bigquery_datatransfer_v1/types/transfer.py",
"mypy.ini",
"noxfile.py",
"renovate.json",
"samples/AUTHORING_GUIDE.md",
"samples/CONTRIBUTING.md",
"samples/noxfile.py",
"samples/snippets/noxfile.py",
"scripts/decrypt-secrets.sh",
"scripts/fixup_bigquery_datatransfer_v1_keywords.py",
"scripts/readme-gen/readme_gen.py",
"scripts/readme-gen/templates/README.tmpl.rst",
"scripts/readme-gen/templates/auth.tmpl.rst",
"scripts/readme-gen/templates/auth_api_key.tmpl.rst",
"scripts/readme-gen/templates/install_deps.tmpl.rst",
"scripts/readme-gen/templates/install_portaudio.tmpl.rst",
"setup.cfg",
"testing/.gitignore",
"tests/unit/gapic/bigquery_datatransfer_v1/__init__.py",
"tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py"
]
}

0 comments on commit b954411

Please sign in to comment.