Skip to content
Permalink
Browse files
feat: accept DatasetListItem where DatasetReference is accepted (#597)
* split out and pytestify list_tables tests.

Also, exercise dataset polymorphism in some of the tests.

* list_tables now accepts DatasetListItem objects

* Get coverage to 100%

But why do we run coverage on test code?

* lint

* Update exception text for DatasetListItem

* Bypass opentelemetry tracing in unit tests.

* Got rid of opentelemetry tracing checks. They aren't needed.

* abstracted dataset-argument handling

And applied it to `list_tables` and `list_models`.

* Converted list_model tests to pytest and included check for dataset polymorphism

* removed unneeded blanl lines.

* Made list_routines accept DatasetListItem

and conveted list_routines tests to pytest.

* create_dataset accepts DatasetListItem

Also converted create_dataset tests to pytest.

(And fixed some long lines.)

* Converted list_routine tests to pytest

* include string dataset representation in dataset polymorphism.

* removed some unused imports

* Updated delete_dataset tests

- Polymorphoc on dataset
- pytest

* black

* lint

* We don't actually need to avoid opentelemetry

And a 3.6 test dependened on it.

* fixed docstrings to include DatasetListItem in dataset polymorphic APIs.
  • Loading branch information
jimfulton committed Apr 12, 2021
1 parent 8f4c0b8 commit c8b5581ea3c94005d69755c4a3b5a0d8900f3fe2
@@ -449,6 +449,22 @@ def _create_bqstorage_client(self):

return bigquery_storage.BigQueryReadClient(credentials=self._credentials)

def _dataset_from_arg(self, dataset):
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)

if not isinstance(dataset, (Dataset, DatasetReference)):
if isinstance(dataset, DatasetListItem):
dataset = dataset.reference
else:
raise TypeError(
"dataset must be a Dataset, DatasetReference, DatasetListItem,"
" or string"
)
return dataset

def create_dataset(
self, dataset, exists_ok=False, retry=DEFAULT_RETRY, timeout=None
):
@@ -461,6 +477,7 @@ def create_dataset(
dataset (Union[ \
google.cloud.bigquery.dataset.Dataset, \
google.cloud.bigquery.dataset.DatasetReference, \
google.cloud.bigquery.dataset.DatasetListItem, \
str, \
]):
A :class:`~google.cloud.bigquery.dataset.Dataset` to create.
@@ -491,10 +508,7 @@ def create_dataset(
>>> dataset = client.create_dataset(dataset)
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)
dataset = self._dataset_from_arg(dataset)
if isinstance(dataset, DatasetReference):
dataset = Dataset(dataset)

@@ -1133,6 +1147,7 @@ def list_models(
dataset (Union[ \
google.cloud.bigquery.dataset.Dataset, \
google.cloud.bigquery.dataset.DatasetReference, \
google.cloud.bigquery.dataset.DatasetListItem, \
str, \
]):
A reference to the dataset whose models to list from the
@@ -1160,13 +1175,7 @@ def list_models(
:class:`~google.cloud.bigquery.model.Model` contained
within the requested dataset.
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)

if not isinstance(dataset, (Dataset, DatasetReference)):
raise TypeError("dataset must be a Dataset, DatasetReference, or string")
dataset = self._dataset_from_arg(dataset)

path = "%s/models" % dataset.path
span_attributes = {"path": path}
@@ -1210,6 +1219,7 @@ def list_routines(
dataset (Union[ \
google.cloud.bigquery.dataset.Dataset, \
google.cloud.bigquery.dataset.DatasetReference, \
google.cloud.bigquery.dataset.DatasetListItem, \
str, \
]):
A reference to the dataset whose routines to list from the
@@ -1237,14 +1247,7 @@ def list_routines(
:class:`~google.cloud.bigquery.routine.Routine`s contained
within the requested dataset, limited by ``max_results``.
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)

if not isinstance(dataset, (Dataset, DatasetReference)):
raise TypeError("dataset must be a Dataset, DatasetReference, or string")

dataset = self._dataset_from_arg(dataset)
path = "{}/routines".format(dataset.path)

span_attributes = {"path": path}
@@ -1288,6 +1291,7 @@ def list_tables(
dataset (Union[ \
google.cloud.bigquery.dataset.Dataset, \
google.cloud.bigquery.dataset.DatasetReference, \
google.cloud.bigquery.dataset.DatasetListItem, \
str, \
]):
A reference to the dataset whose tables to list from the
@@ -1315,14 +1319,7 @@ def list_tables(
:class:`~google.cloud.bigquery.table.TableListItem` contained
within the requested dataset.
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)

if not isinstance(dataset, (Dataset, DatasetReference)):
raise TypeError("dataset must be a Dataset, DatasetReference, or string")

dataset = self._dataset_from_arg(dataset)
path = "%s/tables" % dataset.path
span_attributes = {"path": path}

@@ -1365,6 +1362,7 @@ def delete_dataset(
dataset (Union[ \
google.cloud.bigquery.dataset.Dataset, \
google.cloud.bigquery.dataset.DatasetReference, \
google.cloud.bigquery.dataset.DatasetListItem, \
str, \
]):
A reference to the dataset to delete. If a string is passed
@@ -1384,14 +1382,7 @@ def delete_dataset(
Defaults to ``False``. If ``True``, ignore "not found" errors
when deleting the dataset.
"""
if isinstance(dataset, str):
dataset = DatasetReference.from_string(
dataset, default_project=self.project
)

if not isinstance(dataset, (Dataset, DatasetReference)):
raise TypeError("dataset must be a Dataset or a DatasetReference")

dataset = self._dataset_from_arg(dataset)
params = {}
path = dataset.path
if delete_contents:
@@ -0,0 +1,23 @@
import pytest

from .helpers import make_client


@pytest.fixture
def client():
yield make_client()


@pytest.fixture
def PROJECT():
yield "PROJECT"


@pytest.fixture
def DS_ID():
yield "DATASET_ID"


@pytest.fixture
def LOCATION():
yield "us-central"
@@ -12,6 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import google.cloud.bigquery.client
import google.cloud.bigquery.dataset
import mock
import pytest


def make_connection(*responses):
import google.cloud.bigquery._http
@@ -31,3 +36,47 @@ def _to_pyarrow(value):
import pyarrow

return pyarrow.array([value])[0]


def make_client(project="PROJECT", **kw):
credentials = mock.Mock(spec=google.auth.credentials.Credentials)
return google.cloud.bigquery.client.Client(project, credentials, **kw)


def make_dataset_reference_string(project, ds_id):
return f"{project}.{ds_id}"


def make_dataset(project, ds_id):
return google.cloud.bigquery.dataset.Dataset(
google.cloud.bigquery.dataset.DatasetReference(project, ds_id)
)


def make_dataset_list_item(project, ds_id):
return google.cloud.bigquery.dataset.DatasetListItem(
dict(datasetReference=dict(projectId=project, datasetId=ds_id))
)


def identity(x):
return x


def get_reference(x):
return x.reference


dataset_like = [
(google.cloud.bigquery.dataset.DatasetReference, identity),
(make_dataset, identity),
(make_dataset_list_item, get_reference),
(
make_dataset_reference_string,
google.cloud.bigquery.dataset.DatasetReference.from_string,
),
]

dataset_polymorphic = pytest.mark.parametrize(
"make_dataset,get_reference", dataset_like
)
Loading

0 comments on commit c8b5581

Please sign in to comment.