Skip to content

Commit

Permalink
Merge pull request #1425 from tseaver/1424-bigquery-missing_datasets_key
Browse files Browse the repository at this point in the history
#1424: Defend against missing 'datasets' key in datasets/list response.
  • Loading branch information
tseaver committed Jan 28, 2016
2 parents 2911bd9 + 0758aa4 commit 7b08a59
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 24 deletions.
2 changes: 1 addition & 1 deletion gcloud/bigquery/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def list_datasets(self, include_all=False, max_results=None,
resp = self.connection.api_request(method='GET', path=path,
query_params=params)
datasets = [Dataset.from_api_repr(resource, self)
for resource in resp['datasets']]
for resource in resp.get('datasets', ())]
return datasets, resp.get('nextPageToken')

def dataset(self, dataset_name):
Expand Down
26 changes: 3 additions & 23 deletions gcloud/bigquery/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,39 +74,19 @@ def test_list_datasets_defaults(self):
self.assertEqual(req['method'], 'GET')
self.assertEqual(req['path'], '/%s' % PATH)

def test_list_datasets_explicit(self):
from gcloud.bigquery.dataset import Dataset
def test_list_datasets_explicit_response_missing_datasets_key(self):
PROJECT = 'PROJECT'
DATASET_1 = 'dataset_one'
DATASET_2 = 'dataset_two'
PATH = 'projects/%s/datasets' % PROJECT
TOKEN = 'TOKEN'
DATA = {
'datasets': [
{'kind': 'bigquery#dataset',
'id': '%s:%s' % (PROJECT, DATASET_1),
'datasetReference': {'datasetId': DATASET_1,
'projectId': PROJECT},
'friendlyName': None},
{'kind': 'bigquery#dataset',
'id': '%s:%s' % (PROJECT, DATASET_2),
'datasetReference': {'datasetId': DATASET_2,
'projectId': PROJECT},
'friendlyName': 'Two'},
]
}
DATA = {}
creds = _Credentials()
client = self._makeOne(PROJECT, creds)
conn = client.connection = _Connection(DATA)

datasets, token = client.list_datasets(
include_all=True, max_results=3, page_token=TOKEN)

self.assertEqual(len(datasets), len(DATA['datasets']))
for found, expected in zip(datasets, DATA['datasets']):
self.assertTrue(isinstance(found, Dataset))
self.assertEqual(found.dataset_id, expected['id'])
self.assertEqual(found.friendly_name, expected['friendlyName'])
self.assertEqual(len(datasets), 0)
self.assertEqual(token, None)

self.assertEqual(len(conn._requested), 1)
Expand Down

0 comments on commit 7b08a59

Please sign in to comment.