Skip to content

Commit

Permalink
Merge 7f83b62 into 4806f04
Browse files Browse the repository at this point in the history
  • Loading branch information
esloho committed Oct 10, 2019
2 parents 4806f04 + 7f83b62 commit bedd332
Show file tree
Hide file tree
Showing 11 changed files with 78 additions and 23 deletions.
15 changes: 11 additions & 4 deletions cartoframes/data/observatory/catalog.py
@@ -1,5 +1,6 @@
from __future__ import absolute_import

from .entity import is_slug_value
from .category import Category
from .country import Country
from .geography import Geography
Expand Down Expand Up @@ -62,7 +63,7 @@ def country(self, country_id):
Args:
country_id (str):
Value for the column 'country_id' to be used when querying the Catalog.
Id value of the country to be used for filtering the Catalog.
Returns:
:py:class:`Catalog <cartoframes.data.observatory.catalog.Catalog>`
Expand All @@ -77,7 +78,7 @@ def category(self, category_id):
Args:
category_id (str):
Value for the column 'category_id' to be used when querying the Catalog.
Id value of the category to be used for filtering the Catalog.
Returns:
:py:class:`Catalog <cartoframes.data.observatory.catalog.Catalog>`
Expand All @@ -92,14 +93,20 @@ def geography(self, geography_id):
Args:
geography_id (str):
Value for the column 'geography_id' to be used when querying the Catalog
Id or slug value of the geography to be used for filtering the Catalog
Returns:
:py:class:`Catalog <cartoframes.data.observatory.catalog.Catalog>`
"""

self.filters[GEOGRAPHY_FILTER] = geography_id
filter_value = geography_id

if is_slug_value(geography_id):
geography = Geography.get(geography_id)
filter_value = geography.id

self.filters[GEOGRAPHY_FILTER] = filter_value
return self

def clear_filters(self):
Expand Down
4 changes: 4 additions & 0 deletions cartoframes/data/observatory/entity.py
Expand Up @@ -88,6 +88,10 @@ def _get_bigquery_client(project, credentials):
return BigQueryClient(project, credentials)


def is_slug_value(id_value):
return len(id_value.split('.')) == 1


class CatalogList(list):

def __init__(self, data):
Expand Down
4 changes: 2 additions & 2 deletions cartoframes/data/observatory/repository/entity_repo.py
@@ -1,5 +1,5 @@
from cartoframes.exceptions import DiscoveryException
from cartoframes.data.observatory.entity import CatalogList
from cartoframes.data.observatory.entity import CatalogList, is_slug_value
from .repo_client import RepoClient

try:
Expand Down Expand Up @@ -47,7 +47,7 @@ def _get_filters(self, filters):
return cleaned_filters

def _get_id_filter(self, id_):
if self.slug_field is not None and len(id_.split('.')) == 1:
if self.slug_field is not None and is_slug_value(id_):
return {self.slug_field: id_}

return {self.id_field: id_}
Expand Down
Expand Up @@ -5,6 +5,7 @@


_VARIABLE_GROUP_ID_FIELD = 'id'
_VARIABLE_GROUP_SLUG_FIELD = 'slug'
_ALLOWED_FILTERS = [DATASET_FILTER]


Expand All @@ -15,7 +16,8 @@ def get_variable_group_repo():
class VariableGroupRepository(EntityRepository):

def __init__(self):
super(VariableGroupRepository, self).__init__(_VARIABLE_GROUP_ID_FIELD, _ALLOWED_FILTERS)
super(VariableGroupRepository, self).__init__(_VARIABLE_GROUP_ID_FIELD, _ALLOWED_FILTERS,
_VARIABLE_GROUP_SLUG_FIELD)

def get_by_dataset(self, dataset_id):
return self._get_filtered_entities({DATASET_FILTER: dataset_id})
Expand All @@ -31,6 +33,7 @@ def _get_rows(self, filters=None):
def _map_row(self, row):
return {
'id': self._normalize_field(row, self.id_field),
'slug': self._normalize_field(row, 'slug'),
'name': self._normalize_field(row, 'name'),
'dataset_id': self._normalize_field(row, 'dataset_id'),
'starred': self._normalize_field(row, 'starred')
Expand Down
4 changes: 3 additions & 1 deletion cartoframes/data/observatory/repository/variable_repo.py
Expand Up @@ -5,6 +5,7 @@


_VARIABLE_ID_FIELD = 'id'
_VARIABLE_SLUG_FIELD = 'slug'
_ALLOWED_DATASETS = [DATASET_FILTER, VARIABLE_GROUP_FILTER]


Expand All @@ -15,7 +16,7 @@ def get_variable_repo():
class VariableRepository(EntityRepository):

def __init__(self):
super(VariableRepository, self).__init__(_VARIABLE_ID_FIELD, _ALLOWED_DATASETS)
super(VariableRepository, self).__init__(_VARIABLE_ID_FIELD, _ALLOWED_DATASETS, _VARIABLE_SLUG_FIELD)

def get_by_dataset(self, dataset_id):
return self._get_filtered_entities({DATASET_FILTER: dataset_id})
Expand All @@ -34,6 +35,7 @@ def _get_rows(self, filters=None):
def _map_row(self, row):
return {
'id': self._normalize_field(row, self.id_field),
'slug': self._normalize_field(row, 'slug'),
'name': self._normalize_field(row, 'name'),
'description': self._normalize_field(row, 'description'),
'column_name': self._normalize_field(row, 'column_name'),
Expand Down
20 changes: 12 additions & 8 deletions test/data/observatory/examples.py
Expand Up @@ -27,7 +27,7 @@

db_geography1 = {
'id': 'carto-do-public.tiger.geography_esp_census_2019',
'slug': 'geography_esp_census_2019',
'slug': 'esp_census_2019_4567890d',
'name': 'ESP - Census',
'description': 'Geography data for Spanish census',
'provider_id': 'bbva',
Expand All @@ -41,7 +41,7 @@
}
db_geography2 = {
'id': 'carto-do-public.tiger.geography_esp_municipalities_2019',
'slug': 'carto-do-public-esp-municipalities_2019',
'slug': 'esp_municipalities_2019_3456789c',
'name': 'ESP - Municipalities',
'description': 'Geography data for Spanish municipalities',
'provider_id': 'bbva',
Expand All @@ -59,7 +59,7 @@

db_dataset1 = {
'id': 'carto-do-public.project.basicstats-census',
'slug': 'carto-do-public-basicstats-census',
'slug': 'basicstats_census_1234567a',
'name': 'Basic Stats - Census',
'description': 'Basic stats on 2019 Spanish census',
'provider_id': 'bbva',
Expand All @@ -77,7 +77,7 @@
}
db_dataset2 = {
'id': 'carto-do-public.project.basicstats-municipalities',
'slug': 'carto-do-public-basicstats-municipalities',
'slug': 'basicstats_municipalities_2345678b',
'name': 'Basic Stats - Municipalities',
'description': 'Basic stats on 2019 Spanish municipalities',
'provider_id': 'bbva',
Expand All @@ -98,7 +98,8 @@
test_datasets = CatalogList([test_dataset1, test_dataset2])

db_variable1 = {
'id': 'var1',
'id': 'carto-do.variable.var1',
'slug': 'var1',
'name': 'Population',
'description': 'The number of people within each geography',
'column_name': 'pop',
Expand All @@ -110,7 +111,8 @@
'summary_jsonb': {}
}
db_variable2 = {
'id': 'var2',
'id': 'carto-do.variable.var2',
'slug': 'var2',
'name': 'Date',
'description': 'The date the data refers to (YYYY-MM format for month and YYYY-MM-DD for day).',
'column_name': 'Date',
Expand Down Expand Up @@ -138,13 +140,15 @@
test_providers = CatalogList([test_provider1, test_provider2])

db_variable_group1 = {
'id': 'vargroup1',
'id': 'carto-do.variable_group.vargroup1',
'slug': 'vargroup1',
'name': 'Population',
'dataset_id': 'dataset1',
'starred': True
}
db_variable_group2 = {
'id': 'vargroup2',
'id': 'carto-do.variable_group.vargroup2',
'slug': 'vargroup2',
'name': 'Date',
'dataset_id': 'dataset1',
'starred': False
Expand Down
Expand Up @@ -91,6 +91,7 @@ def test_missing_fields_are_mapped_as_None(self, mocked_repo):

expected_variables_groups = CatalogList([VariableGroup({
'id': 'variable_group1',
'slug': None,
'name': None,
'dataset_id': None,
'starred': None
Expand Down
15 changes: 15 additions & 0 deletions test/data/observatory/repository/test_variable_repo.py
Expand Up @@ -68,6 +68,20 @@ def test_get_by_id_unknown_fails(self, mocked_repo):
with self.assertRaises(DiscoveryException):
repo.get_by_id(requested_id)

@patch.object(RepoClient, 'get_variables')
def test_get_by_slug(self, mocked_repo):
# Given
mocked_repo.return_value = [db_variable1]
requested_slug = db_variable1['slug']
repo = VariableRepository()

# When
variable = repo.get_by_id(requested_slug)

# Then
mocked_repo.assert_called_once_with({'slug': requested_slug})
assert variable == test_variable1

@patch.object(RepoClient, 'get_variables')
def test_get_by_dataset(self, mocked_repo):
# Given
Expand Down Expand Up @@ -106,6 +120,7 @@ def test_missing_fields_are_mapped_as_None(self, mocked_repo):

expected_variables = CatalogList([Variable({
'id': 'variable1',
'slug': None,
'name': None,
'description': None,
'column_name': None,
Expand Down
21 changes: 20 additions & 1 deletion test/data/observatory/test_catalog.py
@@ -1,13 +1,15 @@
import unittest


from cartoframes.auth import Credentials
from cartoframes.data.observatory.geography import Geography
from cartoframes.data.observatory.country import Country
from cartoframes.data.observatory.category import Category
from cartoframes.data.observatory.dataset import Dataset
from cartoframes.data.observatory.catalog import Catalog
from cartoframes.data.observatory.repository.geography_repo import GeographyRepository
from .examples import test_country2, test_country1, test_category1, test_category2, test_dataset1, test_dataset2, \
test_geographies, test_datasets, test_categories, test_countries
test_geographies, test_datasets, test_categories, test_countries, test_geography1

try:
from unittest.mock import Mock, patch
Expand Down Expand Up @@ -126,6 +128,23 @@ def test_all_filters(self, mocked_datasets):

assert datasets == test_datasets

@patch.object(Dataset, 'get_all')
@patch.object(GeographyRepository, 'get_by_id')
def test_geography_filter_by_slug(self, mocked_repo, mocked_datasets):
# Given
mocked_repo.return_value = test_geography1
mocked_datasets.return_value = test_datasets
slug = 'esp_census_2019_4567890d'
catalog = Catalog()

# When
datasets = catalog.geography(slug).datasets

# Then
mocked_repo.assert_called_once_with(slug)
mocked_datasets.assert_called_once_with({'geography_id': test_geography1.id})
assert datasets == test_datasets

@patch.object(Dataset, 'get_all')
def test_purchased_datasets(self, mocked_purchased_datasets):
# Given
Expand Down
6 changes: 3 additions & 3 deletions test/data/observatory/test_variable.py
Expand Up @@ -99,7 +99,7 @@ def test_variable_is_represented_with_id(self):
variable_repr = repr(variable)

# Then
assert variable_repr == "<Variable('{id}')>".format(id=db_variable1['id'])
assert variable_repr == "<Variable('{id}')>".format(id=db_variable1['slug'])

def test_variable_is_printed_with_classname(self):
# Given
Expand Down Expand Up @@ -133,7 +133,7 @@ def test_variable_list_is_printed_with_classname(self):

# Then
assert variables_str == "[<Variable('{id1}')>, <Variable('{id2}')>]" \
.format(id1=db_variable1['id'], id2=db_variable2['id'])
.format(id1=db_variable1['slug'], id2=db_variable2['slug'])

def test_variable_list_is_represented_with_ids(self):
# Given
Expand All @@ -144,7 +144,7 @@ def test_variable_list_is_represented_with_ids(self):

# Then
assert variables_repr == "[<Variable('{id1}')>, <Variable('{id2}')>]"\
.format(id1=db_variable1['id'], id2=db_variable2['id'])
.format(id1=db_variable1['slug'], id2=db_variable2['slug'])

@patch.object(VariableRepository, 'get_by_id')
def test_get_variable_by_id(self, mocked_repo):
Expand Down
6 changes: 3 additions & 3 deletions test/data/observatory/test_variable_group.py
Expand Up @@ -88,7 +88,7 @@ def test_variable_group_is_represented_with_id(self):
variable_group_repr = repr(variable_group)

# Then
assert variable_group_repr == "<VariableGroup('{id}')>".format(id=db_variable_group1['id'])
assert variable_group_repr == "<VariableGroup('{id}')>".format(id=db_variable_group1['slug'])

def test_variable_group_is_printed_with_classname(self):
# Given
Expand Down Expand Up @@ -122,7 +122,7 @@ def test_variable_group_list_is_printed_with_classname(self):

# Then
assert variables_groups_str == "[<VariableGroup('{id1}')>, <VariableGroup('{id2}')>]" \
.format(id1=db_variable_group1['id'], id2=db_variable_group2['id'])
.format(id1=db_variable_group1['slug'], id2=db_variable_group2['slug'])

def test_variable_group_list_is_represented_with_ids(self):
# Given
Expand All @@ -133,7 +133,7 @@ def test_variable_group_list_is_represented_with_ids(self):

# Then
assert variables_groups_repr == "[<VariableGroup('{id1}')>, <VariableGroup('{id2}')>]"\
.format(id1=db_variable_group1['id'], id2=db_variable_group2['id'])
.format(id1=db_variable_group1['slug'], id2=db_variable_group2['slug'])

@patch.object(VariableGroupRepository, 'get_by_id')
def test_get_variable_group_by_id(self, mocked_repo):
Expand Down

0 comments on commit bedd332

Please sign in to comment.