diff --git a/Dockerfile b/Dockerfile index be3f8bb..7774578 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM python:3.6-alpine RUN apk add --update --no-cache libpq postgresql-dev libffi libffi-dev bash curl libstdc++ nodejs -RUN apk add --update --no-cache --virtual=build-dependencies build-base +RUN apk add --update --no-cache --virtual=build-dependencies build-base RUN apk --repository http://dl-3.alpinelinux.org/alpine/edge/testing/ --update add leveldb leveldb-dev WORKDIR /app @@ -9,7 +9,7 @@ ADD requirements.txt . RUN pip install -r requirements.txt # Install os-types, used in the loading process for fiscal modelling the datapackage -RUN npm install -g os-types +RUN npm install -g os-types@1.15.1-alpha.1 RUN apk del build-dependencies RUN rm -rf /var/cache/apk/* diff --git a/conductor/blueprints/package/controllers.py b/conductor/blueprints/package/controllers.py index 27415ad..879f2f2 100644 --- a/conductor/blueprints/package/controllers.py +++ b/conductor/blueprints/package/controllers.py @@ -55,12 +55,12 @@ def prepare_field(field, slugs): ret = { 'header': slug, 'aliases': aliases, - 'osType': field['osType'], + 'columnType': field['columnType'], } if 'title' in field: ret['title'] = field['title'] ret['options'] = copy_except(field, - ('name', 'title', 'osType', 'type', + ('name', 'title', 'columnType', 'type', 'slug', 'conceptType', 'format')) return ret @@ -138,7 +138,7 @@ def upload(datapackage, token, cache_get, cache_set): prepare_field(f, slugs) for f in r.descriptor['schema']['fields'] - if 'osType' in f + if 'columnType' in f ] } package_id = \ diff --git a/tests/module/blueprints/package/test_controllers.py b/tests/module/blueprints/package/test_controllers.py index 6182da2..bd36633 100644 --- a/tests/module/blueprints/package/test_controllers.py +++ b/tests/module/blueprints/package/test_controllers.py @@ -1,4 +1,3 @@ -from collections import namedtuple import unittest import time @@ -16,6 +15,8 @@ module = import_module('conductor.blueprints.package.controllers') dpp_module = import_module('datapackage.helpers') + + class Response: def __init__(self, status_code, _json): self.status_code = status_code @@ -28,6 +29,7 @@ def raise_for_status(self): if self.status_code != 200: raise AssertionError('HTTP {}'.format(self.status_code)) + datapackage = { 'name': 'my-dataset', 'resources': [ @@ -36,7 +38,9 @@ def raise_for_status(self): 'path': 'data.csv', 'schema': { 'fields': [ - {'name': 'year', 'type': 'integer', 'osType': 'date:fiscal-year'} + {'name': 'year', + 'type': 'integer', + 'columnType': 'date:fiscal-year'} ] } } @@ -47,6 +51,7 @@ def raise_for_status(self): callback = 'http://conductor/callback' token = None + def cache_get(key): global _cache return _cache.get(key) @@ -66,7 +71,8 @@ def setUp(self): global token self.private_key = PRIVATE_KEY - token = jwt.encode({'userid': 'owner'}, PRIVATE_KEY, algorithm='RS256').decode('ascii') + token = jwt.encode({'userid': 'owner'}, + PRIVATE_KEY, algorithm='RS256').decode('ascii') # Cleanup self.addCleanup(patch.stopall) @@ -94,7 +100,8 @@ def assertResponse(self, ret, status=None, progress=None, error=None): def test___load___good_request(self): api_load = module.upload self.requests.get = Mock(return_value=Response(200, datapackage)) - self.assertResponse(api_load('http://bla', token, cache_get, cache_set), 'queued', 0) + self.assertResponse(api_load('http://bla', token, + cache_get, cache_set), 'queued', 0) # def test___load___bad_request(self): # api_load = module.upload @@ -109,7 +116,10 @@ def test___load___good_request(self): def test___callback___server_down(self): api_load = module.upload self.requests.get = Mock(return_value=Response(499, datapackage)) - self.assertResponse(api_load('http://bla', token, cache_get, cache_set), 'fail', error='HTTP 499') + self.assertResponse(api_load('http://bla', + token, + cache_get, + cache_set), 'fail', error='HTTP 499') def test___poll___good_request(self): api_load = module.upload