From 76ecdda7a1cccab6d722b6db010d127ed2dffe05 Mon Sep 17 00:00:00 2001 From: Nicholas Long Date: Thu, 19 Mar 2020 17:04:18 -0600 Subject: [PATCH 1/5] fix postgres port, patch portfolio manager --- .travis.yml | 32 +++++++--- config/settings/travis.py | 62 +++++++++++++------- seed/tests/test_api.py | 2 +- seed/views/portfoliomanager.py | 103 +++++++++++++++++++++------------ 4 files changed, 131 insertions(+), 68 deletions(-) diff --git a/.travis.yml b/.travis.yml index e69eee88f0..1a1e3f47f0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,3 @@ -sudo: required dist: xenial cache: directories: @@ -7,6 +6,7 @@ cache: - "$HOME/.pip-cache/" - "$HOME/.nvm" - ".tox" +os: linux language: python python: - "3.6" @@ -19,7 +19,7 @@ addons: - gdal-bin services: - docker - - redis-server + - redis - postgresql - xvfb before_install: @@ -41,19 +41,20 @@ install: - npm --version - nvm install stable before_script: - - psql -c "DROP DATABASE IF EXISTS seeddb;" -U postgres - - psql -c "DROP DATABASE IF EXISTS test_seeddb;" -U postgres - - mv config/settings/test_local_untracked.py config/settings/local_untracked.py + - psql -p 5433 -c "DROP DATABASE IF EXISTS seeddb;" -U postgres + - psql -p 5433 -c "DROP DATABASE IF EXISTS test_seeddb;" -U postgres + - cp config/settings/test_local_untracked.py config/settings/local_untracked.py - sudo add-apt-repository ppa:timescale/timescaledb-ppa -y - sudo apt-get update -q - sudo apt-get install -y timescaledb-postgresql-11 timescaledb-tools - sudo timescaledb-tune -yes - sudo service postgresql restart - - psql -c "CREATE DATABASE seeddb;" -U postgres - - psql -d seeddb -c "CREATE EXTENSION postgis;" -U postgres + - psql -p 5433 -c "CREATE DATABASE seeddb;" -U postgres + - psql -p 5433 -d seeddb -c "CREATE EXTENSION postgis;" -U postgres + - free -tm env: global: - - DOCKER_COMPOSE_VERSION=1.16.0 + - DOCKER_COMPOSE_VERSION=1.23.1 - DJANGO_SETTINGS_MODULE=config.settings.travis - DISPLAY=:99.0 - COVERALLS_REPO_TOKEN=y8UqJm8Bri5ZP8hr3YZM3guBaUKpsfoCv @@ -62,7 +63,7 @@ env: - TOX_ENV=python - TOX_ENV=flake8 - TOX_ENV=docs -# - TOX_ENV=functional + - TOX_ENV=functional - TOX_ENV=apitest script: - tox -e $TOX_ENV @@ -73,3 +74,16 @@ jobs: before_script: skip script: travis_wait 30 docker/travis_build_docker.sh env: DJANGO_SETTINGS_MODULE=config.settings.docker +after_failure: + - echo "Job Failed... Maybe these logs will help?" + - free -tm + - ls -alt ~ + - ls -alt /home/travis/build/SEED-platform/seed/ + - echo "============================================ celery log ============================================" + - cat /home/travis/build/SEED-platform/seed/celery.log + - echo "============================================ celery console log ============================================" + - cat /home/travis/build/SEED-platform/seed/celery_console.log + - echo "============================================ server log ============================================" + - cat /home/travis/build/SEED-platform/seed/runserver.log + - echo "============================================ syslog ============================================" + - sudo cat /var/log/syslog diff --git a/config/settings/travis.py b/config/settings/travis.py index 426f7e4e4b..f1f21cefd4 100644 --- a/config/settings/travis.py +++ b/config/settings/travis.py @@ -8,6 +8,7 @@ from config.settings.test import * # noqa +# Travis uses a passwordless database DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', @@ -15,31 +16,48 @@ 'USER': 'postgres', 'PASSWORD': '', 'HOST': 'localhost', - 'PORT': '5432', + 'PORT': '5433', } } -# if 'test' in sys.argv: -# # Skip migrations to make testing faster -# MIGRATION_MODULES = { -# 'auth': None, -# 'contenttypes': None, -# 'default': None, -# 'sessions': None, -# -# 'core': None, -# 'profiles': None, -# 'snippets': None, -# 'scaffold_templates': None, -# } - TESTING_MAPQUEST_API_KEY = os.environ.get("TESTING_MAPQUEST_API_KEY") -CACHES = { - 'default': { - 'BACKEND': 'redis_cache.cache.RedisCache', - 'LOCATION': "127.0.0.1:6379", - 'OPTIONS': {'DB': 1}, - 'TIMEOUT': 300 - } +# Setup the logging specific to Travis +LOGGING = { + 'version': 1, + 'disable_existing_loggers': True, + 'handlers': { + 'console': { + 'level': 'INFO', + 'class': 'logging.StreamHandler' + }, + 'console-debug': { + 'level': 'DEBUG', + 'class': 'logging.StreamHandler' + }, + 'file': { + 'level': 'DEBUG', + 'class': 'logging.FileHandler', + 'filename': 'runserver.log', + }, + 'celery': { + 'level': 'DEBUG', + 'class': 'logging.FileHandler', + 'filename': 'celery.log', + }, + }, + 'loggers': { + 'django': { + 'handlers': ['file'], + 'level': os.getenv('DJANGO_LOG_LEVEL', 'DEBUG'), + }, + 'django.db.backends': { + 'level': 'INFO', + 'handlers': ['file'] + }, + 'celery': { + 'handlers': ['celery'], + 'level': 'DEBUG', + } + }, } diff --git a/seed/tests/test_api.py b/seed/tests/test_api.py index b8d2140329..980d15b88b 100644 --- a/seed/tests/test_api.py +++ b/seed/tests/test_api.py @@ -169,7 +169,7 @@ def test_organization(self): self.assertEqual(r['organizations'][0]['owners'][0]['first_name'], 'Jaqen') self.assertEqual(r['organizations'][0]['cycles'], [ { - 'name': '2018 Calendar Year', + 'name': '2019 Calendar Year', 'num_properties': 0, 'num_taxlots': 0, 'cycle_id': self.default_cycle.pk, diff --git a/seed/views/portfoliomanager.py b/seed/views/portfoliomanager.py index 20a3a59f08..ecc72e0600 100644 --- a/seed/views/portfoliomanager.py +++ b/seed/views/portfoliomanager.py @@ -125,10 +125,7 @@ def report(self, request): else: content = pm.generate_and_download_template_report(template) except PMExcept as pme: - return JsonResponse( - {'status': 'error', 'message': str(pme)}, - status=status.HTTP_400_BAD_REQUEST - ) + return JsonResponse({'status': 'error', 'message': str(pme)}, status=status.HTTP_400_BAD_REQUEST) try: content_object = xmltodict.parse(content, dict_constructor=dict) except Exception: # catch all because xmltodict doesn't specify a class of Exceptions @@ -158,8 +155,7 @@ def report(self, request): return JsonResponse({'status': 'success', 'properties': properties}) except Exception as e: - return JsonResponse({'status': 'error', 'message': e}, - status=status.HTTP_400_BAD_REQUEST) + return JsonResponse({'status': 'error', 'message': e}, status=status.HTTP_400_BAD_REQUEST) class PortfolioManagerImport(object): @@ -199,7 +195,8 @@ def login_and_set_cookie_header(self): raise PMExcept("SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.") # This returns a 200 even if the credentials are bad, so I'm having to check some text in the response - if 'The username and/or password you entered is not correct. Please try again.' in response.content.decode('utf-8'): + if 'The username and/or password you entered is not correct. Please try again.' in response.content.decode( + 'utf-8'): raise PMExcept('Unsuccessful response from login attempt; aborting. Check credentials.') # Upon successful logging in, we should have received a cookie header that we can reuse later @@ -218,36 +215,31 @@ def login_and_set_cookie_header(self): def get_list_of_report_templates(self): """ - This method calls out to the ESPM API to get the full list of template rows. For each row, it checks to see if - it has children rows, and if so, it calls out to the API for the child rows and retrieves IDs and names for - those as well. + New method to support update to ESPM - :return: Returns a list of template objects. All rows will have a z_seed_child_row key that is False for main + :return: Returns a list of template objects. All rows will have a z_seed_child_row key that is False for main rows and True for child rows """ - # login if needed if not self.authenticated_headers: self.login_and_set_cookie_header() - # Get the report templates - url = 'https://portfoliomanager.energystar.gov/pm/reports/templateTableRows' + # get the report data + url = 'https://portfoliomanager.energystar.gov/pm/reports/reportData' try: response = requests.get(url, headers=self.authenticated_headers) + except requests.exceptions.SSLError: raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.') if not response.status_code == status.HTTP_200_OK: raise PMExcept('Unsuccessful response from report template rows query; aborting.') - try: - template_object = json.loads(response.text) - except ValueError: - raise PMExcept('Malformed JSON response from report template rows query; aborting.') - _log.debug('Received the following JSON return: ' + json.dumps(template_object, indent=2)) + + template_object = self.parse_template_response(response.text) # We need to parse the list of report templates - if 'rows' not in template_object: - raise PMExcept('Could not find rows key in template response; aborting.') - templates = template_object['rows'] + if 'customReportsData' not in template_object: + raise PMExcept('Could not find customReportsData key in template response; aborting.') + templates = template_object['customReportsData'] template_response = [] sorted_templates = sorted(templates, key=lambda x: x['name']) for t in sorted_templates: @@ -260,16 +252,24 @@ def get_list_of_report_templates(self): _log.debug('Found template,\n id=' + str(t['id']) + '\n name=' + str(t['name'])) if 'hasChildrenRows' in t and t['hasChildrenRows']: _log.debug('Template row has children data request rows, trying to get them now') - children_url = \ - 'https://portfoliomanager.energystar.gov/pm/reports/templateTableChildrenRows/TEMPLATE/{0}'.format( - t['id'] - ) + children_url = f'https://portfoliomanager.energystar.gov/pm/reports/templateChildrenRows/TEMPLATE/{t["id"]}' + # SSL errors would have been caught earlier in this function and raised, so this should be ok children_response = requests.get(children_url, headers=self.authenticated_headers) if not children_response.status_code == status.HTTP_200_OK: raise PMExcept('Unsuccessful response from child row template lookup; aborting.') try: - child_object = json.loads(children_response.text) + # the data are now in the string of the data key of the returned dictionary with an excessive amount of + # escaped doublequotes. + # e.g., response = {"data": "{"customReportsData":"..."}"} + decoded = json.loads(children_response.text) # .encode('utf-8').decode('unicode_escape') + + # the beginning and end of the string needs to be without the doublequote. Remove the escaped double quotes + data_to_parse = decoded['data'].replace('"{', '{').replace('}"', '}').replace('"[{', '[{').replace( + '}]"', '}]').replace('\\"', '"') + + # print(f'data to parse: {data_to_parse}') + child_object = json.loads(data_to_parse)['childrenRows'] except ValueError: raise PMExcept('Malformed JSON response from report template child row query; aborting.') _log.debug('Received the following child JSON return: ' + json.dumps(child_object, indent=2)) @@ -296,6 +296,29 @@ def get_template_by_name(templates, template_name): _log.debug("Desired report name found, template info: " + json.dumps(matched_template, indent=2)) return matched_template + def parse_template_response(self, response_text): + """ + This method is for the updated ESPM where the response is escaped JSON string in a JSON response. + + :param response_text: str, repsonse to parse + :return: dict + """ + try: + # the data are now in the string of the data key of the returned dictionary with an excessive amount of + # escaped doublequotes. + # e.g., response = {"data": "{"customReportsData":"..."}"} + decoded = json.loads(response_text) # .encode('utf-8').decode('unicode_escape') + + # the beginning and end of the string needs to be without the doublequote. Remove the escaped double quotes + data_to_parse = decoded['data'].replace('"[{', '[{').replace('}]"', '}]').replace('\\"', '"') + + # print(f'data to parse: {data_to_parse}') + template_object = json.loads(data_to_parse) + _log.debug('Received the following JSON return: ' + json.dumps(template_object, indent=2)) + return template_object + except ValueError: + raise PMExcept('Malformed JSON response from report template rows query; aborting.') + def generate_and_download_template_report(self, matched_template): """ This method calls out to ESPM to trigger generation of a report for the supplied template. The process requires @@ -329,18 +352,21 @@ def generate_and_download_template_report(self, matched_template): response.headers)) # Now we need to wait while the report is being generated - url = 'https://portfoliomanager.energystar.gov/pm/reports/templateTableRows' + url = 'https://portfoliomanager.energystar.gov/pm/reports/reportData' attempt_count = 0 report_generation_complete = False while attempt_count < 10: attempt_count += 1 + + # get the report data try: response = requests.get(url, headers=self.authenticated_headers) except requests.exceptions.SSLError: raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.') if not response.status_code == status.HTTP_200_OK: - raise PMExcept('Unsuccessful response from GET trying to check status on generated report; aborting.') - template_objects = json.loads(response.text)['rows'] + raise PMExcept('Unsuccessful response from report template rows query; aborting.') + + template_objects = self.parse_template_response(response.text)['customReportsData'] for t in template_objects: if 'id' in t and t['id'] == matched_template['id']: this_matched_template = t @@ -355,6 +381,7 @@ def generate_and_download_template_report(self, matched_template): else: report_generation_complete = True break + if report_generation_complete: _log.debug('Report appears to have been generated successfully (attempt_count=' + str(attempt_count) + ')') else: @@ -362,24 +389,26 @@ def generate_and_download_template_report(self, matched_template): # Finally we can download the generated report template_report_name = quote(matched_template['name']) + '.xml' - sanitized_template_report_name = template_report_name.replace('/', '_') - d_url = 'https://portfoliomanager.energystar.gov/pm/reports/template/download/%s/XML/false/%s?testEnv=false' % ( - str(template_report_id), sanitized_template_report_name + url = 'https://portfoliomanager.energystar.gov/pm/reports/template/download/{0}/XML/false/{1}?testEnv=false' + download_url = url.format( + str(template_report_id), template_report_name ) try: - response = requests.get(d_url, headers=self.authenticated_headers) + response = requests.get(download_url, headers=self.authenticated_headers) except requests.exceptions.SSLError: raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.') if not response.status_code == status.HTTP_200_OK: error_message = 'Unsuccessful response from GET trying to download generated report;' error_message += ' Generated report name: ' + template_report_name + ';' - error_message += ' Tried to download report from URL: ' + d_url + ';' + error_message += ' Tried to download report from URL: ' + download_url + ';' error_message += ' Returned with a status code = ' + response.status_code + ';' raise PMExcept(error_message) return response.content def generate_and_download_child_data_request_report(self, matched_data_request): """ + Updated for recent update of ESPM + This method calls out to ESPM to get the report data for a child template (a data request). For child templates, the process simply requires calling out the download URL and getting the data in XML format. @@ -408,9 +437,11 @@ def generate_and_download_child_data_request_report(self, matched_data_request): str(template_report_id), sanitized_template_report_name ) try: - response = requests.get(download_url, headers=self.authenticated_headers) + response = requests.get(download_url, headers=self.authenticated_headers, allow_redirects=True) except requests.exceptions.SSLError: raise PMExcept('SSL Error in Portfolio Manager Query; check VPN/Network/Proxy.') + if not response.status_code == status.HTTP_200_OK: raise PMExcept('Unsuccessful response from GET trying to download generated report; aborting.') + return response.content From 0a4668669e3cde0203b5fdb389827fdd9e30f101 Mon Sep 17 00:00:00 2001 From: Nicholas Long Date: Thu, 19 Mar 2020 19:13:04 -0600 Subject: [PATCH 2/5] fix base images for docker --- Dockerfile | 70 ++++++++++++++++++++++---------------- docker/postgres/Dockerfile | 37 ++++++++++---------- 2 files changed, 60 insertions(+), 47 deletions(-) diff --git a/Dockerfile b/Dockerfile index 332bd697a4..8f1c8f87f8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,36 +3,44 @@ # DESCRIPTION: Image with seed platform and dependencies running in development mode # TO_BUILD_AND_RUN: docker-compose build && docker-compose up -FROM alpine:3.8 +# This Dockerfile has been updated to pull from our last known good build of SEED (v2.6.1). +# Version 3.7.2-r2 of geos has introduced and incompatible library: +# https://pkgs.alpinelinux.org/package/edge/testing/x86_64/geos +#FROM alpine:3.8 -RUN apk add --no-cache python \ - python3-dev \ - postgresql-dev \ - alpine-sdk \ - pcre \ - pcre-dev \ - libxslt-dev \ - linux-headers \ - libffi-dev \ - bash \ - bash-completion \ - npm \ - nginx && \ - apk add --no-cache --repository http://dl-cdn.alpinelinux.org/alpine/edge/main openssl && \ - apk add --no-cache --repository http://dl-3.alpinelinux.org/alpine/edge/testing/ geos gdal && \ - ln -sf /usr/bin/python3 /usr/bin/python && \ - python -m ensurepip && \ - rm -r /usr/lib/python*/ensurepip && \ - ln -sf /usr/bin/pip3 /usr/bin/pip && \ - pip install --upgrade pip setuptools && \ - pip install git+https://github.com/Supervisor/supervisor@837c159ae51f3 && \ - mkdir -p /var/log/supervisord/ && \ - rm -r /root/.cache && \ - addgroup -g 1000 uwsgi && \ - adduser -G uwsgi -H -u 1000 -S uwsgi && \ - mkdir -p /run/nginx && \ - echo "daemon off;" >> /etc/nginx/nginx.conf && \ - rm -f /etc/nginx/conf.d/default.conf +# Start with 2.6.0. note that the source code will be removed and re-copied to this container. The +# version of SEED here is used to load in the core system packages and dependencies. +FROM seedplatform/seed:2.6.0 + +# DO NOT UPGRADE until libgeos and shapely fix the connection. +#RUN apk add --no-cache python \ +# python3-dev \ +# postgresql-dev \ +# alpine-sdk \ +# pcre \ +# pcre-dev \ +# libxslt-dev \ +# linux-headers \ +# libffi-dev \ +# bash \ +# bash-completion \ +# npm \ +# nginx && \ +# apk add --no-cache --repository http://dl-cdn.alpinelinux.org/alpine/edge/main openssl && \ +# apk add --no-cache --repository http://dl-3.alpinelinux.org/alpine/edge/testing/ geos gdal && \ +# ln -sf /usr/bin/python3 /usr/bin/python && \ +# python -m ensurepip && \ +# rm -r /usr/lib/python*/ensurepip && \ +# ln -sf /usr/bin/pip3 /usr/bin/pip && \ +# pip install --upgrade pip setuptools && \ +# pip install git+https://github.com/Supervisor/supervisor@837c159ae51f3 && \ +# mkdir -p /var/log/supervisord/ && \ +# rm -r /root/.cache && \ +# addgroup -g 1000 uwsgi && \ +# adduser -G uwsgi -H -u 1000 -S uwsgi && \ +# mkdir -p /run/nginx && \ +# echo "daemon off;" >> /etc/nginx/nginx.conf && \ +# rm -f /etc/nginx/conf.d/default.conf ## Note on some of the commands above: ## - create the uwsgi user and group to have id of 1000 @@ -41,6 +49,10 @@ RUN apk add --no-cache python \ ## - install supervisor that works with Python3. ## - enchant, python-gdbm, libssl-dev, libxml2-dev are no longer explicitly installed +## Remove this line after updating the base image to support the new dependency versions. The line ensures that the +# code is only this branch, not any remnants from the tagged container. +RUN rm -rf /seed/ + ### Install python requirements WORKDIR /seed COPY ./requirements.txt /seed/requirements.txt diff --git a/docker/postgres/Dockerfile b/docker/postgres/Dockerfile index 34ad38a09a..99a4cad9a9 100644 --- a/docker/postgres/Dockerfile +++ b/docker/postgres/Dockerfile @@ -1,22 +1,23 @@ -FROM postgres:11.2 +FROM seedplatform/postgres-seed:11.2 -ENV POSTGIS_MAJOR 2.5 -ENV POSTGIS_VERSION 2.5.2+dfsg-1~exp1.pgdg90+1 - -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - postgresql-$PG_MAJOR-postgis-$POSTGIS_MAJOR=$POSTGIS_VERSION \ - postgresql-$PG_MAJOR-postgis-$POSTGIS_MAJOR-scripts=$POSTGIS_VERSION \ - postgis=$POSTGIS_VERSION \ - apt-transport-https ca-certificates wget && \ - rm -rf /var/lib/apt/lists/* - -RUN sh -c "echo 'deb https://packagecloud.io/timescale/timescaledb/debian/ `lsb_release -c -s` main' > /etc/apt/sources.list.d/timescaledb.list" && \ - wget --quiet -O - https://packagecloud.io/timescale/timescaledb/gpgkey | apt-key add - && \ - apt-get update && \ - apt-get install -y timescaledb-postgresql-$PG_MAJOR && \ - apt-get purge -y --auto-remove apt-transport-https ca-certificates wget && \ - rm -rf /var/lib/apt/lists/* +# Use the old image that has been pushed until we update dependencies! +#ENV POSTGIS_MAJOR 2.5 +#ENV POSTGIS_VERSION 2.5.2+dfsg-1~exp1.pgdg90+1 +# +#RUN apt-get update && \ +# apt-get install -y --no-install-recommends \ +# postgresql-$PG_MAJOR-postgis-$POSTGIS_MAJOR=$POSTGIS_VERSION \ +# postgresql-$PG_MAJOR-postgis-$POSTGIS_MAJOR-scripts=$POSTGIS_VERSION \ +# postgis=$POSTGIS_VERSION \ +# apt-transport-https ca-certificates wget && \ +# rm -rf /var/lib/apt/lists/* +# +#RUN sh -c "echo 'deb https://packagecloud.io/timescale/timescaledb/debian/ `lsb_release -c -s` main' > /etc/apt/sources.list.d/timescaledb.list" && \ +# wget --quiet -O - https://packagecloud.io/timescale/timescaledb/gpgkey | apt-key add - && \ +# apt-get update && \ +# apt-get install -y timescaledb-postgresql-$PG_MAJOR && \ +# apt-get purge -y --auto-remove apt-transport-https ca-certificates wget && \ +# rm -rf /var/lib/apt/lists/* RUN mkdir -p /docker-entrypoint-initdb.d COPY ./initdb-postgis.sh /docker-entrypoint-initdb.d/postgis.sh From b1aa4c38e3bc3bc4261d6186b148ee533465db50 Mon Sep 17 00:00:00 2001 From: Nicholas Long Date: Thu, 19 Mar 2020 19:38:24 -0600 Subject: [PATCH 3/5] remove oep --- docker-compose.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index faf94cf8ab..408d46aa3b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -73,17 +73,17 @@ services: options: max-size: 50m max-file: '5' - oep-city-1: + #oep-city-1: # This is a placeholder. If needed, follow the instructions to enable: https://cloud.docker.com/u/seedplatform/repository/docker/seedplatform/oep - image: seedplatform/oep:1.2 - depends_on: - - web - environment: - - OEP_DISABLED=true - logging: - options: - max-size: 50m - max-file: '5' + # image: seedplatform/oep:1.4 + # depends_on: + # - web + # environment: + # - OEP_DISABLED=true + # logging: + # options: + # max-size: 50m + # max-file: '5' volumes: seed_pgdata: external: true From 878a3e10775bed302dbf5dcf65bd9a12cb2e2dc3 Mon Sep 17 00:00:00 2001 From: Ted Summer Date: Fri, 20 Mar 2020 12:42:14 -0600 Subject: [PATCH 4/5] chore(test): remove unused import --- seed/tests/test_api.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/seed/tests/test_api.py b/seed/tests/test_api.py index 7942ebe596..980d15b88b 100644 --- a/seed/tests/test_api.py +++ b/seed/tests/test_api.py @@ -11,8 +11,6 @@ import time from unittest import skip -from datetime import date - from django.core.urlresolvers import reverse_lazy, reverse from django.test import TestCase from django.utils import timezone From 38079742626a3af9dcf314777eb84cdaa13dc486 Mon Sep 17 00:00:00 2001 From: Nicholas Long Date: Tue, 24 Mar 2020 18:11:02 -0600 Subject: [PATCH 5/5] remove functional test --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index af4d9f685e..5007df6acf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -64,7 +64,7 @@ env: - TOX_ENV=python - TOX_ENV=flake8 - TOX_ENV=docs - - TOX_ENV=functional +# - TOX_ENV=functional # chrome is out of date, fix this eventually. - TOX_ENV=apitest script: - tox -e $TOX_ENV