Skip to content
Permalink
Browse files

Replace print statements with logging

To prevent unnecessary output during testing
  • Loading branch information...
schbetsy committed Sep 13, 2018
1 parent ba39020 commit 35a9ca6202475043b6878dbdc0ca50c1a0ac99c6
@@ -1,6 +1,11 @@
from datetime import datetime
import logging
import sys


logger = logging.getLogger(__name__)


def handle_error(fn):
"""
Will save a screenshot of the current page if the method fails
@@ -13,11 +18,11 @@ def wrapper(*args, **kwargs):
return fn(*args, **kwargs)
except Exception:
filename = datetime.now().isoformat()
print("kwargs: %s" % kwargs)
logger.exception("kwargs: %s" % kwargs)
try:
args[0].base.get_screenshot(filename)
except Exception:
print ("HANDLER FAILURE:", sys.exc_info())
logger.exception("HANDLER FAILURE:", sys.exc_info())

raise

@@ -1,5 +1,3 @@
import datetime

from django.core.management.base import BaseCommand, CommandError
from retirement_api.utils import check_api

@@ -2,20 +2,27 @@
import unittest

from django.core.management.base import CommandError
from six import StringIO

from django.core.management import call_command

from retirement_api.management.commands import check_ssa_values, check_ssa
from retirement_api.utils.check_api import collector


out = StringIO()


class CommandTests(unittest.TestCase):
@mock.patch('retirement_api.management.commands.check_ssa_values.ssa_check.run_tests')
def test_check_ssa_values(self, mock_run_tests):
mock_run_tests.return_value = 'OK'
test_run = call_command('check_ssa_values')
call_command('check_ssa_values', stdout=out)
self.assertTrue(mock_run_tests.call_count == 1)
test_run2 = call_command('check_ssa_values',
'--recalibrate')

call_command('check_ssa_values',
'--recalibrate',
stdout=out)
self.assertTrue(mock_run_tests.call_count == 2)
# mock_run_tests.return_value = 'Mismatches'
# with self.assertRaises(CommandError):
@@ -24,5 +31,5 @@ def test_check_ssa_values(self, mock_run_tests):
@mock.patch('retirement_api.management.commands.check_ssa.check_api.run')
def test_check_ssa(self, mock_run):
mock_run.return_value = collector
test_run = call_command('check_ssa')
call_command('check_ssa', stdout=out)
self.assertTrue(mock_run.call_count == 1)
@@ -7,6 +7,7 @@
import datetime
import json
import time
import logging
import random
import signal
from urlparse import urlparse
@@ -142,5 +143,5 @@ def run(base):
if BASE in BASES:
run(BASE)
else:
print helpmsg
logging.getLogger(__name__).info(helpmsg)
sys.exit()
@@ -3,6 +3,7 @@
import datetime
import json
import csv
import logging

"""
terms:
@@ -45,6 +46,9 @@
}


log = logging.getLogger(__name__)


def output_csv(filepath, headings, bs_rows):
with open(filepath, 'w') as f:
writer = csv.writer(f)
@@ -76,9 +80,9 @@ def output_json(filepath, headings, bs_rows):
def make_soup(url):
req = requests.get(url)
if req.reason != 'OK':
print "request to %s failed: %s %s" % (url,
req.status_code,
req.reason)
log.warn("request to %s failed: %s %s" % (url,
req.status_code,
req.reason))
return ''
else:
soup = bs(req.text, 'html.parser')
@@ -105,9 +109,9 @@ def update_example_reduction():
table = soup.findAll('table')[5].find('table')
rows = [row for row in table.findAll('tr') if row.findAll('td')]
output_csv(outcsv, headings, rows)
print "updated %s with %s rows" % (outcsv, len(rows))
log.info("updated %s with %s rows" % (outcsv, len(rows)))
output_json(outjson, headings, rows)
print "updated %s with %s entries" % (outjson, len(rows))
log.info("updated %s with %s entries" % (outjson, len(rows)))


def update_awi_series():
@@ -119,14 +123,14 @@ def update_awi_series():
if soup:
tables = soup.findAll('table')[1].findAll('table')
rows = []
print "found %s tables" % len(tables)
log.info("found %s tables" % len(tables))
for table in tables:
rows.extend([row for row in table.findAll('tr')
if row.findAll('td')])
output_csv(outcsv, headings, rows)
print "updated %s with %s rows" % (outcsv, len(rows))
log.info("updated %s with %s rows" % (outcsv, len(rows)))
output_json(outjson, headings, rows)
print "updated %s with %s entries" % (outjson, len(rows))
log.info("updated %s with %s entries" % (outjson, len(rows)))


def update_cola():
@@ -139,13 +143,13 @@ def update_cola():
[s.extract() for s in soup('small')]
tables = soup.findAll('table')[-3:]
rows = []
print "found %s tables" % len(tables)
log.info("found %s tables" % len(tables))
for table in tables:
rows.extend([row for row in table.findAll('tr') if row.findAll('td')])
output_csv(outcsv, headings, rows)
print "updated %s with %s rows" % (outcsv, len(rows))
log.info("updated %s with %s rows" % (outcsv, len(rows)))
output_json(outjson, headings, rows)
print "updated %s with %s entries" % (outjson, len(rows))
log.info("updated %s with %s entries" % (outjson, len(rows)))


def update_life():
@@ -167,7 +171,7 @@ def update_life():
if soup:
table = soup.find('table').find('table')
if not table:
print "couldn't find table at %s" % url
log.info("couldn't find table at %s" % url)
else:
rows = table.findAll('tr')[2:]
if len(rows) > 100:
@@ -177,7 +181,7 @@ def update_life():
msg += "updated {0} with {1} entries".format(outjson, len(rows))
else:
msg += "didn't find more than 100 rows at {0}".format(url)
print msg
log.info(msg)
return msg


@@ -190,4 +194,4 @@ def harvest_all():
if __name__ == "__main__":
starter = datetime.datetime.now()
harvest_all()
print("update took {0} to update four data stores".format((datetime.datetime.now()-starter)))
log.info("update took {0} to update four data stores".format((datetime.datetime.now()-starter)))
@@ -3,6 +3,7 @@
import json
import datetime
from dateutil import parser
import logging

TODAY = datetime.datetime.now().date()
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
@@ -123,21 +124,22 @@ def yob_test(yob=None):
tests to make sure suppied birth year is valid;
returns valid birth year as a string or None
"""
log = logging.getLogger(__name__)
today = datetime.datetime.now().date()
if not yob:
return None
try:
birth_year = int(yob)
except:
print "birth year should be a number"
log.warn("birth year should be a number")
return None
else:
b_string = str(birth_year)
if birth_year > today.year:
print "can't work with birth dates in the future"
log.warn("can't work with birth dates in the future")
return None
elif len(b_string) != 4:
print "please supply a 4-digit birth year"
log.warn("please supply a 4-digit birth year")
return None
else:
return b_string
@@ -3,6 +3,7 @@
import datetime
from copy import copy
import json
import logging

from django.http import HttpRequest
from .ss_calculator import get_retire_data
@@ -21,6 +22,8 @@
'prgf': 2
}

logger = logging.getLogger(__name__)


def get_test_params(age, dob_day, dob_year=None, income=40000):
"""return test params for one edge case, based on today's date"""
@@ -107,7 +110,7 @@ def check_results(test_data, TESTS):
return ("All tests pass on {0}; "
"last recalibrated on {1}".format(today, calibration.created.date()))
else:
print error_msg
logger.warn(error_msg)
return error_msg


@@ -1,12 +1,10 @@
from __future__ import print_function
import os
import sys
import json
import datetime
import shutil
import tempfile
import csv
import datetime

from bs4 import BeautifulSoup as bs
import requests
@@ -156,8 +154,7 @@ def test_update_life(self, mock_soup, mock_output_json, mock_output_csv):
mock_soup.return_value = bs(mockpage, 'html.parser')

# action
msg = utils.ss_update_stats.update_life()
print(msg)
utils.ss_update_stats.update_life()

# assert

@@ -53,6 +53,22 @@

LANGUAGE_CODE = 'en-us'

LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'CRITICAL',
'class': 'logging.StreamHandler',
}
},
'loggers': {
'': {
'handlers': ['console'],
}
}
}

TIME_ZONE = 'America/New_York'

USE_I18N = True

0 comments on commit 35a9ca6

Please sign in to comment.
You can’t perform that action at this time.