Skip to content

Commit

Permalink
updated tests and util.robust_fetch expectations and behaviour, respe…
Browse files Browse the repository at this point in the history
…ctively
  • Loading branch information
staffanm committed Dec 22, 2017
1 parent 74fb10b commit f53c07f
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 19 deletions.
16 changes: 6 additions & 10 deletions ferenda/documentrepository.py
Original file line number Diff line number Diff line change
Expand Up @@ -958,18 +958,14 @@ def download_if_needed(self, url, basefile, archive=True, filename=None, sleep=1
fp = os.fdopen(fileno)
fp.close()

# Since this part, containing the actual HTTP request call, is
# called repeatedly, we take extra precautions in the event of
# temporary network failures etc. Try 5 times with 1 second
# pause inbetween before giving up.
# Take extra precautions in the event of temporary network
# failures etc -- try 5 times with 1 second pause inbetween
# before giving up.
response = util.robust_fetch(self.session.get, url, self.log,
sleep=sleep, headers=headers, timeout=10)

# FIXME: replace with util.robust_fetch
try:
response = util.robust_fetch(self.session.get, url, self.log, headers=headers, timeout=10)
except Exception:
# robust_fetch has already logged this error, we simply quit
if response is False: # not modified
return False

with open(tmpfile, "wb") as fp:
fp.write(response.content)

Expand Down
10 changes: 6 additions & 4 deletions ferenda/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -818,7 +818,8 @@ def base27decode(num):
return ((num == 0) and base27alphabet[0] ) or (base27decode(num // b ).lstrip(base27alphabet[0]) + base27alphabet[num % b])


def robust_fetch(method, url, logger, attempts=5, pause=1, raise_for_status=True, *args, **kwargs):
def robust_fetch(method, url, logger, attempts=5, sleep=1, raise_for_status=True,
*args, **kwargs):
fetched = False
lastexception = None
try:
Expand All @@ -833,12 +834,13 @@ def robust_fetch(method, url, logger, attempts=5, pause=1, raise_for_status=True
"Failed to fetch %s: err %s (%s remaining attempts)" %
(url, e, attempts))
attempts -= 1
time.sleep(pause)
time.sleep(sleep)
lastexception = e
if not fetched:
logger.error("Failed to fetch %s, giving up" % url)
if lastexception:
raise lastexception
# if lastexception:
# raise lastexception
return False
except requests.exceptions.RequestException as e:
logger.error("Failed to fetch %s: error %s" % (url, e))
raise e
Expand Down
9 changes: 4 additions & 5 deletions test/testDocRepo.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,9 +161,8 @@ def test_download(self):

# test2: create 2 out of 3 files. make sure download_single is
# hit only for the remaining file.
util.ensure_dir(self.datadir+"/base/downloaded/123/a.html")
open(self.datadir+"/base/downloaded/123/a.html","w").close()
open(self.datadir+"/base/downloaded/123/b.html","w").close()
util.writefile(self.datadir+"/base/downloaded/123/a.html", "dummy")
util.writefile(self.datadir+"/base/downloaded/123/b.html", "dummy")

with open("%s/files/base/downloaded/index.htm" % os.path.dirname(__file__)) as fp:
mockresponse.text = fp.read()
Expand All @@ -186,8 +185,7 @@ def test_download(self):

# test4: set refresh = False, create the 3rd file, make sure
# download returns false as nothing changed
util.ensure_dir(self.datadir+"/base/downloaded/124/a.html")
open(self.datadir+"/base/downloaded/124/a.html","w").close()
util.writefile(self.datadir+"/base/downloaded/123/a.html", "dummy")
d.download_single.return_value = False
d.config.refresh = False
with patch.object(d.session, 'get', return_value=mockresponse):
Expand Down Expand Up @@ -825,6 +823,7 @@ def test_relate_all_teardown(self, mock_store):
'storelocation': 'b',
'storerepository': 'c',
'bulktripleload': False}))

self.assertTrue(self.repoclass.relate_all_teardown(config))
self.assertTrue(mock_store.connect.called)
self.assertTrue(mock_store.connect.return_value.get_serialized_file.called)
Expand Down

0 comments on commit f53c07f

Please sign in to comment.