Skip to content
This repository has been archived by the owner on Jul 23, 2024. It is now read-only.

Commit

Permalink
handle request Timeouts by sleeping 1 and then retrying
Browse files Browse the repository at this point in the history
  • Loading branch information
uberj committed Mar 6, 2014
1 parent a3e9d4f commit 981f78f
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 4 deletions.
3 changes: 2 additions & 1 deletion slurpee/management/commands/scrape.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,8 @@ def handle(self, *sources, **opts):
)
except Exception, e:
write_alert(
"Exception type: {0}. Error: ".format(type(e), str(e))
"Exception type: {0}. Error: {1} ".format(
type(e), str(e))
)
logging.error(
"Halting external data import due to: "
Expand Down
12 changes: 9 additions & 3 deletions slurpee/puppet_slurp.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,15 @@ def slurp(self):
success = False
# We have tried retry times
while retry < MAX_RETRY:
self.resp = self.session.get(
self.fact_url, params=self.params, verify=self.ssl_verify
)
try:
self.resp = self.session.get(
self.fact_url, params=self.params, verify=self.ssl_verify
)
except requests.exceptions.Timeout:
time.sleep(1)
retry += 1
continue

if self.resp.status_code == 200:
self.data = json.loads(self.resp.content)
success = True
Expand Down

0 comments on commit 981f78f

Please sign in to comment.