Skip to content

Commit

Permalink
think I solved the paging issue and detecting when all entries are re…
Browse files Browse the repository at this point in the history
…trieved.
  • Loading branch information
danstoner committed Jan 30, 2015
1 parent bd72a06 commit ce2b592
Showing 1 changed file with 8 additions and 5 deletions.
13 changes: 8 additions & 5 deletions dailymile_export_to_tsv.py
Expand Up @@ -86,9 +86,9 @@ def writerows(self, rows):
logging.info("First API Request: " + api_url_entries)

r = s.get(api_url_entries)
r_json=r.json()

while r.status_code == 200:
r_json=r.json()
while (r.status_code == 200) and (r_json["entries"]):
for entry in r_json["entries"]:
# Every JSON record seems to include "id", "url", and "at"
id = entry["id"]
Expand Down Expand Up @@ -122,13 +122,15 @@ def writerows(self, rows):
entry_dict[id].append("")
except: entry_dict[id].append("")
page+=1
if page > 20: # cut down number of page requests for testing
break
# if page > 2: # cut down number of page requests for testing
# break
api_url_entries="https://api.dailymile.com/people/" + dm_user + "/entries.json?page=" + str(page)
# give the API a break
time.sleep(0.1)
logging.info("Fetching: " + api_url_entries)
try: r = s.get(api_url_entries)
try:
r = s.get(api_url_entries)
r_json=r.json()
except:
if r.status_code == 503:
# probably hit the API requests per hour cap, check Retry-After header (future work)
Expand All @@ -143,6 +145,7 @@ def writerows(self, rows):
logging.error("Received unexpected HTTP status code " + r.status_code + " on " + api_url_entries)
break


# The ids look like sequential numbers, sorting by id may go a long way towards getting the entries in chronological order
sorted_keys = sorted(entry_dict.keys())

Expand Down

0 comments on commit ce2b592

Please sign in to comment.