Skip to content

Commit

Permalink
Update scraper.py
Browse files Browse the repository at this point in the history
  • Loading branch information
TheRealCreamCheesus committed Jan 5, 2017
1 parent 40376b3 commit 2f11a7f
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions scraper.py
Expand Up @@ -143,7 +143,7 @@
data = {}
data['donType'] = donType
data['submissionID'] = submissionID
data['clientID'] = clientID
#data['clientID'] = clientID
data['donName'] = donName
data['address'] = address
data['state'] = state
Expand All @@ -163,7 +163,7 @@



scraperwiki.sqlite.save(unique_keys=["rowCount","page","period","entityID"], data=data)
scraperwiki.sqlite.save(unique_keys=["rowCount","page","period"], data=data)


except Exception, e:
Expand Down Expand Up @@ -198,7 +198,7 @@
#print donType
submissionID = lxml.html.tostring(tds[0]).split('SubmissionId=')[1].split('&ClientId=')[0]
#print submissionID
clientID = lxml.html.tostring(tds[0]).split('ClientId=')[1].split('">')[0]
#clientID = lxml.html.tostring(tds[0]).split('ClientId=')[1].split('">')[0]
#print clientID
donName = lxml.html.tostring(tds[0]).split('">')[2].split('</a')[0]
print donName
Expand Down Expand Up @@ -232,7 +232,7 @@
data = {}
data['donType'] = donType
data['submissionID'] = submissionID
data['clientID'] = clientID
#data['clientID'] = clientID
data['donName'] = donName
data['address'] = address
data['state'] = state
Expand Down

0 comments on commit 2f11a7f

Please sign in to comment.