Skip to content
This repository has been archived by the owner on Nov 27, 2019. It is now read-only.

Commit

Permalink
Add lock code, using flock to make sure only one at once
Browse files Browse the repository at this point in the history
  • Loading branch information
frabcus committed May 16, 2014
1 parent 2c06232 commit e516ecc
Showing 1 changed file with 11 additions and 3 deletions.
14 changes: 11 additions & 3 deletions twsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,17 @@ def command_diagnostics():
sys.exit()

def command_scrape():
# Make sure this scrape mode only runs once at once
f = open("query.txt")
try:
fcntl.flock(f, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
log("already running scrape according to flock, exiting")
sys.exit()

# Get query we're working on from file we store it in
query_terms = codecs.open("query.txt", "r", "utf-8").read().strip()

# Read mode from database
try:
mode = scraperwiki.sql.select('mode from __mode')[0]['mode']
Expand Down Expand Up @@ -294,9 +305,6 @@ def command_scrape():
# so it appears before the status one in the list
scraperwiki.sql.dt.create_table({'id_str': '1'}, 'tweets')

# Get query we're working on from file we store it in
query_terms = codecs.open("query.txt", "r", "utf-8").read().strip()

# Connect to Twitter
tw = do_tool_oauth()

Expand Down

0 comments on commit e516ecc

Please sign in to comment.