Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Added THREAD_WAIT constant.

  • Loading branch information...
commit 64d73a6ac218b039353b4d9387898a9a807b47d1 1 parent 2526064
@nbprashanth authored
View
BIN  Upstream.pyc
Binary file not shown
View
BIN  Util.pyc
Binary file not shown
View
BIN  WebParse.pyc
Binary file not shown
View
11 main.py
@@ -3,11 +3,15 @@
from WebParse import WebParse
from Upstream import HTTPLS, FTPLS, Google, Launchpad, SVNLS, Trac,\
SubdirHTTPLS, DualHTTPLS, Custom, SF
+import time
-THREAD_LIMIT = 5
+THREAD_LIMIT = 2
QUEUE_LIMIT = 50
URL = 'http://localhost'
PORT = '3000'
+THREAD_WAIT = 5
+
+# On an average, no of records processed is : 1 for every THREAD_WAIT seconds
jobs = Queue.Queue(QUEUE_LIMIT)
singlelock = threading.Lock()
@@ -114,13 +118,14 @@ def process(self, pkgname, method, url, id, branch):
wp.updateRecord('processed', 'true', id)
wp.updateRecord('latest_ver', ver, id)
wp.updateRecord('loc', loc, id)
-
+
def run(self):
while 1:
try:
job = jobs.get(True,1)
self.process(job[0],job[1],job[2],job[3], job[5])
- jobs.task_done()
+ jobs.task_done()
+ time.sleep(THREAD_WAIT)
except:
break
View
13 main.py~
@@ -3,11 +3,15 @@ import sys
from WebParse import WebParse
from Upstream import HTTPLS, FTPLS, Google, Launchpad, SVNLS, Trac,\
SubdirHTTPLS, DualHTTPLS, Custom, SF
+import time
-THREAD_LIMIT = 5
+THREAD_LIMIT = 2
QUEUE_LIMIT = 50
URL = 'http://localhost'
-PORT = 3000
+PORT = '3000'
+THREAD_WAIT = 5
+
+# On an average, no of records processed is : 2 for every 10 seconds
jobs = Queue.Queue(QUEUE_LIMIT)
singlelock = threading.Lock()
@@ -114,13 +118,14 @@ class workerbee(threading.Thread):
wp.updateRecord('processed', 'true', id)
wp.updateRecord('latest_ver', ver, id)
wp.updateRecord('loc', loc, id)
-
+
def run(self):
while 1:
try:
job = jobs.get(True,1)
self.process(job[0],job[1],job[2],job[3], job[5])
- jobs.task_done()
+ jobs.task_done()
+ time.sleep(THREAD_WAIT)
except:
break
Please sign in to comment.
Something went wrong with that request. Please try again.