Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP

Loading…

New python api #84

Closed
wants to merge 5 commits into from
This page is out of date. Refresh to see the latest.
View
22 python/Airbrake.py
@@ -1,17 +1,15 @@
from iron_worker import *
-import airbrakepy
-import shutil
+worker = IronWorker()
-worker = IronWorker(token='XXXXXXXXXX', project_id='xxxxxxxxxxx')
+package = CodePackage()
+package.name = "AirbrakeExample"
+package.merge("airbrake", ignoreRootDir=True)
+package.merge_dependency("airbrakepy")
+package.merge_dependency("xmlbuilder")
+package.executable = "worker.py"
-#here we have to include AirbrakePy library with worker.
-worker_dir = os.path.dirname(__file__) + '/airbrake'
-abrakepy_dir = os.path.dirname(airbrakepy.__file__)
-shutil.copytree(abrakepy_dir, worker_dir + '/airbrakepy') #copy it to worker directory
+worker.upload(package)
-IronWorker.zipDirectory(directory="airbrake/", destination='worker.zip', overwrite=True)
-
-res = worker.postCode(runFilename='worker.py', zipFilename='worker.zip', name='Airbrake.py sample')
-
-task = worker.postTask(name='Airbrake.py sample')
+task = worker.queue(code_name='AirbrakeExample')
+print task
View
22 python/Loggly.py
@@ -1,18 +1,16 @@
from iron_worker import *
-import shutil
-import hoover
+worker = IronWorker()
-#here we have to include hoover library with worker.
-worker_dir = os.path.dirname(__file__) + '/loggly'
-hoover_dir = os.path.dirname(hoover.__file__)
-shutil.copytree(hoover_dir, worker_dir + '/loggly') #copy it to worker directory
-
-worker = IronWorker(config='config.ini')
-IronWorker.zipDirectory(directory=worker_dir, destination='loggly-py.zip', overwrite=True)
-
-res = worker.postCode(runFilename='loggly.py', zipFilename='loggly-py.zip', name='loggly-py')
+package = CodePackage()
+package.name = "LogglyExample"
+package.merge("loggly", ignoreRootDir=True)
+package.merge_dependency("hoover")
+package.merge_dependency("httplib2")
+package.executable = "loggly.py"
+worker.upload(package)
payload = {'loggly': {'subdomain': 'LOGGLY_SUBDOMAIN', 'username': 'LOGGLY_USERNAME', 'password': 'LOGGLY_PASSWORD'}}
-task = worker.postTask(name='loggly-py')
+task = worker.queue(code_name='LogglyExample', payload=payload)
+print task
View
18 python/PagerDuty.py
@@ -1,18 +0,0 @@
-from iron_worker import *
-
-import pagerduty
-import shutil
-
-#here we have to include pagerduty library with worker.
-worker_dir = os.path.dirname(__file__) + '/pagerduty'
-pd_dir = os.path.dirname(pagerduty.__file__)
-shutil.copytree(pd_dir, worker_dir + '/pagerduty') #copy it to worker directory
-
-payload = {'pagerduty': {'service_key': PAGERDUTY_SERVICE_KEY}}
-
-worker = IronWorker(config='config.ini')
-IronWorker.zipDirectory(directory=worker_dir, destination='pagerduty-py.zip', overwrite=True)
-
-res = worker.postCode(runFilename='pagerduty.py', zipFilename='pagerduty-py.zip', name='pagerduty-py')
-
-task = worker.postTask(name='pagerduty-py', payload=payload)
View
31 python/loggly/loggly.py
@@ -1,20 +1,25 @@
import hoover
import logging
-
-import argparse
+import sys
import json
-parser = argparse.ArgumentParser(
- description="Some stuff")
-parser.add_argument("-payload", type=str, required=False,
- help="The location of a file containing a JSON payload.")
-args = parser.parse_args()
-if args.payload is not None:
- payload = json.loads(open(args.payload).read())
- if 'loggly' in payload:
- loggly_settings = payload['loggly']
- i = hoover.LogglySession(loggly_settings['subdomain'], loggly_settings['username'], loggly_settings['password'])
- i.config_inputs() #inject loggly handler into logger chain
+payload = None
+payload_file = None
+for i in range(len(sys.argv)):
+ if sys.argv[i] == "-payload" and (i + 1) < len(sys.argv):
+ payload_file = sys.argv[i + 1]
+ break
+
+f = open(payload_file, "r")
+contents = f.read()
+f.close()
+
+payload = json.loads(contents)
+
+if 'loggly' in payload:
+ loggly_settings = payload['loggly']
+ i = hoover.LogglySession(loggly_settings['subdomain'], loggly_settings['username'], loggly_settings['password'])
+ i.config_inputs() #inject loggly handler into logger chain
#and then usual yada-yada is going on
logger = logging.getLogger('worker_log')
View
50 python/pagerduty/worker.py
@@ -1,50 +0,0 @@
-import argparse
-import json
-import httlib
-import json
-import logging
-import socket
-import time
-import urllib
-
-SEARCH_HOST="search.twitter.com"
-SEARCH_PATH="/search.json"
-
-parser = argparse.ArgumentParser(
- description="Calculates the Fibonacci sequence up to a maximum number")
-parser.add_argument("-payload", type=str, required=False,
- help="The location of a file containing a JSON payload.")
-args = parser.parse_args()
-
-pd = False
-if args.payload is not None:
- payload = json.loads(open(args.payload).read())
- if 'query' in payload:
- query = payload['query']
-
-def search(query):
- c = httplib.HTTPConnection(SEARCH_HOST)
- params = {'q' : query}
- if self.max_id is not None:
- params['since_id'] = self.max_id
- path = "%s?%s" %(SEARCH_PATH, urllib.urlencode(params))
- try:
- c.request('GET', path)
- r = c.getresponse()
- data = r.read()
- c.close()
- try:
- result = json.loads(data)
- except ValueError:
- return None
- if 'results' not in result:
- return None
- self.max_id = result['max_id']
- return result['results']
- except (httplib.HTTPException, socket.error, socket.timeout), e:
- logging.error("search() error: %s" %(e))
- return None
-#some code here
-
-#Workers code
-twittersearch(query)
View
4 python/worker101/enqueue.py
@@ -1,7 +1,7 @@
from iron_worker import *
-worker = IronWorker(token='XXXXXXXXXX', project_id='xxxxxxxxxxx')
+worker = IronWorker()
payload = {'pagerduty': {'query':'iron.io'}}
-task = worker.postTask(name='PythonWorker101', payload=payload)
+task = worker.queue(code_name='PythonWorker101', payload=payload)
Something went wrong with that request. Please try again.