Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Import code from dwh repo.

  • Loading branch information...
commit 888216f451655133af9764601584141245f34101 1 parent 0e4cfd3
Orne Brocaar brocaar authored
Showing with 1,514 additions and 0 deletions.
  1. +1 −0  MANIFEST.in
  2. +16 −0 Makefile
  3. +1 −0  deploy-requirements.txt
  4. +153 −0 docs/Makefile
  5. +251 −0 docs/conf.py
  6. +77 −0 docs/index.rst
  7. +3 −0  fabfile.py
  8. +1 −0  job_runner_worker/__init__.py
  9. +26 −0 job_runner_worker/auth.py
  10. +35 −0 job_runner_worker/cleanup.py
  11. +38 −0 job_runner_worker/config.py
  12. +1 −0  job_runner_worker/deploy-requirements.txt
  13. +52 −0 job_runner_worker/enqueuer.py
  14. +32 −0 job_runner_worker/events.py
  15. +162 −0 job_runner_worker/models.py
  16. +38 −0 job_runner_worker/runner.py
  17. 0  job_runner_worker/tests/__init__.py
  18. 0  job_runner_worker/tests/unit/__init__.py
  19. +27 −0 job_runner_worker/tests/unit/test_auth.py
  20. +39 −0 job_runner_worker/tests/unit/test_cleanup.py
  21. +41 −0 job_runner_worker/tests/unit/test_config.py
  22. +71 −0 job_runner_worker/tests/unit/test_enqueuer.py
  23. +39 −0 job_runner_worker/tests/unit/test_events.py
  24. +189 −0 job_runner_worker/tests/unit/test_models.py
  25. +52 −0 job_runner_worker/tests/unit/test_runner.py
  26. +44 −0 job_runner_worker/tests/unit/test_worker.py
  27. +55 −0 job_runner_worker/worker.py
  28. +5 −0 requirements.txt
  29. +30 −0 scripts/job_runner_worker
  30. +27 −0 setup.py
  31. +8 −0 test-requirements.txt
1  MANIFEST.in
View
@@ -0,0 +1 @@
+include README.rst
16 Makefile
View
@@ -0,0 +1,16 @@
+clean-pyc:
+ find . -type f -name "*.pyc" -delete
+ find . -type f -name "*.pyo" -delete
+
+documentation:
+ cd docs && SETTINGS_PATH='.' make clean html
+
+pep8:
+ pep8 --show-pep8 -r job_runner_worker && echo "All good!"
+
+unittest: clean-pyc
+ coverage erase
+ SETTINGS_PATH='.' coverage run --include "job_runner_worker*" --omit "*test*" -m unittest2 discover
+ coverage report
+
+test: pep8 unittest documentation
1  deploy-requirements.txt
View
@@ -0,0 +1 @@
+dwh-fabric>=0.7
153 docs/Makefile
View
@@ -0,0 +1,153 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) -n .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+ -rm -rf $(BUILDDIR)/*
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Job-RunnerWorker.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Job-RunnerWorker.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/Job-RunnerWorker"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Job-RunnerWorker"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
251 docs/conf.py
View
@@ -0,0 +1,251 @@
+# -*- coding: utf-8 -*-
+#
+# Job-Runner Worker documentation build configuration file, created by
+# sphinx-quickstart on Tue Sep 25 10:51:20 2012.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+os.environ['SETTINGS_PATH'] = ''
+sys.path.insert(0, os.path.abspath('..'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
+
+intersphinx_mapping = {
+ 'python': (
+ 'http://docs.python.org/release/2.6.8/',
+ None
+ ),
+}
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Job-Runner Worker'
+copyright = u'2012, Spil Games'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+import job_runner_worker
+#
+# The short X.Y version.
+version = job_runner_worker.__version__
+# The full version, including alpha/beta/rc tags.
+release = version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'nature'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Job-RunnerWorkerdoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+ ('index', 'Job-RunnerWorker.tex', u'Job-Runner Worker Documentation',
+ u'Spil Games', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'job-runnerworker', u'Job-Runner Worker Documentation',
+ [u'Spil Games'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'Job-RunnerWorker', u'Job-Runner Worker Documentation',
+ u'Spil Games', 'Job-RunnerWorker', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
77 docs/index.rst
View
@@ -0,0 +1,77 @@
+Welcome to Job-Runner Worker's documentation!
+=============================================
+
+Installation
+------------
+
+This package can be installed by executing
+``pip install job-runner-worker``.
+
+
+Configuration
+-------------
+
+A configuration-file is required containing the API url, public and
+private-key, etc... Example::
+
+ [job_runner_worker]
+ api_base_url=https://engportal-stg.priv.spillgroup.org/
+ private_api_key=privatekey
+ public_api_key=publickey
+ run_resource_uri=/api/job_runner/v1/run/
+ concurrent_jobs=4
+ log_level=info
+ ws_server_hostname=websocket.server
+ ws_server_port=5555
+ script_temp_path=/tmp
+
+
+``api_base_url``
+ The base URL which will be used to access the API.
+
+``public_api_key``
+ Private-key to access the API.
+
+``public_api_key``
+ Public-key to access the API.
+
+``run_resource_uri``
+ The URI to the run resources.
+
+``concurrent_jobs``
+ The number of jobs to run concurrently.
+
+``log_level``
+ The log level. Valid options are:
+
+ * ``debug``
+ * ``info``
+ * ``warning``
+ * ``error``
+
+``ws_server_hostname``
+ The hostname of the WebSocket Server.
+
+``ws_server_port``
+ The port of the WebSocket Server.
+
+``script_temp_path``
+ The path where the scripts that are being executed through the Job-Runner
+ are temporarily stored. Note that this should be a location where
+ executable scripts are allowed!
+
+
+Command-line usage
+------------------
+
+For starting the worker, you can use the ``job_runner_worker`` command::
+
+ usage: job_runner_worker [-h] [--config-path CONFIG_PATH]
+
+ Job Runner worker
+
+ optional arguments:
+ -h, --help show this help message and exit
+ --config-path CONFIG_PATH
+ absolute path to config file (default: SETTINGS_PATH
+ env variable)
3  fabfile.py
View
@@ -0,0 +1,3 @@
+from dwh_fabric.deploy import deploy_package
+from dwh_fabric.packaging import publish_package
+from dwh_fabric.sphinx import publish_docs
1  job_runner_worker/__init__.py
View
@@ -0,0 +1 @@
+__version__ = '0.6.1'
26 job_runner_worker/auth.py
View
@@ -0,0 +1,26 @@
+import hashlib
+import hmac
+
+from requests.auth import AuthBase
+
+
+class HmacAuth(AuthBase):
+ """
+ Custom authentication for HMAC.
+ """
+ def __init__(self, public_key, private_key):
+ self.public_key = public_key
+ self.private_key = private_key
+
+ def __call__(self, r):
+ hmac_message = '{method}{full_path}{body}'.format(
+ method=r.method.upper(),
+ full_path=r.path_url,
+ body=r.data or '',
+ )
+ hmac_key = hmac.new(self.private_key, hmac_message, hashlib.sha1)
+
+ r.headers['Authorization'] = 'ApiKey {0}:{1}'.format(
+ self.public_key, hmac_key.hexdigest())
+
+ return r
35 job_runner_worker/cleanup.py
View
@@ -0,0 +1,35 @@
+import logging
+
+from job_runner_worker.config import config
+from job_runner_worker.models import Run
+
+
+logger = logging.getLogger(__name__)
+
+
+def reset_incomplete_runs():
+ """
+ Cleanup incomplete runs.
+
+ A run is left incomplete when a worker dies while the run hasn't been
+ finished (or was marked as enqueued). These runs needs to be re-started
+ and therefore reset to scheduled state.
+
+ """
+ logger.info('Cleaning up incomplete runs')
+ incomplete_runs = []
+
+ for state in ['in_queue', 'started']:
+ incomplete_runs.extend(Run.get_list(
+ config.get('job_runner_worker', 'run_resource_uri'),
+ params={
+ 'state': state,
+ }
+ ))
+
+ for run in incomplete_runs:
+ logger.warning('Run {0} was left incomplete'.format(run.resource_uri))
+ run.patch({
+ 'enqueue_dts': None,
+ 'start_dts': None,
+ })
38 job_runner_worker/config.py
View
@@ -0,0 +1,38 @@
+import ConfigParser
+import logging
+import os
+
+
+def get_config_parser():
+ """
+ Return ``ConfigParser`` instance and load config file.
+
+ This will load the settings as specified in the ``SETTINGS_PATH``
+ environment variable.
+
+ :return:
+ Instance of :py:class:`ConfigParser.ConfigParser`.
+
+ """
+ config = ConfigParser.ConfigParser({
+ 'log_level': 'info',
+ })
+ config.read(os.environ['SETTINGS_PATH'])
+ return config
+
+
+def setup_log_handler(log_level):
+ """
+ Setup log handling.
+
+ :param log_level:
+ The log level (uppercased ``str``).
+
+ """
+ logging.basicConfig(
+ level=getattr(logging, log_level),
+ format='%(levelname)s - %(asctime)s - %(name)s: %(message)s'
+ )
+
+
+config = get_config_parser()
1  job_runner_worker/deploy-requirements.txt
View
@@ -0,0 +1 @@
+dwh-fabric>=0.7
52 job_runner_worker/enqueuer.py
View
@@ -0,0 +1,52 @@
+import logging
+import json
+import time
+from datetime import datetime
+
+from job_runner_worker.config import config
+from job_runner_worker.models import RestError, Run
+
+
+logger = logging.getLogger(__name__)
+
+
+def enqueue_runs(run_queue, event_queue):
+ """
+ Populate the ``run_queue``.
+
+ :param run_queue:
+ An instance of ``Queue`` for pushing the runs to.
+
+ :param event_queue:
+ An instance of ``Queue`` for pushing events to.
+
+ """
+ while True:
+ if not run_queue.full():
+ try:
+ runs = Run.get_list(
+ config.get('job_runner_worker', 'run_resource_uri'),
+ params={
+ 'limit': 1,
+ 'state': 'scheduled',
+ 'schedule_dts__lte': datetime.utcnow().isoformat(' '),
+ }
+ )
+
+ if len(runs):
+ run = runs[0]
+ run.patch({
+ 'enqueue_dts': datetime.utcnow().isoformat(' ')
+ })
+ run_queue.put(run)
+ event_queue.put(json.dumps(
+ {'event': 'enqueued', 'run_id': run.id}))
+ else:
+ time.sleep(5)
+
+ except RestError:
+ logger.exception(
+ 'An exception was raised while populating the queue')
+ time.sleep(5)
+ else:
+ time.sleep(5)
32 job_runner_worker/events.py
View
@@ -0,0 +1,32 @@
+import json
+import logging
+
+import zmq.green as zmq
+
+from job_runner_worker.config import config
+
+
+logger = logging.getLogger(__name__)
+
+
+def publish(event_queue):
+ """
+ Publish enqueued events to the WebSocket server.
+
+ :param event_queue:
+ A ``Queue`` instance for events to broadcast.
+
+ """
+ context = zmq.Context(1)
+ publisher = context.socket(zmq.PUB)
+ publisher.connect('tcp://{0}:{1}'.format(
+ config.get('job_runner_worker', 'ws_server_hostname'),
+ config.get('job_runner_worker', 'ws_server_port'),
+ ))
+
+ for event in event_queue:
+ logger.debug('Sending event: {0}'.format(event))
+ publisher.send_multipart(['worker.event', event])
+
+ publisher.close()
+ context.term()
162 job_runner_worker/models.py
View
@@ -0,0 +1,162 @@
+import json
+import requests
+import urlparse
+from requests.exceptions import RequestException
+
+from job_runner_worker.auth import HmacAuth
+from job_runner_worker.config import config
+
+
+class RestError(Exception):
+ """
+ Exception raised when a RESTful is returning an error.
+ """
+
+
+class BaseRestModel(object):
+ """
+ Base model around RESTful resources.
+
+ :param resource_url:
+ The path of the resource.
+
+ :param initial_data:
+ A ``dict`` containing initial data. Optional.
+
+ """
+ def __init__(self, resource_path, initial_data=None):
+ self._resource_path = resource_path
+ self._data = initial_data
+
+ def __getattr__(self, name):
+ if not self._data:
+ self._data = self._get_json_data()
+ return self._data[name]
+
+ def _get_json_data(self):
+ """
+ Return JSON data.
+
+ :raises:
+ :exc:`.RestError` when response code is not 200.
+
+ """
+ try:
+ response = requests.get(
+ urlparse.urljoin(
+ config.get('job_runner_worker', 'api_base_url'),
+ self._resource_path
+ ),
+ auth=HmacAuth(
+ config.get('job_runner_worker', 'public_api_key'),
+ config.get('job_runner_worker', 'private_api_key')
+ ),
+ headers={'content-type': 'application/json'},
+ verify=False,
+ )
+
+ if response.status_code != 200:
+ raise RestError(
+ 'GET request returned {0} - {1}'.format(
+ response.status_code, response.content))
+
+ return response.json
+ except RequestException as e:
+ raise RestError('Exception {0} raised with message {1}'.format(
+ e.__class__.__name__, str(e)))
+
+ def patch(self, attributes={}):
+ """
+ PATCH resource with given keyword arguments.
+
+ :raises:
+ :exc:`.RestError` when response code is not 202.
+
+ """
+ try:
+ response = requests.patch(
+ urlparse.urljoin(
+ config.get('job_runner_worker', 'api_base_url'),
+ self._resource_path
+ ),
+ auth=HmacAuth(
+ config.get('job_runner_worker', 'public_api_key'),
+ config.get('job_runner_worker', 'private_api_key')
+ ),
+ headers={'content-type': 'application/json'},
+ data=json.dumps(attributes),
+ verify=False,
+ )
+
+ if response.status_code != 202:
+ raise RestError(
+ 'PATCH request returned {0} - {1}'.format(
+ response.status_code, response.content))
+ except RequestException as e:
+ raise RestError('Exception {0} raised with message {1}'.format(
+ e.__class__.__name__, str(e)))
+
+ @classmethod
+ def get_list(cls, resource_path, params={}):
+ """
+ Return a list of models for ``resource_path``.
+
+ :param resource_path:
+ The path of the resource.
+
+ :param params:
+ A ``dict`` containing optional request params. Optional.
+
+ :return:
+ A ``list`` of class instances.
+
+ :raises:
+ :exc:`.RestError` when response code is not 200.
+
+ """
+ try:
+ response = requests.get(
+ urlparse.urljoin(
+ config.get('job_runner_worker', 'api_base_url'),
+ resource_path
+ ),
+ auth=HmacAuth(
+ config.get('job_runner_worker', 'public_api_key'),
+ config.get('job_runner_worker', 'private_api_key')
+ ),
+ params=params,
+ headers={'content-type': 'application/json'},
+ verify=False,
+ )
+ except RequestException as e:
+ raise RestError('Exception {0} raised with message {1}'.format(
+ e.__class__.__name__, str(e)))
+
+ if response.status_code != 200:
+ raise RestError(
+ 'GET request returned {0}'.format(response.status_code))
+
+ output = []
+
+ for obj_dict in response.json['objects']:
+ output.append(cls(obj_dict['resource_uri'], obj_dict))
+
+ if 'next' in response.json['meta'] and response.json['meta']['next']:
+ output.extend(cls.get_list(response.json['meta']['next']))
+
+ return output
+
+
+class Run(BaseRestModel):
+ """
+ Model class for run resources.
+ """
+ @property
+ def job(self):
+ return Job(self.__getattr__('job'))
+
+
+class Job(BaseRestModel):
+ """
+ Model class for job resources.
+ """
38 job_runner_worker/runner.py
View
@@ -0,0 +1,38 @@
+import logging
+
+import gevent
+from gevent.queue import Queue
+
+from job_runner_worker.cleanup import reset_incomplete_runs
+from job_runner_worker.config import config
+from job_runner_worker.enqueuer import enqueue_runs
+from job_runner_worker.events import publish
+from job_runner_worker.worker import execute_run
+
+
+logger = logging.getLogger(__name__)
+
+
+def run():
+ """
+ Start consuming runs and executing them.
+ """
+ greenlets = []
+ reset_incomplete_runs()
+ concurrent_jobs = config.getint('job_runner_worker', 'concurrent_jobs')
+
+ run_queue = Queue(concurrent_jobs)
+ event_queue = Queue()
+
+ logger.info('Start enqueue loop')
+ greenlets.append(gevent.spawn(enqueue_runs, run_queue, event_queue))
+
+ logger.info('Starting {0} workers'.format(concurrent_jobs))
+ for x in range(concurrent_jobs):
+ greenlets.append(gevent.spawn(execute_run, run_queue, event_queue))
+
+ logger.info('Starting event publisher')
+ greenlets.append(gevent.spawn(publish, event_queue))
+
+ for greenlet in greenlets:
+ greenlet.join()
0  job_runner_worker/tests/__init__.py
View
No changes.
0  job_runner_worker/tests/unit/__init__.py
View
No changes.
27 job_runner_worker/tests/unit/test_auth.py
View
@@ -0,0 +1,27 @@
+import unittest2 as unittest
+
+from mock import Mock
+
+from job_runner_worker.auth import HmacAuth
+
+
+class HmacAuthTestCase(unittest.TestCase):
+ """
+ Tests for :class:`.HmacAuth`.
+ """
+ def test_hmac_calculation(self):
+ """
+ Test HMAC calculation.
+ """
+ auth = HmacAuth('public', 'key')
+
+ r = Mock()
+ r.method = 'patch'
+ r.path_url = '/path/?foo=bar'
+ r.data = 'data body'
+ r.headers = {}
+
+ self.assertEqual(
+ 'ApiKey public:2b989ffc81712758d070fb46055b55f18a245d15',
+ auth(r).headers['Authorization']
+ )
39 job_runner_worker/tests/unit/test_cleanup.py
View
@@ -0,0 +1,39 @@
+import unittest2 as unittest
+
+from mock import Mock, call, patch
+
+from job_runner_worker.cleanup import reset_incomplete_runs
+
+
+class ModuleTestCase(unittest.TestCase):
+ """
+ Tests for :mod:`job_runner_worker.cleanup`.
+ """
+ @patch('job_runner_worker.cleanup.Run')
+ @patch('job_runner_worker.cleanup.config')
+ def test_reset_incomplete_runs(self, config, Run):
+ """
+ Test :func:`.reset_incomplete_runs`.
+ """
+ def config_side_effect(*args):
+ return {
+ ('job_runner_worker', 'run_resource_uri'): '/api/run/'
+ }[args]
+
+ config.get.side_effect = config_side_effect
+
+ incomplete_run = Mock()
+
+ Run.get_list.side_effect = [[incomplete_run], []]
+
+ reset_incomplete_runs()
+
+ self.assertEqual([
+ call('/api/run/', params={'state': 'in_queue'}),
+ call('/api/run/', params={'state': 'started'}),
+ ], Run.get_list.call_args_list)
+
+ incomplete_run.patch.assert_called_once_with({
+ 'enqueue_dts': None,
+ 'start_dts': None,
+ })
41 job_runner_worker/tests/unit/test_config.py
View
@@ -0,0 +1,41 @@
+import unittest2 as unittest
+
+from mock import Mock, patch
+
+from job_runner_worker.config import get_config_parser, setup_log_handler
+
+
+class ModuleTestCase(unittest.TestCase):
+ """
+ Tests for :py:mod:`job_runner_worker.config`.
+ """
+ @patch('job_runner_worker.config.ConfigParser')
+ @patch('job_runner_worker.config.os')
+ def test_get_config_parser(self, os, ConfigParser):
+ """
+ Test :py:func:`.get_config_parser`.
+ """
+ os.environ = {'SETTINGS_PATH': '/path/to/settings'}
+
+ config_mock = Mock()
+ ConfigParser.ConfigParser.return_value = config_mock
+
+ config = get_config_parser()
+
+ ConfigParser.ConfigParser.assert_called_once_with({
+ 'log_level': 'info',
+ })
+ config_mock.read.assert_called_once_with('/path/to/settings')
+ self.assertEqual(config_mock, config)
+
+ @patch('job_runner_worker.config.logging')
+ def test_setup_log_handler(self, logging):
+ """
+ Test :func:`.setup_log_handler`.
+ """
+ setup_log_handler('INFO')
+
+ logging.basicConfig.assert_called_once_with(
+ level=logging.INFO,
+ format='%(levelname)s - %(asctime)s - %(name)s: %(message)s',
+ )
71 job_runner_worker/tests/unit/test_enqueuer.py
View
@@ -0,0 +1,71 @@
+import unittest2 as unittest
+
+from mock import Mock, patch
+
+from job_runner_worker.enqueuer import enqueue_runs
+from job_runner_worker.models import RestError
+
+
+class ModuleTestCase(unittest.TestCase):
+ """
+ Tests for :mod:`job_runner_worker.enqueuer`.
+ """
+ @patch('job_runner_worker.enqueuer.config')
+ @patch('job_runner_worker.enqueuer.datetime')
+ @patch('job_runner_worker.enqueuer.Run')
+ @patch('job_runner_worker.enqueuer.time')
+ def test_enqueue_runs(self, time, Run, datetime, config):
+ """
+ Test :func:`.enqueue_runs`.
+ """
+ # just to break the never-ending loop :)
+ time.sleep.side_effect = Exception('Boom!')
+
+ queue = Mock()
+ queue.full.side_effect = [False, True]
+ event_queue = Mock()
+
+ run = Mock()
+ run.id = 1234
+
+ Run.get_list.return_value = [run]
+
+ self.assertRaises(Exception, enqueue_runs, queue, event_queue)
+
+ Run.get_list.assert_called_once_with(
+ config.get.return_value,
+ params={
+ 'limit': 1,
+ 'state': 'scheduled',
+ 'schedule_dts__lte': datetime.utcnow.return_value
+ .isoformat.return_value
+ }
+ )
+
+ run.patch.assert_called_once_with({
+ 'enqueue_dts': datetime.utcnow.return_value.isoformat.return_value
+ })
+ queue.put.assert_called_once_with(run)
+ event_queue.put.assert_called_once_with(
+ '{"event": "enqueued", "run_id": 1234}')
+
+ @patch('job_runner_worker.enqueuer.logger')
+ @patch('job_runner_worker.enqueuer.config')
+ @patch('job_runner_worker.enqueuer.datetime')
+ @patch('job_runner_worker.enqueuer.Run')
+ @patch('job_runner_worker.enqueuer.time')
+ def test_enqueue_runs_exception(self, time, Run, datetime, config, logger):
+ """
+ Test :func:`.enqueue_runs` raising exception.
+ """
+ # just to break the never-ending loop :)
+ time.sleep.side_effect = Exception('Boom!')
+
+ queue = Mock()
+ queue.full.side_effect = [False, True]
+ event_queue = Mock()
+
+ Run.get_list.side_effect = RestError('Bam!')
+
+ self.assertRaises(Exception, enqueue_runs, queue, event_queue)
+ self.assertEqual(1, logger.exception.call_count)
39 job_runner_worker/tests/unit/test_events.py
View
@@ -0,0 +1,39 @@
+import unittest2 as unittest
+
+from mock import call, patch
+
+from job_runner_worker.events import publish
+
+
+class ModuleTestCase(unittest.TestCase):
+ """
+ Tests for :mod:`job_runner_worker.events`.
+ """
+ @patch('job_runner_worker.events.config')
+ @patch('job_runner_worker.events.zmq')
+ def test_publish(self, zmq, config):
+ """
+ Test :func:`.websocket`.
+ """
+ def config_side_effect(*args):
+ return {
+ ('job_runner_worker', 'ws_server_hostname'): 'localhost',
+ ('job_runner_worker', 'ws_server_port'): 5555,
+ }[args]
+
+ config.get.side_effect = config_side_effect
+
+ context = zmq.Context.return_value
+ publisher = context.socket.return_value
+
+ event_queue = [
+ 'foo',
+ 'bar',
+ ]
+
+ publish(event_queue)
+
+ self.assertEqual([
+ call(['worker.event', 'foo']),
+ call(['worker.event', 'bar']),
+ ], publisher.send_multipart.call_args_list)
189 job_runner_worker/tests/unit/test_models.py
View
@@ -0,0 +1,189 @@
+import unittest2 as unittest
+
+from mock import Mock, patch
+
+from job_runner_worker.models import BaseRestModel, RestError, Run
+
+
+class BaseRestModelTestCase(unittest.TestCase):
+ """
+ Tests for :class:`.BaseRestModel`.
+ """
+ @patch('job_runner_worker.models.HmacAuth')
+ @patch('job_runner_worker.models.config')
+ @patch('job_runner_worker.models.requests')
+ def test_patch(self, requests, config, HmacAuth):
+ """
+ Test :meth:`.BaseRestModel.patch`.
+ """
+ def config_get_side_effect(*args):
+ return {
+ ('job_runner_worker', 'api_base_url'): 'http://api/',
+ ('job_runner_worker', 'private_api_key'): 'key',
+ ('job_runner_worker', 'public_api_key'): 'public',
+ }[args]
+
+ config.get.side_effect = config_get_side_effect
+ response = requests.patch.return_value
+ response.status_code = 202
+
+ base_model = BaseRestModel('/path/to/resource')
+ base_model.patch({'field_name': 'field_value', 'published': True})
+
+ requests.patch.assert_called_once_with(
+ 'http://api/path/to/resource',
+ auth=HmacAuth.return_value,
+ headers={'content-type': 'application/json'},
+ data='{"field_name": "field_value", "published": true}',
+ verify=False,
+ )
+
+ @patch('job_runner_worker.models.HmacAuth')
+ @patch('job_runner_worker.models.config')
+ @patch('job_runner_worker.models.requests')
+ def test_patch_not_202(self, requests, config, HmacAuth):
+ """
+ Test :meth:`.BaseRestModel.patch`.
+ """
+ def config_get_side_effect(*args):
+ return {
+ ('job_runner_worker', 'api_base_url'): 'http://api/',
+ ('job_runner_worker', 'private_api_key'): 'key',
+ ('job_runner_worker', 'public_api_key'): 'public',
+ }[args]
+
+ config.get.side_effect = config_get_side_effect
+ response = requests.patch.return_value
+ response.status_code = 500
+
+ base_model = BaseRestModel('/path/to/resource')
+ self.assertRaises(RestError, base_model.patch, {'foo': 'bar'})
+
+ @patch('job_runner_worker.models.HmacAuth')
+ @patch('job_runner_worker.models.config')
+ @patch('job_runner_worker.models.requests')
+ def test__get_json_data(self, requests, config, HmacAuth):
+ """
+ Tests :meth:`.BaseRestModel._get_json_data`.
+ """
+ def config_get_side_effect(*args):
+ return {
+ ('job_runner_worker', 'api_base_url'): 'http://api/',
+ ('job_runner_worker', 'private_api_key'): 'key',
+ ('job_runner_worker', 'public_api_key'): 'public',
+ }[args]
+
+ config.get.side_effect = config_get_side_effect
+ response = requests.get.return_value
+ response.status_code = 200
+
+ base_model = BaseRestModel('/path/to/resource')
+
+ self.assertEqual(response.json, base_model._get_json_data())
+
+ requests.get.assert_called_once_with(
+ 'http://api/path/to/resource',
+ auth=HmacAuth.return_value,
+ headers={'content-type': 'application/json'},
+ verify=False,
+ )
+
+ HmacAuth.assert_called_once_with('public', 'key')
+
+ @patch('job_runner_worker.models.HmacAuth')
+ @patch('job_runner_worker.models.config')
+ @patch('job_runner_worker.models.requests')
+ def test__get_json_data_not_200(self, requests, config, HmacAuth):
+ """
+ Tests :meth:`.BaseRestModel._get_json_data` not returning 200.
+ """
+ def config_get_side_effect(*args):
+ return {
+ ('job_runner_worker', 'api_base_url'): 'http://api/',
+ ('job_runner_worker', 'private_api_key'): 'key',
+ ('job_runner_worker', 'public_api_key'): 'public',
+ }[args]
+
+ config.get.side_effect = config_get_side_effect
+ response = requests.get.return_value
+ response.status_code = 401
+
+ base_model = BaseRestModel('/path/to/resource')
+
+ self.assertRaises(RestError, base_model._get_json_data)
+
+ @patch('job_runner_worker.models.HmacAuth')
+ @patch('job_runner_worker.models.config')
+ @patch('job_runner_worker.models.requests')
+ def test_get_list(self, requests, config, HmacAuth):
+ """
+ Test :meth:`.BaseRestModel.get_list`.
+ """
+ def config_get_side_effect(*args):
+ return {
+ ('job_runner_worker', 'api_base_url'): 'http://api/',
+ ('job_runner_worker', 'private_api_key'): 'key',
+ ('job_runner_worker', 'public_api_key'): 'public',
+ }[args]
+
+ config.get.side_effect = config_get_side_effect
+ response = requests.get.return_value
+ response.status_code = 200
+ response.json = {
+ 'objects': [
+ {'id': 1, 'resource_uri': 'foo'},
+ {'id': 2, 'resource_uri': 'bar'},
+ ],
+ 'meta': {
+ 'next': None,
+ }
+ }
+
+ out = BaseRestModel.get_list('/path/to/resource')
+ self.assertEqual(2, len(out))
+ self.assertEqual({'id': 1, 'resource_uri': 'foo'}, out[0]._data)
+ self.assertEqual({'id': 2, 'resource_uri': 'bar'}, out[1]._data)
+
+ @patch('job_runner_worker.models.HmacAuth')
+ @patch('job_runner_worker.models.config')
+ @patch('job_runner_worker.models.requests')
+ def test_get_list_not_200(self, requests, config, HmacAuth):
+ """
+ Test :meth:`.BaseRestModel.get_list`.
+ """
+ def config_get_side_effect(*args):
+ return {
+ ('job_runner_worker', 'api_base_url'): 'http://api/',
+ ('job_runner_worker', 'private_api_key'): 'key',
+ ('job_runner_worker', 'public_api_key'): 'public',
+ }[args]
+
+ config.get.side_effect = config_get_side_effect
+ response = requests.get.return_value
+ response.status_code = 401
+
+ self.assertRaises(RestError, BaseRestModel.get_list, '/resource')
+
+ def test___getattr__(self):
+ """
+ Test ``__getattr__``.
+ """
+ base_model = BaseRestModel(Mock())
+ base_model._get_json_data = Mock(return_value={'foo': 'bar'})
+ self.assertEqual('bar', base_model.foo)
+
+
+class RunTestCase(unittest.TestCase):
+ """
+ Tests for :class:`.Run`.
+ """
+ @patch('job_runner_worker.models.Job')
+ def test_job_property(self, JobMock):
+ """
+ Test job property.
+ """
+ run_model = Run(Mock(), {'job': '/job/resource'})
+
+ self.assertEqual(JobMock.return_value, run_model.job)
+
+ JobMock.assert_called_once_with('/job/resource')
52 job_runner_worker/tests/unit/test_runner.py
View
@@ -0,0 +1,52 @@
+import unittest2 as unittest
+
+from mock import call, patch
+
+from job_runner_worker.runner import run
+
+
+class ModuleTestCase(unittest.TestCase):
+ """
+ Tests for :mod:`job_runner_worker.runner`.
+ """
+ @patch('job_runner_worker.runner.publish')
+ @patch('job_runner_worker.runner.reset_incomplete_runs')
+ @patch('job_runner_worker.runner.execute_run')
+ @patch('job_runner_worker.runner.enqueue_runs')
+ @patch('job_runner_worker.runner.gevent')
+ @patch('job_runner_worker.runner.Queue')
+ @patch('job_runner_worker.runner.config')
+ def test_run(
+ self,
+ config,
+ Queue,
+ gevent,
+ enqueue_runs,
+ execute_run,
+ reset_incomplete_runs,
+ publish):
+ """
+ Test :func:`.run`.
+ """
+ def config_side_effect(*args):
+ return {
+ ('job_runner_worker', 'concurrent_jobs'): 4,
+ }[args]
+
+ config.getint.side_effect = config_side_effect
+
+ run()
+
+ reset_incomplete_runs.assert_called_once_with()
+
+ self.assertEqual([
+ call(enqueue_runs, Queue.return_value, Queue.return_value),
+ call(execute_run, Queue.return_value, Queue.return_value),
+ call(execute_run, Queue.return_value, Queue.return_value),
+ call(execute_run, Queue.return_value, Queue.return_value),
+ call(execute_run, Queue.return_value, Queue.return_value),
+ call(
+ publish,
+ Queue.return_value,
+ )
+ ], gevent.spawn.call_args_list)
44 job_runner_worker/tests/unit/test_worker.py
View
@@ -0,0 +1,44 @@
+import subprocess
+import unittest2 as unittest
+
+from mock import Mock, call, patch
+
+from job_runner_worker.worker import execute_run
+
+
+class ModuleTestCase(unittest.TestCase):
+ """
+ Tests for :mod:`job_runner_worker.worker`.
+ """
+ @patch('job_runner_worker.worker.subprocess', subprocess)
+ @patch('job_runner_worker.worker.config')
+ @patch('job_runner_worker.worker.datetime')
+ def test_execute_run(self, datetime, config):
+ """
+ Test :func:`.execute_run`.
+ """
+ config.get.return_value = '/tmp'
+
+ run = Mock()
+ run.id = 1234
+ run.job.script_content_rendered = (
+ '#!/usr/bin/env bash\n\necho "Hello World!";\n')
+
+ event_queue = Mock()
+
+ execute_run([run], event_queue)
+
+ dts = datetime.utcnow.return_value.isoformat.return_value
+
+ self.assertEqual([
+ call({'start_dts': dts}),
+ call({
+ 'return_dts': dts,
+ 'return_log': 'Hello World!\n',
+ 'return_success': True,
+ })
+ ], run.patch.call_args_list)
+ self.assertEqual([
+ call('{"event": "started", "run_id": 1234}'),
+ call('{"event": "returned", "run_id": 1234}'),
+ ], event_queue.put.call_args_list)
55 job_runner_worker/worker.py
View
@@ -0,0 +1,55 @@
+import json
+import logging
+import os
+import tempfile
+from datetime import datetime
+
+import gevent_subprocess as subprocess
+
+from job_runner_worker.config import config
+
+
+logger = logging.getLogger(__name__)
+
+
+def execute_run(run_queue, event_queue):
+ """
+ Execute runs from the ``run_queue``.
+
+ :param run_queue:
+ An instance of ``Queue`` to consume run instances from.
+
+ :param event_queue:
+ An instance of ``Queue`` to push events to.
+
+ """
+ logger.info('Started run executer')
+
+ for run in run_queue:
+
+ file_desc, file_path = tempfile.mkstemp(
+ dir=config.get('job_runner_worker', 'script_temp_path')
+ )
+ file_obj = os.fdopen(file_desc, 'w')
+ os.chmod(file_path, 0700)
+ file_obj.write(run.job.script_content_rendered.replace('\r', ''))
+ file_obj.close()
+
+ run.patch({
+ 'start_dts': datetime.utcnow().isoformat(' ')
+ })
+ event_queue.put(json.dumps({'event': 'started', 'run_id': run.id}))
+
+ logger.info('Starting run {0}'.format(run.resource_uri))
+ sub_proc = subprocess.Popen(
+ [file_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = sub_proc.communicate()
+ logger.info('Run {0} ended'.format(run.resource_uri))
+
+ run.patch({
+ 'return_dts': datetime.utcnow().isoformat(' '),
+ 'return_log': '{0}{1}'.format(out, err),
+ 'return_success': False if sub_proc.returncode else True,
+ })
+ event_queue.put(json.dumps({'event': 'returned', 'run_id': run.id}))
+ os.remove(file_path)
5 requirements.txt
View
@@ -0,0 +1,5 @@
+argparse
+gevent
+gevent_subprocess
+requests
+pyzmq
30 scripts/job_runner_worker
View
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+
+from gevent import monkey; monkey.patch_all()
+
+import argparse
+import os
+
+parser = argparse.ArgumentParser(description='Job Runner worker')
+
+parser.add_argument(
+ '--config-path',
+ dest='config_path',
+ type=str,
+ default=None,
+ help='absolute path to config file (default: SETTINGS_PATH env variable)',
+)
+
+if __name__ == '__main__':
+ arg_obj = parser.parse_args()
+
+ if arg_obj.config_path:
+ os.environ['SETTINGS_PATH'] = arg_obj.config_path
+
+ from job_runner_worker.config import config, setup_log_handler
+ from job_runner_worker.runner import run
+
+ setup_log_handler(
+ log_level=config.get('job_runner_worker', 'log_level').upper())
+
+ run()
27 setup.py
View
@@ -0,0 +1,27 @@
+from setuptools import setup
+
+import job_runner_worker
+
+
+setup(
+ name='job-runner-worker',
+ version=job_runner_worker.__version__,
+ url='https://github.com/spilgames/dwh/',
+ author='Orne Brocaar',
+ author_email='orne.brocaar@spilgames.com',
+ description='Job-Runner Worker',
+ long_description=open('README.rst').read(),
+ packages=[
+ 'job_runner_worker',
+ ],
+ scripts=[
+ 'scripts/job_runner_worker',
+ ],
+ install_requires=[
+ 'argparse',
+ 'gevent',
+ 'gevent_subprocess',
+ 'requests',
+ 'pyzmq',
+ ]
+)
8 test-requirements.txt
View
@@ -0,0 +1,8 @@
+# Documentation
+sphinx
+
+# Testing
+coverage
+mock
+pep8
+unittest2
Please sign in to comment.
Something went wrong with that request. Please try again.