From ef230605204d293ace08ebd928416eef1f96e972 Mon Sep 17 00:00:00 2001 From: Jordan Lund Date: Tue, 9 Jun 2015 15:21:30 -0700 Subject: [PATCH 1/8] renames blueprint to archiver, adds to app.py, adds doc strings, removes hgmo and mozharness refs out of generic methods --- relengapi/app.py | 1 + relengapi/blueprints/archiver/__init__.py | 109 ++++++++++++++++++++++ relengapi/blueprints/archiver/tasks.py | 74 +++++++++++++++ relengapi/blueprints/archiver/types.py | 26 ++++++ 4 files changed, 210 insertions(+) create mode 100644 relengapi/blueprints/archiver/__init__.py create mode 100644 relengapi/blueprints/archiver/tasks.py create mode 100644 relengapi/blueprints/archiver/types.py diff --git a/relengapi/app.py b/relengapi/app.py index 83f1836e..3e7970a3 100644 --- a/relengapi/app.py +++ b/relengapi/app.py @@ -83,6 +83,7 @@ def _load_bp(n): 'slaveloan', 'tokenauth', 'tooltool', + 'archiver', ]] diff --git a/relengapi/blueprints/archiver/__init__.py b/relengapi/blueprints/archiver/__init__.py new file mode 100644 index 00000000..7c8a0347 --- /dev/null +++ b/relengapi/blueprints/archiver/__init__.py @@ -0,0 +1,109 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import logging +import os + +from flask import Blueprint +from flask import current_app +from flask import redirect +from flask import url_for +from relengapi.blueprints.archiver.tasks import create_and_upload_archive +from relengapi.blueprints.archiver.types import MozharnessArchiveTask +from relengapi.lib import api +from werkzeug.exceptions import NotFound + +bp = Blueprint('archiver', __name__) +log = logging.getLogger(__name__) + +GET_EXPIRES_IN = 300 + +@bp.route('/status/') +@api.apimethod(MozharnessArchiveTask, unicode) +def task_status(task_id): + """ + Check and return the current state of the create_and_upload_archive celery task with task id + of . + + If the task is unknown, state will be PENDING. Once the task starts it will be updated to + PROGRESS and finally, if it completes, it will be either SUCCESS (no exceptions), or FAILURE. + + See update_state() within create_and_upload_archive and + http://celery.readthedocs.org/en/latest/reference/celery.states.html for more details. + + If state is SUCCESS, it is safe to check response['s3_urls'] for the archives submitted to s3 + """ + task = create_and_upload_archive.AsyncResult(task_id) + task_info = task.info or {} + response = { + 'state': task.state, + 'src_url': task_info.get('src_url', ''), + 's3_urls': task_info.get('s3_urls', {}) + } + if task.state != 'FAILURE': + response['status'] = task_info.get('status', 'no status available at this point.') + else: + # something went wrong + response['status'] = str(task.info) # this is the exception raised + + return MozharnessArchiveTask(**response) + + +@bp.route('/mozharness/') +@api.apimethod(None, unicode, unicode, unicode, unicode, status_code=302) +def get_mozharness_archive(rev, repo="mozilla-central", region='us-west-2', suffix='tar.gz'): + cfg = current_app.config['SUBREPO_MOZHARNESS_CFG'] + return get_archive_from_repo(cfg, rev, repo, region, suffix) + + +def get_archive_from_repo(cfg, rev, repo, region, suffix): + """ + A generic getter for retrieving an s3 location of an archive where the archive is based off a + given repo name, revision, and possibly sub-dir. + + sub-dir: hg.mozilla.org supports archives of sub directories within a repository. This + flexibility allows for creating archives of only a portion of what would normally be an entire + repo archive. + + logic flow: + If their is already a key based on given args, a re-direct link is given for the + s3 location. If the key does not exist, download the archive from src url, upload it to and + return all s3 url locations. + + When the key does not exist, the remaining work will be assigned to a celery background task + with a url location returned immediately for obtaining task state updates. + """ + bucket_region = None + bucket_name = None + for bucket in cfg['S3_BUCKETS']: + if region in bucket['REGION']: + bucket_region = bucket['REGION'] + bucket_name = bucket['NAME'] + + # sanity check + if not bucket_name or not bucket_region: + valid_regions = str([bucket['REGION'] for bucket in cfg['S3_BUCKETS']]) + log.warning('Unsupported region given: "{}" Valid Regions "{}"'.format(region, valid_regions)) + raise NotFound + + s3 = current_app.aws.connect_to('s3', bucket_region) + bucket = s3.get_bucket(bucket_name) + key = '{repo}-{rev}.{suffix}'.format(repo=os.path.basename(repo), rev=rev, suffix=suffix) + + # first, see if the key exists + if not bucket.get_key(key): + task_id = rev + if create_and_upload_archive.AsyncResult(task_id).state != 'PROGRESS': + # task is currently not in progress so start one. + create_and_upload_archive.apply_async(args=[cfg, rev, repo, suffix, key], task_id=task_id) + return {}, 202, {'Location': url_for('archiver.task_status', task_id=task_id)} + + log.info("generating GET URL to {}, expires in {}s".format(rev, GET_EXPIRES_IN)) + # return 302 pointing to s3 url with archive + signed_url = s3.generate_url( + method='GET', expires_in=GET_EXPIRES_IN, + bucket=bucket_name, key=key + ) + return redirect(signed_url) + diff --git a/relengapi/blueprints/archiver/tasks.py b/relengapi/blueprints/archiver/tasks.py new file mode 100644 index 00000000..1887f4ba --- /dev/null +++ b/relengapi/blueprints/archiver/tasks.py @@ -0,0 +1,74 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import logging +import os +import tempfile +import requests +import urllib2 +from boto.s3.key import Key + +from flask import current_app +from relengapi.lib import celery + +log = logging.getLogger(__name__) + +GET_EXPIRES_IN = 300 + + +def upload_url_archive_to_s3(key, url, region, bucket, suffix): + """ + Given a src url, upload contents to an s3 bucket by a given key. + """ + s3 = current_app.aws.connect_to('s3', region) + k = Key(s3.get_bucket(bucket)) + k.key = key + + # rather than worrying about pointers and seeking, let's avail of a named temp file that is + # allowed to persist after the file is closed. Finally, when are finished, we can clean up + # the temp file + temp_file = tempfile.NamedTemporaryFile(mode="wb", suffix=".{}".format(suffix), delete=False) + data = urllib2.urlopen(url).read() + with open(temp_file.name, "wb") as tmpf: + tmpf.write(data) + k.set_contents_from_filename(temp_file.name) + os.unlink(temp_file.name) # clean up tmp file + + return s3.generate_url(expires_in=GET_EXPIRES_IN, method='GET', bucket=bucket, key=key) + + +@celery.task(bind=True) +def create_and_upload_archive(self, cfg, rev, repo, suffix, key): + """ + A celery task that downloads an archive if it exists from a src location and attempts to upload + the archive to a supported bucket in each supported region. + + Throughout this process, update the state of the task and finally return the location of the + s3 urls if successful. + """ + return_status = "Task completed! Check 's3_urls' for upload locations." + s3_urls = {} + src_url = cfg['URL_SRC_TEMPLATE'].format(repo=repo, rev=rev, suffix=suffix) + + self.update_state(state='PROGRESS', + meta={'status': 'ensuring archive origin location exists.', 'src_url': src_url}) + resp = requests.get(src_url) + if resp.status_code == 200: + self.update_state(state='PROGRESS', + meta={'status': 'uploading archive to s3 buckets', 'src_url': src_url}) + for bucket in cfg['S3_BUCKETS']: + s3_urls[bucket['REGION']] = upload_url_archive_to_s3(key, src_url, bucket['REGION'], + bucket['NAME'], suffix) + if not any(s3_urls.values()): + return_status = "Could not upload any archives to s3. Check logs for errors." + log.warning(return_status) + else: + return_status = "Can't find archive given branch, rev, and suffix. Does url {} exist? " \ + "Request Response code: {}".format(src_url, resp.status_code) + log.warning(return_status) + + return { + 'status': return_status, + 'src_url': src_url, + 's3_urls': s3_urls, + } diff --git a/relengapi/blueprints/archiver/types.py b/relengapi/blueprints/archiver/types.py new file mode 100644 index 00000000..d1c7ad87 --- /dev/null +++ b/relengapi/blueprints/archiver/types.py @@ -0,0 +1,26 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import wsme.types + + +class MozharnessArchiveTask(wsme.types.Base): + """Represents a running task and its current state + """ + + #: this is the current state of the task + #: e.g. "PENDING", "PROGRESS", "SUCCESS", "FAILURE" + state = unicode + + #: current msg status of task + #: e.g. "Downloading archive from hg.m.o" + status = unicode + + #: archive url origin that s3 item is based off of + src_url = unicode + + #: s3 links for the archives by region + s3_urls = {str: str} + + From 2e7b2a1a184842a4cdcca6bac6cdbbfef2d94e8c Mon Sep 17 00:00:00 2001 From: Jordan Lund Date: Tue, 9 Jun 2015 16:01:52 -0700 Subject: [PATCH 2/8] re-order imports of tasks.py --- relengapi/blueprints/archiver/tasks.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/relengapi/blueprints/archiver/tasks.py b/relengapi/blueprints/archiver/tasks.py index 1887f4ba..fa242e61 100644 --- a/relengapi/blueprints/archiver/tasks.py +++ b/relengapi/blueprints/archiver/tasks.py @@ -3,9 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. import logging import os -import tempfile import requests +import tempfile import urllib2 + from boto.s3.key import Key from flask import current_app From ca9c4df7f0fd49c3557f0840cdf8800770c747b4 Mon Sep 17 00:00:00 2001 From: Jordan Lund Date: Thu, 11 Jun 2015 12:24:34 -0700 Subject: [PATCH 3/8] adds docs, cleans up pep and mixed imports, tests still WIP --- relengapi/blueprints/archiver/__init__.py | 12 +-- relengapi/blueprints/archiver/tasks.py | 28 +++---- .../blueprints/archiver/test_archiver.py | 75 +++++++++++++++++ relengapi/blueprints/archiver/types.py | 2 - relengapi/docs/deployment/archiver.rst | 31 +++++++ relengapi/docs/deployment/index.rst | 1 + relengapi/docs/usage/archiver.rst | 81 +++++++++++++++++++ relengapi/docs/usage/index.rst | 1 + 8 files changed, 207 insertions(+), 24 deletions(-) create mode 100644 relengapi/blueprints/archiver/test_archiver.py create mode 100644 relengapi/docs/deployment/archiver.rst create mode 100644 relengapi/docs/usage/archiver.rst diff --git a/relengapi/blueprints/archiver/__init__.py b/relengapi/blueprints/archiver/__init__.py index 7c8a0347..84a506ea 100644 --- a/relengapi/blueprints/archiver/__init__.py +++ b/relengapi/blueprints/archiver/__init__.py @@ -19,6 +19,7 @@ GET_EXPIRES_IN = 300 + @bp.route('/status/') @api.apimethod(MozharnessArchiveTask, unicode) def task_status(task_id): @@ -27,7 +28,7 @@ def task_status(task_id): of . If the task is unknown, state will be PENDING. Once the task starts it will be updated to - PROGRESS and finally, if it completes, it will be either SUCCESS (no exceptions), or FAILURE. + STARTED and finally, if it completes, it will be either SUCCESS (no exceptions), or FAILURE. See update_state() within create_and_upload_archive and http://celery.readthedocs.org/en/latest/reference/celery.states.html for more details. @@ -84,7 +85,8 @@ def get_archive_from_repo(cfg, rev, repo, region, suffix): # sanity check if not bucket_name or not bucket_region: valid_regions = str([bucket['REGION'] for bucket in cfg['S3_BUCKETS']]) - log.warning('Unsupported region given: "{}" Valid Regions "{}"'.format(region, valid_regions)) + log.warning('Unsupported region given: "{}" Valid Regions "{}"'.format(region, + valid_regions)) raise NotFound s3 = current_app.aws.connect_to('s3', bucket_region) @@ -94,9 +96,10 @@ def get_archive_from_repo(cfg, rev, repo, region, suffix): # first, see if the key exists if not bucket.get_key(key): task_id = rev - if create_and_upload_archive.AsyncResult(task_id).state != 'PROGRESS': + if create_and_upload_archive.AsyncResult(task_id).state != 'STARTED': # task is currently not in progress so start one. - create_and_upload_archive.apply_async(args=[cfg, rev, repo, suffix, key], task_id=task_id) + create_and_upload_archive.apply_async(args=[cfg, rev, repo, suffix, key], + task_id=task_id) return {}, 202, {'Location': url_for('archiver.task_status', task_id=task_id)} log.info("generating GET URL to {}, expires in {}s".format(rev, GET_EXPIRES_IN)) @@ -106,4 +109,3 @@ def get_archive_from_repo(cfg, rev, repo, region, suffix): bucket=bucket_name, key=key ) return redirect(signed_url) - diff --git a/relengapi/blueprints/archiver/tasks.py b/relengapi/blueprints/archiver/tasks.py index fa242e61..10f4470e 100644 --- a/relengapi/blueprints/archiver/tasks.py +++ b/relengapi/blueprints/archiver/tasks.py @@ -5,7 +5,6 @@ import os import requests import tempfile -import urllib2 from boto.s3.key import Key @@ -28,17 +27,17 @@ def upload_url_archive_to_s3(key, url, region, bucket, suffix): # rather than worrying about pointers and seeking, let's avail of a named temp file that is # allowed to persist after the file is closed. Finally, when are finished, we can clean up # the temp file + resp = requests.get(url) temp_file = tempfile.NamedTemporaryFile(mode="wb", suffix=".{}".format(suffix), delete=False) - data = urllib2.urlopen(url).read() with open(temp_file.name, "wb") as tmpf: - tmpf.write(data) + tmpf.write(resp.content) k.set_contents_from_filename(temp_file.name) os.unlink(temp_file.name) # clean up tmp file return s3.generate_url(expires_in=GET_EXPIRES_IN, method='GET', bucket=bucket, key=key) -@celery.task(bind=True) +@celery.task(bind=True, track_started=True) def create_and_upload_archive(self, cfg, rev, repo, suffix, key): """ A celery task that downloads an archive if it exists from a src location and attempts to upload @@ -47,29 +46,24 @@ def create_and_upload_archive(self, cfg, rev, repo, suffix, key): Throughout this process, update the state of the task and finally return the location of the s3 urls if successful. """ - return_status = "Task completed! Check 's3_urls' for upload locations." + status = "Task completed! Check 's3_urls' for upload locations." s3_urls = {} src_url = cfg['URL_SRC_TEMPLATE'].format(repo=repo, rev=rev, suffix=suffix) - self.update_state(state='PROGRESS', - meta={'status': 'ensuring archive origin location exists.', 'src_url': src_url}) - resp = requests.get(src_url) + resp = requests.head(src_url) if resp.status_code == 200: - self.update_state(state='PROGRESS', - meta={'status': 'uploading archive to s3 buckets', 'src_url': src_url}) for bucket in cfg['S3_BUCKETS']: s3_urls[bucket['REGION']] = upload_url_archive_to_s3(key, src_url, bucket['REGION'], bucket['NAME'], suffix) if not any(s3_urls.values()): - return_status = "Could not upload any archives to s3. Check logs for errors." - log.warning(return_status) + status = "Could not upload any archives to s3. Check logs for errors." + log.warning(status) else: - return_status = "Can't find archive given branch, rev, and suffix. Does url {} exist? " \ - "Request Response code: {}".format(src_url, resp.status_code) - log.warning(return_status) - + status = "Can't find archive given branch, rev, and suffix. Does url {} exist? " \ + "Request Response code: {}".format(src_url, resp.status_code) + log.warning(status) return { - 'status': return_status, + 'status': status, 'src_url': src_url, 's3_urls': s3_urls, } diff --git a/relengapi/blueprints/archiver/test_archiver.py b/relengapi/blueprints/archiver/test_archiver.py new file mode 100644 index 00000000..2c341e9e --- /dev/null +++ b/relengapi/blueprints/archiver/test_archiver.py @@ -0,0 +1,75 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# import json +import moto + +# from relengapi.blueprints.archiver.tasks import create_and_upload_archive +# from time import sleep +# +# from nose.tools import eq_ +from relengapi.lib.testing.context import TestContext + +cfg = { + 'RELENGAPI_CELERY_LOG_LEVEL': 'DEBUG', + 'debug': True, + + 'AWS': { + 'access_key_id': 'aa', + 'secret_access_key': 'ss', + }, + + 'SUBREPO_MOZHARNESS_CFG': { + 'S3_BUCKETS': [ + {'REGION': 'us-east-1', 'NAME': 'archiver-bucket-1'}, + {'REGION': 'us-west-2', 'NAME': 'archiver-bucket-2'} + ], + "URL_SRC_TEMPLATE": "http://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/testing/mozharness" + }, + 'CELERY_BROKER_URL': 'memory://', + 'CELERY_BACKEND': 'cache', + "CELERY_CACHE_BACKEND": 'memory', + 'CELERY_ALWAYS_EAGER': True, +} + +test_context = TestContext(config=cfg) +# test_context = TestContext() + + +# @test_context +# def test_hello(client): +# print 'hi there' +# rv = client.get('/archiver/') +# eq_(rv.status_code, 200) +# eq_(json.loads(rv.data)['result'], {'message': 'hello world'}) + + +@moto.mock_s3 +@test_context +def test_foo(app, client): + app.debug = True + s3 = app.aws.connect_to('s3', 'us-east-1') + s3.create_bucket('archiver-bucket-1') + s32 = app.aws.connect_to('s3', 'us-west-2') + s32.create_bucket('archiver-bucket-2') + + # from pprint import pprint + + # with app.app_context(): + # resp = client.get('/archiver/mozharness/9ebd530c5843') + # sleep(3) + # resp2 = client.get('/archiver/status/9ebd530c5843') + # for a in dir(resp): + # pprint(a + ' : ' + str(resp.__getattribute__(a))) + # for a in dir(resp2): + # pprint(a + ' : ' + str(resp2.__getattribute__(a))) + + # rev, repo, suffix, key, = 'foo', 'mozilla-central', 'tar.gz', 'mozilla-central-foo.tar.gz' + # config = cfg['SUBREPO_MOZHARNESS_CFG'] + # with app.app_context(): + # task = create_and_upload_archive.apply_async(args=[config, rev, repo, suffix, key], + # task_id='9ebd530c5843') + # for a in dir(task): + # pprint(a + ' : ' + str(task.__getattribute__(a))) + # eq_(task.status_code, 404, task.info) diff --git a/relengapi/blueprints/archiver/types.py b/relengapi/blueprints/archiver/types.py index d1c7ad87..67dcbe57 100644 --- a/relengapi/blueprints/archiver/types.py +++ b/relengapi/blueprints/archiver/types.py @@ -22,5 +22,3 @@ class MozharnessArchiveTask(wsme.types.Base): #: s3 links for the archives by region s3_urls = {str: str} - - diff --git a/relengapi/docs/deployment/archiver.rst b/relengapi/docs/deployment/archiver.rst new file mode 100644 index 00000000..d114ba3f --- /dev/null +++ b/relengapi/docs/deployment/archiver.rst @@ -0,0 +1,31 @@ +Deploying Archiver +================== + +Archiver requires a separate configuration for each endpoint. Within these endpoint configs, you need to state the s3 +buckets, where each bucket represents a region and the template url that will be formatted by query params passed to +the endpoint. + +For AWS credentials, each bucket should be limited to the AWS IAM role corresponding to the AWS credentials. Buckets in +the configuration are required to be pre-existing. + +Finally, Archiver avails of Celery. You will need to provide a broker and back-end. + +Example config:: + + # using rabbitmq locally in a staging setup + CELERY_BROKER_URL='amqp://guest@localhost//' + CELERY_BACKEND='amqp' + + AWS = { + 'access_key_id': 'accessKeyExample', + 'secret_access_key': 'secretAccessKeyExample', + } + + # for the mozharness endpoint + SUBREPO_MOZHARNESS_CFG = { + 'S3_BUCKETS': [ + {'REGION': 'us-east-1', 'NAME': 'example-bucket-name-for-us-east-1'}, + {'REGION': 'us-west-2', 'NAME': 'example-bucket-name-for-us-west-2'} + ], + "URL_SRC_TEMPLATE": "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/testing/mozharness" + } diff --git a/relengapi/docs/deployment/index.rst b/relengapi/docs/deployment/index.rst index 10c69a13..497b8649 100644 --- a/relengapi/docs/deployment/index.rst +++ b/relengapi/docs/deployment/index.rst @@ -16,3 +16,4 @@ This is mostly limited to system administrators and those responsible for config sqs tooltool slaveloan + archiver diff --git a/relengapi/docs/usage/archiver.rst b/relengapi/docs/usage/archiver.rst new file mode 100644 index 00000000..7ffe9393 --- /dev/null +++ b/relengapi/docs/usage/archiver.rst @@ -0,0 +1,81 @@ +Archiver +======== + +Archiver simply takes repository archive urls and returns an s3 location for the same archive, while submitting the +archive to s3 if it doesn't already exist. + + +If the archive exists, the response will redirect with a 302 and location for the s3 url equivalent. + +If the archive does not already exist in s3, the response will accept the request (202) and return the task location url +that is monitoring the current state of creating and uploading the archive to s3. + +Currently, only Mozharness is configured: + "URL_SRC_TEMPLATE": "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/testing/mozharness" + + + +Examples:: + + # To get an in-tree Mozharness archive based on: http://hg.mozilla.org/mozilla-central/rev/3d11cb4f31b9 + > curl -i http://127.0.0.1:8010/archiver/mozharness/3d11cb4f31b9\?repo\=mozilla-central\®ion\=us-east-1 + HTTP/1.0 202 ACCEPTED + Content-Type: application/json + Content-Length: 18 + Location: http://127.0.0.1:8010/archiver/status/3d11cb4f31b9 + Server: Werkzeug/0.10.4 Python/2.7.6 + Date: Tue, 09 Jun 2015 22:19:15 GMT + + { + "result": {} + } + + + # In the above example, the s3 archive does not exist so Archiver will create it. poll the Location header url in the above response to monitor state + > curl -i http://127.0.0.1:8010/archiver/status/3d11cb4f31b9 + HTTP/1.0 200 OK + Content-Type: application/json + Content-Length: 682 + Server: Werkzeug/0.10.4 Python/2.7.6 + Date: Tue, 09 Jun 2015 22:19:24 GMT + + { + "result": { + "s3_urls": { + "us-east-1": "https://archiver-mozharness-us-east-1.s3.amazonaws.com/mozilla-central-3d11cb4f31b9.tar.gz?Signature=GB%2F%2Feye%2Fidj7BrOYEZQNHSFSNyY%3D&Expires=1433888658&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA", + "us-west-2": "https://archiver-mozharness-us-west-2.s3-us-west-2.amazonaws.com/mozilla-central-3d11cb4f31b9.tar.gz?Signature=7%2FnVzYSgGAs8lVP9x%2FvkI%2FklDls%3D&Expires=1433888659&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA" + }, + "src_url": "https://hg.mozilla.org/mozilla-central/archive/3d11cb4f31b9.tar.gz/testing/mozharness", + "state": "SUCCESS", + "status": "Task completed! Check 's3_urls' for upload locations." + } + } + + + # We can see above that Archiver has created two s3 archives across two regions. We can use those urls to grab the archive. + # Subsequent requests of the original endpoint also just redirects the s3 location + > curl -i http://127.0.0.1:8010/archiver/mozharness/3d11cb4f31b9\?repo\=mozilla-central\®ion\=us-east-1 + HTTP/1.0 302 FOUND + Content-Type: text/html; charset=utf-8 + Content-Length: 599 + Location: https://archiver-mozharness-us-east-1.s3.amazonaws.com/mozilla-central-3d11cb4f31b9.tar.gz?Signature=094S3haXO5LMbFtCObyh8FhN%2FD0%3D&Expires=1433888697&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA + Server: Werkzeug/0.10.4 Python/2.7.6 + Date: Tue, 09 Jun 2015 22:19:57 GMT + + + Redirecting... +

Redirecting...

+

You should be redirected automatically to target URL: https://archiver-mozharness-us-east-1.s3.amazonaws.com/mozilla-central-3d11cb4f31b9.tar.gz?Signature=094S3haXO5LMbFtCObyh8FhN%2FD0%3D&Expires=1433888697&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA. If not click the link. + + + +Types +----- + +.. api:autotype:: MozharnessArchiveTask + +Endpoints +--------- + +.. api:autoendpoint:: archiver.* + diff --git a/relengapi/docs/usage/index.rst b/relengapi/docs/usage/index.rst index 35742f1b..33b90b23 100644 --- a/relengapi/docs/usage/index.rst +++ b/relengapi/docs/usage/index.rst @@ -16,3 +16,4 @@ Subsequent sections describe the interfaces provided by the individual component clobberer tooltool slaveloan + archiver From 0e62163ec64afeed7a1b28fd9c926d68b700ef4b Mon Sep 17 00:00:00 2001 From: Jordan Lund Date: Fri, 12 Jun 2015 14:18:41 -0700 Subject: [PATCH 4/8] impls tests --- relengapi/blueprints/archiver/tasks.py | 21 +++-- .../blueprints/archiver/test_archiver.py | 79 ++++++++-------- relengapi/blueprints/archiver/test_tasks.py | 90 +++++++++++++++++++ 3 files changed, 144 insertions(+), 46 deletions(-) create mode 100644 relengapi/blueprints/archiver/test_tasks.py diff --git a/relengapi/blueprints/archiver/tasks.py b/relengapi/blueprints/archiver/tasks.py index 10f4470e..8a4a61e1 100644 --- a/relengapi/blueprints/archiver/tasks.py +++ b/relengapi/blueprints/archiver/tasks.py @@ -7,7 +7,9 @@ import tempfile from boto.s3.key import Key +from random import randint +from celery.task import current from flask import current_app from relengapi.lib import celery @@ -37,7 +39,7 @@ def upload_url_archive_to_s3(key, url, region, bucket, suffix): return s3.generate_url(expires_in=GET_EXPIRES_IN, method='GET', bucket=bucket, key=key) -@celery.task(bind=True, track_started=True) +@celery.task(bind=True, track_started=True, max_retries=3) def create_and_upload_archive(self, cfg, rev, repo, suffix, key): """ A celery task that downloads an archive if it exists from a src location and attempts to upload @@ -52,15 +54,22 @@ def create_and_upload_archive(self, cfg, rev, repo, suffix, key): resp = requests.head(src_url) if resp.status_code == 200: - for bucket in cfg['S3_BUCKETS']: - s3_urls[bucket['REGION']] = upload_url_archive_to_s3(key, src_url, bucket['REGION'], - bucket['NAME'], suffix) + try: + for bucket in cfg['S3_BUCKETS']: + s3_urls[bucket['REGION']] = upload_url_archive_to_s3(key, src_url, bucket['REGION'], + bucket['NAME'], suffix) + except Exception as exc: + # set a jitter enabled delay + # where an aggressive delay would result in: 7s, 49s, and 343s + # and a gentle delay would result in: 4s, 16s, and 64s + delay = randint(4, 7) ** (current.request.retries + 1) # retries == 0 on first attempt + current.retry(exc=exc, countdown=delay) if not any(s3_urls.values()): status = "Could not upload any archives to s3. Check logs for errors." log.warning(status) else: - status = "Can't find archive given branch, rev, and suffix. Does url {} exist? " \ - "Request Response code: {}".format(src_url, resp.status_code) + status = "Url not found. Does it exist? url: '{}', response: '{}' ".format(src_url, + resp.status_code) log.warning(status) return { 'status': status, diff --git a/relengapi/blueprints/archiver/test_archiver.py b/relengapi/blueprints/archiver/test_archiver.py index 2c341e9e..9edff985 100644 --- a/relengapi/blueprints/archiver/test_archiver.py +++ b/relengapi/blueprints/archiver/test_archiver.py @@ -1,19 +1,15 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. - -# import json import moto +from boto.s3.key import Key +from nose.tools import eq_ -# from relengapi.blueprints.archiver.tasks import create_and_upload_archive -# from time import sleep -# -# from nose.tools import eq_ from relengapi.lib.testing.context import TestContext + cfg = { 'RELENGAPI_CELERY_LOG_LEVEL': 'DEBUG', - 'debug': True, 'AWS': { 'access_key_id': 'aa', @@ -34,42 +30,45 @@ } test_context = TestContext(config=cfg) -# test_context = TestContext() -# @test_context -# def test_hello(client): -# print 'hi there' -# rv = client.get('/archiver/') -# eq_(rv.status_code, 200) -# eq_(json.loads(rv.data)['result'], {'message': 'hello world'}) +def setup_buckets(app, cfg): + for bucket in cfg['S3_BUCKETS']: + s3 = app.aws.connect_to('s3', bucket["REGION"]) + s3.create_bucket(bucket["NAME"]) + + +def create_s3_items(app, cfg, key): + for bucket in cfg['S3_BUCKETS']: + s3 = app.aws.connect_to('s3', bucket["REGION"]) + b = s3.get_bucket(bucket["NAME"]) + k = Key(b) + k.key = key + k.set_contents_from_string("Help, I'm trapped in an alternate s3 dimension.") + + +@moto.mock_s3 +@test_context +def test_accepted_response_when_missing_s3_key(app, client): + setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) + resp = client.get('/archiver/mozharness/9ebd530c5843?repo=mozilla-central®ion=us-east-1') + eq_(resp.status_code, 202, resp.status) + + +@moto.mock_s3 +@test_context +def test_redirect_response_when_found_s3_key(app, client): + setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) + create_s3_items(app, cfg['SUBREPO_MOZHARNESS_CFG'], key='mozilla-central-9ebd530c5843.tar.gz') + + resp = client.get('/archiver/mozharness/9ebd530c5843?repo=mozilla-central®ion=us-east-1') + eq_(resp.status_code, 302, resp.status) @moto.mock_s3 @test_context -def test_foo(app, client): - app.debug = True - s3 = app.aws.connect_to('s3', 'us-east-1') - s3.create_bucket('archiver-bucket-1') - s32 = app.aws.connect_to('s3', 'us-west-2') - s32.create_bucket('archiver-bucket-2') - - # from pprint import pprint - - # with app.app_context(): - # resp = client.get('/archiver/mozharness/9ebd530c5843') - # sleep(3) - # resp2 = client.get('/archiver/status/9ebd530c5843') - # for a in dir(resp): - # pprint(a + ' : ' + str(resp.__getattribute__(a))) - # for a in dir(resp2): - # pprint(a + ' : ' + str(resp2.__getattribute__(a))) - - # rev, repo, suffix, key, = 'foo', 'mozilla-central', 'tar.gz', 'mozilla-central-foo.tar.gz' - # config = cfg['SUBREPO_MOZHARNESS_CFG'] - # with app.app_context(): - # task = create_and_upload_archive.apply_async(args=[config, rev, repo, suffix, key], - # task_id='9ebd530c5843') - # for a in dir(task): - # pprint(a + ' : ' + str(task.__getattribute__(a))) - # eq_(task.status_code, 404, task.info) +def test_unsupported_region(app, client): + setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) + + resp = client.get('/archiver/mozharness/9ebd530c5843?repo=mozilla-central®ion=us-SXSW-5') + eq_(resp.status_code, 404, resp.status) diff --git a/relengapi/blueprints/archiver/test_tasks.py b/relengapi/blueprints/archiver/test_tasks.py new file mode 100644 index 00000000..916ffc67 --- /dev/null +++ b/relengapi/blueprints/archiver/test_tasks.py @@ -0,0 +1,90 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import moto +import requests +import tempfile + +from relengapi.blueprints.archiver.tasks import create_and_upload_archive +from relengapi.blueprints.archiver.tasks import upload_url_archive_to_s3 +from relengapi.lib.testing.context import TestContext + + +cfg = { + 'RELENGAPI_CELERY_LOG_LEVEL': 'DEBUG', + + 'AWS': { + 'access_key_id': 'aa', + 'secret_access_key': 'ss', + }, + + 'SUBREPO_MOZHARNESS_CFG': { + 'S3_BUCKETS': [ + {'REGION': 'us-east-1', 'NAME': 'archiver-bucket-1'}, + {'REGION': 'us-west-2', 'NAME': 'archiver-bucket-2'} + ], + "URL_SRC_TEMPLATE": "http://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/testing/mozharness" + }, + 'CELERY_BROKER_URL': 'memory://', + 'CELERY_BACKEND': 'cache', + "CELERY_CACHE_BACKEND": 'memory', + 'CELERY_ALWAYS_EAGER': True, +} + +test_context = TestContext(config=cfg) + + +def setup_buckets(app, cfg): + for bucket in cfg['S3_BUCKETS']: + s3 = app.aws.connect_to('s3', bucket["REGION"]) + s3.create_bucket(bucket["NAME"]) + + +@moto.mock_s3 +@test_context +def test_invalid_hg_url(app, client): + setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) + rev, repo, suffix, key, = 'foo', 'mozilla-central', 'tar.gz', 'mozilla-central-foo.tar.gz' + config = cfg['SUBREPO_MOZHARNESS_CFG'] + with app.app_context(): + task = create_and_upload_archive.apply_async(args=[config, rev, repo, suffix, key], + task_id='9ebd530c5843') + assert ("Url not found. Does it exist?" in task.info.get('status', {}), + "invalid hg url was not caught!") + + +@moto.mock_s3 +@test_context +def test_s3_urls_exist_for_each_region(app, client): + setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) + rev, repo, suffix, key, = ('203e1025a826', 'mozilla-central', 'tar.gz', + 'mozilla-central-foo.tar.gz') + config = cfg['SUBREPO_MOZHARNESS_CFG'] + with app.app_context(): + task = create_and_upload_archive.apply_async(args=[config, rev, repo, suffix, key], + task_id='9ebd530c5843') + expected_regions = [b["REGION"] for b in cfg["SUBREPO_MOZHARNESS_CFG"]["S3_BUCKETS"]] + assert (all([task.info.get("s3_urls", {}).get("region") for region in expected_regions]), + "s3 urls not uploaded for each region!") + + +@moto.mock_s3 +@test_context +def test_hg_and_s3_archives_match(app, client): + setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) + bucket = cfg['SUBREPO_MOZHARNESS_CFG']["S3_BUCKETS"][0] + + src_url = "http://hg.mozilla.org/mozilla-central/archive/203e1025a826.tar.gz/testing/mozharness" + with app.app_context(): + s3_url = upload_url_archive_to_s3(key="203e1025a826", url=src_url, region=bucket["REGION"], + bucket=bucket["NAME"], suffix='tar.gz') + + src_resp = requests.get(src_url) + s3_resp = requests.get(s3_url) + src_file = tempfile.NamedTemporaryFile(mode="wb") + s3_file = tempfile.NamedTemporaryFile(mode="wb") + with open(src_file.name, "wb") as srcf: + srcf.write(src_resp.content) + with open(s3_file.name, "wb") as s3f: + s3f.write(s3_resp.content) + assert cmp(srcf, s3f), "s3 archive based on hg archive does not match!" From d4054d685c513a18dee89a16cd5a8b24adce2de7 Mon Sep 17 00:00:00 2001 From: Jordan Lund Date: Fri, 12 Jun 2015 14:20:35 -0700 Subject: [PATCH 5/8] commiting imports that were forced changed by validate --- relengapi/blueprints/archiver/test_archiver.py | 1 + 1 file changed, 1 insertion(+) diff --git a/relengapi/blueprints/archiver/test_archiver.py b/relengapi/blueprints/archiver/test_archiver.py index 9edff985..9df8e5b6 100644 --- a/relengapi/blueprints/archiver/test_archiver.py +++ b/relengapi/blueprints/archiver/test_archiver.py @@ -2,6 +2,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import moto + from boto.s3.key import Key from nose.tools import eq_ From a85df9c40353719fa6e1bb9c656e7175abd69c01 Mon Sep 17 00:00:00 2001 From: Jordan Lund Date: Tue, 16 Jun 2015 00:08:53 -0700 Subject: [PATCH 6/8] fix always true archiver test asserts --- relengapi/blueprints/archiver/test_tasks.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/relengapi/blueprints/archiver/test_tasks.py b/relengapi/blueprints/archiver/test_tasks.py index 916ffc67..e7cf58e3 100644 --- a/relengapi/blueprints/archiver/test_tasks.py +++ b/relengapi/blueprints/archiver/test_tasks.py @@ -49,8 +49,7 @@ def test_invalid_hg_url(app, client): with app.app_context(): task = create_and_upload_archive.apply_async(args=[config, rev, repo, suffix, key], task_id='9ebd530c5843') - assert ("Url not found. Does it exist?" in task.info.get('status', {}), - "invalid hg url was not caught!") + assert "Url not found." in task.info.get('status', {}), "invalid hg url was not caught!" @moto.mock_s3 @@ -64,8 +63,10 @@ def test_s3_urls_exist_for_each_region(app, client): task = create_and_upload_archive.apply_async(args=[config, rev, repo, suffix, key], task_id='9ebd530c5843') expected_regions = [b["REGION"] for b in cfg["SUBREPO_MOZHARNESS_CFG"]["S3_BUCKETS"]] - assert (all([task.info.get("s3_urls", {}).get("region") for region in expected_regions]), - "s3 urls not uploaded for each region!") + all_regions_have_s3_urls = [ + task.info.get("s3_urls", {}).get(region) for region in expected_regions + ] + assert all(all_regions_have_s3_urls), "s3 urls not uploaded for each region!" @moto.mock_s3 From 408a48fe0203db4b8cc2fc335644c8caea026879 Mon Sep 17 00:00:00 2001 From: Jordan Lund Date: Wed, 17 Jun 2015 22:15:42 -0700 Subject: [PATCH 7/8] addresses review, makes generic, archiving more efficient supports subdir query arg and is more generic than mozharness. copying raw gzip response not working fixes gzip response, rewrites tests, docs, and addresses pep errors force response decode, update doc usage to reflect changes --- relengapi/blueprints/archiver/__init__.py | 81 +++++++++-------- relengapi/blueprints/archiver/tasks.py | 59 +++++++------ .../blueprints/archiver/test_archiver.py | 84 +++++++++++------- relengapi/blueprints/archiver/test_tasks.py | 87 ++++++++----------- relengapi/blueprints/archiver/test_util.py | 74 ++++++++++++++++ relengapi/docs/deployment/archiver.rst | 14 ++- relengapi/docs/usage/archiver.rst | 43 ++++----- 7 files changed, 262 insertions(+), 180 deletions(-) create mode 100644 relengapi/blueprints/archiver/test_util.py diff --git a/relengapi/blueprints/archiver/__init__.py b/relengapi/blueprints/archiver/__init__.py index 84a506ea..8feb6908 100644 --- a/relengapi/blueprints/archiver/__init__.py +++ b/relengapi/blueprints/archiver/__init__.py @@ -3,7 +3,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. import logging -import os + +from random import randint from flask import Blueprint from flask import current_app @@ -12,7 +13,6 @@ from relengapi.blueprints.archiver.tasks import create_and_upload_archive from relengapi.blueprints.archiver.types import MozharnessArchiveTask from relengapi.lib import api -from werkzeug.exceptions import NotFound bp = Blueprint('archiver', __name__) log = logging.getLogger(__name__) @@ -39,73 +39,80 @@ def task_status(task_id): task_info = task.info or {} response = { 'state': task.state, - 'src_url': task_info.get('src_url', ''), - 's3_urls': task_info.get('s3_urls', {}) } if task.state != 'FAILURE': response['status'] = task_info.get('status', 'no status available at this point.') + response['src_url'] = task_info.get('src_url', '') + response['s3_urls'] = task_info.get('s3_urls', {}) else: # something went wrong response['status'] = str(task.info) # this is the exception raised + response['src_url'] = '' + response['s3_urls'] = {} return MozharnessArchiveTask(**response) -@bp.route('/mozharness/') -@api.apimethod(None, unicode, unicode, unicode, unicode, status_code=302) -def get_mozharness_archive(rev, repo="mozilla-central", region='us-west-2', suffix='tar.gz'): - cfg = current_app.config['SUBREPO_MOZHARNESS_CFG'] - return get_archive_from_repo(cfg, rev, repo, region, suffix) +@bp.route('/hgmo//') +@api.apimethod(None, unicode, unicode, unicode, unicode, unicode, status_code=302) +def get_hgmo_archive(repo, rev, subdir=None, suffix='tar.gz', preferred_region=None): + """ + An archiver for hg.mozilla.org related requests. Uses relengapi.blueprints.archiver.get_archive + + :param repo: the repo location off of hg.mozilla.org/ + :param rev: the rev associated with the repo + :param subdir: optional subdir path to only archive a portion of the repo + :param suffix: the archive extension type. defaulted to tar.gz + :param preferred_region: the preferred s3 region to use + """ + src_url = current_app.config['ARCHIVER_HGMO_URL_TEMPLATE'].format( + repo=repo, rev=rev, suffix=suffix, subdir=subdir or '' + ) + # though slightly odd to append the archive suffix extension with a subdir, this: + # 1) allows us to have archives based on different subdir locations from the same repo and rev + # 2) is aligned with the hg.mozilla.org format + key = '{repo}-{rev}.{suffix}'.format(repo=repo, rev=rev, suffix=suffix) + if subdir: + key += '/{}'.format(subdir) + return get_archive(src_url, key, preferred_region) -def get_archive_from_repo(cfg, rev, repo, region, suffix): +def get_archive(src_url, key, preferred_region): """ A generic getter for retrieving an s3 location of an archive where the archive is based off a - given repo name, revision, and possibly sub-dir. + src_url. sub-dir: hg.mozilla.org supports archives of sub directories within a repository. This flexibility allows for creating archives of only a portion of what would normally be an entire repo archive. logic flow: - If their is already a key based on given args, a re-direct link is given for the - s3 location. If the key does not exist, download the archive from src url, upload it to and - return all s3 url locations. + If their is already a key within s3, a re-direct link is given for the + s3 location. If the key does not exist, download the archive from src url, upload it to s3 + for each region supported and return all uploaded s3 url locations. When the key does not exist, the remaining work will be assigned to a celery background task with a url location returned immediately for obtaining task state updates. """ - bucket_region = None - bucket_name = None - for bucket in cfg['S3_BUCKETS']: - if region in bucket['REGION']: - bucket_region = bucket['REGION'] - bucket_name = bucket['NAME'] - - # sanity check - if not bucket_name or not bucket_region: - valid_regions = str([bucket['REGION'] for bucket in cfg['S3_BUCKETS']]) - log.warning('Unsupported region given: "{}" Valid Regions "{}"'.format(region, - valid_regions)) - raise NotFound - - s3 = current_app.aws.connect_to('s3', bucket_region) - bucket = s3.get_bucket(bucket_name) - key = '{repo}-{rev}.{suffix}'.format(repo=os.path.basename(repo), rev=rev, suffix=suffix) + buckets = current_app.config['ARCHIVER_S3_BUCKETS'] + random_region = buckets.keys()[randint(0, len(buckets.keys()) - 1)] + # use preferred region if available otherwise choose a valid one at random + region = preferred_region if preferred_region and preferred_region in buckets else random_region + bucket = buckets[region] + s3 = current_app.aws.connect_to('s3', region) # first, see if the key exists - if not bucket.get_key(key): - task_id = rev + if not s3.get_bucket(bucket).get_key(key): + task_id = key.replace('/', '_') # keep things simple and avoid slashes in task url if create_and_upload_archive.AsyncResult(task_id).state != 'STARTED': # task is currently not in progress so start one. - create_and_upload_archive.apply_async(args=[cfg, rev, repo, suffix, key], - task_id=task_id) + create_and_upload_archive.apply_async(args=[src_url, key], task_id=task_id) return {}, 202, {'Location': url_for('archiver.task_status', task_id=task_id)} - log.info("generating GET URL to {}, expires in {}s".format(rev, GET_EXPIRES_IN)) + log.info("generating GET URL to {}, expires in {}s".format(key, GET_EXPIRES_IN)) # return 302 pointing to s3 url with archive signed_url = s3.generate_url( method='GET', expires_in=GET_EXPIRES_IN, - bucket=bucket_name, key=key + bucket=bucket, key=key ) return redirect(signed_url) diff --git a/relengapi/blueprints/archiver/tasks.py b/relengapi/blueprints/archiver/tasks.py index 8a4a61e1..227e2715 100644 --- a/relengapi/blueprints/archiver/tasks.py +++ b/relengapi/blueprints/archiver/tasks.py @@ -2,15 +2,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import logging -import os import requests +import shutil import tempfile from boto.s3.key import Key -from random import randint - from celery.task import current from flask import current_app +from random import randint + from relengapi.lib import celery log = logging.getLogger(__name__) @@ -18,29 +18,37 @@ GET_EXPIRES_IN = 300 -def upload_url_archive_to_s3(key, url, region, bucket, suffix): - """ - Given a src url, upload contents to an s3 bucket by a given key. - """ - s3 = current_app.aws.connect_to('s3', region) - k = Key(s3.get_bucket(bucket)) - k.key = key +def upload_url_archive_to_s3(key, url, buckets): + s3_urls = {} + + # make the source request + resp = requests.get(url, stream=True) + + # create a temporary file for it + tempf = tempfile.TemporaryFile() + # copy the data, block-by-block, into that file + resp.raw.decode_content = True + shutil.copyfileobj(resp.raw, tempf) - # rather than worrying about pointers and seeking, let's avail of a named temp file that is - # allowed to persist after the file is closed. Finally, when are finished, we can clean up - # the temp file - resp = requests.get(url) - temp_file = tempfile.NamedTemporaryFile(mode="wb", suffix=".{}".format(suffix), delete=False) - with open(temp_file.name, "wb") as tmpf: - tmpf.write(resp.content) - k.set_contents_from_filename(temp_file.name) - os.unlink(temp_file.name) # clean up tmp file + # write it out to S3 + for region in buckets: + s3 = current_app.aws.connect_to('s3', region) + k = Key(s3.get_bucket(buckets[region])) + k.key = key + k.set_metadata('Content-Type', resp.headers['Content-Type']) + # give it the same attachment filename + k.set_metadata('Content-Disposition', resp.headers['Content-Disposition']) + k.set_contents_from_file(tempf, rewind=True) # rewind points tempf back to start for us + s3_urls[region] = s3.generate_url(expires_in=GET_EXPIRES_IN, method='GET', + bucket=buckets[region], key=key) - return s3.generate_url(expires_in=GET_EXPIRES_IN, method='GET', bucket=bucket, key=key) + resp.close() + + return s3_urls @celery.task(bind=True, track_started=True, max_retries=3) -def create_and_upload_archive(self, cfg, rev, repo, suffix, key): +def create_and_upload_archive(self, src_url, key): """ A celery task that downloads an archive if it exists from a src location and attempts to upload the archive to a supported bucket in each supported region. @@ -50,23 +58,18 @@ def create_and_upload_archive(self, cfg, rev, repo, suffix, key): """ status = "Task completed! Check 's3_urls' for upload locations." s3_urls = {} - src_url = cfg['URL_SRC_TEMPLATE'].format(repo=repo, rev=rev, suffix=suffix) + buckets = current_app.config['ARCHIVER_S3_BUCKETS'] resp = requests.head(src_url) if resp.status_code == 200: try: - for bucket in cfg['S3_BUCKETS']: - s3_urls[bucket['REGION']] = upload_url_archive_to_s3(key, src_url, bucket['REGION'], - bucket['NAME'], suffix) + s3_urls = upload_url_archive_to_s3(key, src_url, buckets) except Exception as exc: # set a jitter enabled delay # where an aggressive delay would result in: 7s, 49s, and 343s # and a gentle delay would result in: 4s, 16s, and 64s delay = randint(4, 7) ** (current.request.retries + 1) # retries == 0 on first attempt current.retry(exc=exc, countdown=delay) - if not any(s3_urls.values()): - status = "Could not upload any archives to s3. Check logs for errors." - log.warning(status) else: status = "Url not found. Does it exist? url: '{}', response: '{}' ".format(src_url, resp.status_code) diff --git a/relengapi/blueprints/archiver/test_archiver.py b/relengapi/blueprints/archiver/test_archiver.py index 9df8e5b6..0f56d53c 100644 --- a/relengapi/blueprints/archiver/test_archiver.py +++ b/relengapi/blueprints/archiver/test_archiver.py @@ -1,10 +1,18 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +import json +import mock import moto -from boto.s3.key import Key from nose.tools import eq_ +from relengapi.blueprints.archiver.test_util import EXPECTED_TASK_STATUS_FAILED_RESPONSE +from relengapi.blueprints.archiver.test_util import EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE +from relengapi.blueprints.archiver.test_util import create_s3_items +from relengapi.blueprints.archiver.test_util import fake_200_response +from relengapi.blueprints.archiver.test_util import fake_failed_task_status +from relengapi.blueprints.archiver.test_util import fake_successful_task_status +from relengapi.blueprints.archiver.test_util import setup_buckets from relengapi.lib.testing.context import TestContext @@ -17,13 +25,12 @@ 'secret_access_key': 'ss', }, - 'SUBREPO_MOZHARNESS_CFG': { - 'S3_BUCKETS': [ - {'REGION': 'us-east-1', 'NAME': 'archiver-bucket-1'}, - {'REGION': 'us-west-2', 'NAME': 'archiver-bucket-2'} - ], - "URL_SRC_TEMPLATE": "http://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/testing/mozharness" + 'ARCHIVER_S3_BUCKETS': { + 'us-east-1': 'archiver-bucket-1', + 'us-west-2': 'archiver-bucket-2' }, + 'ARCHIVER_HGMO_URL_TEMPLATE': "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/{subdir}", + 'CELERY_BROKER_URL': 'memory://', 'CELERY_BACKEND': 'cache', "CELERY_CACHE_BACKEND": 'memory', @@ -33,43 +40,56 @@ test_context = TestContext(config=cfg) -def setup_buckets(app, cfg): - for bucket in cfg['S3_BUCKETS']: - s3 = app.aws.connect_to('s3', bucket["REGION"]) - s3.create_bucket(bucket["NAME"]) - - -def create_s3_items(app, cfg, key): - for bucket in cfg['S3_BUCKETS']: - s3 = app.aws.connect_to('s3', bucket["REGION"]) - b = s3.get_bucket(bucket["NAME"]) - k = Key(b) - k.key = key - k.set_contents_from_string("Help, I'm trapped in an alternate s3 dimension.") - - @moto.mock_s3 @test_context def test_accepted_response_when_missing_s3_key(app, client): - setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) - resp = client.get('/archiver/mozharness/9ebd530c5843?repo=mozilla-central®ion=us-east-1') + setup_buckets(app, cfg) + with mock.patch("relengapi.blueprints.archiver.tasks.requests.get") as get, \ + mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head: + # don't actually hit hg.m.o, we just care about starting a subprocess and + # returning a 202 accepted + get.return_value = fake_200_response() + head.return_value = fake_200_response() + resp = client.get('/archiver/hgmo/mozilla-central/9213957d166d?' + 'subdir=testing/mozharness&preferred_region=us-west-2') eq_(resp.status_code, 202, resp.status) @moto.mock_s3 @test_context def test_redirect_response_when_found_s3_key(app, client): - setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) - create_s3_items(app, cfg['SUBREPO_MOZHARNESS_CFG'], key='mozilla-central-9ebd530c5843.tar.gz') - - resp = client.get('/archiver/mozharness/9ebd530c5843?repo=mozilla-central®ion=us-east-1') + setup_buckets(app, cfg) + rev, repo, subdir, suffix = '203e1025a826', 'mozilla-central', 'testing/mozharness', 'tar.gz' + key = '{repo}-{rev}.{suffix}'.format(repo=repo, rev=rev, suffix=suffix) + if subdir: + key += '/{}'.format(subdir) + create_s3_items(app, cfg, key=key) + + resp = client.get( + '/archiver/hgmo/{repo}/{rev}?subdir={subdir}&suffix={suffix}'.format( + rev=rev, repo=repo, subdir=subdir, suffix=suffix + ) + ) eq_(resp.status_code, 302, resp.status) @moto.mock_s3 @test_context -def test_unsupported_region(app, client): - setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) +def test_task_status_when_failed(app, client): + expected_response = EXPECTED_TASK_STATUS_FAILED_RESPONSE + with mock.patch("relengapi.blueprints.archiver.create_and_upload_archive") as caua: + caua.AsyncResult.side_effect = fake_failed_task_status + response = client.get('/archiver/status/{task_id}'.format(task_id=123)) + eq_(cmp(json.loads(response.data)['result'], expected_response), 0, + "a failed task status check does not equal expected status.") + - resp = client.get('/archiver/mozharness/9ebd530c5843?repo=mozilla-central®ion=us-SXSW-5') - eq_(resp.status_code, 404, resp.status) +@moto.mock_s3 +@test_context +def test_task_status_when_success(app, client): + expected_response = EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE + with mock.patch("relengapi.blueprints.archiver.create_and_upload_archive") as caua: + caua.AsyncResult.return_value = fake_successful_task_status(expected_response) + response = client.get('/archiver/status/{task_id}'.format(task_id=123)) + eq_(cmp(json.loads(response.data)['result'], expected_response), 0, + "A successful task status check does not equal expected status.") diff --git a/relengapi/blueprints/archiver/test_tasks.py b/relengapi/blueprints/archiver/test_tasks.py index e7cf58e3..e7f2c925 100644 --- a/relengapi/blueprints/archiver/test_tasks.py +++ b/relengapi/blueprints/archiver/test_tasks.py @@ -1,12 +1,13 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. +import mock import moto -import requests -import tempfile from relengapi.blueprints.archiver.tasks import create_and_upload_archive -from relengapi.blueprints.archiver.tasks import upload_url_archive_to_s3 +from relengapi.blueprints.archiver.test_util import fake_200_response +from relengapi.blueprints.archiver.test_util import fake_404_response +from relengapi.blueprints.archiver.test_util import setup_buckets from relengapi.lib.testing.context import TestContext @@ -18,13 +19,12 @@ 'secret_access_key': 'ss', }, - 'SUBREPO_MOZHARNESS_CFG': { - 'S3_BUCKETS': [ - {'REGION': 'us-east-1', 'NAME': 'archiver-bucket-1'}, - {'REGION': 'us-west-2', 'NAME': 'archiver-bucket-2'} - ], - "URL_SRC_TEMPLATE": "http://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/testing/mozharness" + 'ARCHIVER_S3_BUCKETS': { + 'us-east-1': 'archiver-bucket-1', + 'us-west-2': 'archiver-bucket-2' }, + 'ARCHIVER_HGMO_URL_TEMPLATE': "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/{subdir}", + 'CELERY_BROKER_URL': 'memory://', 'CELERY_BACKEND': 'cache', "CELERY_CACHE_BACKEND": 'memory', @@ -34,58 +34,43 @@ test_context = TestContext(config=cfg) -def setup_buckets(app, cfg): - for bucket in cfg['S3_BUCKETS']: - s3 = app.aws.connect_to('s3', bucket["REGION"]) - s3.create_bucket(bucket["NAME"]) - - @moto.mock_s3 @test_context -def test_invalid_hg_url(app, client): - setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) - rev, repo, suffix, key, = 'foo', 'mozilla-central', 'tar.gz', 'mozilla-central-foo.tar.gz' - config = cfg['SUBREPO_MOZHARNESS_CFG'] +def test_invalid_hg_url(app): + setup_buckets(app, cfg) + rev, repo, suffix = 'fakeRev', 'mozilla-central', 'tar.gz' + key = '{repo}-{rev}.{suffix}'.format(repo=repo, rev=rev, suffix=suffix) + src_url = cfg['ARCHIVER_HGMO_URL_TEMPLATE'].format(repo=repo, rev=rev, suffix=suffix, + subdir='testing/mozharness') with app.app_context(): - task = create_and_upload_archive.apply_async(args=[config, rev, repo, suffix, key], - task_id='9ebd530c5843') + with mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head: + head.return_value = fake_404_response() + task = create_and_upload_archive.apply_async(args=[src_url, key], + task_id=key.replace('/', '_')) assert "Url not found." in task.info.get('status', {}), "invalid hg url was not caught!" @moto.mock_s3 @test_context -def test_s3_urls_exist_for_each_region(app, client): - setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) - rev, repo, suffix, key, = ('203e1025a826', 'mozilla-central', 'tar.gz', - 'mozilla-central-foo.tar.gz') - config = cfg['SUBREPO_MOZHARNESS_CFG'] +def test_successful_upload_archive_response(app): + setup_buckets(app, cfg) + rev, repo, subdir, suffix = '203e1025a826', 'mozilla-central', 'testing/mozharness', 'tar.gz' + key = '{repo}-{rev}.{suffix}'.format(repo=repo, rev=rev, suffix=suffix) + if subdir: + key += '/{}'.format(subdir) + src_url = cfg['ARCHIVER_HGMO_URL_TEMPLATE'].format(repo=repo, rev=rev, suffix=suffix, + subdir='testing/mozharness') with app.app_context(): - task = create_and_upload_archive.apply_async(args=[config, rev, repo, suffix, key], - task_id='9ebd530c5843') - expected_regions = [b["REGION"] for b in cfg["SUBREPO_MOZHARNESS_CFG"]["S3_BUCKETS"]] + with mock.patch("relengapi.blueprints.archiver.tasks.requests.get") as get, \ + mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head: + get.return_value = fake_200_response() + head.return_value = fake_200_response() + task = create_and_upload_archive.apply_async(args=[src_url, key], + task_id=key.replace('/', '_')) + expected_regions = [region for region in cfg['ARCHIVER_S3_BUCKETS']] all_regions_have_s3_urls = [ task.info.get("s3_urls", {}).get(region) for region in expected_regions ] assert all(all_regions_have_s3_urls), "s3 urls not uploaded for each region!" - - -@moto.mock_s3 -@test_context -def test_hg_and_s3_archives_match(app, client): - setup_buckets(app, cfg['SUBREPO_MOZHARNESS_CFG']) - bucket = cfg['SUBREPO_MOZHARNESS_CFG']["S3_BUCKETS"][0] - - src_url = "http://hg.mozilla.org/mozilla-central/archive/203e1025a826.tar.gz/testing/mozharness" - with app.app_context(): - s3_url = upload_url_archive_to_s3(key="203e1025a826", url=src_url, region=bucket["REGION"], - bucket=bucket["NAME"], suffix='tar.gz') - - src_resp = requests.get(src_url) - s3_resp = requests.get(s3_url) - src_file = tempfile.NamedTemporaryFile(mode="wb") - s3_file = tempfile.NamedTemporaryFile(mode="wb") - with open(src_file.name, "wb") as srcf: - srcf.write(src_resp.content) - with open(s3_file.name, "wb") as s3f: - s3f.write(s3_resp.content) - assert cmp(srcf, s3f), "s3 archive based on hg archive does not match!" + assert task.info.get('src_url') == src_url, "src url doesn't match upload response!" + assert task.state == "SUCCESS", "completed task's state isn't SUCCESS!" diff --git a/relengapi/blueprints/archiver/test_util.py b/relengapi/blueprints/archiver/test_util.py new file mode 100644 index 00000000..be2022fd --- /dev/null +++ b/relengapi/blueprints/archiver/test_util.py @@ -0,0 +1,74 @@ +import mock + +from StringIO import StringIO +from boto.s3.key import Key + +EXPECTED_TASK_STATUS_FAILED_RESPONSE = { + "s3_urls": {}, + "src_url": "", + "state": "FAILURE", + "status": "{u'exc_message': u'fp is at EOF. Use rewind option or seek() to data start.'" + ", u'exc_type': u'AttributeError'}" +} + +EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE = { + "s3_urls": { + "us-east-1": "https://archiver-us-east-1.s3.amazonaws.com/mozilla-central-9213957d1.tar.gz", + "us-west-2": "https://archiver-us-west-2.s3.amazonaws.com/mozilla-central-9213957d1.tar.gz", + }, + "src_url": "https://hg.mozilla.org/mozilla-central/archive/9213957d1.tar.gz/testing/mozharness", + "state": "SUCCESS", + "status": "Task completed! Check 's3_urls' for upload locations." +} + + +def setup_buckets(app, cfg): + for region, bucket in cfg['ARCHIVER_S3_BUCKETS'].iteritems(): + s3 = app.aws.connect_to('s3', region) + s3.create_bucket(bucket) + + +def create_s3_items(app, cfg, key): + for region, bucket in cfg['ARCHIVER_S3_BUCKETS'].iteritems(): + s3 = app.aws.connect_to('s3', region) + b = s3.get_bucket(bucket) + k = Key(b) + k.key = key + k.set_contents_from_string("Help, I'm trapped in an alternate s3 dimension.") + + +def fake_200_response(): + response = mock.Mock() + response.status_code = 200 + response.headers = { + 'Content-Type': 'application/x-gzip', + 'Content-Disposition': 'attachment; filename=mozilla-central-9213957d166d.tar.gz' + } + response.raw = StringIO("Debugging is twice as hard as writing the code in the first place. " + "Therefore, if you write the code as cleverly as possible, you are, " + "by definition, not smart enough to debug it. --Brian W. Kernighan") + return response + + +def fake_404_response(): + response = mock.Mock() + response.status_code = 404 + return response + + +def fake_failed_task_status(task_id): + task = mock.Mock() + task.state = EXPECTED_TASK_STATUS_FAILED_RESPONSE['state'] + task.info = EXPECTED_TASK_STATUS_FAILED_RESPONSE['status'] + return task + + +def fake_successful_task_status(task_id): + task = mock.Mock() + task.state = EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE['state'] + task.info = { + 'src_url': EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE['src_url'], + 's3_urls': EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE['s3_urls'], + 'status': EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE['status'], + } + return task diff --git a/relengapi/docs/deployment/archiver.rst b/relengapi/docs/deployment/archiver.rst index d114ba3f..0e8385ac 100644 --- a/relengapi/docs/deployment/archiver.rst +++ b/relengapi/docs/deployment/archiver.rst @@ -8,7 +8,7 @@ the endpoint. For AWS credentials, each bucket should be limited to the AWS IAM role corresponding to the AWS credentials. Buckets in the configuration are required to be pre-existing. -Finally, Archiver avails of Celery. You will need to provide a broker and back-end. +Finally, Archiver uses Celery. You will need to provide a broker and back-end. Example config:: @@ -21,11 +21,9 @@ Example config:: 'secret_access_key': 'secretAccessKeyExample', } - # for the mozharness endpoint - SUBREPO_MOZHARNESS_CFG = { - 'S3_BUCKETS': [ - {'REGION': 'us-east-1', 'NAME': 'example-bucket-name-for-us-east-1'}, - {'REGION': 'us-west-2', 'NAME': 'example-bucket-name-for-us-west-2'} - ], - "URL_SRC_TEMPLATE": "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/testing/mozharness" + ARCHIVER_S3_BUCKETS = { + 'us-east-1', 'archiver-us-east-1', + 'us-west-2', 'archiver-us-west-2' } + + ARCHIVER_HGMO_URL_TEMPLATE = "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/{subdir}" diff --git a/relengapi/docs/usage/archiver.rst b/relengapi/docs/usage/archiver.rst index 7ffe9393..c9bbbaf8 100644 --- a/relengapi/docs/usage/archiver.rst +++ b/relengapi/docs/usage/archiver.rst @@ -4,69 +4,64 @@ Archiver Archiver simply takes repository archive urls and returns an s3 location for the same archive, while submitting the archive to s3 if it doesn't already exist. - If the archive exists, the response will redirect with a 302 and location for the s3 url equivalent. If the archive does not already exist in s3, the response will accept the request (202) and return the task location url that is monitoring the current state of creating and uploading the archive to s3. -Currently, only Mozharness is configured: - "URL_SRC_TEMPLATE": "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/testing/mozharness" - +Currently, only hg.mozilla.org support is configured: + ARCHIVER_HGMO_URL_TEMPLATE = "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/{subdir}" Examples:: - # To get an in-tree Mozharness archive based on: http://hg.mozilla.org/mozilla-central/rev/3d11cb4f31b9 - > curl -i http://127.0.0.1:8010/archiver/mozharness/3d11cb4f31b9\?repo\=mozilla-central\®ion\=us-east-1 + # To get an in-tree Mozharness archive based on: http://hg.mozilla.org/mozilla-central/rev/3d11cb4f31b9 + > curl -i http://127.0.0.1:8010/archiver/hgmo/projects/ash/42bf8560b395?subdir=testing/mozharness&preferred_region=us-west-2 HTTP/1.0 202 ACCEPTED Content-Type: application/json Content-Length: 18 - Location: http://127.0.0.1:8010/archiver/status/3d11cb4f31b9 + Location: http://127.0.0.1:8010/archiver/status/projects_ash-42bf8560b395.tar.gz_testing_mozharness Server: Werkzeug/0.10.4 Python/2.7.6 - Date: Tue, 09 Jun 2015 22:19:15 GMT + Date: Fri, 19 Jun 2015 22:41:29 GMT { "result": {} - } - + }% # In the above example, the s3 archive does not exist so Archiver will create it. poll the Location header url in the above response to monitor state - > curl -i http://127.0.0.1:8010/archiver/status/3d11cb4f31b9 + > curl -i http://127.0.0.1:8010/archiver/status/projects_ash-42bf8560b395.tar.gz_testing_mozharness HTTP/1.0 200 OK Content-Type: application/json - Content-Length: 682 + Content-Length: 683 Server: Werkzeug/0.10.4 Python/2.7.6 - Date: Tue, 09 Jun 2015 22:19:24 GMT + Date: Fri, 19 Jun 2015 22:41:41 GMT { "result": { "s3_urls": { - "us-east-1": "https://archiver-mozharness-us-east-1.s3.amazonaws.com/mozilla-central-3d11cb4f31b9.tar.gz?Signature=GB%2F%2Feye%2Fidj7BrOYEZQNHSFSNyY%3D&Expires=1433888658&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA", - "us-west-2": "https://archiver-mozharness-us-west-2.s3-us-west-2.amazonaws.com/mozilla-central-3d11cb4f31b9.tar.gz?Signature=7%2FnVzYSgGAs8lVP9x%2FvkI%2FklDls%3D&Expires=1433888659&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA" + "us-east-1": "https://archiver-us-east-1.s3.amazonaws.com/projects/ash-42bf8560b395.tar.gz/testing/mozharness?Signature=0f%2FvcSqbUylTWgwx8yYYISO6%2FJM%3D&Expires=1434753993&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA", + "us-west-2": "https://archiver-us-west-2.s3-us-west-2.amazonaws.com/projects/ash-42bf8560b395.tar.gz/testing/mozharness?Signature=i6%2B9d4r8u8YuUNTmT4kX9jcaNrA%3D&Expires=1434753992&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA" }, - "src_url": "https://hg.mozilla.org/mozilla-central/archive/3d11cb4f31b9.tar.gz/testing/mozharness", + "src_url": "https://hg.mozilla.org/projects/ash/archive/42bf8560b395.tar.gz/testing/mozharness", "state": "SUCCESS", "status": "Task completed! Check 's3_urls' for upload locations." } - } - + }% # We can see above that Archiver has created two s3 archives across two regions. We can use those urls to grab the archive. # Subsequent requests of the original endpoint also just redirects the s3 location - > curl -i http://127.0.0.1:8010/archiver/mozharness/3d11cb4f31b9\?repo\=mozilla-central\®ion\=us-east-1 + > curl -i http://127.0.0.1:8010/archiver/hgmo/projects/ash/42bf8560b395?subdir=testing/mozharness&preferred_region=us-west-2 HTTP/1.0 302 FOUND Content-Type: text/html; charset=utf-8 - Content-Length: 599 - Location: https://archiver-mozharness-us-east-1.s3.amazonaws.com/mozilla-central-3d11cb4f31b9.tar.gz?Signature=094S3haXO5LMbFtCObyh8FhN%2FD0%3D&Expires=1433888697&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA + Content-Length: 625 + Location: https://archiver-us-west-2.s3-us-west-2.amazonaws.com/projects/ash-42bf8560b395.tar.gz/testing/mozharness?Signature=oZVrvFhkM6RR8rxKryt9vTWmvTQ%3D&Expires=1434754032&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA Server: Werkzeug/0.10.4 Python/2.7.6 - Date: Tue, 09 Jun 2015 22:19:57 GMT + Date: Fri, 19 Jun 2015 22:42:12 GMT Redirecting...

Redirecting...

-

You should be redirected automatically to target URL: https://archiver-mozharness-us-east-1.s3.amazonaws.com/mozilla-central-3d11cb4f31b9.tar.gz?Signature=094S3haXO5LMbFtCObyh8FhN%2FD0%3D&Expires=1433888697&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA. If not click the link. - +

You should be redirected automatically to target URL: https://archiver-us-west-2.s3-us-west-2.amazonaws.com/projects/ash-42bf8560b395.tar.gz/testing/mozharness?Signature=oZVrvFhkM6RR8rxKryt9vTWmvTQ%3D&Expires=1434754032&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA. If not click the link.% Types From b0067daafc24df7ad47be82736a57f1f44ea396c Mon Sep 17 00:00:00 2001 From: Jordan Lund Date: Mon, 29 Jun 2015 12:27:16 -0700 Subject: [PATCH 8/8] fix dict typo for deploy archiver docs --- relengapi/docs/deployment/archiver.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/relengapi/docs/deployment/archiver.rst b/relengapi/docs/deployment/archiver.rst index 0e8385ac..b2c5ab75 100644 --- a/relengapi/docs/deployment/archiver.rst +++ b/relengapi/docs/deployment/archiver.rst @@ -22,8 +22,8 @@ Example config:: } ARCHIVER_S3_BUCKETS = { - 'us-east-1', 'archiver-us-east-1', - 'us-west-2', 'archiver-us-west-2' + 'us-east-1': 'archiver-us-east-1', + 'us-west-2': 'archiver-us-west-2' } ARCHIVER_HGMO_URL_TEMPLATE = "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/{subdir}"