diff --git a/relengapi/app.py b/relengapi/app.py index 83f1836e..3e7970a3 100644 --- a/relengapi/app.py +++ b/relengapi/app.py @@ -83,6 +83,7 @@ def _load_bp(n): 'slaveloan', 'tokenauth', 'tooltool', + 'archiver', ]] diff --git a/relengapi/blueprints/archiver/__init__.py b/relengapi/blueprints/archiver/__init__.py new file mode 100644 index 00000000..8feb6908 --- /dev/null +++ b/relengapi/blueprints/archiver/__init__.py @@ -0,0 +1,118 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import logging + +from random import randint + +from flask import Blueprint +from flask import current_app +from flask import redirect +from flask import url_for +from relengapi.blueprints.archiver.tasks import create_and_upload_archive +from relengapi.blueprints.archiver.types import MozharnessArchiveTask +from relengapi.lib import api + +bp = Blueprint('archiver', __name__) +log = logging.getLogger(__name__) + +GET_EXPIRES_IN = 300 + + +@bp.route('/status/') +@api.apimethod(MozharnessArchiveTask, unicode) +def task_status(task_id): + """ + Check and return the current state of the create_and_upload_archive celery task with task id + of . + + If the task is unknown, state will be PENDING. Once the task starts it will be updated to + STARTED and finally, if it completes, it will be either SUCCESS (no exceptions), or FAILURE. + + See update_state() within create_and_upload_archive and + http://celery.readthedocs.org/en/latest/reference/celery.states.html for more details. + + If state is SUCCESS, it is safe to check response['s3_urls'] for the archives submitted to s3 + """ + task = create_and_upload_archive.AsyncResult(task_id) + task_info = task.info or {} + response = { + 'state': task.state, + } + if task.state != 'FAILURE': + response['status'] = task_info.get('status', 'no status available at this point.') + response['src_url'] = task_info.get('src_url', '') + response['s3_urls'] = task_info.get('s3_urls', {}) + else: + # something went wrong + response['status'] = str(task.info) # this is the exception raised + response['src_url'] = '' + response['s3_urls'] = {} + + return MozharnessArchiveTask(**response) + + +@bp.route('/hgmo//') +@api.apimethod(None, unicode, unicode, unicode, unicode, unicode, status_code=302) +def get_hgmo_archive(repo, rev, subdir=None, suffix='tar.gz', preferred_region=None): + """ + An archiver for hg.mozilla.org related requests. Uses relengapi.blueprints.archiver.get_archive + + :param repo: the repo location off of hg.mozilla.org/ + :param rev: the rev associated with the repo + :param subdir: optional subdir path to only archive a portion of the repo + :param suffix: the archive extension type. defaulted to tar.gz + :param preferred_region: the preferred s3 region to use + """ + src_url = current_app.config['ARCHIVER_HGMO_URL_TEMPLATE'].format( + repo=repo, rev=rev, suffix=suffix, subdir=subdir or '' + ) + # though slightly odd to append the archive suffix extension with a subdir, this: + # 1) allows us to have archives based on different subdir locations from the same repo and rev + # 2) is aligned with the hg.mozilla.org format + key = '{repo}-{rev}.{suffix}'.format(repo=repo, rev=rev, suffix=suffix) + if subdir: + key += '/{}'.format(subdir) + return get_archive(src_url, key, preferred_region) + + +def get_archive(src_url, key, preferred_region): + """ + A generic getter for retrieving an s3 location of an archive where the archive is based off a + src_url. + + sub-dir: hg.mozilla.org supports archives of sub directories within a repository. This + flexibility allows for creating archives of only a portion of what would normally be an entire + repo archive. + + logic flow: + If their is already a key within s3, a re-direct link is given for the + s3 location. If the key does not exist, download the archive from src url, upload it to s3 + for each region supported and return all uploaded s3 url locations. + + When the key does not exist, the remaining work will be assigned to a celery background task + with a url location returned immediately for obtaining task state updates. + """ + buckets = current_app.config['ARCHIVER_S3_BUCKETS'] + random_region = buckets.keys()[randint(0, len(buckets.keys()) - 1)] + # use preferred region if available otherwise choose a valid one at random + region = preferred_region if preferred_region and preferred_region in buckets else random_region + bucket = buckets[region] + s3 = current_app.aws.connect_to('s3', region) + + # first, see if the key exists + if not s3.get_bucket(bucket).get_key(key): + task_id = key.replace('/', '_') # keep things simple and avoid slashes in task url + if create_and_upload_archive.AsyncResult(task_id).state != 'STARTED': + # task is currently not in progress so start one. + create_and_upload_archive.apply_async(args=[src_url, key], task_id=task_id) + return {}, 202, {'Location': url_for('archiver.task_status', task_id=task_id)} + + log.info("generating GET URL to {}, expires in {}s".format(key, GET_EXPIRES_IN)) + # return 302 pointing to s3 url with archive + signed_url = s3.generate_url( + method='GET', expires_in=GET_EXPIRES_IN, + bucket=bucket, key=key + ) + return redirect(signed_url) diff --git a/relengapi/blueprints/archiver/tasks.py b/relengapi/blueprints/archiver/tasks.py new file mode 100644 index 00000000..227e2715 --- /dev/null +++ b/relengapi/blueprints/archiver/tasks.py @@ -0,0 +1,81 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import logging +import requests +import shutil +import tempfile + +from boto.s3.key import Key +from celery.task import current +from flask import current_app +from random import randint + +from relengapi.lib import celery + +log = logging.getLogger(__name__) + +GET_EXPIRES_IN = 300 + + +def upload_url_archive_to_s3(key, url, buckets): + s3_urls = {} + + # make the source request + resp = requests.get(url, stream=True) + + # create a temporary file for it + tempf = tempfile.TemporaryFile() + # copy the data, block-by-block, into that file + resp.raw.decode_content = True + shutil.copyfileobj(resp.raw, tempf) + + # write it out to S3 + for region in buckets: + s3 = current_app.aws.connect_to('s3', region) + k = Key(s3.get_bucket(buckets[region])) + k.key = key + k.set_metadata('Content-Type', resp.headers['Content-Type']) + # give it the same attachment filename + k.set_metadata('Content-Disposition', resp.headers['Content-Disposition']) + k.set_contents_from_file(tempf, rewind=True) # rewind points tempf back to start for us + s3_urls[region] = s3.generate_url(expires_in=GET_EXPIRES_IN, method='GET', + bucket=buckets[region], key=key) + + resp.close() + + return s3_urls + + +@celery.task(bind=True, track_started=True, max_retries=3) +def create_and_upload_archive(self, src_url, key): + """ + A celery task that downloads an archive if it exists from a src location and attempts to upload + the archive to a supported bucket in each supported region. + + Throughout this process, update the state of the task and finally return the location of the + s3 urls if successful. + """ + status = "Task completed! Check 's3_urls' for upload locations." + s3_urls = {} + buckets = current_app.config['ARCHIVER_S3_BUCKETS'] + + resp = requests.head(src_url) + if resp.status_code == 200: + try: + s3_urls = upload_url_archive_to_s3(key, src_url, buckets) + except Exception as exc: + # set a jitter enabled delay + # where an aggressive delay would result in: 7s, 49s, and 343s + # and a gentle delay would result in: 4s, 16s, and 64s + delay = randint(4, 7) ** (current.request.retries + 1) # retries == 0 on first attempt + current.retry(exc=exc, countdown=delay) + else: + status = "Url not found. Does it exist? url: '{}', response: '{}' ".format(src_url, + resp.status_code) + log.warning(status) + return { + 'status': status, + 'src_url': src_url, + 's3_urls': s3_urls, + } diff --git a/relengapi/blueprints/archiver/test_archiver.py b/relengapi/blueprints/archiver/test_archiver.py new file mode 100644 index 00000000..0f56d53c --- /dev/null +++ b/relengapi/blueprints/archiver/test_archiver.py @@ -0,0 +1,95 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import json +import mock +import moto + +from nose.tools import eq_ +from relengapi.blueprints.archiver.test_util import EXPECTED_TASK_STATUS_FAILED_RESPONSE +from relengapi.blueprints.archiver.test_util import EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE +from relengapi.blueprints.archiver.test_util import create_s3_items +from relengapi.blueprints.archiver.test_util import fake_200_response +from relengapi.blueprints.archiver.test_util import fake_failed_task_status +from relengapi.blueprints.archiver.test_util import fake_successful_task_status +from relengapi.blueprints.archiver.test_util import setup_buckets + +from relengapi.lib.testing.context import TestContext + + +cfg = { + 'RELENGAPI_CELERY_LOG_LEVEL': 'DEBUG', + + 'AWS': { + 'access_key_id': 'aa', + 'secret_access_key': 'ss', + }, + + 'ARCHIVER_S3_BUCKETS': { + 'us-east-1': 'archiver-bucket-1', + 'us-west-2': 'archiver-bucket-2' + }, + 'ARCHIVER_HGMO_URL_TEMPLATE': "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/{subdir}", + + 'CELERY_BROKER_URL': 'memory://', + 'CELERY_BACKEND': 'cache', + "CELERY_CACHE_BACKEND": 'memory', + 'CELERY_ALWAYS_EAGER': True, +} + +test_context = TestContext(config=cfg) + + +@moto.mock_s3 +@test_context +def test_accepted_response_when_missing_s3_key(app, client): + setup_buckets(app, cfg) + with mock.patch("relengapi.blueprints.archiver.tasks.requests.get") as get, \ + mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head: + # don't actually hit hg.m.o, we just care about starting a subprocess and + # returning a 202 accepted + get.return_value = fake_200_response() + head.return_value = fake_200_response() + resp = client.get('/archiver/hgmo/mozilla-central/9213957d166d?' + 'subdir=testing/mozharness&preferred_region=us-west-2') + eq_(resp.status_code, 202, resp.status) + + +@moto.mock_s3 +@test_context +def test_redirect_response_when_found_s3_key(app, client): + setup_buckets(app, cfg) + rev, repo, subdir, suffix = '203e1025a826', 'mozilla-central', 'testing/mozharness', 'tar.gz' + key = '{repo}-{rev}.{suffix}'.format(repo=repo, rev=rev, suffix=suffix) + if subdir: + key += '/{}'.format(subdir) + create_s3_items(app, cfg, key=key) + + resp = client.get( + '/archiver/hgmo/{repo}/{rev}?subdir={subdir}&suffix={suffix}'.format( + rev=rev, repo=repo, subdir=subdir, suffix=suffix + ) + ) + eq_(resp.status_code, 302, resp.status) + + +@moto.mock_s3 +@test_context +def test_task_status_when_failed(app, client): + expected_response = EXPECTED_TASK_STATUS_FAILED_RESPONSE + with mock.patch("relengapi.blueprints.archiver.create_and_upload_archive") as caua: + caua.AsyncResult.side_effect = fake_failed_task_status + response = client.get('/archiver/status/{task_id}'.format(task_id=123)) + eq_(cmp(json.loads(response.data)['result'], expected_response), 0, + "a failed task status check does not equal expected status.") + + +@moto.mock_s3 +@test_context +def test_task_status_when_success(app, client): + expected_response = EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE + with mock.patch("relengapi.blueprints.archiver.create_and_upload_archive") as caua: + caua.AsyncResult.return_value = fake_successful_task_status(expected_response) + response = client.get('/archiver/status/{task_id}'.format(task_id=123)) + eq_(cmp(json.loads(response.data)['result'], expected_response), 0, + "A successful task status check does not equal expected status.") diff --git a/relengapi/blueprints/archiver/test_tasks.py b/relengapi/blueprints/archiver/test_tasks.py new file mode 100644 index 00000000..e7f2c925 --- /dev/null +++ b/relengapi/blueprints/archiver/test_tasks.py @@ -0,0 +1,76 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. +import mock +import moto + +from relengapi.blueprints.archiver.tasks import create_and_upload_archive +from relengapi.blueprints.archiver.test_util import fake_200_response +from relengapi.blueprints.archiver.test_util import fake_404_response +from relengapi.blueprints.archiver.test_util import setup_buckets +from relengapi.lib.testing.context import TestContext + + +cfg = { + 'RELENGAPI_CELERY_LOG_LEVEL': 'DEBUG', + + 'AWS': { + 'access_key_id': 'aa', + 'secret_access_key': 'ss', + }, + + 'ARCHIVER_S3_BUCKETS': { + 'us-east-1': 'archiver-bucket-1', + 'us-west-2': 'archiver-bucket-2' + }, + 'ARCHIVER_HGMO_URL_TEMPLATE': "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/{subdir}", + + 'CELERY_BROKER_URL': 'memory://', + 'CELERY_BACKEND': 'cache', + "CELERY_CACHE_BACKEND": 'memory', + 'CELERY_ALWAYS_EAGER': True, +} + +test_context = TestContext(config=cfg) + + +@moto.mock_s3 +@test_context +def test_invalid_hg_url(app): + setup_buckets(app, cfg) + rev, repo, suffix = 'fakeRev', 'mozilla-central', 'tar.gz' + key = '{repo}-{rev}.{suffix}'.format(repo=repo, rev=rev, suffix=suffix) + src_url = cfg['ARCHIVER_HGMO_URL_TEMPLATE'].format(repo=repo, rev=rev, suffix=suffix, + subdir='testing/mozharness') + with app.app_context(): + with mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head: + head.return_value = fake_404_response() + task = create_and_upload_archive.apply_async(args=[src_url, key], + task_id=key.replace('/', '_')) + assert "Url not found." in task.info.get('status', {}), "invalid hg url was not caught!" + + +@moto.mock_s3 +@test_context +def test_successful_upload_archive_response(app): + setup_buckets(app, cfg) + rev, repo, subdir, suffix = '203e1025a826', 'mozilla-central', 'testing/mozharness', 'tar.gz' + key = '{repo}-{rev}.{suffix}'.format(repo=repo, rev=rev, suffix=suffix) + if subdir: + key += '/{}'.format(subdir) + src_url = cfg['ARCHIVER_HGMO_URL_TEMPLATE'].format(repo=repo, rev=rev, suffix=suffix, + subdir='testing/mozharness') + with app.app_context(): + with mock.patch("relengapi.blueprints.archiver.tasks.requests.get") as get, \ + mock.patch("relengapi.blueprints.archiver.tasks.requests.head") as head: + get.return_value = fake_200_response() + head.return_value = fake_200_response() + task = create_and_upload_archive.apply_async(args=[src_url, key], + task_id=key.replace('/', '_')) + expected_regions = [region for region in cfg['ARCHIVER_S3_BUCKETS']] + all_regions_have_s3_urls = [ + task.info.get("s3_urls", {}).get(region) for region in expected_regions + ] + assert all(all_regions_have_s3_urls), "s3 urls not uploaded for each region!" + assert task.info.get('src_url') == src_url, "src url doesn't match upload response!" + assert task.state == "SUCCESS", "completed task's state isn't SUCCESS!" diff --git a/relengapi/blueprints/archiver/test_util.py b/relengapi/blueprints/archiver/test_util.py new file mode 100644 index 00000000..be2022fd --- /dev/null +++ b/relengapi/blueprints/archiver/test_util.py @@ -0,0 +1,74 @@ +import mock + +from StringIO import StringIO +from boto.s3.key import Key + +EXPECTED_TASK_STATUS_FAILED_RESPONSE = { + "s3_urls": {}, + "src_url": "", + "state": "FAILURE", + "status": "{u'exc_message': u'fp is at EOF. Use rewind option or seek() to data start.'" + ", u'exc_type': u'AttributeError'}" +} + +EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE = { + "s3_urls": { + "us-east-1": "https://archiver-us-east-1.s3.amazonaws.com/mozilla-central-9213957d1.tar.gz", + "us-west-2": "https://archiver-us-west-2.s3.amazonaws.com/mozilla-central-9213957d1.tar.gz", + }, + "src_url": "https://hg.mozilla.org/mozilla-central/archive/9213957d1.tar.gz/testing/mozharness", + "state": "SUCCESS", + "status": "Task completed! Check 's3_urls' for upload locations." +} + + +def setup_buckets(app, cfg): + for region, bucket in cfg['ARCHIVER_S3_BUCKETS'].iteritems(): + s3 = app.aws.connect_to('s3', region) + s3.create_bucket(bucket) + + +def create_s3_items(app, cfg, key): + for region, bucket in cfg['ARCHIVER_S3_BUCKETS'].iteritems(): + s3 = app.aws.connect_to('s3', region) + b = s3.get_bucket(bucket) + k = Key(b) + k.key = key + k.set_contents_from_string("Help, I'm trapped in an alternate s3 dimension.") + + +def fake_200_response(): + response = mock.Mock() + response.status_code = 200 + response.headers = { + 'Content-Type': 'application/x-gzip', + 'Content-Disposition': 'attachment; filename=mozilla-central-9213957d166d.tar.gz' + } + response.raw = StringIO("Debugging is twice as hard as writing the code in the first place. " + "Therefore, if you write the code as cleverly as possible, you are, " + "by definition, not smart enough to debug it. --Brian W. Kernighan") + return response + + +def fake_404_response(): + response = mock.Mock() + response.status_code = 404 + return response + + +def fake_failed_task_status(task_id): + task = mock.Mock() + task.state = EXPECTED_TASK_STATUS_FAILED_RESPONSE['state'] + task.info = EXPECTED_TASK_STATUS_FAILED_RESPONSE['status'] + return task + + +def fake_successful_task_status(task_id): + task = mock.Mock() + task.state = EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE['state'] + task.info = { + 'src_url': EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE['src_url'], + 's3_urls': EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE['s3_urls'], + 'status': EXPECTED_TASK_STATUS_SUCCESSFUL_RESPONSE['status'], + } + return task diff --git a/relengapi/blueprints/archiver/types.py b/relengapi/blueprints/archiver/types.py new file mode 100644 index 00000000..67dcbe57 --- /dev/null +++ b/relengapi/blueprints/archiver/types.py @@ -0,0 +1,24 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import wsme.types + + +class MozharnessArchiveTask(wsme.types.Base): + """Represents a running task and its current state + """ + + #: this is the current state of the task + #: e.g. "PENDING", "PROGRESS", "SUCCESS", "FAILURE" + state = unicode + + #: current msg status of task + #: e.g. "Downloading archive from hg.m.o" + status = unicode + + #: archive url origin that s3 item is based off of + src_url = unicode + + #: s3 links for the archives by region + s3_urls = {str: str} diff --git a/relengapi/docs/deployment/archiver.rst b/relengapi/docs/deployment/archiver.rst new file mode 100644 index 00000000..b2c5ab75 --- /dev/null +++ b/relengapi/docs/deployment/archiver.rst @@ -0,0 +1,29 @@ +Deploying Archiver +================== + +Archiver requires a separate configuration for each endpoint. Within these endpoint configs, you need to state the s3 +buckets, where each bucket represents a region and the template url that will be formatted by query params passed to +the endpoint. + +For AWS credentials, each bucket should be limited to the AWS IAM role corresponding to the AWS credentials. Buckets in +the configuration are required to be pre-existing. + +Finally, Archiver uses Celery. You will need to provide a broker and back-end. + +Example config:: + + # using rabbitmq locally in a staging setup + CELERY_BROKER_URL='amqp://guest@localhost//' + CELERY_BACKEND='amqp' + + AWS = { + 'access_key_id': 'accessKeyExample', + 'secret_access_key': 'secretAccessKeyExample', + } + + ARCHIVER_S3_BUCKETS = { + 'us-east-1': 'archiver-us-east-1', + 'us-west-2': 'archiver-us-west-2' + } + + ARCHIVER_HGMO_URL_TEMPLATE = "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/{subdir}" diff --git a/relengapi/docs/deployment/index.rst b/relengapi/docs/deployment/index.rst index 10c69a13..497b8649 100644 --- a/relengapi/docs/deployment/index.rst +++ b/relengapi/docs/deployment/index.rst @@ -16,3 +16,4 @@ This is mostly limited to system administrators and those responsible for config sqs tooltool slaveloan + archiver diff --git a/relengapi/docs/usage/archiver.rst b/relengapi/docs/usage/archiver.rst new file mode 100644 index 00000000..c9bbbaf8 --- /dev/null +++ b/relengapi/docs/usage/archiver.rst @@ -0,0 +1,76 @@ +Archiver +======== + +Archiver simply takes repository archive urls and returns an s3 location for the same archive, while submitting the +archive to s3 if it doesn't already exist. + +If the archive exists, the response will redirect with a 302 and location for the s3 url equivalent. + +If the archive does not already exist in s3, the response will accept the request (202) and return the task location url +that is monitoring the current state of creating and uploading the archive to s3. + +Currently, only hg.mozilla.org support is configured: + ARCHIVER_HGMO_URL_TEMPLATE = "https://hg.mozilla.org/{repo}/archive/{rev}.{suffix}/{subdir}" + + +Examples:: + + # To get an in-tree Mozharness archive based on: http://hg.mozilla.org/mozilla-central/rev/3d11cb4f31b9 + > curl -i http://127.0.0.1:8010/archiver/hgmo/projects/ash/42bf8560b395?subdir=testing/mozharness&preferred_region=us-west-2 + HTTP/1.0 202 ACCEPTED + Content-Type: application/json + Content-Length: 18 + Location: http://127.0.0.1:8010/archiver/status/projects_ash-42bf8560b395.tar.gz_testing_mozharness + Server: Werkzeug/0.10.4 Python/2.7.6 + Date: Fri, 19 Jun 2015 22:41:29 GMT + + { + "result": {} + }% + + # In the above example, the s3 archive does not exist so Archiver will create it. poll the Location header url in the above response to monitor state + > curl -i http://127.0.0.1:8010/archiver/status/projects_ash-42bf8560b395.tar.gz_testing_mozharness + HTTP/1.0 200 OK + Content-Type: application/json + Content-Length: 683 + Server: Werkzeug/0.10.4 Python/2.7.6 + Date: Fri, 19 Jun 2015 22:41:41 GMT + + { + "result": { + "s3_urls": { + "us-east-1": "https://archiver-us-east-1.s3.amazonaws.com/projects/ash-42bf8560b395.tar.gz/testing/mozharness?Signature=0f%2FvcSqbUylTWgwx8yYYISO6%2FJM%3D&Expires=1434753993&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA", + "us-west-2": "https://archiver-us-west-2.s3-us-west-2.amazonaws.com/projects/ash-42bf8560b395.tar.gz/testing/mozharness?Signature=i6%2B9d4r8u8YuUNTmT4kX9jcaNrA%3D&Expires=1434753992&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA" + }, + "src_url": "https://hg.mozilla.org/projects/ash/archive/42bf8560b395.tar.gz/testing/mozharness", + "state": "SUCCESS", + "status": "Task completed! Check 's3_urls' for upload locations." + } + }% + + # We can see above that Archiver has created two s3 archives across two regions. We can use those urls to grab the archive. + # Subsequent requests of the original endpoint also just redirects the s3 location + > curl -i http://127.0.0.1:8010/archiver/hgmo/projects/ash/42bf8560b395?subdir=testing/mozharness&preferred_region=us-west-2 + HTTP/1.0 302 FOUND + Content-Type: text/html; charset=utf-8 + Content-Length: 625 + Location: https://archiver-us-west-2.s3-us-west-2.amazonaws.com/projects/ash-42bf8560b395.tar.gz/testing/mozharness?Signature=oZVrvFhkM6RR8rxKryt9vTWmvTQ%3D&Expires=1434754032&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA + Server: Werkzeug/0.10.4 Python/2.7.6 + Date: Fri, 19 Jun 2015 22:42:12 GMT + + + Redirecting... +

Redirecting...

+

You should be redirected automatically to target URL: https://archiver-us-west-2.s3-us-west-2.amazonaws.com/projects/ash-42bf8560b395.tar.gz/testing/mozharness?Signature=oZVrvFhkM6RR8rxKryt9vTWmvTQ%3D&Expires=1434754032&AWSAccessKeyId=AKIAIYHUTJ7BG2GMUTXA. If not click the link.% + + +Types +----- + +.. api:autotype:: MozharnessArchiveTask + +Endpoints +--------- + +.. api:autoendpoint:: archiver.* + diff --git a/relengapi/docs/usage/index.rst b/relengapi/docs/usage/index.rst index 35742f1b..33b90b23 100644 --- a/relengapi/docs/usage/index.rst +++ b/relengapi/docs/usage/index.rst @@ -16,3 +16,4 @@ Subsequent sections describe the interfaces provided by the individual component clobberer tooltool slaveloan + archiver