Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Don't retry on 404s. #300

Merged
merged 1 commit into from
Jan 15, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 2 additions & 2 deletions scriptworker/artifacts.py
Expand Up @@ -290,8 +290,8 @@ async def download_artifacts(context, file_urls, parent_dir=None, session=None,
list: the full paths to the files downloaded

Raises:
scriptworker.exceptions.DownloadError: on download failure after
max retries.
scriptworker.exceptions.BaseDownloadError: on download failure after
any applicable retries.

"""
parent_dir = parent_dir or context.config['work_dir']
Expand Down
10 changes: 5 additions & 5 deletions scriptworker/cot/verify.py
Expand Up @@ -28,7 +28,7 @@
from scriptworker.config import read_worker_creds, apply_product_config
from scriptworker.constants import DEFAULT_CONFIG
from scriptworker.context import Context
from scriptworker.exceptions import CoTError, DownloadError, ScriptWorkerGPGException
from scriptworker.exceptions import CoTError, BaseDownloadError, ScriptWorkerGPGException
from scriptworker.gpg import get_body, GPG
from scriptworker.log import contextual_log_handler
from scriptworker.task import (
Expand Down Expand Up @@ -591,7 +591,7 @@ async def download_cot(chain):
chain (ChainOfTrust): the chain of trust to add to.

Raises:
DownloadError: on failure.
BaseDownloadError: on failure.

"""
mandatory_artifact_tasks = []
Expand Down Expand Up @@ -689,7 +689,7 @@ async def download_cot_artifacts(chain):

Raises:
CoTError: on chain of trust sha validation error, on a mandatory artifact
DownloadError: on download error on a mandatory artifact
BaseDownloadError: on download error on a mandatory artifact

"""
upstream_artifacts = chain.task['payload'].get('upstreamArtifacts', [])
Expand Down Expand Up @@ -1489,7 +1489,7 @@ async def verify_parent_task(chain, link):
verify_link_in_task_graph(chain, link, target_link)
try:
await verify_parent_task_definition(chain, link)
except (DownloadError, KeyError) as e:
except (BaseDownloadError, KeyError) as e:
raise CoTError(e)


Expand Down Expand Up @@ -1955,7 +1955,7 @@ async def verify_chain_of_trust(chain):
# verify the worker_impls, e.g. docker-worker
await verify_worker_impls(chain)
await trace_back_to_tree(chain)
except (DownloadError, KeyError, AttributeError) as exc:
except (BaseDownloadError, KeyError, AttributeError) as exc:
log.critical("Chain of Trust verification error!", exc_info=True)
if isinstance(exc, CoTError):
raise
Expand Down
26 changes: 22 additions & 4 deletions scriptworker/exceptions.py
Expand Up @@ -88,26 +88,44 @@ def __init__(self, msg):
super().__init__(msg, exit_code=STATUSES['malformed-payload'])


class DownloadError(ScriptWorkerTaskException):
"""Failure in ``scriptworker.utils.download_file``.
class BaseDownloadError(ScriptWorkerTaskException):
"""Base class for DownloadError and Download404.

Attributes:
exit_code (int): this is set to 4 (resource-unavailable).

"""

def __init__(self, msg):
"""Initialize DownloadError.
"""Initialize Download404.

Args:
msg (string): the error message

"""
super(DownloadError, self).__init__(
super(BaseDownloadError, self).__init__(
msg, exit_code=STATUSES['resource-unavailable']
)


class Download404(BaseDownloadError):
"""404 in ``scriptworker.utils.download_file``.

Attributes:
exit_code (int): this is set to 4 (resource-unavailable).

"""


class DownloadError(BaseDownloadError):
"""Failure in ``scriptworker.utils.download_file``.

Attributes:
exit_code (int): this is set to 4 (resource-unavailable).

"""


class CoTError(ScriptWorkerTaskException, KeyError):
"""Failure in Chain of Trust verification.

Expand Down
31 changes: 18 additions & 13 deletions scriptworker/test/__init__.py
Expand Up @@ -6,6 +6,7 @@
import arrow
import asyncio
from copy import deepcopy
import functools
import json
import mock
import os
Expand Down Expand Up @@ -168,32 +169,36 @@ def unsuccessful_queue():
return UnsuccessfulQueue()


@asyncio.coroutine
def _fake_request(resp_status, method, url, *args, **kwargs):
resp = FakeResponse(method, url, status=resp_status)
resp._history = (FakeResponse(method, url, status=302),)
return resp


@pytest.mark.asyncio
@pytest.fixture(scope='function')
async def fake_session():
@asyncio.coroutine
def _fake_request(method, url, *args, **kwargs):
resp = FakeResponse(method, url)
resp._history = (FakeResponse(method, url, status=302),)
return resp

session = aiohttp.ClientSession()
session._request = _fake_request
session._request = functools.partial(_fake_request, 200)
yield session
await session.close()


@pytest.mark.asyncio
@pytest.fixture(scope='function')
async def fake_session_500():
@asyncio.coroutine
def _fake_request(method, url, *args, **kwargs):
resp = FakeResponse(method, url, status=500)
resp._history = (FakeResponse(method, url, status=302),)
return resp
session = aiohttp.ClientSession()
session._request = functools.partial(_fake_request, 500)
yield session
await session.close()


@pytest.mark.asyncio
@pytest.fixture(scope='function')
async def fake_session_404():
session = aiohttp.ClientSession()
session._request = _fake_request
session._request = functools.partial(_fake_request, 404)
yield session
await session.close()

Expand Down
11 changes: 10 additions & 1 deletion scriptworker/test/test_utils.py
Expand Up @@ -9,11 +9,12 @@
import re
import shutil
import tempfile
from scriptworker.exceptions import DownloadError, ScriptWorkerException, ScriptWorkerRetryException
from scriptworker.exceptions import DownloadError, Download404, ScriptWorkerException, ScriptWorkerRetryException
import scriptworker.utils as utils
from . import (
FakeResponse,
fake_session,
fake_session_404,
fake_session_500,
noop_async,
tmpdir,
Expand All @@ -23,6 +24,7 @@

assert tmpdir, context # silence flake8
assert fake_session, fake_session_500 # silence flake8
assert fake_session_404 # silence flake8

# constants helpers and fixtures {{{1
# from https://github.com/SecurityInnovation/PGPy/blob/develop/tests/test_01_types.py
Expand Down Expand Up @@ -327,6 +329,13 @@ async def test_download_file_exception(context, fake_session_500, tmpdir):
await utils.download_file(context, "url", path, session=fake_session_500)


@pytest.mark.asyncio
async def test_download_file_404(context, fake_session_404, tmpdir):
path = os.path.join(tmpdir, "foo")
with pytest.raises(Download404):
await utils.download_file(context, "url", path, session=fake_session_404)


# format_json {{{1
def test_format_json():
expected = '\n'.join([
Expand Down
16 changes: 12 additions & 4 deletions scriptworker/utils.py
Expand Up @@ -23,6 +23,7 @@
from taskcluster.client import createTemporaryCredentials
from scriptworker.exceptions import (
DownloadError,
Download404,
ScriptWorkerException,
ScriptWorkerRetryException,
ScriptWorkerTaskException,
Expand Down Expand Up @@ -466,6 +467,12 @@ def load_json_or_yaml(string, is_path=False, file_type='json',


# download_file {{{1
async def _log_download_error(resp, msg):
log.debug(msg, {'url': get_loggable_url(str(resp.url)), 'status': resp.status, 'body': (await resp.text())[:1000]})
for i, h in enumerate(resp.history):
log.debug("Redirect history %s: %s; body=%s", get_loggable_url(str(h.url)), h.status, (await h.text())[:1000])


async def download_file(context, url, abs_filename, session=None, chunk_size=128):
"""Download a file, async.

Expand All @@ -484,10 +491,11 @@ async def download_file(context, url, abs_filename, session=None, chunk_size=128
log.info("Downloading %s", loggable_url)
parent_dir = os.path.dirname(abs_filename)
async with session.get(url) as resp:
if resp.status != 200:
log.debug("Failed to download %s: %s; body=%s", get_loggable_url(str(resp.url)), resp.status, (await resp.text())[:1000])
for i, h in enumerate(resp.history):
log.debug("Redirect history %s: %s; body=%s", get_loggable_url(str(h.url)), h.status, (await h.text())[:1000])
if resp.status == 404:
await _log_download_error(resp, "404 downloading %(url)s: %(status)s; body=%(body)s")
raise Download404("{} status {}!".format(loggable_url, resp.status))
elif resp.status != 200:
await _log_download_error(resp, "Failed to download %(url)s: %(status)s; body=%(body)s")
raise DownloadError("{} status {} is not 200!".format(loggable_url, resp.status))
makedirs(parent_dir)
with open(abs_filename, "wb") as fd:
Expand Down