Skip to content
This repository has been archived by the owner on Dec 13, 2020. It is now read-only.

Add "token" argument for private repos #8

Open
wants to merge 11 commits into
base: master
Choose a base branch
from
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ nosetests.xml
coverage.xml
*,cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
Expand Down
18 changes: 16 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,25 @@
# Configure.
language: python
python: 3.5
matrix:
include:
- python: 2.7
env: TOXENV=py27
- python: 3.3
env: TOXENV=py33
- python: 3.4
env: TOXENV=py34
- python: 3.5
env: TOXENV=py35
- python: pypy
env: TOXENV=pypy
- python: pypy3
env: TOXENV=pypy3.3-5.2-alpha1
- env: TOXENV=lint
sudo: false

# Run.
install: pip install coveralls tox
script: tox -e lint,py35,py34,py33,pypy3.3-5.2-alpha1,pypy,py27
script: tox
after_success: coveralls

# Deploy.
Expand Down
29 changes: 21 additions & 8 deletions appveyor_artifacts.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
-N JOB --job-name=JOB Filter by job name (Python versions, etc).
-o NAME --owner-name=NAME Repository owner/account name.
-p NUM --pull-request=NUM Pull request number of current job.
-s TOKEN --token=TOKEN AppVeyor API token
-r --raise Don't handle exceptions, raise all the way.
-t NAME --tag-name=NAME Tag name that triggered current job.
-v --verbose Raise exceptions with tracebacks.
Expand All @@ -52,6 +53,7 @@
import pkg_resources
import requests
import requests.exceptions
import requests.utils
from docopt import docopt

__author__ = '@Robpol86'
Expand Down Expand Up @@ -192,26 +194,30 @@ def get_arguments(argv=None, environ=None):
'raise': args['--raise'],
'repo': repo,
'tag': tag,
'token': args['--token'] or '',
'verbose': args['--verbose'],
}

return config


@with_log
def query_api(endpoint, log):
def query_api(endpoint, log, token=None):
"""Query the AppVeyor API.

:raise HandledError: On non HTTP200 responses or invalid JSON response.

:param str endpoint: API endpoint to query (e.g. '/projects/Robpol86/appveyor-artifacts').
:param logging.Logger log: Logger for this function. Populated by with_log() decorator.
:param dict token: API token. Optional

:return: Parsed JSON response.
:rtype: dict
"""
url = API_PREFIX + endpoint
headers = {'content-type': 'application/json'}
if token is not None:
headers['authorization'] = 'Bearer ' + token
response = None
log.debug('Querying %s with headers %s.', url, headers)
for i in range(QUERY_ATTEMPTS):
Expand Down Expand Up @@ -306,7 +312,7 @@ def query_build_version(config, log):

# Query history.
log.debug('Querying AppVeyor history API for %s/%s...', config['owner'], config['repo'])
json_data = query_api(url)
json_data = query_api(url, token=config['token'])
if 'builds' not in json_data:
log.error('Bad JSON reply: "builds" key missing.')
raise HandledError
Expand Down Expand Up @@ -345,7 +351,7 @@ def query_job_ids(build_version, config, log):

# Query version.
log.debug('Querying AppVeyor version API for %s/%s at %s...', config['owner'], config['repo'], build_version)
json_data = query_api(url)
json_data = query_api(url, token=config['token'])
if 'build' not in json_data:
log.error('Bad JSON reply: "build" key missing.')
raise HandledError
Expand All @@ -367,10 +373,11 @@ def query_job_ids(build_version, config, log):


@with_log
def query_artifacts(job_ids, log):
def query_artifacts(job_ids, config, log):
"""Query API again for artifacts.

:param iter job_ids: List of AppVeyor jobIDs.
:param dict config: Dictionary from get_arguments().
:param logging.Logger log: Logger for this function. Populated by with_log() decorator.

:return: List of tuples: (job ID, artifact file name, artifact file size).
Expand All @@ -380,7 +387,7 @@ def query_artifacts(job_ids, log):
for job in job_ids:
url = '/buildjobs/{0}/artifacts'.format(job)
log.debug('Querying AppVeyor artifact API for %s...', job)
json_data = query_api(url)
json_data = query_api(url, token=config['token'])
for artifact in json_data:
jobs_artifacts.append((job, artifact['fileName'], artifact['size']))
return jobs_artifacts
Expand Down Expand Up @@ -417,7 +424,8 @@ def artifacts_urls(config, jobs_artifacts, log):
# Get final URLs and destination file paths.
root_dir = config['dir'] or os.getcwd()
for job, file_name, size in jobs_artifacts:
artifact_url = '{0}/buildjobs/{1}/artifacts/{2}'.format(API_PREFIX, job, file_name)
file_name_urlsafe = requests.utils.quote(file_name, safe='')
artifact_url = '{0}/buildjobs/{1}/artifacts/{2}'.format(API_PREFIX, job, file_name_urlsafe)
artifact_local = os.path.join(root_dir, job if job_dirs else '', file_name)
if artifact_local in artifacts:
if config['no_job_dirs'] == 'skip':
Expand Down Expand Up @@ -486,7 +494,7 @@ def get_urls(config, log):
time.sleep(SLEEP_FOR)

# Get artifacts.
artifacts = query_artifacts([i[0] for i in job_ids])
artifacts = query_artifacts([i[0] for i in job_ids], config)
log.info('Found %d artifact%s.', len(artifacts), '' if len(artifacts) == 1 else 's')
return artifacts_urls(config, artifacts) if artifacts else dict()

Expand All @@ -511,10 +519,15 @@ def download_file(config, local_path, url, expected_size, chunk_size, log):
relative_path = os.path.relpath(local_path, config['dir'] or os.getcwd())
print(' => {0}'.format(relative_path), end=' ', file=sys.stderr)

headers = {}
if config['token']:
headers['authorization'] = 'Bearer ' + config['token']

# Download file.
log.debug('Writing to: %s', local_path)
with open(local_path, 'wb') as handle:
response = requests.get(url, stream=True)

response = requests.get(url, headers=headers, stream=True)
for chunk in response.iter_content(chunk_size):
handle.write(chunk)
print('.', end='', file=sys.stderr)
Expand Down
4 changes: 2 additions & 2 deletions tests/test_artifacts_urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,9 +165,9 @@ def test_subdirectory():
actual = artifacts_urls(config, jobs_artifacts)
expected = dict([
(py.path.local('src/OutputRoot/PackageWeb.1.1.17.nupkg'),
(API_PREFIX + '/buildjobs/r97evl3jva2ejs6b/artifacts/src/OutputRoot/PackageWeb.1.1.17.nupkg', 60301)),
(API_PREFIX + '/buildjobs/r97evl3jva2ejs6b/artifacts/src%2FOutputRoot%2FPackageWeb.1.1.17.nupkg', 60301)),
(py.path.local('src/OutputRoot/PackageWeb.1.1.10.nupkg'),
(API_PREFIX + '/buildjobs/s97evl3jva2ejs6b/artifacts/src/OutputRoot/PackageWeb.1.1.10.nupkg', 50301)),
(API_PREFIX + '/buildjobs/s97evl3jva2ejs6b/artifacts/src%2FOutputRoot%2FPackageWeb.1.1.10.nupkg', 50301)),
])
assert actual == expected

Expand Down
10 changes: 7 additions & 3 deletions tests/test_download_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ def test_success(capsys, tmpdir):

# Run.
local_path = tmpdir.join('appveyor_artifacts.py')
download_file(dict(dir=str(tmpdir)), str(local_path), url, source_file.size(), 1024)
config = dict(dir=str(tmpdir), token='')
download_file(config, str(local_path), url, source_file.size(), 1024)

# Check.
assert local_path.size() == source_file.size()
Expand All @@ -47,7 +48,8 @@ def test_success_subdir(capsys, tmpdir):

# Run.
local_path = tmpdir.join('src', 'files', 'appveyor_artifacts.py')
download_file(dict(dir=str(tmpdir)), str(local_path), url, source_file.size(), 1024)
config = dict(dir=str(tmpdir), token='')
download_file(config, str(local_path), url, source_file.size(), 1024)

# Check.
assert local_path.size() == source_file.size()
Expand All @@ -70,11 +72,13 @@ def test_errors(tmpdir, caplog, file_exists):
url = 'https://ci.appveyor.com/api/buildjobs/abc1def2ghi3jkl4/artifacts/appveyor_artifacts.py'
httpretty.register_uri(httpretty.GET, url, body=iter(source_file.readlines()), streaming=True)

config = dict(dir=str(tmpdir), token='')

local_path = tmpdir.join('appveyor_artifacts.py')
if file_exists:
local_path.ensure()
with pytest.raises(HandledError):
download_file(dict(dir=str(tmpdir)), str(local_path), url, source_file.size() + 32, 1024)
download_file(config, str(local_path), url, source_file.size() + 32, 1024)

if file_exists:
assert caplog.records[-2].message == 'File already exists: ' + str(local_path)
Expand Down
4 changes: 4 additions & 0 deletions tests/test_get_arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ def different_cli_argv():
'raise': False,
'repo': '',
'tag': '',
'token': '',
'verbose': False,
}
yield argv, expected
Expand All @@ -62,6 +63,7 @@ def different_cli_argv():
'raise': False,
'repo': 'koala',
'tag': 'v1.0.0',
'token': '',
'verbose': False,
'ignore_errors': False,
}
Expand All @@ -74,6 +76,7 @@ def different_cli_argv():
'-J', 'overwrite',
'-m',
'-N', r'Environment: PYTHON=C:\Python27',
'-s', 'aSecret',
'-v',
]
expected = {
Expand All @@ -89,6 +92,7 @@ def different_cli_argv():
'raise': False,
'repo': '',
'tag': '',
'token': 'aSecret',
'verbose': True,
}
yield argv, expected
Expand Down
16 changes: 8 additions & 8 deletions tests/test_get_urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@ def test_instant_success(monkeypatch, artifacts):
:param monkeypatch: pytest fixture.
:param bool artifacts: If simulation should have or lack artifacts.
"""
monkeypatch.setattr('appveyor_artifacts.query_build_version', lambda _: '1.0.1')
monkeypatch.setattr('appveyor_artifacts.query_build_version', lambda *_: '1.0.1')
monkeypatch.setattr('appveyor_artifacts.query_job_ids', lambda *_: [('abc1def2ghi3jkl4', 'success')])
monkeypatch.setattr('appveyor_artifacts.query_artifacts',
lambda _: [('abc1def2ghi3jkl4', 'README.md', 1234)] if artifacts else [])
lambda *_: [('abc1def2ghi3jkl4', 'README.md', 1234)] if artifacts else [])

config = dict(always_job_dirs=False, no_job_dirs=None, dir=None)
config = dict(always_job_dirs=False, no_job_dirs=None, dir=None, token='')
actual = get_urls(config)
expected = {py.path.local('README.md'): (PREFIX % ('abc1def2ghi3jkl4', 'README.md'), 1234)} if artifacts else dict()
assert actual == expected
Expand All @@ -36,9 +36,9 @@ def test_wait_for_job_queue(monkeypatch, caplog, timeout):
"""
answers = [None, '1.0.1']
monkeypatch.setattr('appveyor_artifacts.SLEEP_FOR', 0.01)
monkeypatch.setattr('appveyor_artifacts.query_build_version', lambda _: None if timeout else answers.pop(0))
monkeypatch.setattr('appveyor_artifacts.query_build_version', lambda *_: None if timeout else answers.pop(0))
monkeypatch.setattr('appveyor_artifacts.query_job_ids', lambda *_: [('abc1def2ghi3jkl4', 'success')])
monkeypatch.setattr('appveyor_artifacts.query_artifacts', lambda _: list())
monkeypatch.setattr('appveyor_artifacts.query_artifacts', lambda *_: list())

if timeout:
with pytest.raises(HandledError):
Expand Down Expand Up @@ -66,11 +66,11 @@ def test_queued_running_success_or_failed(monkeypatch, caplog, success):
"""
answers = (['bad'] if success is None else []) + ['queued', 'running'] + (['success'] if success else ['failed'])
monkeypatch.setattr('appveyor_artifacts.SLEEP_FOR', 0.01)
monkeypatch.setattr('appveyor_artifacts.query_build_version', lambda _: '1.0.1')
monkeypatch.setattr('appveyor_artifacts.query_build_version', lambda *_: '1.0.1')
monkeypatch.setattr('appveyor_artifacts.query_job_ids', lambda *_: [('abc1def2ghi3jkl4', answers.pop(0))])
monkeypatch.setattr('appveyor_artifacts.query_artifacts', lambda _: [('abc1def2ghi3jkl4', 'README.md', 1234)])
monkeypatch.setattr('appveyor_artifacts.query_artifacts', lambda *_: [('abc1def2ghi3jkl4', 'README.md', 1234)])

config = dict(always_job_dirs=False, no_job_dirs=None, dir=None, owner='me', repo='project')
config = dict(always_job_dirs=False, no_job_dirs=None, dir=None, owner='me', repo='project', token='')
if not success:
with pytest.raises(HandledError):
get_urls(config)
Expand Down
12 changes: 6 additions & 6 deletions tests/test_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def test_no_paths(monkeypatch, caplog):
"""
monkeypatch.setattr('appveyor_artifacts.get_urls', lambda _: dict())
monkeypatch.setattr('appveyor_artifacts.validate', lambda _: None)
appveyor_artifacts.main(dict(dir=None, mangle_coverage=False))
appveyor_artifacts.main(dict(dir=None, mangle_coverage=False, token=''))
assert caplog.records[-2].message == 'No artifacts; nothing to download.'


Expand All @@ -43,7 +43,7 @@ def test_one_file(capsys, monkeypatch, tmpdir, caplog):
httpretty.register_uri(httpretty.GET, url, body=body, streaming=True)
monkeypatch.setattr('appveyor_artifacts.get_urls', lambda _: paths_and_urls)
monkeypatch.setattr('appveyor_artifacts.validate', lambda _: None)
appveyor_artifacts.main(dict(dir=str(tmpdir), mangle_coverage=False))
appveyor_artifacts.main(dict(dir=str(tmpdir), mangle_coverage=False, token=''))

messages = [r.message for r in caplog.records if r.levelname != 'DEBUG']
expected = [
Expand Down Expand Up @@ -76,7 +76,7 @@ def test_multiple_files(capsys, monkeypatch, tmpdir, caplog):
httpretty.register_uri(httpretty.GET, url, body=body, streaming=True)
monkeypatch.setattr('appveyor_artifacts.get_urls', lambda _: paths_and_urls)
monkeypatch.setattr('appveyor_artifacts.validate', lambda _: None)
appveyor_artifacts.main(dict(dir=str(tmpdir), mangle_coverage=False))
appveyor_artifacts.main(dict(dir=str(tmpdir), mangle_coverage=False, token=''))

messages = [r.message for r in caplog.records if r.levelname != 'DEBUG']
expected = [
Expand Down Expand Up @@ -116,7 +116,7 @@ def test_small_files(capsys, monkeypatch, tmpdir, caplog):
httpretty.register_uri(httpretty.GET, url, body=body, streaming=True)
monkeypatch.setattr('appveyor_artifacts.get_urls', lambda _: paths_and_urls)
monkeypatch.setattr('appveyor_artifacts.validate', lambda _: None)
appveyor_artifacts.main(dict(dir=str(tmpdir), mangle_coverage=False))
appveyor_artifacts.main(dict(dir=str(tmpdir), mangle_coverage=False, token=''))

messages = [r.message for r in caplog.records if r.levelname != 'DEBUG']
expected = [
Expand Down Expand Up @@ -154,7 +154,7 @@ def test_large_files(capsys, monkeypatch, tmpdir, caplog):
httpretty.register_uri(httpretty.GET, url, body=body, streaming=True)
monkeypatch.setattr('appveyor_artifacts.get_urls', lambda _: paths_and_urls)
monkeypatch.setattr('appveyor_artifacts.validate', lambda _: None)
appveyor_artifacts.main(dict(dir=str(tmpdir), mangle_coverage=True))
appveyor_artifacts.main(dict(dir=str(tmpdir), mangle_coverage=True, token=''))

messages = [r.message for r in caplog.records if r.levelname != 'DEBUG']
expected = [
Expand All @@ -172,7 +172,7 @@ def test_large_files(capsys, monkeypatch, tmpdir, caplog):
assert stderr == expected


@pytest.mark.skipif('(os.environ.get("CI"), os.environ.get("TRAVIS")) != ("true", "true")')
@pytest.mark.skipif((os.environ.get("CI") == 'true') and (os.environ.get("TRAVIS") == 'true'), reason='on CI')
@pytest.mark.parametrize('direct', [False, True])
def test_subprocess(tmpdir, direct):
"""Test executing script through entry_points and directly.
Expand Down
14 changes: 10 additions & 4 deletions tests/test_query_artifacts.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@
from appveyor_artifacts import query_artifacts


def mock_query_api(url, replies):
def mock_query_api(url, token, replies):
"""Mock JSON replies.

:param str url: Url as key.
:param str token: Token; ignored
:param dict replies: Mock replies from test functions.
"""
return replies[url]
Expand All @@ -19,9 +20,13 @@ def test(monkeypatch):

:param monkeypatch: pytest fixture.
"""
config = dict(
token='',
)

# Test empty.
monkeypatch.setattr('appveyor_artifacts.query_api', lambda _: list())
assert query_artifacts(['spfxkimxcj6faq57']) == list()
monkeypatch.setattr('appveyor_artifacts.query_api', lambda _, **kwargs: list())
assert query_artifacts(['spfxkimxcj6faq57'], config) == list()

# Test multiple jobs.
replies = {
Expand All @@ -38,8 +43,9 @@ def test(monkeypatch):
{'fileName': 'no_ext', 'size': 101, 'type': 'File'},
],
}

monkeypatch.setattr('appveyor_artifacts.query_api', partial(mock_query_api, replies=replies))
actual = query_artifacts(['v5wnn9k8auqcqovw', 'bpgcbvqmawv1jw06'])
actual = query_artifacts(['v5wnn9k8auqcqovw', 'bpgcbvqmawv1jw06'], config)

expected = [
('v5wnn9k8auqcqovw', 'luajit.exe', 675840),
Expand Down
Loading