Skip to content

Commit

Permalink
Extension reference doc generation scripts & CI check (#80)
Browse files Browse the repository at this point in the history
  • Loading branch information
derekbekoe committed Feb 26, 2018
1 parent 29e7405 commit 141b7db
Show file tree
Hide file tree
Showing 15 changed files with 563 additions and 122 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -103,3 +103,6 @@ ENV/

# mypy
.mypy_cache/

# Sphinx
_build/
6 changes: 5 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,11 @@ jobs:
python: 3.6
- stage: verify
env: PURPOSE='IndexVerify'
script: python ./scripts/ci/test_integration.py -v
script: python ./scripts/ci/test_index.py -v
python: 3.6
- stage: verify
env: PURPOSE='IndexRefDocVerify'
script: ./scripts/ci/test_index_ref_doc.sh
python: 3.6
fast_finish: true
allow_failures:
Expand Down
69 changes: 69 additions & 0 deletions scripts/ci/index_ref_doc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
#!/usr/bin/env python

# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------

from __future__ import print_function

import os
import sys
import tempfile
import unittest
import shutil
from subprocess import check_call
from pkg_resources import parse_version

from six import with_metaclass

from util import get_index_data, get_whl_from_url, get_repo_root


REF_GEN_SCRIPT = os.path.join(get_repo_root(), 'scripts', 'refdoc', 'generate.py')

REF_DOC_OUT_DIR = os.environ.get('AZ_EXT_REF_DOC_OUT_DIR', tempfile.mkdtemp())

if not os.path.isdir(REF_DOC_OUT_DIR):
print('{} is not a directory'.format(REF_DOC_OUT_DIR))
sys.exit(1)

ALL_TESTS = []

for extension_name, exts in get_index_data()['extensions'].items():
candidates_sorted = sorted(exts, key=lambda c: parse_version(c['metadata']['version']), reverse=True)
chosen = candidates_sorted[0]
ALL_TESTS.append((extension_name, chosen['downloadUrl'], chosen['filename']))


class TestIndexRefDocsMeta(type):
def __new__(mcs, name, bases, _dict):

def gen_test(ext_name, ext_url, filename):
def test(self):
ext_file = get_whl_from_url(ext_url, filename, self.whl_dir)
ref_doc_out_dir = os.path.join(REF_DOC_OUT_DIR, ext_name)
if not os.path.isdir(ref_doc_out_dir):
os.mkdir(ref_doc_out_dir)
script_args = [sys.executable, REF_GEN_SCRIPT, '--extension-file', ext_file, '--output-dir',
ref_doc_out_dir]
check_call(script_args)
return test

for ext_name, ext_url, filename in ALL_TESTS:
test_name = "test_ref_doc_%s" % ext_name
_dict[test_name] = gen_test(ext_name, ext_url, filename)
return type.__new__(mcs, name, bases, _dict)


class IndexRefDocs(with_metaclass(TestIndexRefDocsMeta, unittest.TestCase)):

def setUp(self):
self.whl_dir = tempfile.mkdtemp()

def tearDown(self):
shutil.rmtree(self.whl_dir)


if __name__ == '__main__':
unittest.main()
116 changes: 1 addition & 115 deletions scripts/ci/test_integration.py → scripts/ci/test_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,50 +13,10 @@
import json
import tempfile
import unittest
import zipfile
import hashlib
import shutil
import subprocess
from wheel.install import WHEEL_INFO_RE
from util import get_repo_root

INDEX_PATH = os.path.join(get_repo_root(), 'src', 'index.json')
SRC_PATH = os.path.join(get_repo_root(), 'src')

# Extensions to skip dep. check. Aim to keep this list empty.
SKIP_DEP_CHECK = ['azure-cli-iot-ext']


def catch_dup_keys(pairs):
seen = {}
for k, v in pairs:
if k in seen:
raise ValueError("duplicate key {}".format(k))
seen[k] = v
return seen


def get_index_data():
try:
with open(INDEX_PATH) as f:
return json.load(f, object_pairs_hook=catch_dup_keys)
except ValueError as err:
raise AssertionError("Invalid JSON in {}: {}".format(INDEX_PATH, err))


def get_whl_from_url(url, filename, tmp_dir, whl_cache):
if url in whl_cache:
return whl_cache[url]
import requests
r = requests.get(url, stream=True)
assert r.status_code == 200, "Request to {} failed with {}".format(url, r.status_code)
ext_file = os.path.join(tmp_dir, filename)
with open(ext_file, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # ignore keep-alive new chunks
f.write(chunk)
whl_cache[url] = ext_file
return ext_file
from util import get_ext_metadata, get_whl_from_url, get_index_data, SKIP_DEP_CHECK


def get_sha256sum(a_file):
Expand All @@ -66,50 +26,6 @@ def get_sha256sum(a_file):
return sha256.hexdigest()


def get_extension_modname(ext_dir):
# Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L153
EXTENSIONS_MOD_PREFIX = 'azext_'
pos_mods = [n for n in os.listdir(ext_dir)
if n.startswith(EXTENSIONS_MOD_PREFIX) and os.path.isdir(os.path.join(ext_dir, n))]
if len(pos_mods) != 1:
raise AssertionError("Expected 1 module to load starting with "
"'{}': got {}".format(EXTENSIONS_MOD_PREFIX, pos_mods))
return pos_mods[0]


def get_azext_metadata(ext_dir):
# Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L109
AZEXT_METADATA_FILENAME = 'azext_metadata.json'
azext_metadata = None
ext_modname = get_extension_modname(ext_dir=ext_dir)
azext_metadata_filepath = os.path.join(ext_dir, ext_modname, AZEXT_METADATA_FILENAME)
if os.path.isfile(azext_metadata_filepath):
with open(azext_metadata_filepath) as f:
azext_metadata = json.load(f)
return azext_metadata


def get_ext_metadata(ext_dir, ext_file, ext_name):
# Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L89
WHL_METADATA_FILENAME = 'metadata.json'
zip_ref = zipfile.ZipFile(ext_file, 'r')
zip_ref.extractall(ext_dir)
zip_ref.close()
metadata = {}
dist_info_dirs = [f for f in os.listdir(ext_dir) if f.endswith('.dist-info')]
azext_metadata = get_azext_metadata(ext_dir)
if azext_metadata:
metadata.update(azext_metadata)
for dist_info_dirname in dist_info_dirs:
parsed_dist_info_dir = WHEEL_INFO_RE(dist_info_dirname)
if parsed_dist_info_dir and parsed_dist_info_dir.groupdict().get('name') == ext_name.replace('-', '_'):
whl_metadata_filepath = os.path.join(ext_dir, dist_info_dirname, WHL_METADATA_FILENAME)
if os.path.isfile(whl_metadata_filepath):
with open(whl_metadata_filepath) as f:
metadata.update(json.load(f))
return metadata


class TestIndex(unittest.TestCase):

@classmethod
Expand Down Expand Up @@ -219,35 +135,5 @@ def test_metadata(self):
shutil.rmtree(extensions_dir)


class TestSourceWheels(unittest.TestCase):

def test_source_wheels(self):
# Test we can build all sources into wheels and that metadata from the wheel is valid
from subprocess import PIPE
built_whl_dir = tempfile.mkdtemp()
source_extensions = [os.path.join(SRC_PATH, n) for n in os.listdir(SRC_PATH)
if os.path.isdir(os.path.join(SRC_PATH, n))]
for s in source_extensions:
if not os.path.isfile(os.path.join(s, 'setup.py')):
continue
try:
subprocess.check_call(['python', 'setup.py', 'bdist_wheel', '-q', '-d', built_whl_dir],
cwd=s, stdout=PIPE, stderr=PIPE)
except subprocess.CalledProcessError as err:
self.fail("Unable to build extension {} : {}".format(s, err))
for filename in os.listdir(built_whl_dir):
ext_file = os.path.join(built_whl_dir, filename)
ext_dir = tempfile.mkdtemp(dir=built_whl_dir)
ext_name = WHEEL_INFO_RE(filename).groupdict().get('name')
metadata = get_ext_metadata(ext_dir, ext_file, ext_name)
run_requires = metadata.get('run_requires')
if run_requires and ext_name not in SKIP_DEP_CHECK:
deps = run_requires[0]['requires']
self.assertTrue(all(not dep.startswith('azure-') for dep in deps),
"Dependencies of {} use disallowed extension dependencies. "
"Remove these dependencies: {}".format(filename, deps))
shutil.rmtree(built_whl_dir)


if __name__ == '__main__':
unittest.main()
13 changes: 13 additions & 0 deletions scripts/ci/test_index_ref_doc.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/usr/bin/env bash
set -ex

# Install CLI
echo "Installing azure-cli..."

pip install "azure-cli" -q
pip install "sphinx==1.7.0" -q
echo "Installed."

python ./scripts/ci/index_ref_doc.py -v

echo "OK."
40 changes: 34 additions & 6 deletions scripts/ci/test_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,19 @@
import tempfile
import unittest
import shutil
from subprocess import check_call
from subprocess import check_call, CalledProcessError, PIPE

import mock
from wheel.install import WHEEL_INFO_RE
from six import with_metaclass

from util import get_repo_root
from util import get_ext_metadata, SRC_PATH, SKIP_DEP_CHECK


SOURCES = os.path.join(get_repo_root(), 'src')

ALL_TESTS = []

for src_d in os.listdir(SOURCES):
src_d_full = os.path.join(SOURCES, src_d)
for src_d in os.listdir(SRC_PATH):
src_d_full = os.path.join(SRC_PATH, src_d)
if os.path.isdir(src_d_full):
pkg_name = next((d for d in os.listdir(src_d_full) if d.startswith('azext_')), None)
# Find the package and check it has tests
Expand Down Expand Up @@ -63,5 +62,34 @@ def tearDown(self):
shutil.rmtree(self.ext_dir)


class TestSourceWheels(unittest.TestCase):

def test_source_wheels(self):
# Test we can build all sources into wheels and that metadata from the wheel is valid
built_whl_dir = tempfile.mkdtemp()
source_extensions = [os.path.join(SRC_PATH, n) for n in os.listdir(SRC_PATH)
if os.path.isdir(os.path.join(SRC_PATH, n))]
for s in source_extensions:
if not os.path.isfile(os.path.join(s, 'setup.py')):
continue
try:
check_call(['python', 'setup.py', 'bdist_wheel', '-q', '-d', built_whl_dir],
cwd=s, stdout=PIPE, stderr=PIPE)
except CalledProcessError as err:
self.fail("Unable to build extension {} : {}".format(s, err))
for filename in os.listdir(built_whl_dir):
ext_file = os.path.join(built_whl_dir, filename)
ext_dir = tempfile.mkdtemp(dir=built_whl_dir)
ext_name = WHEEL_INFO_RE(filename).groupdict().get('name')
metadata = get_ext_metadata(ext_dir, ext_file, ext_name)
run_requires = metadata.get('run_requires')
if run_requires and ext_name not in SKIP_DEP_CHECK:
deps = run_requires[0]['requires']
self.assertTrue(all(not dep.startswith('azure-') for dep in deps),
"Dependencies of {} use disallowed extension dependencies. "
"Remove these dependencies: {}".format(filename, deps))
shutil.rmtree(built_whl_dir)


if __name__ == '__main__':
unittest.main()
Loading

0 comments on commit 141b7db

Please sign in to comment.