diff --git a/.gitignore b/.gitignore index e0c2b3809cc..27970066f69 100644 --- a/.gitignore +++ b/.gitignore @@ -103,3 +103,6 @@ ENV/ # mypy .mypy_cache/ + +# Sphinx +_build/ diff --git a/.travis.yml b/.travis.yml index a26d15a9472..b73e284461c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,11 @@ jobs: python: 3.6 - stage: verify env: PURPOSE='IndexVerify' - script: python ./scripts/ci/test_integration.py -v + script: python ./scripts/ci/test_index.py -v + python: 3.6 + - stage: verify + env: PURPOSE='IndexRefDocVerify' + script: ./scripts/ci/test_index_ref_doc.sh python: 3.6 fast_finish: true allow_failures: diff --git a/scripts/ci/index_ref_doc.py b/scripts/ci/index_ref_doc.py new file mode 100644 index 00000000000..b86ec1879c1 --- /dev/null +++ b/scripts/ci/index_ref_doc.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +from __future__ import print_function + +import os +import sys +import tempfile +import unittest +import shutil +from subprocess import check_call +from pkg_resources import parse_version + +from six import with_metaclass + +from util import get_index_data, get_whl_from_url, get_repo_root + + +REF_GEN_SCRIPT = os.path.join(get_repo_root(), 'scripts', 'refdoc', 'generate.py') + +REF_DOC_OUT_DIR = os.environ.get('AZ_EXT_REF_DOC_OUT_DIR', tempfile.mkdtemp()) + +if not os.path.isdir(REF_DOC_OUT_DIR): + print('{} is not a directory'.format(REF_DOC_OUT_DIR)) + sys.exit(1) + +ALL_TESTS = [] + +for extension_name, exts in get_index_data()['extensions'].items(): + candidates_sorted = sorted(exts, key=lambda c: parse_version(c['metadata']['version']), reverse=True) + chosen = candidates_sorted[0] + ALL_TESTS.append((extension_name, chosen['downloadUrl'], chosen['filename'])) + + +class TestIndexRefDocsMeta(type): + def __new__(mcs, name, bases, _dict): + + def gen_test(ext_name, ext_url, filename): + def test(self): + ext_file = get_whl_from_url(ext_url, filename, self.whl_dir) + ref_doc_out_dir = os.path.join(REF_DOC_OUT_DIR, ext_name) + if not os.path.isdir(ref_doc_out_dir): + os.mkdir(ref_doc_out_dir) + script_args = [sys.executable, REF_GEN_SCRIPT, '--extension-file', ext_file, '--output-dir', + ref_doc_out_dir] + check_call(script_args) + return test + + for ext_name, ext_url, filename in ALL_TESTS: + test_name = "test_ref_doc_%s" % ext_name + _dict[test_name] = gen_test(ext_name, ext_url, filename) + return type.__new__(mcs, name, bases, _dict) + + +class IndexRefDocs(with_metaclass(TestIndexRefDocsMeta, unittest.TestCase)): + + def setUp(self): + self.whl_dir = tempfile.mkdtemp() + + def tearDown(self): + shutil.rmtree(self.whl_dir) + + +if __name__ == '__main__': + unittest.main() diff --git a/scripts/ci/test_integration.py b/scripts/ci/test_index.py similarity index 56% rename from scripts/ci/test_integration.py rename to scripts/ci/test_index.py index a7aa904b44f..04dcc965fd7 100755 --- a/scripts/ci/test_integration.py +++ b/scripts/ci/test_index.py @@ -13,50 +13,10 @@ import json import tempfile import unittest -import zipfile import hashlib import shutil -import subprocess from wheel.install import WHEEL_INFO_RE -from util import get_repo_root - -INDEX_PATH = os.path.join(get_repo_root(), 'src', 'index.json') -SRC_PATH = os.path.join(get_repo_root(), 'src') - -# Extensions to skip dep. check. Aim to keep this list empty. -SKIP_DEP_CHECK = ['azure-cli-iot-ext'] - - -def catch_dup_keys(pairs): - seen = {} - for k, v in pairs: - if k in seen: - raise ValueError("duplicate key {}".format(k)) - seen[k] = v - return seen - - -def get_index_data(): - try: - with open(INDEX_PATH) as f: - return json.load(f, object_pairs_hook=catch_dup_keys) - except ValueError as err: - raise AssertionError("Invalid JSON in {}: {}".format(INDEX_PATH, err)) - - -def get_whl_from_url(url, filename, tmp_dir, whl_cache): - if url in whl_cache: - return whl_cache[url] - import requests - r = requests.get(url, stream=True) - assert r.status_code == 200, "Request to {} failed with {}".format(url, r.status_code) - ext_file = os.path.join(tmp_dir, filename) - with open(ext_file, 'wb') as f: - for chunk in r.iter_content(chunk_size=1024): - if chunk: # ignore keep-alive new chunks - f.write(chunk) - whl_cache[url] = ext_file - return ext_file +from util import get_ext_metadata, get_whl_from_url, get_index_data, SKIP_DEP_CHECK def get_sha256sum(a_file): @@ -66,50 +26,6 @@ def get_sha256sum(a_file): return sha256.hexdigest() -def get_extension_modname(ext_dir): - # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L153 - EXTENSIONS_MOD_PREFIX = 'azext_' - pos_mods = [n for n in os.listdir(ext_dir) - if n.startswith(EXTENSIONS_MOD_PREFIX) and os.path.isdir(os.path.join(ext_dir, n))] - if len(pos_mods) != 1: - raise AssertionError("Expected 1 module to load starting with " - "'{}': got {}".format(EXTENSIONS_MOD_PREFIX, pos_mods)) - return pos_mods[0] - - -def get_azext_metadata(ext_dir): - # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L109 - AZEXT_METADATA_FILENAME = 'azext_metadata.json' - azext_metadata = None - ext_modname = get_extension_modname(ext_dir=ext_dir) - azext_metadata_filepath = os.path.join(ext_dir, ext_modname, AZEXT_METADATA_FILENAME) - if os.path.isfile(azext_metadata_filepath): - with open(azext_metadata_filepath) as f: - azext_metadata = json.load(f) - return azext_metadata - - -def get_ext_metadata(ext_dir, ext_file, ext_name): - # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L89 - WHL_METADATA_FILENAME = 'metadata.json' - zip_ref = zipfile.ZipFile(ext_file, 'r') - zip_ref.extractall(ext_dir) - zip_ref.close() - metadata = {} - dist_info_dirs = [f for f in os.listdir(ext_dir) if f.endswith('.dist-info')] - azext_metadata = get_azext_metadata(ext_dir) - if azext_metadata: - metadata.update(azext_metadata) - for dist_info_dirname in dist_info_dirs: - parsed_dist_info_dir = WHEEL_INFO_RE(dist_info_dirname) - if parsed_dist_info_dir and parsed_dist_info_dir.groupdict().get('name') == ext_name.replace('-', '_'): - whl_metadata_filepath = os.path.join(ext_dir, dist_info_dirname, WHL_METADATA_FILENAME) - if os.path.isfile(whl_metadata_filepath): - with open(whl_metadata_filepath) as f: - metadata.update(json.load(f)) - return metadata - - class TestIndex(unittest.TestCase): @classmethod @@ -219,35 +135,5 @@ def test_metadata(self): shutil.rmtree(extensions_dir) -class TestSourceWheels(unittest.TestCase): - - def test_source_wheels(self): - # Test we can build all sources into wheels and that metadata from the wheel is valid - from subprocess import PIPE - built_whl_dir = tempfile.mkdtemp() - source_extensions = [os.path.join(SRC_PATH, n) for n in os.listdir(SRC_PATH) - if os.path.isdir(os.path.join(SRC_PATH, n))] - for s in source_extensions: - if not os.path.isfile(os.path.join(s, 'setup.py')): - continue - try: - subprocess.check_call(['python', 'setup.py', 'bdist_wheel', '-q', '-d', built_whl_dir], - cwd=s, stdout=PIPE, stderr=PIPE) - except subprocess.CalledProcessError as err: - self.fail("Unable to build extension {} : {}".format(s, err)) - for filename in os.listdir(built_whl_dir): - ext_file = os.path.join(built_whl_dir, filename) - ext_dir = tempfile.mkdtemp(dir=built_whl_dir) - ext_name = WHEEL_INFO_RE(filename).groupdict().get('name') - metadata = get_ext_metadata(ext_dir, ext_file, ext_name) - run_requires = metadata.get('run_requires') - if run_requires and ext_name not in SKIP_DEP_CHECK: - deps = run_requires[0]['requires'] - self.assertTrue(all(not dep.startswith('azure-') for dep in deps), - "Dependencies of {} use disallowed extension dependencies. " - "Remove these dependencies: {}".format(filename, deps)) - shutil.rmtree(built_whl_dir) - - if __name__ == '__main__': unittest.main() diff --git a/scripts/ci/test_index_ref_doc.sh b/scripts/ci/test_index_ref_doc.sh new file mode 100755 index 00000000000..cabc21466ea --- /dev/null +++ b/scripts/ci/test_index_ref_doc.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash +set -ex + +# Install CLI +echo "Installing azure-cli..." + +pip install "azure-cli" -q +pip install "sphinx==1.7.0" -q +echo "Installed." + +python ./scripts/ci/index_ref_doc.py -v + +echo "OK." diff --git a/scripts/ci/test_source.py b/scripts/ci/test_source.py index fdda3e6f024..a9985afa640 100755 --- a/scripts/ci/test_source.py +++ b/scripts/ci/test_source.py @@ -12,20 +12,19 @@ import tempfile import unittest import shutil -from subprocess import check_call +from subprocess import check_call, CalledProcessError, PIPE import mock +from wheel.install import WHEEL_INFO_RE from six import with_metaclass -from util import get_repo_root +from util import get_ext_metadata, SRC_PATH, SKIP_DEP_CHECK -SOURCES = os.path.join(get_repo_root(), 'src') - ALL_TESTS = [] -for src_d in os.listdir(SOURCES): - src_d_full = os.path.join(SOURCES, src_d) +for src_d in os.listdir(SRC_PATH): + src_d_full = os.path.join(SRC_PATH, src_d) if os.path.isdir(src_d_full): pkg_name = next((d for d in os.listdir(src_d_full) if d.startswith('azext_')), None) # Find the package and check it has tests @@ -63,5 +62,34 @@ def tearDown(self): shutil.rmtree(self.ext_dir) +class TestSourceWheels(unittest.TestCase): + + def test_source_wheels(self): + # Test we can build all sources into wheels and that metadata from the wheel is valid + built_whl_dir = tempfile.mkdtemp() + source_extensions = [os.path.join(SRC_PATH, n) for n in os.listdir(SRC_PATH) + if os.path.isdir(os.path.join(SRC_PATH, n))] + for s in source_extensions: + if not os.path.isfile(os.path.join(s, 'setup.py')): + continue + try: + check_call(['python', 'setup.py', 'bdist_wheel', '-q', '-d', built_whl_dir], + cwd=s, stdout=PIPE, stderr=PIPE) + except CalledProcessError as err: + self.fail("Unable to build extension {} : {}".format(s, err)) + for filename in os.listdir(built_whl_dir): + ext_file = os.path.join(built_whl_dir, filename) + ext_dir = tempfile.mkdtemp(dir=built_whl_dir) + ext_name = WHEEL_INFO_RE(filename).groupdict().get('name') + metadata = get_ext_metadata(ext_dir, ext_file, ext_name) + run_requires = metadata.get('run_requires') + if run_requires and ext_name not in SKIP_DEP_CHECK: + deps = run_requires[0]['requires'] + self.assertTrue(all(not dep.startswith('azure-') for dep in deps), + "Dependencies of {} use disallowed extension dependencies. " + "Remove these dependencies: {}".format(filename, deps)) + shutil.rmtree(built_whl_dir) + + if __name__ == '__main__': unittest.main() diff --git a/scripts/ci/util.py b/scripts/ci/util.py index c873d3bd83f..439c1a30c66 100644 --- a/scripts/ci/util.py +++ b/scripts/ci/util.py @@ -4,6 +4,12 @@ # -------------------------------------------------------------------------------------------- import os +import json +import zipfile +from wheel.install import WHEEL_INFO_RE + +# Extensions to skip dep. check. Aim to keep this list empty. +SKIP_DEP_CHECK = ['azure-cli-iot-ext'] def get_repo_root(): @@ -11,3 +17,85 @@ def get_repo_root(): while not os.path.exists(os.path.join(current_dir, 'CONTRIBUTING.rst')): current_dir = os.path.dirname(current_dir) return current_dir + + +def _get_extension_modname(ext_dir): + # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L153 + EXTENSIONS_MOD_PREFIX = 'azext_' + pos_mods = [n for n in os.listdir(ext_dir) + if n.startswith(EXTENSIONS_MOD_PREFIX) and os.path.isdir(os.path.join(ext_dir, n))] + if len(pos_mods) != 1: + raise AssertionError("Expected 1 module to load starting with " + "'{}': got {}".format(EXTENSIONS_MOD_PREFIX, pos_mods)) + return pos_mods[0] + + +def _get_azext_metadata(ext_dir): + # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L109 + AZEXT_METADATA_FILENAME = 'azext_metadata.json' + azext_metadata = None + ext_modname = _get_extension_modname(ext_dir=ext_dir) + azext_metadata_filepath = os.path.join(ext_dir, ext_modname, AZEXT_METADATA_FILENAME) + if os.path.isfile(azext_metadata_filepath): + with open(azext_metadata_filepath) as f: + azext_metadata = json.load(f) + return azext_metadata + + +def get_ext_metadata(ext_dir, ext_file, ext_name): + # Modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/extension.py#L89 + WHL_METADATA_FILENAME = 'metadata.json' + zip_ref = zipfile.ZipFile(ext_file, 'r') + zip_ref.extractall(ext_dir) + zip_ref.close() + metadata = {} + dist_info_dirs = [f for f in os.listdir(ext_dir) if f.endswith('.dist-info')] + azext_metadata = _get_azext_metadata(ext_dir) + if azext_metadata: + metadata.update(azext_metadata) + for dist_info_dirname in dist_info_dirs: + parsed_dist_info_dir = WHEEL_INFO_RE(dist_info_dirname) + if parsed_dist_info_dir and parsed_dist_info_dir.groupdict().get('name') == ext_name.replace('-', '_'): + whl_metadata_filepath = os.path.join(ext_dir, dist_info_dirname, WHL_METADATA_FILENAME) + if os.path.isfile(whl_metadata_filepath): + with open(whl_metadata_filepath) as f: + metadata.update(json.load(f)) + return metadata + + +def get_whl_from_url(url, filename, tmp_dir, whl_cache=None): + if not whl_cache: + whl_cache = {} + if url in whl_cache: + return whl_cache[url] + import requests + r = requests.get(url, stream=True) + assert r.status_code == 200, "Request to {} failed with {}".format(url, r.status_code) + ext_file = os.path.join(tmp_dir, filename) + with open(ext_file, 'wb') as f: + for chunk in r.iter_content(chunk_size=1024): + if chunk: # ignore keep-alive new chunks + f.write(chunk) + whl_cache[url] = ext_file + return ext_file + + +SRC_PATH = os.path.join(get_repo_root(), 'src') +INDEX_PATH = os.path.join(SRC_PATH, 'index.json') + + +def _catch_dup_keys(pairs): + seen = {} + for k, v in pairs: + if k in seen: + raise ValueError("duplicate key {}".format(k)) + seen[k] = v + return seen + + +def get_index_data(): + try: + with open(INDEX_PATH) as f: + return json.load(f, object_pairs_hook=_catch_dup_keys) + except ValueError as err: + raise AssertionError("Invalid JSON in {}: {}".format(INDEX_PATH, err)) diff --git a/scripts/refdoc/README.md b/scripts/refdoc/README.md new file mode 100644 index 00000000000..4ed3027b121 --- /dev/null +++ b/scripts/refdoc/README.md @@ -0,0 +1,18 @@ +# Ref doc gen # + +Scripts for reference documentation generation for Azure CLI Extensions using [sphinx](http://www.sphinx-doc.org/en/master/) + +# How to generate the Sphinx help file output # + +## Set up environment ## + +1. Ensure the CLI is installed in your Python virtual environment. +2. Inside the Python virtual environment, run `pip install sphinx==1.7.0` + +## Run Sphinx ## + +1. Run the generate script `python scripts/refdoc/generate.py -e PATH_TO_WHL.whl` + +## Retrieve output ## + +1. By default, the XML output is stored in `ref-doc-out-*/ind.xml` diff --git a/scripts/refdoc/azhelpgen/__init__.py b/scripts/refdoc/azhelpgen/__init__.py new file mode 100644 index 00000000000..34913fb394d --- /dev/null +++ b/scripts/refdoc/azhelpgen/__init__.py @@ -0,0 +1,4 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- diff --git a/scripts/refdoc/azhelpgen/azhelpgen.py b/scripts/refdoc/azhelpgen/azhelpgen.py new file mode 100644 index 00000000000..110b87b53ac --- /dev/null +++ b/scripts/refdoc/azhelpgen/azhelpgen.py @@ -0,0 +1,135 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import argparse +import json +from docutils import nodes +from docutils.statemachine import ViewList +from docutils.parsers.rst import Directive +from sphinx.util.nodes import nested_parse_with_titles + +from knack.help_files import helps + +from knack.help import GroupHelpFile +from azure.cli.core import MainCommandsLoader, AzCli +from azure.cli.core.commands import AzCliCommandInvoker, ExtensionCommandSource +from azure.cli.core.parser import AzCliCommandParser +from azure.cli.core._help import AzCliHelp, CliCommandHelpFile, ArgumentGroupRegistry + + +def get_extension_help_files(cli_ctx): + invoker = cli_ctx.invocation_cls(cli_ctx=cli_ctx, commands_loader_cls=cli_ctx.commands_loader_cls, + parser_cls=cli_ctx.parser_cls, help_cls=cli_ctx.help_cls) + cli_ctx.invocation = invoker + cmd_table = invoker.commands_loader.load_command_table(None) + # Filter the command table to only get commands from extensions + cmd_table = {k: v for k, v in cmd_table.items() if isinstance(v.command_source, ExtensionCommandSource)} + invoker.commands_loader.command_table = cmd_table + print('FOUND {} command(s) from the extension.'.format(len(cmd_table))) + for cmd_name in cmd_table: + invoker.commands_loader.load_arguments(cmd_name) + invoker.parser.load_command_table(invoker.commands_loader.command_table) + + parser_keys = [] + parser_values = [] + sub_parser_keys = [] + sub_parser_values = [] + _store_parsers(invoker.parser, parser_keys, parser_values, sub_parser_keys, sub_parser_values) + for cmd, parser in zip(parser_keys, parser_values): + if cmd not in sub_parser_keys: + sub_parser_keys.append(cmd) + sub_parser_values.append(parser) + help_files = [] + for cmd, parser in zip(sub_parser_keys, sub_parser_values): + try: + help_file = GroupHelpFile(cmd, parser) if _is_group(parser) else CliCommandHelpFile(cmd, parser) + help_file.load(parser) + help_files.append(help_file) + except Exception as ex: + print("Skipped '{}' due to '{}'".format(cmd, ex)) + help_files = sorted(help_files, key=lambda x: x.command) + return help_files + +class AzHelpGenDirective(Directive): + def make_rst(self): + INDENT = ' ' + DOUBLEINDENT = INDENT * 2 + + az_cli = AzCli(cli_name='az', + commands_loader_cls=MainCommandsLoader, + invocation_cls=AzCliCommandInvoker, + parser_cls=AzCliCommandParser, + help_cls=AzCliHelp) + help_files = get_extension_help_files(az_cli) + + for help_file in help_files: + is_command = isinstance(help_file, CliCommandHelpFile) + yield '.. cli{}:: {}'.format('command' if is_command else 'group', help_file.command if help_file.command else 'az') #it is top level group az if command is empty + yield '' + yield '{}:summary: {}'.format(INDENT, help_file.short_summary) + yield '{}:description: {}'.format(INDENT, help_file.long_summary) + yield '' + + if is_command and help_file.parameters: + group_registry = ArgumentGroupRegistry([p.group_name for p in help_file.parameters if p.group_name]) + + for arg in sorted(help_file.parameters, + key=lambda p: group_registry.get_group_priority(p.group_name) + + str(not p.required) + p.name): + yield '{}.. cliarg:: {}'.format(INDENT, arg.name) + yield '' + yield '{}:required: {}'.format(DOUBLEINDENT, arg.required) + short_summary = arg.short_summary or '' + possible_values_index = short_summary.find(' Possible values include') + short_summary = short_summary[0:possible_values_index + if possible_values_index >= 0 else len(short_summary)] + short_summary = short_summary.strip() + yield '{}:summary: {}'.format(DOUBLEINDENT, short_summary) + yield '{}:description: {}'.format(DOUBLEINDENT, arg.long_summary) + if arg.choices: + yield '{}:values: {}'.format(DOUBLEINDENT, ', '.join(sorted([str(x) for x in arg.choices]))) + if arg.default and arg.default != argparse.SUPPRESS: + yield '{}:default: {}'.format(DOUBLEINDENT, arg.default) + if arg.value_sources: + yield '{}:source: {}'.format(DOUBLEINDENT, ', '.join(arg.value_sources)) + yield '' + yield '' + if len(help_file.examples) > 0: + for e in help_file.examples: + yield '{}.. cliexample:: {}'.format(INDENT, e.name) + yield '' + yield DOUBLEINDENT + e.text + yield '' + + def run(self): + node = nodes.section() + node.document = self.state.document + result = ViewList() + for line in self.make_rst(): + result.append(line, '') + + nested_parse_with_titles(self.state, result, node) + return node.children + +def setup(app): + app.add_directive('azhelpgen', AzHelpGenDirective) + + +def _store_parsers(parser, parser_keys, parser_values, sub_parser_keys, sub_parser_values): + for s in parser.subparsers.values(): + parser_keys.append(_get_parser_name(s)) + parser_values.append(s) + if _is_group(s): + for c in s.choices.values(): + sub_parser_keys.append(_get_parser_name(c)) + sub_parser_values.append(c) + _store_parsers(c, parser_keys, parser_values, sub_parser_keys, sub_parser_values) + +def _is_group(parser): + return getattr(parser, '_subparsers', None) is not None \ + or getattr(parser, 'choices', None) is not None + +def _get_parser_name(s): + return (s._prog_prefix if hasattr(s, '_prog_prefix') else s.prog)[3:] diff --git a/scripts/refdoc/cligroup/__init__.py b/scripts/refdoc/cligroup/__init__.py new file mode 100644 index 00000000000..34913fb394d --- /dev/null +++ b/scripts/refdoc/cligroup/__init__.py @@ -0,0 +1,4 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- diff --git a/scripts/refdoc/cligroup/cligroup.py b/scripts/refdoc/cligroup/cligroup.py new file mode 100644 index 00000000000..ed6ecfc8955 --- /dev/null +++ b/scripts/refdoc/cligroup/cligroup.py @@ -0,0 +1,67 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +import copy +from docutils import nodes +from sphinx import addnodes +from sphinx.directives import ObjectDescription +from docutils.parsers.rst import Directive +from sphinx.util.docfields import Field + +cli_field_types = [ + Field('summary', label='Summary', has_arg=False, + names=('summary', 'shortdesc')), + Field('description', label='Description', has_arg=False, + names=('description', 'desc', 'longdesc')) + ] + +class CliBaseDirective(ObjectDescription): + def handle_signature(self, sig, signode): + signode += addnodes.desc_addname(sig, sig) + return sig + + def needs_arglist(self): + return False + + def add_target_and_index(self, name, sig, signode): + signode['ids'].append(name) + + def get_index_text(self, modname, name): + return name + +class CliGroupDirective(CliBaseDirective): + doc_field_types = copy.copy(cli_field_types) + doc_field_types.append( + Field('docsource', label='Doc Source', has_arg=False, + names=('docsource', 'documentsource')) + ) + +class CliCommandDirective(CliBaseDirective): + doc_field_types = copy.copy(cli_field_types) + doc_field_types.append( + Field('docsource', label='Doc Source', has_arg=False, + names=('docsource', 'documentsource')) + ) + +class CliArgumentDirective(CliBaseDirective): + doc_field_types = copy.copy(cli_field_types) + doc_field_types.extend([ + Field('required', label='Required', has_arg=False, + names=('required')), + Field('values', label='Allowed values', has_arg=False, + names=('values', 'choices', 'options')), + Field('default', label='Default value', has_arg=False, + names=('default')), + Field('source', label='Values from', has_arg=False, + names=('source', 'sources')) + ]) + +class CliExampleDirective(CliBaseDirective): + pass + +def setup(app): + app.add_directive('cligroup', CliGroupDirective) + app.add_directive('clicommand', CliCommandDirective) + app.add_directive('cliarg', CliArgumentDirective) + app.add_directive('cliexample', CliExampleDirective) diff --git a/scripts/refdoc/conf.py b/scripts/refdoc/conf.py new file mode 100644 index 00000000000..b1a72859f7a --- /dev/null +++ b/scripts/refdoc/conf.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +# -*- coding: utf-8 -*- + +import os +import sys +sys.path.insert(0, os.getcwd()) + +# For more information on all config options, see http://www.sphinx-doc.org/en/stable/config.html + +extensions = [ + 'sphinx.ext.doctest', + 'sphinx.ext.coverage', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', + 'sphinx.ext.autodoc', + 'cligroup.cligroup', + 'azhelpgen.azhelpgen' +] + +# The file name extension for the sphinx source files. +source_suffix = '.rst' +# The master toctree document. +master_doc = 'ind' + +# General information about the project. +project = 'az' +copyright = '2018, msft' +author = 'msft' + +# The language for content autogenerated by Sphinx +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] diff --git a/scripts/refdoc/generate.py b/scripts/refdoc/generate.py new file mode 100644 index 00000000000..4261aa93741 --- /dev/null +++ b/scripts/refdoc/generate.py @@ -0,0 +1,81 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +from __future__ import print_function + +import os +import sys +import copy +import shutil +import argparse +import tempfile +import datetime +from subprocess import check_call, CalledProcessError + + +ENV_KEY_AZURE_EXTENSION_DIR = 'AZURE_EXTENSION_DIR' + +def print_status(msg=''): + print('-- '+msg) + + +def generate(ext_file, output_dir): + # Verify sphinx installed in environment before we get started + check_call(['sphinx-build', '--version']) + if not output_dir: + output_dir = tempfile.mkdtemp(prefix='ref-doc-out-', dir=os.getcwd()) + print_status('Using output directory {}'.format(output_dir)) + temp_extension_dir = tempfile.mkdtemp() + try: + pip_cmd = [sys.executable, '-m', 'pip', 'install', '--target', os.path.join(temp_extension_dir, 'extension'), + ext_file, '--disable-pip-version-check', '--no-cache-dir'] + print_status('Executing "{}"'.format(' '.join(pip_cmd))) + check_call(pip_cmd) + # TODO-DEREK Check these. It's running the correct script? + sphinx_cmd = ['sphinx-build', '-b', 'xml', os.path.dirname(os.path.realpath(__file__)), output_dir] + env = copy.copy(os.environ) + env[ENV_KEY_AZURE_EXTENSION_DIR] = temp_extension_dir + print_status('Executing "{}" with {} set to {}'.format(' '.join(sphinx_cmd), + ENV_KEY_AZURE_EXTENSION_DIR, + env['AZURE_EXTENSION_DIR'])) + check_call(sphinx_cmd, env=env) + finally: + shutil.rmtree(temp_extension_dir) + print_status('Cleaned up temp directory {}'.format(temp_extension_dir)) + print_status('Ref doc output available at {}'.format(output_dir)) + print_status('Done.') + + +def _type_ext_file(val): + ext_file = os.path.realpath(os.path.expanduser(val)) + if os.path.isdir(ext_file): + raise argparse.ArgumentTypeError('{} is a directory not an extension file.'.format(ext_file)) + if not os.path.isfile(ext_file): + raise argparse.ArgumentTypeError('{} does not exist.'.format(ext_file)) + if os.path.splitext(ext_file)[1] != '.whl': + raise argparse.ArgumentTypeError('{} Extension files should end with .whl'.format(ext_file)) + return ext_file + + +def _type_path(val): + out_path = os.path.realpath(os.path.expanduser(val)) + if not os.path.isdir(out_path): + raise argparse.ArgumentTypeError('{} is not a directory. Create it or specify different directory.'.format(out_path)) + if os.listdir(out_path): + raise argparse.ArgumentTypeError('{} is not empty. Empty output directory required.'.format(out_path)) + return out_path + + +# A small command line interface for the script +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Script to generate reference documentation for a single Azure CLI extension.') + + parser.add_argument('-e', '--extension-file', dest='ext_file', + help='Path to the extension .whl file.', required=True, type=_type_ext_file) + parser.add_argument('-o', '--output-dir', dest='output_dir', + help='Path to place the generated documentation. By default, a temporary directory will be created.', required=False, type=_type_path) + + args = parser.parse_args() + generate(args.ext_file, args.output_dir) diff --git a/scripts/refdoc/ind.rst b/scripts/refdoc/ind.rst new file mode 100644 index 00000000000..5fd5f988675 --- /dev/null +++ b/scripts/refdoc/ind.rst @@ -0,0 +1,2 @@ +.. azhelpgen:: + \ No newline at end of file