From 2b45dbea5aa8b90c6415728ccbb8f2619305f6e7 Mon Sep 17 00:00:00 2001 From: Alexander Lanin Date: Tue, 29 Jul 2025 18:02:32 +0200 Subject: [PATCH 1/3] Change how docs-as-code is used MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Maximilian Sören Pollak --- .devcontainer/devcontainer.json | 2 +- .github/workflows/consumer_test.yml | 2 +- .github/workflows/docs.yml | 2 +- .github/workflows/format.yml | 2 +- .github/workflows/test.yml | 2 +- .vscode/settings.json | 2 +- BUILD | 13 +- MODULE.bazel | 11 +- README.md | 28 +-- docs.bzl | 196 ++++-------------- docs/BUILD | 41 ---- docs/conf.py | 9 +- docs/internals/benchmark_results.md | 6 +- docs/requirements/requirements.rst | 1 - src/BUILD | 38 +--- src/extensions/score_layout/__init__.py | 10 +- src/extensions/score_metamodel/__init__.py | 22 +- .../score_metamodel/external_needs.py | 174 ++++++++++++++++ .../tests/test_external_needs.py | 43 ++++ src/extensions/score_plantuml.py | 11 +- src/find_runfiles/__init__.py | 2 +- src/incremental.py | 13 +- src/requirements.txt | 12 +- src/tests/README.md | 16 +- src/tests/test_consumer.py | 21 +- 25 files changed, 344 insertions(+), 335 deletions(-) delete mode 100644 docs/BUILD create mode 100644 src/extensions/score_metamodel/external_needs.py create mode 100644 src/extensions/score_metamodel/tests/test_external_needs.py diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index c3798082..bf388945 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -2,5 +2,5 @@ "name": "eclipse-s-core", "image": "ghcr.io/eclipse-score/devcontainer:latest", "initializeCommand": "mkdir -p ${localEnv:HOME}/.cache/bazel", - "updateContentCommand": "bazel run //docs:ide_support" + "updateContentCommand": "bazel run //:ide_support" } diff --git a/.github/workflows/consumer_test.yml b/.github/workflows/consumer_test.yml index 3295f1b6..08827c75 100644 --- a/.github/workflows/consumer_test.yml +++ b/.github/workflows/consumer_test.yml @@ -61,7 +61,7 @@ jobs: - name: Run Consumer tests id: consumer_tests run: | - bazel run //docs:ide_support + bazel run //:ide_support .venv_docs/bin/python -m pytest -s -v src/tests/ env: FORCE_COLOR: "1" diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 454b37f0..8ffcb83e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -38,5 +38,5 @@ jobs: id-token: write with: - bazel-target: "//docs:incremental_release -- --github_user=${{ github.repository_owner }} --github_repo=${{ github.event.repository.name }}" + bazel-target: "//:docs -- --github_user=${{ github.repository_owner }} --github_repo=${{ github.event.repository.name }}" retention-days: 3 diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index a2fd8c00..05664bac 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -37,5 +37,5 @@ jobs: bazelisk-cache: true - name: Run formatting checks run: | - bazel run //docs:ide_support + bazel run //:ide_support bazel test //src:format.check diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e696d567..31ea1171 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -37,5 +37,5 @@ jobs: bazelisk-cache: true - name: Run test targets run: | - bazel run //docs:ide_support + bazel run //:ide_support bazel test //src/... diff --git a/.vscode/settings.json b/.vscode/settings.json index 477fe16e..9d8c9f45 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -68,7 +68,7 @@ "esbonio.sphinx.buildCommand": [ "docs", "_build", - "-T", // show details in case of errors in extensions + "-T", // show more details in case of errors "--jobs", "auto", "--conf-dir", diff --git a/BUILD b/BUILD index 121235d7..29f3e61e 100644 --- a/BUILD +++ b/BUILD @@ -12,6 +12,8 @@ # ******************************************************************************* load("@score_cr_checker//:cr_checker.bzl", "copyright_checker") +load("//:docs.bzl", "docs") + package(default_visibility = ["//visibility:public"]) @@ -27,7 +29,10 @@ copyright_checker( visibility = ["//visibility:public"], ) -exports_files([ - "MODULE.bazel", - "BUILD", -]) + +docs( + source_dir = "docs", + data = [ + "@score_process//:needs_json" + ], +) diff --git a/MODULE.bazel b/MODULE.bazel index 2661c653..a6485c03 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -13,8 +13,8 @@ module( name = "score_docs_as_code", - version = "0.5.0", - compatibility_level = 0, + version = "1.0.0-RC1", + compatibility_level = 1, ) ############################################################################### @@ -96,3 +96,10 @@ bazel_dep(name = "score_dash_license_checker", version = "0.1.1") # docs dependency bazel_dep(name = "score_process", version = "1.0.4") + +# TODO: REMOVE BEFORE MERGE +git_override( + module_name="score_process", + remote = "https://github.com/MaximilianSoerenPollak/process_description", + commit = "6c2c2707c4b9bdeb608dd499a727c57e1f9a503d" +) diff --git a/README.md b/README.md index 74b0c9bc..0f2d5762 100644 --- a/README.md +++ b/README.md @@ -2,44 +2,30 @@ Docs-as-code tooling for Eclipse S-CORE -## Overview - -The S-CORE docs Sphinx configuration and build code. +Full documentation is on [GitHub Pages](https://eclipse-score.github.io/docs-as-code/). > [!NOTE] > This repository offers a [DevContainer](https://containers.dev/). > For setting this up read [eclipse-score/devcontainer/README.md#inside-the-container](https://github.com/eclipse-score/devcontainer/blob/main/README.md#inside-the-container). -## Building documentation - -#### Run a documentation build: - -#### Integrate latest score main branch - -```bash -bazel run //docs:incremental_latest -``` - -#### Access your documentation at: - -- `_build/` for incremental +## Development of docs-as-code -#### Getting IDE support +### Getting IDE support for docs-as-code development -Create the virtual environment via `bazel run //docs:ide_support`.\ +Create the virtual environment via `bazel run //:ide_support`. If your IDE does not automatically ask you to activate the newly created environment you can activate it. - In VSCode via `ctrl+p` => `Select Python Interpreter` then select `.venv_docs/bin/python` -- In the terminal via `source .venv_docs/bin/activate` +- In the terminal via `. .venv_docs/bin/activate` -#### Format your documentation with: +### Format your documentation with: ```bash bazel test //src:format.check bazel run //src:format.fix ``` -#### Find & fix missing copyright +### Find & fix missing copyright ```bash bazel run //:copyright-check diff --git a/docs.bzl b/docs.bzl index 8d7cb482..ae2d7205 100644 --- a/docs.bzl +++ b/docs.bzl @@ -46,151 +46,72 @@ load("@rules_python//sphinxdocs:sphinx.bzl", "sphinx_build_binary", "sphinx_docs load("@rules_python//sphinxdocs:sphinx_docs_library.bzl", "sphinx_docs_library") load("@score_python_basics//:defs.bzl", "score_virtualenv") -sphinx_requirements = all_requirements + [ - "@score_docs_as_code//src:plantuml_for_python", - "@score_docs_as_code//src/extensions:score_plantuml", - "@score_docs_as_code//src/find_runfiles:find_runfiles", - "@score_docs_as_code//src/extensions/score_draw_uml_funcs:score_draw_uml_funcs", - "@score_docs_as_code//src/extensions/score_header_service:score_header_service", - "@score_docs_as_code//src/extensions/score_layout:score_layout", - "@score_docs_as_code//src/extensions/score_metamodel:score_metamodel", - "@score_docs_as_code//src/extensions/score_source_code_linker:score_source_code_linker", -] - -def docs(source_files_to_scan_for_needs_links = None, source_dir = "docs", conf_dir = "docs", build_dir_for_incremental = "_build", docs_targets = [], deps = []): +def docs(source_dir = "docs", data = [], deps = []): """ Creates all targets related to documentation. By using this function, you'll get any and all updates for documentation targets in one place. - Current restrictions: - * only callable from 'docs/BUILD' """ - # We are iterating over all provided 'targets' in order to allow for automatic generation of them without - # needing to modify the underlying 'docs.bzl' file. - for target in docs_targets: - suffix = "_" + target["suffix"] if target["suffix"] else "" - external_needs_deps = target.get("target", []) - external_needs_def = target.get("external_needs_info", []) - - sphinx_build_binary( - name = "sphinx_build" + suffix, - visibility = ["//visibility:public"], - data = ["@score_docs_as_code//src:docs_assets", "@score_docs_as_code//src:docs_as_code_py_modules"] + external_needs_deps, - deps = sphinx_requirements + deps, - ) - _incremental( - incremental_name = "incremental" + suffix, - live_name = "live_preview" + suffix, - conf_dir = conf_dir, - source_dir = source_dir, - build_dir = build_dir_for_incremental, - external_needs_deps = external_needs_deps, - external_needs_def = external_needs_def, - extra_dependencies = deps, - ) - _docs( - name = "docs" + suffix, - suffix = suffix, - format = "html", - external_needs_deps = external_needs_deps, - external_needs_def = external_needs_def, - ) - _docs( - name = "docs_needs" + suffix, - suffix = suffix, - format = "needs", - external_needs_deps = external_needs_deps, - external_needs_def = external_needs_def, - ) - - # Virtual python environment for working on the documentation (esbonio). - # incl. python support when working on conf.py and sphinx extensions. - # creates :ide_support target for virtualenv - _ide_support(deps) - - # creates 'needs.json' build target - -def _incremental(incremental_name = "incremental", live_name = "live_preview", source_dir = "docs", conf_dir = "docs", build_dir = "_build", extra_dependencies = list(), external_needs_deps = list(), external_needs_def = None): - """ - A target for building docs incrementally at runtime, incl live preview. - Args: - source_code_linker: The source code linker target to be used for linking source code to documentation. - source_code_links: The output from the source code linker. - source_dir: Directory containing the source files for documentation. - conf_dir: Directory containing the Sphinx configuration. - build_dir: Directory to output the built documentation. - extra_dependencies: Additional dependencies besides the centrally maintained "sphinx_requirements". - """ - - dependencies = sphinx_requirements + extra_dependencies + ["@rules_python//python/runfiles"] - - # Create description tags for the incremental targets. - call_path = native.package_name() - incremental_tag = "cli_help=Build documentation incrementally:\nbazel run //" + call_path + ":" + incremental_name - - if incremental_name == "incremental_latest": - incremental_tag = ( - "cli_help=Build documentation incrementally (use current main branch of imported docs repositories " + - "(e.g. process_description)):\n" + - "bazel run //" + call_path + ":incremental_latest" - ) - elif incremental_name == "incremental_release": - incremental_tag = ( - "cli_help=Build documentation incrementally (use release version imported in MODULE.bazel):\n" + - "bazel run //" + call_path + ":incremental_release" - ) + data = data + ["@score_docs_as_code//src:docs_assets"] + + deps = deps + all_requirements + [ + "@score_docs_as_code//src:plantuml_for_python", + "@score_docs_as_code//src/extensions:score_plantuml", + "@score_docs_as_code//src/find_runfiles:find_runfiles", + "@score_docs_as_code//src/extensions/score_draw_uml_funcs:score_draw_uml_funcs", + "@score_docs_as_code//src/extensions/score_header_service:score_header_service", + "@score_docs_as_code//src/extensions/score_layout:score_layout", + "@score_docs_as_code//src/extensions/score_metamodel:score_metamodel", + "@score_docs_as_code//src/extensions/score_source_code_linker:score_source_code_linker", + ] + + sphinx_build_binary( + name = "sphinx_build", + visibility = ["//visibility:private"], + data = data, + deps = deps, + ) py_binary( - name = incremental_name, + name = "docs", + tags = ["cli_help=Build documentation [run]"], srcs = ["@score_docs_as_code//src:incremental.py"], - deps = dependencies, - # TODO: Figure out if we need all dependencies as data here or not. - data = ["@score_docs_as_code//src:plantuml", "@score_docs_as_code//src:docs_assets"] + dependencies + external_needs_deps, + data = data, + deps = deps, env = { "SOURCE_DIRECTORY": source_dir, - "CONF_DIRECTORY": conf_dir, - "BUILD_DIRECTORY": build_dir, - "EXTERNAL_NEEDS_INFO": json.encode(external_needs_def), + "DATA": str(data), "ACTION": "incremental", }, - tags = [incremental_tag], ) py_binary( - name = live_name, + name = "live_preview", + tags = ["cli_help=Live preview documentation in the browser [run]"], srcs = ["@score_docs_as_code//src:incremental.py"], - deps = dependencies, - data = ["@score_docs_as_code//src:plantuml", "@score_docs_as_code//src:docs_assets"] + dependencies + external_needs_deps, + data = data, + deps = deps, env = { "SOURCE_DIRECTORY": source_dir, - "CONF_DIRECTORY": conf_dir, - "BUILD_DIRECTORY": build_dir, - "EXTERNAL_NEEDS_INFO": json.encode(external_needs_def), + "DATA": str(data), "ACTION": "live_preview", }, ) -def _ide_support(extra_dependencies): - call_path = native.package_name() score_virtualenv( name = "ide_support", + tags = ["cli_help=Create virtual environment (.venv_docs) for documentation support [run]"], venv_name = ".venv_docs", - reqs = sphinx_requirements + extra_dependencies, - tags = [ - "cli_help=Create virtual environment for documentation:\n" + - "bazel run //" + call_path + ":ide_support", - ], + reqs = deps, + # Add dependencies to ide_support, so esbonio has access to them. + data = data, ) -def _docs(name = "docs", suffix = "", format = "html", external_needs_deps = list(), external_needs_def = list()): - ext_needs_arg = "--define=external_needs_source=" + json.encode(external_needs_def) - - # Clean suffix used in all generated target names - target_suffix = "" if name == "docs" else "_" + name[len("docs"):] - + # creates 'needs.json' build target sphinx_docs( - name = name, + name = "needs_json", srcs = native.glob([ + # TODO: we do not need images etc to generate the json file. "**/*.png", "**/*.svg", "**/*.md", @@ -206,43 +127,16 @@ def _docs(name = "docs", suffix = "", format = "html", external_needs_deps = lis "**/*.csv", "**/*.inc", ], exclude = ["**/tests/*"], allow_empty = True), - config = ":conf.py", + config = ":" + source_dir + "/conf.py", extra_opts = [ "-W", "--keep-going", - ] + [ext_needs_arg], - formats = [ - format, + "-T", # show more details in case of errors + "--jobs", + "auto", + "--define=external_needs_source=" + str(data), ], - sphinx = ":sphinx_build" + suffix, - tags = [ - "manual", - ], - tools = [ - "@score_docs_as_code//src:plantuml", - "@score_docs_as_code//src:docs_assets", - ] + external_needs_deps, - visibility = ["//visibility:public"], - ) - - native.filegroup( - name = "assets" + target_suffix, - srcs = native.glob(["_assets/**"], allow_empty = True), - visibility = ["//visibility:public"], - ) - - native.filegroup( - name = "html" + target_suffix, - srcs = [":" + name], - visibility = ["//visibility:public"], - ) - - pkg_files( - name = "html_files" + target_suffix, - srcs = [":html" + target_suffix], - ) - - pkg_tar( - name = "github_pages" + target_suffix, - srcs = [":html_files" + target_suffix], + formats = ["needs"], + sphinx = ":sphinx_build", + tools = data, ) diff --git a/docs/BUILD b/docs/BUILD deleted file mode 100644 index 8da3d561..00000000 --- a/docs/BUILD +++ /dev/null @@ -1,41 +0,0 @@ -# ******************************************************************************* -# Copyright (c) 2025 Contributors to the Eclipse Foundation -# -# See the NOTICE file(s) distributed with this work for additional -# information regarding copyright ownership. -# -# This program and the accompanying materials are made available under the -# terms of the Apache License Version 2.0 which is available at -# https://www.apache.org/licenses/LICENSE-2.0 -# -# SPDX-License-Identifier: Apache-2.0 -# ******************************************************************************* - -load("@aspect_rules_py//py:defs.bzl", "py_library") -load("//:docs.bzl", "docs") - -# Creates all documentation targets: -# - `docs:incremental` for building docs incrementally at runtime -# - `docs:live_preview` for live preview in the browser without an IDE -# - `docs:ide_support` for creating python virtualenv for IDE support -# - `docs:docs` for building documentation at build-time - -docs( - conf_dir = "docs", - docs_targets = [ - { - "suffix": "release", # The version imported from MODULE.bazel - "target": [ - "@score_process//process:docs_needs_latest", - ], - "external_needs_info": [ - { - "base_url": "https://eclipse-score.github.io/process_description/main", - "json_path": "/score_process+/process/docs_needs_latest/_build/needs/needs.json", - "id_prefix": "process_", - }, - ], - }, - ], - source_dir = "docs", -) diff --git a/docs/conf.py b/docs/conf.py index bc919911..96074dce 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -21,6 +21,8 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information project = "Score Docs-as-Code" +project_url = "https://eclipse-score.github.io/docs-as-code/" +project_prefix = "DOCS_" author = "S-CORE" version = "0.1" @@ -44,11 +46,11 @@ exclude_patterns = [ # The following entries are not required when building the documentation via 'bazel - # build //docs:docs', as that command runs in a sandboxed environment. However, when - # building the documentation via 'bazel run //docs:incremental' or esbonio, these + # build //:docs', as that command runs in a sandboxed environment. However, when + # building the documentation via 'bazel run //:docs' or esbonio, these # entries are required to prevent the build from failing. "bazel-*", - ".venv_docs", + ".venv*", ] # Enable markdown rendering @@ -62,3 +64,4 @@ # Enable numref numfig = True +# needs_builder_filter = "" diff --git a/docs/internals/benchmark_results.md b/docs/internals/benchmark_results.md index 0cb03ffb..adcaa265 100644 --- a/docs/internals/benchmark_results.md +++ b/docs/internals/benchmark_results.md @@ -26,7 +26,7 @@ Repository = [process description](https://github.com/eclipse-score/process_desc --- -## Benchmark 1: `bazel run //process:incremental_latest` +## Benchmark 1: `bazel run //:docs` | Scenario | Run 1 | Run 2 | Run 3 | Average | |------------------|---------------|---------------|---------------|-----------| @@ -37,7 +37,7 @@ Repository = [process description](https://github.com/eclipse-score/process_desc --- -## Benchmark 2: `bazel build //process:docs_needs_latest` +## Benchmark 2: `bazel build //:needs_json` | Scenario | Run 1 | Run 2 | Run 3 | Average | |------------------|---------------|---------------|---------------|-----------| @@ -48,7 +48,7 @@ Repository = [process description](https://github.com/eclipse-score/process_desc --- -## Benchmark 3: `bazel run //process:live_preview_latest` +## Benchmark 3: `bazel run //:live_preview` | Scenario | Run 1 | Run 2 | Run 3 | Average | |------------------|---------------|---------------|---------------|-----------| diff --git a/docs/requirements/requirements.rst b/docs/requirements/requirements.rst index 3fd79994..192d46f7 100644 --- a/docs/requirements/requirements.rst +++ b/docs/requirements/requirements.rst @@ -117,7 +117,6 @@ This section provides an overview of current process requirements and their clar :id: tool_req__docs_common_attr_desc_wording :tags: Common Attributes :implemented: YES - :satisfies: PROCESS_gd_req__req__attr_desc_weak :parent_covered: YES diff --git a/src/BUILD b/src/BUILD index 4a61e6ad..d23883d1 100644 --- a/src/BUILD +++ b/src/BUILD @@ -55,10 +55,10 @@ py_library( ) # In order to update the requirements, change the `requirements.txt` file and run: -# `bazel run //docs:requirements`. +# `bazel run //src:requirements`. # This will update the `requirements_lock.txt` file. # To upgrade all dependencies to their latest versions, run: -# `bazel run //docs:requirements -- --upgrade`. +# `bazel run //src:requirements -- --upgrade`. compile_pip_requirements( name = "requirements", srcs = [ @@ -71,40 +71,6 @@ compile_pip_requirements( ], ) -filegroup( - name = "html", - srcs = [":docs"], - output_group = "html", -) - -pkg_files( - name = "html_files", - srcs = [":html"], - strip_prefix = "html", - #renames={"html": ""}, -) - -pkg_tar( - name = "github-pages", - srcs = [":html_files"], -) - -# 'source_code_linker' needs all targets to be passed to it. -# This is a convenient gathering of all the 'python internal modules' to avoid writing them individiually -py_library( - name = "docs_as_code_py_modules", - srcs = [ - "@score_docs_as_code//src:plantuml_for_python", - "@score_docs_as_code//src/extensions:score_plantuml", - "@score_docs_as_code//src/extensions/score_draw_uml_funcs", - "@score_docs_as_code//src/extensions/score_header_service", - "@score_docs_as_code//src/extensions/score_layout", - "@score_docs_as_code//src/extensions/score_metamodel", - "@score_docs_as_code//src/extensions/score_source_code_linker", - "@score_docs_as_code//src/find_runfiles", - ], - visibility = ["//visibility:public"], -) filegroup( name = "docs_assets", diff --git a/src/extensions/score_layout/__init__.py b/src/extensions/score_layout/__init__.py index 18e36571..3e534819 100644 --- a/src/extensions/score_layout/__init__.py +++ b/src/extensions/score_layout/__init__.py @@ -10,13 +10,13 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* -from typing import Any - -from sphinx.application import Sphinx import os from pathlib import Path +from typing import Any + import html_options import sphinx_options +from sphinx.application import Sphinx def setup(app: Sphinx) -> dict[str, str | bool]: @@ -39,10 +39,10 @@ def update_config(app: Sphinx, _config: Any): # For now this seems the only place this is used / needed. # In the future it might be a good idea to make this available in other places, maybe via the 'find_runfiles' lib if r := os.getenv("RUNFILES_DIR"): - dirs = [str(x) for x in Path(r).glob("*score_docs_as_code~")] + dirs = [str(x) for x in Path(r).glob("*score_docs_as_code+")] if dirs: # Happens if 'score_docs_as_code' is used as Module - p = str(r) + "/score_docs_as_code~/src/assets" + p = str(r) + "/score_docs_as_code+/src/assets" else: # Only happens in 'score_docs_as_code' repository p = str(r) + "/_main/src/assets" diff --git a/src/extensions/score_metamodel/__init__.py b/src/extensions/score_metamodel/__init__.py index 565f11aa..da5daf13 100644 --- a/src/extensions/score_metamodel/__init__.py +++ b/src/extensions/score_metamodel/__init__.py @@ -24,6 +24,7 @@ from sphinx_needs.config import NeedType from sphinx_needs.data import NeedsInfoType, NeedsView, SphinxNeedsData +from .external_needs import connect_external_needs from .log import CheckLogger logger = logging.get_logger(__name__) @@ -298,25 +299,6 @@ def default_options() -> list[str]: "arch", ] - -def parse_external_needs_sources(app: Sphinx, config): - # HACK: maybe there is a nicer way for this - if app.config.external_needs_source not in ["[]", ""]: - x = None - # NOTE: Due to upgrades in modules, encoding changed. Need to clean string in order to read it right again. - clean_str = app.config.external_needs_source.replace('\\"', "") - x = json.loads(clean_str) - if r := os.getenv("RUNFILES_DIR"): - if x[0].get("json_path", None): - for a in x: - # This is needed to allow for the needs.json to be found locally - if "json_path" in a.keys(): - a["json_path"] = r + a["json_path"] - app.config.needs_external_needs = x - # Making the prefixes uppercase here to match sphinx_needs, as it does this internally too. - app.config.allowed_external_prefixes = [z["id_prefix"].upper() for z in x] - - def setup(app: Sphinx) -> dict[str, str | bool]: app.add_config_value("external_needs_source", "", rebuild="env") app.add_config_value("allowed_external_prefixes", [], rebuild="env") @@ -340,7 +322,7 @@ def setup(app: Sphinx) -> dict[str, str | bool]: app.config.needs_reproducible_json = True app.config.needs_json_remove_defaults = True - _ = app.connect("config-inited", parse_external_needs_sources) + _ = app.connect("config-inited", connect_external_needs) discover_checks() diff --git a/src/extensions/score_metamodel/external_needs.py b/src/extensions/score_metamodel/external_needs.py new file mode 100644 index 00000000..132836fd --- /dev/null +++ b/src/extensions/score_metamodel/external_needs.py @@ -0,0 +1,174 @@ +import json +import os +import subprocess +import sys +from dataclasses import dataclass +from pathlib import Path + +from sphinx.application import Sphinx +from sphinx.config import Config +from sphinx.util import logging +from sphinx_needs.needsfile import NeedsList + +logger = logging.getLogger(__name__) + + +@dataclass +class ExternalNeedsSource: + bazel_module: str + path_to_target: str + target: str + + +def _parse_bazel_external_need(s: str) -> ExternalNeedsSource | None: + if not s.startswith("@"): + # Local need, not external needs + return None + + if "//" not in s or ":" not in s: + raise ValueError( + f"Unsuported external data dependency: '{s}'. Must contain '//' & ':'" + ) + repo_and_path, target = s.split( + ":", 1 + ) # @score_process//:needs_json => [@score_process//, needs_json] + repo, path_to_target = repo_and_path.split("//", 1) + repo = repo.lstrip("@") + + if path_to_target == "" and target == "needs_json": + return ExternalNeedsSource( + bazel_module=repo, path_to_target=path_to_target, target=target + ) + else: + # Unknown data target. Probably not a needs.json file. + return None + + +def parse_external_needs_sources_from_DATA(v: str) -> list[ExternalNeedsSource]: + if v in ["[]", ""]: + return [] + + logger.debug(f"Parsing external needs sources: {v}") + data = json.loads(v) + + res = [res for el in data if (res := _parse_bazel_external_need(el))] + logger.debug(f"Parsed external needs sources: {res}") + return res + + +def parse_external_needs_sources_from_bazel_query() -> list[ExternalNeedsSource]: + """ + This function detects if the Sphinx app is running without Bazel and sets the + `external_needs_source` config value accordingly. + + When running with Bazel, we pass the `external_needs_source` config value + from the bazel config. + """ + + logger.debug("Detected execution without Bazel. Fetching external needs config...") + # Currently dependencies are stored in the top level BUILD file. + # We could parse it or query bazel. + # Parsing would be MUCH faster, but querying bazel would be more robust. + p = subprocess.run( + ["bazel", "query", "labels(data, //:docs)"], + check=True, + capture_output=True, + text=True, + ) + res = [ + res + for line in p.stdout.splitlines() + if line.strip() + if (res := _parse_bazel_external_need(line)) + ] + logger.debug(f"Parsed external needs sources: {res}") + return res + + +def extend_needs_json_exporter(config: Config, params: list[str]) -> None: + """ + This will add each param to app.config as a config value. + Then it will overwrite the needs.json exporter to include these values. + """ + + for p in params: + config.add(p, default="", rebuild="env", types=(), description="") + + if not getattr(config, p): + logger.error( + f"Config value '{p}' is not set. " + + "Please set it in your Sphinx config." + ) + + # Patch json exporter to include our custom fields + # Note: yeah, NeedsList is the json exporter! + orig_function = NeedsList._finalise # pyright: ignore[reportPrivateUsage] + + def temp(self: NeedsList): + for p in params: + self.needs_list[p] = getattr(config, p) # pyright: ignore[reportUnknownMemberType] + + orig_function(self) + + NeedsList._finalise = temp # pyright: ignore[reportPrivateUsage] + + +def connect_external_needs(app: Sphinx, config: Config): + extend_needs_json_exporter(config, ["project_url", "project_prefix"]) + + bazel = app.config.external_needs_source or os.getenv("RUNFILES_DIR") + + if bazel: + external_needs = parse_external_needs_sources_from_DATA( + app.config.external_needs_source + ) # pyright: ignore[reportAny] + else: + external_needs = parse_external_needs_sources_from_bazel_query() # pyright: ignore[reportAny] + + for e in external_needs: + assert not e.path_to_target # path_to_target is always empty + json_file = f"{e.bazel_module}+/{e.target}/_build/needs/needs.json" + if r := os.getenv("RUNFILES_DIR"): + logger.debug("Using runfiles to determine external needs JSON file.") + fixed_json_file = Path(r) / json_file + else: + logger.debug( + "Running outside bazel. Determining git root for external needs JSON file." + ) + git_root = Path.cwd().resolve() + while not (git_root / ".git").exists(): + git_root = git_root.parent + if git_root == Path("/"): + sys.exit("Could not find git root.") + logger.debug(f"Git root found: {git_root}") + fixed_json_file = ( + git_root / "bazel-bin" / "ide_support.runfiles" / json_file + ) + + logger.debug(f"Fixed JSON file path: {json_file} -> {fixed_json_file}") + json_file = fixed_json_file + + try: + needs_json_data = json.loads(Path(json_file).read_text(encoding="utf-8")) # pyright: ignore[reportAny] + except FileNotFoundError: + logger.error( + f"Could not find external needs JSON file at {json_file}. " + + "Something went terribly wrong. " + + "Try running `bazel clean --async && rm -rf _build`." + ) + continue + + assert isinstance(app.config.needs_external_needs, list) # pyright: ignore[reportUnknownMemberType] + app.config.needs_external_needs.append( # pyright: ignore[reportUnknownMemberType] + { + "id_prefix": needs_json_data["project_prefix"], + "base_url": needs_json_data["project_url"] + + "/main", # for now always "main" + "json_path": json_file, + } + ) + # Making the prefixes uppercase here to match sphinx_needs, as it does this internally too. + assert isinstance(app.config.allowed_external_prefixes, list) # pyright: ignore[reportAny] + app.config.allowed_external_prefixes.append( # pyright: ignore[reportUnknownMemberType] + needs_json_data["project_prefix"].upper() # pyright: ignore[reportAny] + ) diff --git a/src/extensions/score_metamodel/tests/test_external_needs.py b/src/extensions/score_metamodel/tests/test_external_needs.py new file mode 100644 index 00000000..ea57c69c --- /dev/null +++ b/src/extensions/score_metamodel/tests/test_external_needs.py @@ -0,0 +1,43 @@ +import pytest + +from ..external_needs import ExternalNeedsSource, parse_external_needs_sources_from_DATA + + +def test_empty_list(): + assert parse_external_needs_sources_from_DATA("[]") == [] + +def test_single_entry_with_path(): + result = parse_external_needs_sources_from_DATA('["@repo//foo/bar:baz"]') + # IF a target has a path, it will not be reported as external needs + assert result == [] + +def test_single_entry_no_path(): + result = parse_external_needs_sources_from_DATA('["@repo//:target"]') + # If a target is not named "needs_json", it will not be reported as external needs + assert result == [] + +def test_single_entry_json_no_path(): + result = parse_external_needs_sources_from_DATA('["@repo//:needs_json"]') + assert result == [ExternalNeedsSource(bazel_module="repo", path_to_target="", target="needs_json")] + +def test_multiple_entries(): + result = parse_external_needs_sources_from_DATA('["@repo1//:needs_json", "@repo2//:needs_json"]') + assert result == [ + ExternalNeedsSource(bazel_module="repo1", path_to_target="", target="needs_json"), + ExternalNeedsSource(bazel_module="repo2", path_to_target="", target="needs_json") + ] + +def test_multiple_entries_2(): + result = parse_external_needs_sources_from_DATA('["@repo1//:needs_json", "@repo2//path:needs_json"]') + + assert result == [ + ExternalNeedsSource(bazel_module="repo1", path_to_target="", target="needs_json") + ] + +def test_invalid_entry(): + with pytest.raises(ValueError): + _ = parse_external_needs_sources_from_DATA('["@not_a_valid_string"]') + + +def test_parser(): + ... diff --git a/src/extensions/score_plantuml.py b/src/extensions/score_plantuml.py index 1690315a..b360dfb2 100644 --- a/src/extensions/score_plantuml.py +++ b/src/extensions/score_plantuml.py @@ -24,7 +24,6 @@ In addition it sets common PlantUML options, like output to svg_obj. """ -from gettext import find import os import sys from pathlib import Path @@ -39,14 +38,14 @@ def get_runfiles_dir() -> Path: if r := os.getenv("RUNFILES_DIR"): # Runfiles are only available when running in Bazel. # bazel build and bazel run are both supported. - # i.e. `bazel build //docs:docs` and `bazel run //docs:incremental`. + # i.e. `bazel build //:docs` and `bazel run //:docs`. logger.debug("Using runfiles to determine plantuml path.") runfiles_dir = Path(r) else: # The only way to land here is when running from within the virtual - # environment created by the `docs:ide_support` rule in the BUILD file. + # environment created by the `:ide_support` rule in the BUILD file. # i.e. esbonio or manual sphinx-build execution within the virtual # environment. # We'll still use the plantuml binary from the bazel build. @@ -59,7 +58,7 @@ def get_runfiles_dir() -> Path: if git_root == Path("/"): sys.exit("Could not find git root.") - runfiles_dir = git_root / "bazel-bin" / "docs" / "ide_support.runfiles" + runfiles_dir = git_root / "bazel-bin" / "ide_support.runfiles" if not runfiles_dir.exists(): sys.exit( @@ -73,10 +72,10 @@ def find_correct_path(runfiles: str) -> str: """ This ensures that the 'plantuml' binary path is found in local 'score_docs_as_code' and module use. """ - dirs = [str(x) for x in Path(runfiles).glob("*score_docs_as_code~")] + dirs = [str(x) for x in Path(runfiles).glob("*score_docs_as_code+")] if dirs: # Happens if 'score_docs_as_code' is used as Module - p = runfiles + "/score_docs_as_code~/src/plantuml" + p = runfiles + "/score_docs_as_code+/src/plantuml" else: # Only happens in 'score_docs_as_code' repository p = runfiles + "/../plantuml" diff --git a/src/find_runfiles/__init__.py b/src/find_runfiles/__init__.py index a1aec645..c40ea977 100644 --- a/src/find_runfiles/__init__.py +++ b/src/find_runfiles/__init__.py @@ -60,7 +60,7 @@ def get_runfiles_dir_impl( if env_runfiles: # Runfiles are only available when running in Bazel. # bazel build and bazel run are both supported. - # i.e. `bazel build //docs:docs` and `bazel run //docs:incremental`. + # i.e. `bazel build //:docs` and `bazel run //:docs`. _log_debug("Using env[runfiles] to find the runfiles...") if env_runfiles.is_absolute(): diff --git a/src/incremental.py b/src/incremental.py index 16f18981..1c789ea6 100644 --- a/src/incremental.py +++ b/src/incremental.py @@ -33,8 +33,8 @@ def get_env(name: str) -> str: if __name__ == "__main__": - # Add debuging functionality parser = argparse.ArgumentParser() + # Add debuging functionality parser.add_argument( "-dp", "--debug_port", help="port to listen to debugging client", default=5678 ) @@ -65,8 +65,6 @@ def get_env(name: str) -> str: debugpy.wait_for_client() workspace = os.getenv("BUILD_WORKSPACE_DIRECTORY") - # if workspace: - # os.chdir(workspace) if workspace: workspace += "/" else: @@ -74,15 +72,13 @@ def get_env(name: str) -> str: base_arguments = [ workspace + get_env("SOURCE_DIRECTORY"), - workspace + get_env("BUILD_DIRECTORY"), + workspace + "_build", "-W", # treat warning as errors "--keep-going", # do not abort after one error "-T", # show details in case of errors in extensions "--jobs", "auto", - "--conf-dir", - workspace + get_env("CONF_DIRECTORY"), - f"--define=external_needs_source={get_env('EXTERNAL_NEEDS_INFO')}", + f"--define=external_needs_source={get_env('DATA')}", ] # configure sphinx build with GitHub user and repo from CLI @@ -94,8 +90,7 @@ def get_env(name: str) -> str: action = get_env("ACTION") if action == "live_preview": - build_dir = Path(get_env("BUILD_DIRECTORY")) - (workspace / build_dir / "score_source_code_linker_cache.json").unlink( + Path(workspace + "/_build/score_source_code_linker_cache.json").unlink( missing_ok=False ) sphinx_autobuild_main( diff --git a/src/requirements.txt b/src/requirements.txt index 3bfc13bb..a179bd7b 100644 --- a/src/requirements.txt +++ b/src/requirements.txt @@ -37,7 +37,7 @@ babel==2.17.0 \ basedpyright==1.29.2 \ --hash=sha256:12c49186003b9f69a028615da883ef97035ea2119a9e3f93a00091b3a27088a6 \ --hash=sha256:f389e2997de33d038c5065fd85bff351fbdc62fa6d6371c7b947fc3bce8d437d - # via -r external/score_python_basics~/requirements.txt + # via -r external/score_python_basics+/requirements.txt beautifulsoup4==4.13.4 \ --hash=sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b \ --hash=sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195 @@ -438,7 +438,7 @@ iniconfig==2.1.0 \ --hash=sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7 \ --hash=sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760 # via - # -r external/score_python_basics~/requirements.txt + # -r external/score_python_basics+/requirements.txt # pytest jinja2==3.1.6 \ --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ @@ -669,7 +669,7 @@ nodejs-wheel-binaries==22.16.0 \ --hash=sha256:d695832f026df3a0cf9a089d222225939de9d1b67f8f0a353b79f015aabbe7e2 \ --hash=sha256:dbfccbcd558d2f142ccf66d8c3a098022bf4436db9525b5b8d32169ce185d99e # via - # -r external/score_python_basics~/requirements.txt + # -r external/score_python_basics+/requirements.txt # basedpyright numpy==2.2.5 \ --hash=sha256:0255732338c4fdd00996c0421884ea8a3651eea555c3a56b84892b66f696eb70 \ @@ -734,7 +734,7 @@ packaging==25.0 \ --hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484 \ --hash=sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f # via - # -r external/score_python_basics~/requirements.txt + # -r external/score_python_basics+/requirements.txt # matplotlib # pytest # sphinx @@ -829,7 +829,7 @@ pluggy==1.6.0 \ --hash=sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3 \ --hash=sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746 # via - # -r external/score_python_basics~/requirements.txt + # -r external/score_python_basics+/requirements.txt # pytest pycparser==2.22 \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ @@ -882,7 +882,7 @@ pyspellchecker==0.8.2 \ pytest==8.3.5 \ --hash=sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820 \ --hash=sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845 - # via -r external/score_python_basics~/requirements.txt + # via -r external/score_python_basics+/requirements.txt python-dateutil==2.9.0.post0 \ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 diff --git a/src/tests/README.md b/src/tests/README.md index ef68ce41..66dcfaab 100644 --- a/src/tests/README.md +++ b/src/tests/README.md @@ -1,24 +1,24 @@ # Docs-As-Code Consumer Tests -This test validates that changes to the docs-as-code system don't break downstream consumers. +This test validates that changes to the docs-as-code system don't break downstream consumers. It tests both local changes and git-based overrides against real consumer repositories. ## Use in CI -If you want to start the consumer tests on a PR inside `docs-as-code`, then all you have to do is comment +If you want to start the consumer tests on a PR inside `docs-as-code`, then all you have to do is comment `/consumer-test` on the PR and this should trigger them. ## Quick Start ```bash # Create the virtual environment -bazel run //docs:ide_support +bazel run //:ide_support # Run with std. configuration .venv_docs/bin/python -m pytest -s src/tests # Run with more verbose output (up to -vvv) -.venv_docs/bin/python -m pytest -s -v src/tests +.venv_docs/bin/python -m pytest -s -v src/tests # Run specific repositories only .venv_docs/bin/python -m pytest -s src/tests --repo=score @@ -89,16 +89,16 @@ For each repository, the test: ```bash # Create the virtual environment -bazel run //docs:ide_support +bazel run //:ide_support # First run - clones everything fresh -.venv_docs/bin/python -m pytest -s -v src/tests --repo=score +.venv_docs/bin/python -m pytest -s -v src/tests --repo=score # Make changes to docs-as-code... # Subsequent runs - much faster due to caching -.venv_docs/bin/python -m pytest -s -v src/tests --repo=score +.venv_docs/bin/python -m pytest -s -v src/tests --repo=score # Final validation - test all repos without cache -.venv_docs/bin/python -m pytest -s -v src/tests --disable-cache +.venv_docs/bin/python -m pytest -s -v src/tests --disable-cache ``` diff --git a/src/tests/test_consumer.py b/src/tests/test_consumer.py index 4c345655..c0fb091b 100644 --- a/src/tests/test_consumer.py +++ b/src/tests/test_consumer.py @@ -10,10 +10,8 @@ # # SPDX-License-Identifier: Apache-2.0 # ******************************************************************************* -import logging import os import re -import argparse import subprocess from collections import defaultdict from dataclasses import dataclass, field @@ -39,11 +37,12 @@ - The script itself takes quiet a bit of time, roughly 5+ min for a full run. - If you need more output, enable it via `-v` or `-vv` - Start the script via the following command: - - bazel run //docs:ide_support + - bazel run //:ide_support - .venv_docs/bin/python -m pytest -s src/tests (If you need more verbosity add `-v` or `-vv`) """ # Max width of the printout +# Trial and error has shown that 80 the best value is for GH CI output len_max = 80 CACHE_DIR = Path.home() / ".cache" / "docs_as_code_consumer_tests" @@ -79,18 +78,16 @@ class Result: ConsumerRepo( name="process_description", git_url="https://github.com/eclipse-score/process_description.git", - commands=["bazel run //process:incremental_latest"], + commands=["bazel run //:docs"], test_commands=[], ), ConsumerRepo( name="score", git_url="https://github.com/eclipse-score/score.git", commands=[ - "bazel run //docs:ide_support", - "bazel run //docs:incremental_latest", - "bazel run //docs:incremental_release", - "bazel build //docs:docs_release", - "bazel build //docs:docs_latest", + "bazel run //:ide_support", + "bazel run //:docs", + "bazel build //:needs_json", ], test_commands=[], ), @@ -98,9 +95,9 @@ class Result: name="module_template", git_url="https://github.com/eclipse-score/module_template.git", commands=[ - "bazel run //docs:ide_support", - "bazel run //docs:incremental", - "bazel build //docs:docs", + "bazel run //:ide_support", + "bazel run //:docs", + "bazel build //:needs_json", ], test_commands=[ "bazel test //tests/...", From 9674313f6dcbf9045f6820875d85cfb318652821 Mon Sep 17 00:00:00 2001 From: Alexander Lanin Date: Tue, 29 Jul 2025 18:13:47 +0200 Subject: [PATCH 2/3] format and copyright --- BUILD | 6 +-- MODULE.bazel | 7 --- pyproject.toml | 8 ++-- src/BUILD | 1 - src/extensions/score_metamodel/__init__.py | 1 + .../score_metamodel/external_needs.py | 13 ++++++ .../tests/test_external_needs.py | 45 +++++++++++++++---- 7 files changed, 57 insertions(+), 24 deletions(-) diff --git a/BUILD b/BUILD index 29f3e61e..cb382a56 100644 --- a/BUILD +++ b/BUILD @@ -14,7 +14,6 @@ load("@score_cr_checker//:cr_checker.bzl", "copyright_checker") load("//:docs.bzl", "docs") - package(default_visibility = ["//visibility:public"]) copyright_checker( @@ -29,10 +28,9 @@ copyright_checker( visibility = ["//visibility:public"], ) - docs( - source_dir = "docs", data = [ - "@score_process//:needs_json" + "@score_process//:needs_json", ], + source_dir = "docs", ) diff --git a/MODULE.bazel b/MODULE.bazel index a6485c03..3b7236c9 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -96,10 +96,3 @@ bazel_dep(name = "score_dash_license_checker", version = "0.1.1") # docs dependency bazel_dep(name = "score_process", version = "1.0.4") - -# TODO: REMOVE BEFORE MERGE -git_override( - module_name="score_process", - remote = "https://github.com/MaximilianSoerenPollak/process_description", - commit = "6c2c2707c4b9bdeb608dd499a727c57e1f9a503d" -) diff --git a/pyproject.toml b/pyproject.toml index e509ec2e..eae04ccc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,21 +1,21 @@ # This file is at the root level, as it applies to all Python code, # not only to docs or to tools. [tool.pyright] -extends = "bazel-bin/docs/ide_support.runfiles/score_python_basics+/pyproject.toml" +extends = "bazel-bin/ide_support.runfiles/score_python_basics+/pyproject.toml" exclude = [ "**/__pycache__", "**/.*", "**/bazel-*", - "venv/**", + ".venv*/**", ] [tool.ruff] -extend = "bazel-bin/docs/ide_support.runfiles/score_python_basics+/pyproject.toml" +extend = "bazel-bin/ide_support.runfiles/score_python_basics+/pyproject.toml" extend-exclude = [ "**/__pycache__", "/.*", "bazel-*", - "venv/**", + ".venv*/**", ] diff --git a/src/BUILD b/src/BUILD index d23883d1..2446069d 100644 --- a/src/BUILD +++ b/src/BUILD @@ -71,7 +71,6 @@ compile_pip_requirements( ], ) - filegroup( name = "docs_assets", srcs = glob( diff --git a/src/extensions/score_metamodel/__init__.py b/src/extensions/score_metamodel/__init__.py index da5daf13..210cd9fe 100644 --- a/src/extensions/score_metamodel/__init__.py +++ b/src/extensions/score_metamodel/__init__.py @@ -299,6 +299,7 @@ def default_options() -> list[str]: "arch", ] + def setup(app: Sphinx) -> dict[str, str | bool]: app.add_config_value("external_needs_source", "", rebuild="env") app.add_config_value("allowed_external_prefixes", [], rebuild="env") diff --git a/src/extensions/score_metamodel/external_needs.py b/src/extensions/score_metamodel/external_needs.py index 132836fd..b5db7ebf 100644 --- a/src/extensions/score_metamodel/external_needs.py +++ b/src/extensions/score_metamodel/external_needs.py @@ -1,3 +1,16 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* + import json import os import subprocess diff --git a/src/extensions/score_metamodel/tests/test_external_needs.py b/src/extensions/score_metamodel/tests/test_external_needs.py index ea57c69c..d94d19a4 100644 --- a/src/extensions/score_metamodel/tests/test_external_needs.py +++ b/src/extensions/score_metamodel/tests/test_external_needs.py @@ -1,3 +1,15 @@ +# ******************************************************************************* +# Copyright (c) 2025 Contributors to the Eclipse Foundation +# +# See the NOTICE file(s) distributed with this work for additional +# information regarding copyright ownership. +# +# This program and the accompanying materials are made available under the +# terms of the Apache License Version 2.0 which is available at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# SPDX-License-Identifier: Apache-2.0 +# ******************************************************************************* import pytest from ..external_needs import ExternalNeedsSource, parse_external_needs_sources_from_DATA @@ -6,38 +18,55 @@ def test_empty_list(): assert parse_external_needs_sources_from_DATA("[]") == [] + def test_single_entry_with_path(): result = parse_external_needs_sources_from_DATA('["@repo//foo/bar:baz"]') # IF a target has a path, it will not be reported as external needs assert result == [] + def test_single_entry_no_path(): result = parse_external_needs_sources_from_DATA('["@repo//:target"]') # If a target is not named "needs_json", it will not be reported as external needs assert result == [] + def test_single_entry_json_no_path(): result = parse_external_needs_sources_from_DATA('["@repo//:needs_json"]') - assert result == [ExternalNeedsSource(bazel_module="repo", path_to_target="", target="needs_json")] + assert result == [ + ExternalNeedsSource(bazel_module="repo", path_to_target="", target="needs_json") + ] + def test_multiple_entries(): - result = parse_external_needs_sources_from_DATA('["@repo1//:needs_json", "@repo2//:needs_json"]') + result = parse_external_needs_sources_from_DATA( + '["@repo1//:needs_json", "@repo2//:needs_json"]' + ) assert result == [ - ExternalNeedsSource(bazel_module="repo1", path_to_target="", target="needs_json"), - ExternalNeedsSource(bazel_module="repo2", path_to_target="", target="needs_json") + ExternalNeedsSource( + bazel_module="repo1", path_to_target="", target="needs_json" + ), + ExternalNeedsSource( + bazel_module="repo2", path_to_target="", target="needs_json" + ), ] + def test_multiple_entries_2(): - result = parse_external_needs_sources_from_DATA('["@repo1//:needs_json", "@repo2//path:needs_json"]') + result = parse_external_needs_sources_from_DATA( + '["@repo1//:needs_json", "@repo2//path:needs_json"]' + ) assert result == [ - ExternalNeedsSource(bazel_module="repo1", path_to_target="", target="needs_json") + ExternalNeedsSource( + bazel_module="repo1", path_to_target="", target="needs_json" + ) ] + def test_invalid_entry(): with pytest.raises(ValueError): _ = parse_external_needs_sources_from_DATA('["@not_a_valid_string"]') -def test_parser(): - ... +def test_parser(): ... From ad2c758c5bf68a6317fd6d1f3fe41052aff779a3 Mon Sep 17 00:00:00 2001 From: Alexander Lanin Date: Wed, 30 Jul 2025 09:45:32 +0200 Subject: [PATCH 3/3] bugfixes --- src/extensions/score_metamodel/external_needs.py | 2 ++ src/extensions/score_source_code_linker/__init__.py | 6 +++--- src/incremental.py | 2 +- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/extensions/score_metamodel/external_needs.py b/src/extensions/score_metamodel/external_needs.py index b5db7ebf..d7a9de90 100644 --- a/src/extensions/score_metamodel/external_needs.py +++ b/src/extensions/score_metamodel/external_needs.py @@ -105,6 +105,8 @@ def extend_needs_json_exporter(config: Config, params: list[str]) -> None: """ for p in params: + # Note: we are currently addinig these values to config after config-inited. + # This is wrong. But good enough. config.add(p, default="", rebuild="env", types=(), description="") if not getattr(config, p): diff --git a/src/extensions/score_source_code_linker/__init__.py b/src/extensions/score_source_code_linker/__init__.py index 3ea1d3b3..3a4be37a 100644 --- a/src/extensions/score_source_code_linker/__init__.py +++ b/src/extensions/score_source_code_linker/__init__.py @@ -22,8 +22,8 @@ from typing import cast from sphinx.application import Sphinx -from sphinx.environment import BuildEnvironment from sphinx.config import Config +from sphinx.environment import BuildEnvironment from sphinx_needs.data import NeedsInfoType, NeedsMutable, SphinxNeedsData from sphinx_needs.logging import get_logger @@ -33,9 +33,9 @@ generate_source_code_links_json, ) from src.extensions.score_source_code_linker.needlinks import ( + DefaultNeedLink, NeedLink, load_source_code_links_json, - DefaultNeedLink, ) LOGGER = get_logger(__name__) @@ -105,7 +105,7 @@ def setup_once(app: Sphinx, config: Config): def setup(app: Sphinx) -> dict[str, str | bool]: # Esbonio will execute setup() on every iteration. # setup_once will only be called once. - app.connect("config-inited", setup_once) + setup_once(app, app.config) return { "version": "0.1", diff --git a/src/incremental.py b/src/incremental.py index 1c789ea6..857b6d98 100644 --- a/src/incremental.py +++ b/src/incremental.py @@ -91,7 +91,7 @@ def get_env(name: str) -> str: action = get_env("ACTION") if action == "live_preview": Path(workspace + "/_build/score_source_code_linker_cache.json").unlink( - missing_ok=False + missing_ok=True ) sphinx_autobuild_main( base_arguments