Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We鈥檒l occasionally send you account related emails.

Already on GitHub? Sign in to your account

馃敡 Consolidate needs data post-processing #1039

Merged
merged 1 commit into from Sep 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/contributing.rst
Expand Up @@ -257,7 +257,7 @@ The following is an outline of the build events which this extension adds to the
- Check for dead links (``process_need_nodes -> check_links``)
- Generate back links (``process_need_nodes -> create_back_links``)
- Process constraints, for each ``Need`` node (``process_need_nodes -> process_constraints``)
- Perform all modifications on need data items, due to ``Needextend`` nodes (``process_need_nodes -> process_needextend``)
- Perform all modifications on need data items, due to ``Needextend`` nodes (``process_need_nodes -> extend_needs_data``)
- Format each ``Need`` node to give the desired visual output (``process_need_nodes -> print_need_nodes``)
- Process all other need specific nodes, replacing them with the desired visual output (``process_creator``)

Expand Down
46 changes: 10 additions & 36 deletions sphinx_needs/data.py
Expand Up @@ -30,23 +30,6 @@ class NeedsFilterType(TypedDict):
amount: int


class NeedsWorkflowType(TypedDict):
"""
Used to store workflow status information for already executed tasks.
Some tasks like backlink_creation need be performed only once.
But most sphinx-events get called several times (for each single document file),
which would also execute our code several times...
"""

backlink_creation_links: bool
dynamic_values_resolved: bool
links_checked: bool
add_sections: bool
variant_option_resolved: bool
needs_extended: bool
needs_constraints: bool


class NeedsBaseDataType(TypedDict):
"""A base type for all data."""

Expand Down Expand Up @@ -448,27 +431,18 @@ def get_or_create_docs(self) -> dict[str, list[str]]:
self.env.needs_all_docs = {"all": []}
return self.env.needs_all_docs

def get_or_create_workflow(self) -> NeedsWorkflowType:
"""Get workflow information.

This is lazily created and cached in the environment.
"""
@property
def needs_is_post_processed(self) -> bool:
"""Whether needs have been post-processed."""
try:
return self.env.needs_workflow
return self.env.needs_is_post_processed
except AttributeError:
self.env.needs_workflow = {
"backlink_creation_links": False,
"dynamic_values_resolved": False,
"links_checked": False,
"add_sections": False,
"variant_option_resolved": False,
"needs_extended": False,
"needs_constraints": False,
}
for link_type in self.env.app.config.needs_extra_links:
self.env.needs_workflow["backlink_creation_{}".format(link_type["option"])] = False

return self.env.needs_workflow # type: ignore[return-value]
self.env.needs_is_post_processed = False
return self.env.needs_is_post_processed

@needs_is_post_processed.setter
def needs_is_post_processed(self, value: bool) -> None:
self.env.needs_is_post_processed = value

def get_or_create_services(self) -> ServiceManager:
"""Get information about services.
Expand Down
114 changes: 52 additions & 62 deletions sphinx_needs/directives/need.py
Expand Up @@ -17,7 +17,11 @@
from sphinx_needs.data import SphinxNeedsData
from sphinx_needs.debug import measure_time
from sphinx_needs.defaults import NEED_DEFAULT_OPTIONS
from sphinx_needs.directives.needextend import process_needextend
from sphinx_needs.directives.needextend import (
Needextend,
extend_needs_data,
remove_needextend_node,
)
from sphinx_needs.functions import (
find_and_replace_node_content,
resolve_dynamic_values,
Expand Down Expand Up @@ -376,29 +380,23 @@ def process_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str) -
return

env = app.env
needs_data = SphinxNeedsData(env)

# If no needs were defined, we do not need to do anything
if not hasattr(env, "needs_all_needs"):
if not needs_data.get_or_create_needs():
return

# Call dynamic functions and replace related node data with their return values
resolve_dynamic_values(env)

# Apply variant handling on options and replace its values with their return values
resolve_variants_options(env)

# check if we have dead links
check_links(env)

# Create back links of common links and extra links
for links in needs_config.extra_links:
create_back_links(env, links["option"])

process_constraints(app)
if not needs_data.needs_is_post_processed:
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the main change; we move all needs post-processing under a single if block, which only gets run once (not for every document)

resolve_dynamic_values(env)
resolve_variants_options(env)
check_links(env)
create_back_links(env)
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

looping through all link types is also moved internal to the create_back_links function

process_constraints(app)
extend_needs_data(app)
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is process_needextend, but with the removal of nodes

needs_data.needs_is_post_processed = True

# We call process_needextend here by our own, so that we are able
# to give print_need_nodes the already found need_nodes.
process_needextend(app, doctree, fromdocname)
for extend_node in doctree.findall(Needextend):
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is split out from the original process_needextend function, since it is not part of the needs data post-processing

remove_needextend_node(extend_node)

print_need_nodes(app, doctree, fromdocname, list(doctree.findall(Need)))

Expand Down Expand Up @@ -432,18 +430,16 @@ def print_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str, fou


def check_links(env: BuildEnvironment) -> None:
"""Checks if set links are valid or are dead (referenced need does not exist.)

For needs with dead links, an extra ``has_dead_links`` field is added and,
if the link is not allowed to be dead,
the ``has_forbidden_dead_links`` field is also added.
"""
Checks if set links are valid or are dead (referenced need does not exist.)
:param env: Sphinx environment
:return:
"""
config = NeedsSphinxConfig(env.config)
data = SphinxNeedsData(env)
workflow = data.get_or_create_workflow()
if workflow["links_checked"]:
return

needs = data.get_or_create_needs()
extra_links = getattr(env.config, "needs_extra_links", [])
extra_links = config.extra_links
for need in needs.values():
for link_type in extra_links:
dead_links_allowed = link_type.get("allow_dead_links", False)
Expand All @@ -464,45 +460,39 @@ def check_links(env: BuildEnvironment) -> None:
need["has_forbidden_dead_links"] = True
break # One found dead link is enough

# Finally set a flag so that this function gets not executed several times
workflow["links_checked"] = True


def create_back_links(env: BuildEnvironment, option: str) -> None:
"""
Create back-links in all found needs.
But do this only once, as all needs are already collected and this sorting is for all
needs and not only for the ones of the current document.
def create_back_links(env: BuildEnvironment) -> None:
"""Create back-links in all found needs.

:param env: sphinx environment
These are fields for each link type, ``<link_name>_back``,
which contain a list of all IDs of needs that link to the current need.
"""
data = SphinxNeedsData(env)
workflow = data.get_or_create_workflow()
option_back = f"{option}_back"
if workflow[f"backlink_creation_{option}"]: # type: ignore[literal-required]
return

needs_config = NeedsSphinxConfig(env.config)
needs = data.get_or_create_needs()
for key, need in needs.items():
need_link_value = [need[option]] if isinstance(need[option], str) else need[option] # type: ignore[literal-required]
for link in need_link_value:
link_main = link.split(".")[0]
try:
link_part = link.split(".")[1]
except IndexError:
link_part = None

if link_main in needs:
if key not in needs[link_main][option_back]: # type: ignore[literal-required]
needs[link_main][option_back].append(key) # type: ignore[literal-required]

# Handling of links to need_parts inside a need
if link_part and link_part in needs[link_main]["parts"]:
if option_back not in needs[link_main]["parts"][link_part].keys():
needs[link_main]["parts"][link_part][option_back] = [] # type: ignore[literal-required]
needs[link_main]["parts"][link_part][option_back].append(key) # type: ignore[literal-required]

workflow[f"backlink_creation_{option}"] = True # type: ignore[literal-required]

for links in needs_config.extra_links:
option = links["option"]
option_back = f"{option}_back"

for key, need in needs.items():
need_link_value = [need[option]] if isinstance(need[option], str) else need[option] # type: ignore[literal-required]
for link in need_link_value:
link_main = link.split(".")[0]
try:
link_part = link.split(".")[1]
except IndexError:
link_part = None

if link_main in needs:
if key not in needs[link_main][option_back]: # type: ignore[literal-required]
needs[link_main][option_back].append(key) # type: ignore[literal-required]

# Handling of links to need_parts inside a need
if link_part and link_part in needs[link_main]["parts"]:
if option_back not in needs[link_main]["parts"][link_part].keys():
needs[link_main]["parts"][link_part][option_back] = [] # type: ignore[literal-required]
needs[link_main]["parts"][link_part][option_back].append(key) # type: ignore[literal-required]


def _fix_list_dyn_func(list: List[str]) -> List[str]:
Expand Down