Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merge 24.5.x back into main #5358

Merged
merged 3 commits into from
May 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .authors.yml
Original file line number Diff line number Diff line change
Expand Up @@ -612,7 +612,7 @@
first_commit: 2015-08-30 06:44:37
- name: Marcel Bargull
email: marcel.bargull@udo.edu
num_commits: 87
num_commits: 88
first_commit: 2016-09-26 11:45:54
github: mbargull
alternate_emails:
Expand Down Expand Up @@ -1202,7 +1202,7 @@
alternate_emails:
- clee@anaconda.com
- name: Ken Odegard
num_commits: 203
num_commits: 204
email: kodegard@anaconda.com
first_commit: 2020-09-08 19:53:41
github: kenodegard
Expand Down
20 changes: 20 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,25 @@
[//]: # (current developments)

## 24.5.1 (2024-05-23)

### Bug fixes

* Fix issue with modifying a `frozendict` when specifying `outputs/files` in `meta.yaml`. (#5342 via #5345)
* Fix excessive memory use in `inspect_linkages_lief`. (#5267 via #5348)

### Deprecations

* Mark `conda_build.metadata.toposort` as deprecated. Use `conda_build.metadata.toposort_outputs` instead. (#5342 via #5345)
* Mark `conda_build.metadata.check_circular_dependencies` as deprecated. Use `conda_build.metadata._check_circular_dependencies` instead. (#5342 via #5345)

### Contributors

* @beeankha
* @kenodegard
* @mbargull



## 24.5.0 (2024-05-06)

### Enhancements
Expand Down
148 changes: 125 additions & 23 deletions conda_build/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

from . import exceptions, utils
from .config import Config, get_or_merge_config
from .deprecations import deprecated
from .features import feature_list
from .license_family import ensure_valid_license_family
from .utils import (
Expand All @@ -45,7 +46,10 @@
)

if TYPE_CHECKING:
from typing import Any, Literal
from typing import Any, Literal, Self

OutputDict = dict[str, Any]
OutputTuple = tuple[OutputDict, "MetaData"]

try:
import yaml
Expand Down Expand Up @@ -408,7 +412,17 @@ def _get_all_dependencies(metadata, envs=("host", "build", "run")):
return reqs


def check_circular_dependencies(render_order, config=None):
@deprecated(
"24.5.1",
"24.7.0",
addendum="Use `conda_build.metadata._check_circular_dependencies` instead.",
)
def check_circular_dependencies(
render_order: dict[dict[str, Any], MetaData],
config: Config | None = None,
):
# deprecated since the input type (render_order) changed
envs: tuple[str, ...]
if config and config.host_subdir != config.build_subdir:
# When cross compiling build dependencies are already built
# and cannot come from the recipe as subpackages
Expand All @@ -433,6 +447,39 @@ def check_circular_dependencies(render_order, config=None):
raise exceptions.RecipeError(error)


def _check_circular_dependencies(
render_order: list[OutputTuple],
config: Config | None = None,
) -> None:
envs: tuple[str, ...]
if config and config.host_subdir != config.build_subdir:
# When cross compiling build dependencies are already built
# and cannot come from the recipe as subpackages
envs = ("host", "run")
else:
envs = ("build", "host", "run")

pairs: list[tuple[str, str]] = []
for idx, (_, metadata) in enumerate(render_order):
name = metadata.name()
for _, other_metadata in render_order[idx + 1 :]:
other_name = other_metadata.name()
if any(
name == dep.split(" ")[0]
for dep in _get_all_dependencies(other_metadata, envs=envs)
) and any(
other_name == dep.split(" ")[0]
for dep in _get_all_dependencies(metadata, envs=envs)
):
pairs.append((name, other_name))

if pairs:
error = "Circular dependencies in recipe: \n"
for pair in pairs:
error += " {} <-> {}\n".format(*pair)
raise exceptions.RecipeError(error)


def _variants_equal(metadata, output_metadata):
match = True
for key, val in metadata.config.variant.items():
Expand Down Expand Up @@ -846,14 +893,13 @@ def _get_dependencies_from_environment(env_name_or_path):
return {"requirements": {"build": bootstrap_requirements}}


def toposort(output_metadata_map):
"""This function is used to work out the order to run the install scripts
for split packages based on any interdependencies. The result is just
a re-ordering of outputs such that we can run them in that order and
reset the initial set of files in the install prefix after each. This
will naturally lead to non-overlapping files in each package and also
the correct files being present during the install and test procedures,
provided they are run in this order."""
@deprecated(
"24.5.1",
"24.7.0",
addendum="Use `conda_build.metadata.toposort_outputs` instead.",
)
def toposort(output_metadata_map: dict[OutputDict, MetaData]):
# deprecated since input type (output_metadata_map) and output changed
from conda.common.toposort import _toposort

# We only care about the conda packages built by this recipe. Non-conda
Expand All @@ -863,9 +909,9 @@ def toposort(output_metadata_map):
for output_d in output_metadata_map
if output_d.get("type", "conda").startswith("conda")
]
topodict = dict()
order = dict()
endorder = set()
topodict: dict[str, set[str]] = dict()
order: dict[str, int] = dict()
endorder: set[int] = set()

for idx, (output_d, output_m) in enumerate(output_metadata_map.items()):
if output_d.get("type", "conda").startswith("conda"):
Expand Down Expand Up @@ -907,6 +953,63 @@ def toposort(output_metadata_map):
return result


def _toposort_outputs(output_tuples: list[OutputTuple]) -> list[OutputTuple]:
"""This function is used to work out the order to run the install scripts
for split packages based on any interdependencies. The result is just
a re-ordering of outputs such that we can run them in that order and
reset the initial set of files in the install prefix after each. This
will naturally lead to non-overlapping files in each package and also
the correct files being present during the install and test procedures,
provided they are run in this order."""
from conda.common.toposort import _toposort

# We only care about the conda packages built by this recipe. Non-conda
# packages get sorted to the end.
conda_outputs: dict[str, list[OutputTuple]] = {}
non_conda_outputs: list[OutputTuple] = []
for output_tuple in output_tuples:
output_d, _ = output_tuple
if output_d.get("type", "conda").startswith("conda"):
# conda packages must have a name
# the same package name may be seen multiple times (variants)
conda_outputs.setdefault(output_d["name"], []).append(output_tuple)
elif "name" in output_d:
non_conda_outputs.append(output_tuple)
else:
# TODO: is it even possible to get here? and if so should we silently ignore or error?
utils.get_logger(__name__).warn("Found an output without a name, skipping")

# Iterate over conda packages, creating a mapping of package names to their
# dependencies to be used in toposort
name_to_dependencies: dict[str, set[str]] = {}
for name, same_name_outputs in conda_outputs.items():
for output_d, output_metadata in same_name_outputs:
# dependencies for all of the variants
dependencies = (
*output_metadata.get_value("requirements/run", []),
*output_metadata.get_value("requirements/host", []),
*(
output_metadata.get_value("requirements/build", [])
if not output_metadata.is_cross
else []
),
)
name_to_dependencies.setdefault(name, set()).update(
dependency_name
for dependency in dependencies
if (dependency_name := dependency.split(" ")[0]) in conda_outputs
)

return [
*(
output
for name in _toposort(name_to_dependencies)
for output in conda_outputs[name]
),
*non_conda_outputs,
]


def get_output_dicts_from_metadata(
metadata: MetaData,
outputs: list[dict[str, Any]] | None = None,
Expand Down Expand Up @@ -2268,7 +2371,7 @@ def validate_features(self):
"character in your recipe."
)

def copy(self):
def copy(self: Self) -> MetaData:
new = copy.copy(self)
new.config = self.config.copy()
new.config.variant = copy.deepcopy(self.config.variant)
Expand Down Expand Up @@ -2520,10 +2623,10 @@ def get_output_metadata_set(
permit_undefined_jinja: bool = False,
permit_unsatisfiable_variants: bool = False,
bypass_env_check: bool = False,
) -> list[tuple[dict[str, Any], MetaData]]:
) -> list[OutputTuple]:
from .source import provide

out_metadata_map = {}
output_tuples: list[OutputTuple] = []
if self.final:
outputs = get_output_dicts_from_metadata(self)
output_tuples = [(outputs[0], self)]
Expand Down Expand Up @@ -2579,27 +2682,26 @@ def get_output_metadata_set(
}
),
] = (out, out_metadata)
out_metadata_map[deepfreeze(out)] = out_metadata
output_tuples.append((out, out_metadata))
ref_metadata.other_outputs = out_metadata.other_outputs = (
all_output_metadata
)
except SystemExit:
if not permit_undefined_jinja:
raise
out_metadata_map = {}
output_tuples = []

assert out_metadata_map, (
assert output_tuples, (
"Error: output metadata set is empty. Please file an issue"
" on the conda-build tracker at https://github.com/conda/conda-build/issues"
)

# format here is {output_dict: metadata_object}
render_order = toposort(out_metadata_map)
check_circular_dependencies(render_order, config=self.config)
render_order: list[OutputTuple] = _toposort_outputs(output_tuples)
_check_circular_dependencies(render_order, config=self.config)
conda_packages = OrderedDict()
non_conda_packages = []

for output_d, m in render_order.items():
for output_d, m in render_order:
if not output_d.get("type") or output_d["type"] in (
"conda",
"conda_v2",
Expand Down
30 changes: 15 additions & 15 deletions conda_build/os_utils/liefldd.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,12 +353,12 @@ def _get_path_dirs(prefix):
yield "/".join((prefix, "bin"))


def get_uniqueness_key(file):
def get_uniqueness_key(filename, file):
binary = ensure_binary(file)
if not binary:
return EXE_FORMATS.UNKNOWN
elif binary.format == EXE_FORMATS.MACHO:
return str(file)
return filename
elif binary.format == EXE_FORMATS.ELF and ( # noqa
binary.type == lief.ELF.ELF_CLASS.CLASS32
or binary.type == lief.ELF.ELF_CLASS.CLASS64
Expand All @@ -369,8 +369,8 @@ def get_uniqueness_key(file):
]
if result:
return result[0]
return str(file)
return str(file)
return filename
return filename


def _get_resolved_location(
Expand Down Expand Up @@ -505,13 +505,13 @@ def inspect_linkages_lief(
for element in todo:
todo.pop(0)
filename2 = element[0]
binary = element[1]
if not binary:
binary2 = element[1]
if not binary2:
continue
uniqueness_key = get_uniqueness_key(binary)
uniqueness_key = get_uniqueness_key(filename2, binary2)
if uniqueness_key not in already_seen:
parent_exe_dirname = None
if binary.format == EXE_FORMATS.PE:
if binary2.format == EXE_FORMATS.PE:
tmp_filename = filename2
while tmp_filename:
if (
Expand All @@ -527,17 +527,17 @@ def inspect_linkages_lief(
if ".pyd" in filename2 or (os.sep + "DLLs" + os.sep) in filename2:
parent_exe_dirname = envroot.replace(os.sep, "/") + "/DLLs"
rpaths_by_binary[filename2] = get_rpaths(
binary, parent_exe_dirname, envroot.replace(os.sep, "/"), sysroot
binary2, parent_exe_dirname, envroot.replace(os.sep, "/"), sysroot
)
tmp_filename = filename2
rpaths_transitive = []
if binary.format == EXE_FORMATS.PE:
if binary2.format == EXE_FORMATS.PE:
rpaths_transitive = rpaths_by_binary[tmp_filename]
else:
while tmp_filename:
rpaths_transitive[:0] = rpaths_by_binary[tmp_filename]
tmp_filename = parents_by_filename[tmp_filename]
libraries = get_libraries(binary)
libraries = get_libraries(binary2)
if filename2 in libraries: # Happens on macOS, leading to cycles.
libraries.remove(filename2)
# RPATH is implicit everywhere except macOS, make it explicit to simplify things.
Expand All @@ -546,14 +546,14 @@ def inspect_linkages_lief(
"$RPATH/" + lib
if not lib.startswith("/")
and not lib.startswith("$")
and binary.format != EXE_FORMATS.MACHO # noqa
and binary2.format != EXE_FORMATS.MACHO # noqa
else lib
)
for lib in libraries
]
for lib, orig in zip(libraries, these_orig):
resolved = _get_resolved_location(
binary,
binary2,
orig,
exedir,
exedir,
Expand All @@ -568,7 +568,7 @@ def inspect_linkages_lief(
# can be run case-sensitively if the user wishes.
#
"""
if binary.format == EXE_FORMATS.PE:
if binary2.format == EXE_FORMATS.PE:
import random
path_fixed = (
os.path.dirname(path_fixed)
Expand Down Expand Up @@ -596,7 +596,7 @@ def inspect_linkages_lief(
if recurse:
if os.path.exists(resolved[0]):
todo.append([resolved[0], lief.parse(resolved[0])])
already_seen.add(get_uniqueness_key(binary))
already_seen.add(uniqueness_key)
return results


Expand Down
15 changes: 15 additions & 0 deletions tests/test-recipes/metadata/gh-5342/meta.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{% set name = "gh-5342" %}

package:
name: {{ name }}
version: 1.0

outputs:
- name: {{ name }}
build:
skip: true

- name: {{ name }}-dev
build:
files:
- file
Loading