Skip to content

Commit

Permalink
Factor parts of repo_helper.conda into shippinglabel.
Browse files Browse the repository at this point in the history
  • Loading branch information
domdfcoding committed Dec 16, 2020
1 parent e3582b7 commit 51a98f0
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 153 deletions.
153 changes: 2 additions & 151 deletions repo_helper/conda.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,159 +23,17 @@
# MA 02110-1301, USA.
#

# stdlib
import difflib
from datetime import datetime, timedelta
from typing import Iterable, List, Mapping, Union

# 3rd party
import appdirs # type: ignore
import jinja2
from apeye import SlumberURL
from domdf_python_tools.paths import PathPlus
from domdf_python_tools.stringlist import DelimitedList
from domdf_python_tools.typing import PathLike
from packaging.requirements import InvalidRequirement, Requirement
from shippinglabel import normalize
from shippinglabel.requirements import ComparableRequirement, combine_requirements, read_requirements
from shippinglabel.conda import compile_requirements, validate_requirements

# this package
from repo_helper.configuration import parse_yaml
from repo_helper.templates import template_dir

__all__ = [
"CONDA_API",
"get_from_cache",
"compile_requirements",
"validate_requirements",
"make_recipe",
]

CONDA_API = SlumberURL("https://conda.anaconda.org", append_slash=False)
"""
Instance of :class:`apeye.slumber_url.SlumberURL` for accessing the conda API.
.. versionadded:: 2020.11.10
"""


def get_from_cache(channel_name: str) -> List[str]:
"""
Obtain the list of packages in the given Conda channel, either from the cache or from the conda API.
Responses are cached for 48 hours.
:param channel_name:
.. versionadded:: 2020.11.10
"""

cache_dir = PathPlus(appdirs.user_cache_dir("repo_helper", "domdfcoding")) / "conda_cache"
cache_dir.maybe_make(parents=True)

filename = cache_dir / f"{channel_name}.json"

if filename.is_file():
data = filename.load_json()
if datetime.fromtimestamp(data["expires"]) > datetime.now():
return data["packages"]

conda_packages = set()

for package in (CONDA_API / channel_name / "noarch" / "repodata.json").get()["packages"].values():
conda_packages.add(package["name"])
for package in (CONDA_API / channel_name / "linux-64" / "repodata.json").get()["packages"].values():
conda_packages.add(package["name"])

data = {"expires": (datetime.now() + timedelta(hours=48)).timestamp(), "packages": sorted(conda_packages)}

filename.dump_json(data, indent=2)

return data["packages"]


def compile_requirements(
repo_dir: PathPlus,
extras: Iterable[str],
) -> List[ComparableRequirement]:
"""
Compile a list of requirements for the package from the requirements.txt file and any extra dependencies.
:param repo_dir:
:param extras: Mapping of "extras" names to lists of requirements.
.. versionadded:: 2020.11.10
.. versionadded:: 2020.11.12 ``extras`` is not an iterable of strings.
"""

all_requirements: List[ComparableRequirement] = []
extra_requirements = [ComparableRequirement(r) for r in extras]

for requirement in sorted(
combine_requirements(
*read_requirements(repo_dir / "requirements.txt")[0],
*extra_requirements,
),
):
if requirement.url: # pragma: no cover
continue

# TODO: add the extra requirements
if requirement.extras:
requirement.extras = set()
if requirement.marker:
requirement.marker = None

all_requirements.append(requirement)

return all_requirements


def validate_requirements(
requirements: Iterable[ComparableRequirement],
conda_channels: Iterable[str],
) -> List[ComparableRequirement]:
"""
Ensure that all requirements are available from the given conda channels,
and normalize the names to those in the conda channel.
:param requirements:
:param conda_channels:
.. versionadded:: 2020.11.10
""" # noqa: D400

validated_requirements = []

conda_packages = set()
channels = DelimitedList(conda_channels)

for channel in channels:
for package in get_from_cache(channel):
conda_packages.add(package)

for requirement in requirements:

# Check alias_mapping first
if requirement.name in alias_mapping:
requirement.name = alias_mapping[requirement.name]
validated_requirements.append(requirement)
continue

matches = difflib.get_close_matches(requirement.name, conda_packages)
for match in matches:
if normalize(match) == requirement.name:
requirement.name = match
validated_requirements.append(requirement)
break
else:
raise InvalidRequirement(
f"Cannot satisfy the requirement {requirement.name!r} "
f"from any of the channels: '{channels:', '}'."
)

return validated_requirements
__all__ = ["make_recipe"]


def make_recipe(repo_dir: PathLike, recipe_file: PathLike) -> None:
Expand Down Expand Up @@ -224,10 +82,3 @@ def make_recipe(repo_dir: PathLike, recipe_file: PathLike) -> None:
# - {{ import_name }} = {{ import_name }}:main
# skip_compile_pyc:
# - "*/templates/*.py" # These should not (and cannot) be compiled


#: Mapping of normalised names to names on conda, if they differ for some reason.
alias_mapping = {
"ruamel-yaml": "ruamel.yaml",
}
# Really just due to https://github.com/conda-forge/ruamel.yaml-feedstock/issues/7
3 changes: 1 addition & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
apeye>=0.4.0
appdirs>=1.4.4
attrs>=20.2.0
click==7.1.2
configconfig>=0.4.0
Expand All @@ -15,7 +14,7 @@ packaging>=20.4
pre-commit>=2.7.1
requests>=2.25.0
ruamel-yaml>=0.16.12
shippinglabel>=0.5.2
shippinglabel>=0.7.0
southwark>=0.6.0
tomlkit>=0.7.0
typing-extensions>=3.7.4.3
Expand Down

0 comments on commit 51a98f0

Please sign in to comment.