diff --git a/docs/source/courses/advanced/dd_versions.rst b/docs/source/courses/advanced/dd_versions.rst
index 3f7f19f..ab87097 100644
--- a/docs/source/courses/advanced/dd_versions.rst
+++ b/docs/source/courses/advanced/dd_versions.rst
@@ -60,7 +60,7 @@ Bundled Data Dictionary definitions
IMAS-Python comes bundled [#DDdefs]_ with many versions of the Data Dictionary definitions.
You can find out which versions are available by calling
-:py:meth:`imas.dd_zip.dd_xml_versions`.
+``imas.dd_zip.dd_xml_versions``.
Converting an IDS between Data Dictionary versions
@@ -290,6 +290,6 @@ build, you can use them like you normally would.
.. rubric:: Footnotes
-.. [#DDdefs] To be more precise, the Data Dictionary definitions are generated when the
- IMAS-Python package is created. See :ref:`this reference
` for more
- details.
+.. [#DDdefs] To be more precise, the Data Dictionary definitions are provided by the
+ `IMAS Data Dictionaries `__
+ package.
diff --git a/docs/source/imas_architecture.rst b/docs/source/imas_architecture.rst
index b1764be..182d2a0 100644
--- a/docs/source/imas_architecture.rst
+++ b/docs/source/imas_architecture.rst
@@ -72,11 +72,7 @@ Data Dictionary building and loading
The following submodules are responsible for building the Data Dictionary and loading DD
definitions at runtime.
-- :py:mod:`imas.dd_helpers` handles building the ``IDSDef.zip`` file, containing all
- versions of the Data Dictionary since ``3.22.0``.
-
- :py:mod:`imas.dd_zip` handles loading the Data Dictionary definitions at run time.
- These definitions can be loaded from an ``IDSDef.zip`` or from a custom XML file.
.. _imas_architecture/IDS_nodes:
diff --git a/docs/source/multi-dd.rst b/docs/source/multi-dd.rst
index b63d18e..bef1fe5 100644
--- a/docs/source/multi-dd.rst
+++ b/docs/source/multi-dd.rst
@@ -207,21 +207,14 @@ Automated tests have been provided that check the loading of all of the DD
versions tagged in the data-dictionary git repository.
-Extending the DD set
-''''''''''''''''''''
+Data Dictionary definitions
+'''''''''''''''''''''''''''
-Use the command ``python setup.py build_DD`` to build a new ``IDSDef.zip``. This
-fetches all tags from the data dictionary git repository and builds the ``IDSDef.zip``.
+The Data Dictionary definitions used by IMAS-Python are provided by the `IMAS Data
+Dictionaries `__ package.
+Please update this package if you need a more recent version of the data dictionary. For
+example, using ``pip``:
-IMAS-Python searches for an ``IDSDef.zip`` in the following locations:
+.. code-block:: bash
-1. The environment variable ``$IMAS_DDZIP`` (path to a zip file)
-2. The file ``./IDSDef.zip`` in the current working directory
-3. In the local configuration folder: ``~/.config/imas/IDSDef.zip``, or
- ``$XDG_CONFIG_DIR/imas/IDSDef.zip`` (if the environment variable
- ``$XDG_CONFIG_DIR`` is set)
-4. The zipfile bundled with the IMAS-Python installation: ``assets/IDSDef.zip``
-
-All paths are searched in order when loading the definitions of a specific data
-dictionary version: the first zip file that contains the definitions of the requested
-version is used.
+ pip install --upgrade imas-data-dictionaries
diff --git a/imas/__init__.py b/imas/__init__.py
index 0ed1040..58a6699 100644
--- a/imas/__init__.py
+++ b/imas/__init__.py
@@ -20,7 +20,6 @@
# Load the IMAS-Python IMAS AL/DD core
from . import (
db_entry,
- dd_helpers,
dd_zip,
util,
)
diff --git a/imas/dd_helpers.py b/imas/dd_helpers.py
deleted file mode 100644
index 446a999..0000000
--- a/imas/dd_helpers.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# This file is part of IMAS-Python.
-# You should have received the IMAS-Python LICENSE file with this project.
-"""Helper functions to build IDSDef.xml"""
-
-import logging
-import os
-import shutil
-from pathlib import Path
-from typing import Tuple
-from zipfile import ZIP_DEFLATED, ZipFile
-
-from packaging.version import Version as V
-from saxonche import PySaxonProcessor
-
-logger = logging.getLogger(__name__)
-
-_idsdef_zip_relpath = Path("imas/assets/IDSDef.zip")
-_build_dir = Path("build")
-
-
-def prepare_data_dictionaries():
- """Build IMAS IDSDef.xml files for each tagged version in the DD repository
- 1. Use saxonche for transformations
- 2. Clone the DD repository (ask for user/pass unless ssh key access is available)
- 3. Generate IDSDef.xml and rename to IDSDef_${version}.xml
- 4. Zip all these IDSDefs together and include in wheel
- """
- from git import Repo
-
- repo: Repo = get_data_dictionary_repo()
- if repo:
- newest_version_and_tag = (V("0"), None)
- for tag in repo.tags:
- version_and_tag = (V(str(tag)), tag)
- if V(str(tag)) > V("3.21.1"):
- newest_version_and_tag = max(newest_version_and_tag, version_and_tag)
- logger.debug("Building data dictionary version %s", tag)
- build_data_dictionary(repo, tag)
-
- logger.info("Creating zip file of DD versions")
-
- if _idsdef_zip_relpath.is_file():
- logger.warning("Overwriting '%s'", _idsdef_zip_relpath)
-
- with ZipFile(
- _idsdef_zip_relpath,
- mode="w", # this needs w, since zip can have multiple same entries
- compression=ZIP_DEFLATED,
- ) as dd_zip:
- for filename in _build_dir.glob("[0-9]*.xml"):
- arcname = Path("data-dictionary").joinpath(*filename.parts[1:])
- dd_zip.write(filename, arcname=arcname)
- # Include identifiers from latest tag in zip file
- repo.git.checkout(newest_version_and_tag[1], force=True)
- # DD layout <= 4.0.0
- for filename in Path("data-dictionary").glob("*/*identifier.xml"):
- arcname = Path("identifiers").joinpath(*filename.parts[1:])
- dd_zip.write(filename, arcname=arcname)
- # DD layout > 4.0.0
- for filename in Path("data-dictionary").glob("schemas/*/*identifier.xml"):
- arcname = Path("identifiers").joinpath(*filename.parts[2:])
- dd_zip.write(filename, arcname=arcname)
-
-
-def get_data_dictionary_repo() -> Tuple[bool, bool]:
- try:
- import git # Import git here, the user might not have it!
- except ModuleNotFoundError:
- raise RuntimeError(
- "Could not find 'git' module, try 'pip install gitpython'. \
- Will not build Data Dictionaries!"
- )
-
- # We need the actual source code (for now) so grab it from ITER
- dd_repo_path = "data-dictionary"
-
- if "DD_DIRECTORY" in os.environ:
- logger.info("Found DD_DIRECTORY, copying")
- try:
- shutil.copytree(os.environ["DD_DIRECTORY"], dd_repo_path)
- except FileExistsError:
- pass
- else:
- logger.info("Trying to pull data dictionary git repo from ITER")
-
- # Set up a bare repo and fetch the data-dictionary repository in it
- os.makedirs(dd_repo_path, exist_ok=True)
- try:
- repo = git.Repo(dd_repo_path)
- except git.exc.InvalidGitRepositoryError:
- repo = git.Repo.init(dd_repo_path)
- logger.info("Set up local git repository {!s}".format(repo))
-
- try:
- origin = repo.remote()
- except ValueError:
- dd_repo_url = "https://github.com/iterorganization/imas-data-dictionary.git"
- origin = repo.create_remote("origin", url=dd_repo_url)
- logger.info("Set up remote '{!s}' linking to '{!s}'".format(origin, origin.url))
-
- try:
- origin.fetch(tags=True)
- except git.exc.GitCommandError as ee:
- logger.warning(
- "Could not fetch tags from %s. Git reports:\n %s." "\nTrying to continue",
- list(origin.urls),
- ee,
- )
- else:
- logger.info("Remote tags fetched")
- return repo
-
-
-def _run_xsl_transformation(
- xsd_file: Path, xsl_file: Path, tag: str, output_file: Path
-) -> None:
- """
- This function performs an XSL transformation using Saxon-HE (saxonche)
- with the provided XSD file, XSL file, tag, and output file.
-
- Args:
- xsd_file (Path): XML Schema Definition (XSD) file
- xsl_file (Path): The `xsl_file` parameter
- tag (str): tag name to provide to 'DD_GIT_DESCRIBE' parameter
- output_file (Path): The `output_file` parameter for resulting xml
- """
- with PySaxonProcessor(license=False) as proc:
- logger.debug("Initializing Saxon Processor")
- xsltproc = proc.new_xslt30_processor()
- xdm_ddgit = proc.make_string_value(tag)
- xsltproc.set_parameter("DD_GIT_DESCRIBE", xdm_ddgit)
- xsltproc.transform_to_file(
- source_file=str(xsd_file),
- stylesheet_file=str(xsl_file),
- output_file=str(output_file),
- )
-
-
-def build_data_dictionary(repo, tag: str, rebuild=False) -> None:
- """Build a single version of the data dictionary given by the tag argument
- if the IDS does not already exist.
-
- In the data-dictionary repository sometimes IDSDef.xml is stored
- directly, in which case we do not call make.
-
- Args:
- repo: Repository object containing the DD source code
- tag: The DD version tag that will be build
- rebuild: If true, overwrites existing pre-build tagged DD version
- """
- _build_dir.mkdir(exist_ok=True)
- result_xml = _build_dir / f"{tag}.xml"
-
- if result_xml.exists() and not rebuild:
- logger.debug(f"XML for tag '{tag}' already exists, skipping")
- return
-
- repo.git.checkout(tag, force=True)
-
- # Perform the XSL transformation with saxonche
- dd_xsd = Path("data-dictionary/dd_data_dictionary.xml.xsd")
- dd_xsl = Path("data-dictionary/dd_data_dictionary.xml.xsl")
- _run_xsl_transformation(dd_xsd, dd_xsl, tag.name, result_xml)
-
-
-if __name__ == "__main__":
- prepare_data_dictionaries()
diff --git a/imas/dd_zip.py b/imas/dd_zip.py
index 2d62224..e4cce36 100644
--- a/imas/dd_zip.py
+++ b/imas/dd_zip.py
@@ -1,103 +1,27 @@
# This file is part of IMAS-Python.
# You should have received the IMAS-Python LICENSE file with this project.
-""" Extract DD versions from a zip file.
+"""Extract DD versions from the imas-data-dictionaries distribution."""
-The zip file contains files as
-* `data-dictionary/3.30.0.xml`
-* `data-dictionary/3.29.0.xml`
-
-multiple paths are checked. See `ZIPFILE_LOCATIONS`.
-First the environment variable IMAS_DDZIP is checked.
-If that exists and points to a file we will attempt to open it.
-Then, IDSDef.zip is searched in site-packages, the current folder,
-in .config/imas/ (`$$XDG_CONFIG_HOME`) and in
-the assets/ folder within the IMAS-Python package.
-
-1. `$$IMAS_DDZIP`
-2. The virtual environment
-3. USER_BASE`imas/IDSDef.zip`
-4. All `site-packages/imas/IDSDef.zip`
-5. `./IDSDef.zip`
-6. `~/.config/imas/IDSDef.zip`
-7. `__file__/../../imas/assets/IDSDef.zip`
-
-All files are checked, i.e. if your .config/imas/IDSDef.zip is outdated
-the IMAS-Python-packaged version will be used.
-
-The `assets/IDSDef.zip` provided with the package can be updated
-with the `python setup.py build_DD` command, which is also performed on install
-if you have access to the ITER data-dictionary git repo.
-Reinstalling imas thus also will give you access to the latest DD versions.
-"""
import logging
import os
-import re
import xml.etree.ElementTree as ET
-from contextlib import contextmanager, nullcontext
from functools import lru_cache
from pathlib import Path
-from typing import Dict, Iterator, List, Tuple, Union
-from zipfile import ZipFile
-
-try:
- from importlib.resources import as_file, files
-
- try:
- from importlib.resources.abc import Traversable
- except ModuleNotFoundError: # Python 3.9/3.10 support
- from importlib.abc import Traversable
-
-except ImportError: # Python 3.8 support
- from importlib_resources import as_file, files
- from importlib_resources.abc import Traversable
-from packaging.version import InvalidVersion, Version
+# These methods in imas_data_dictionaries used to be defined here. We import them here
+# for backwards compatibility:
+from imas_data_dictionaries import dd_identifiers # noqa: F401
+from imas_data_dictionaries import get_dd_xml_crc # noqa: F401
+from imas_data_dictionaries import get_identifier_xml # noqa: F401
+from imas_data_dictionaries import dd_xml_versions, get_dd_xml, parse_dd_version
+from packaging.version import InvalidVersion
import imas
-from imas.exception import UnknownDDVersion
+from imas.exception import UnknownDDVersion # noqa: F401
logger = logging.getLogger(__name__)
-def _get_xdg_config_dir():
- """
- Return the XDG config directory, according to the XDG base directory spec:
-
- https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
- """
- return os.environ.get("XDG_CONFIG_HOME") or str(Path.home() / ".config")
-
-
-def _generate_zipfile_locations() -> Iterator[Union[Path, Traversable]]:
- """Build a list of potential data dictionary locations.
- We start with the path (if any) of the IMAS_DDZIP env var.
- Then we look for IDSDef.zip in the current folder, in the
- default XDG config dir (~/.config/imas/IDSDef.zip) and
- finally in the assets distributed with this package.
- """
- zip_name = "IDSDef.zip"
-
- environ = os.environ.get("IMAS_DDZIP")
- if environ:
- yield Path(environ).resolve()
-
- yield Path(zip_name).resolve()
- yield Path(_get_xdg_config_dir()).resolve() / "imas" / zip_name
- yield files(imas) / "assets" / zip_name
-
-
-def parse_dd_version(version: str) -> Version:
- try:
- return Version(version)
- except InvalidVersion:
- # This is probably a dev build of the DD, of which the version is obtained with
- # `git describe` in the format X.Y.Z--g with X.Y.Z the previous
- # released version: try again after converting the first dash to a + and treat
- # it like a `local` version specifier, which is recognized as newer.
- # https://packaging.python.org/en/latest/specifications/version-specifiers/
- return Version(version.replace("-", "+", 1))
-
-
# Expected use case is one, maximum two DD versions
# Cache is bigger than that: in pytest we currently use the following DD versions:
# - 3.22.0
@@ -112,7 +36,6 @@ def parse_dd_version(version: str) -> Version:
# - IDS_minimal_struct_array.xml
# - IDS_minimal_types.xml
_DD_CACHE_SIZE = 8
-ZIPFILE_LOCATIONS = list(_generate_zipfile_locations())
def dd_etree(version=None, xml_path=None):
@@ -168,117 +91,6 @@ def _load_etree(version, xml_path):
return tree
-@contextmanager
-def _open_zipfile(path: Union[Path, Traversable]) -> Iterator[ZipFile]:
- """Open a zipfile, given a Path or Traversable."""
- if isinstance(path, Path):
- ctx = nullcontext(path)
- else:
- ctx = as_file(path)
- with ctx as file:
- with ZipFile(file) as zipfile:
- yield zipfile
-
-
-@lru_cache
-def _read_dd_versions() -> Dict[str, Tuple[Union[Path, Traversable], str]]:
- """Traverse all possible DD zip files and return a map of known versions.
-
- Returns:
- version_map: version -> (zipfile path, filename)
- """
- versions = {}
- xml_re = re.compile(r"^data-dictionary/([0-9.]+)\.xml$")
- for path in ZIPFILE_LOCATIONS:
- if not path.is_file():
- continue
- with _open_zipfile(path) as zipfile:
- for fname in zipfile.namelist():
- match = xml_re.match(fname)
- if match:
- version = match.group(1)
- if version not in versions:
- versions[version] = (path, fname)
- if not versions:
- raise RuntimeError(
- "Could not find any data dictionary definitions. "
- f"Looked in: {', '.join(map(repr, ZIPFILE_LOCATIONS))}."
- )
- return versions
-
-
-@lru_cache
-def _read_identifiers() -> Dict[str, Tuple[Union[Path, Traversable], str]]:
- """Traverse all possible DD zip files and return a map of known identifiers.
-
- Returns:
- identifier_map: identifier -> (zipfile path, filename)
- """
- identifiers = {}
- xml_re = re.compile(r"^identifiers/\w+/(\w+_identifier).xml$")
- for path in ZIPFILE_LOCATIONS:
- if not path.is_file():
- continue
- with _open_zipfile(path) as zipfile:
- for fname in zipfile.namelist():
- match = xml_re.match(fname)
- if match:
- identifier_name = match.group(1)
- if identifier_name not in identifiers:
- identifiers[identifier_name] = (path, fname)
- return identifiers
-
-
-@lru_cache
-def dd_xml_versions() -> List[str]:
- """Parse IDSDef.zip to find version numbers available"""
-
- def sort_key(version):
- try:
- return parse_dd_version(version)
- except InvalidVersion:
- # Don't fail when a malformatted version is present in the DD zip
- logger.error(
- f"Could not convert DD XML version {version} to a Version.", exc_info=1
- )
- return Version(0)
-
- return sorted(_read_dd_versions(), key=sort_key)
-
-
-@lru_cache
-def dd_identifiers() -> List[str]:
- """Parse IDSDef.zip to find available identifiers"""
-
- return sorted(_read_identifiers())
-
-
-def get_dd_xml(version):
- """Read XML file for the given data dictionary version."""
- dd_versions = dd_xml_versions()
- if version not in dd_versions:
- raise UnknownDDVersion(version, dd_versions)
- path, fname = _read_dd_versions()[version]
- with _open_zipfile(path) as zipfile:
- return zipfile.read(fname)
-
-
-def get_dd_xml_crc(version):
- """Given a version string, return its CRC checksum"""
- # Note, by this time get_dd_xml is already called, so we don't need to check if the
- # version is known
- path, fname = _read_dd_versions()[version]
- with _open_zipfile(path) as zipfile:
- return zipfile.getinfo(fname).CRC
-
-
-def get_identifier_xml(identifier_name):
- """Get identifier XML for the given identifier name"""
- path, fname = _read_identifiers()[identifier_name]
- with _open_zipfile(path) as zipfile:
- return zipfile.read(fname)
-
-
def print_supported_version_warning(version):
try:
if parse_dd_version(version) < imas.OLDEST_SUPPORTED_VERSION:
diff --git a/imas/exception.py b/imas/exception.py
index 513c2ca..737680c 100644
--- a/imas/exception.py
+++ b/imas/exception.py
@@ -1,11 +1,14 @@
# This file is part of IMAS-Python.
# You should have received the IMAS-Python LICENSE file with this project.
-"""Exception classes used in IMAS-Python.
-"""
+"""Exception classes used in IMAS-Python."""
import difflib
import logging
-from typing import TYPE_CHECKING, List
+from typing import TYPE_CHECKING
+
+# This exception from imas_data_dictionaries used to be defined here. We import it here
+# for backwards compatibility:
+from imas_data_dictionaries import UnknownDDVersion # noqa: F401
from imas.backends.imas_core import imas_interface as _imas_interface
@@ -23,20 +26,6 @@
ALException = None
-class UnknownDDVersion(ValueError):
- """Error raised when an unknown DD version is specified."""
-
- def __init__(self, version: str, available: List[str], note: str = "") -> None:
- close_matches = difflib.get_close_matches(version, available, n=1)
- if close_matches:
- suggestions = f"Did you mean {close_matches[0]!r}?"
- else:
- suggestions = f"Available versions are {', '.join(reversed(available))}"
- super().__init__(
- f"Data dictionary version {version!r} cannot be found. {suggestions}{note}"
- )
-
-
class IDSNameError(ValueError):
"""Error raised by DBEntry.get(_slice) when providing an invalid IDS name."""
diff --git a/imas/test/test_dd_helpers.py b/imas/test/test_dd_helpers.py
deleted file mode 100644
index 07d1d2b..0000000
--- a/imas/test/test_dd_helpers.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from pathlib import Path
-import shutil
-import pytest
-import os
-import zipfile
-
-from imas.dd_helpers import prepare_data_dictionaries, _idsdef_zip_relpath, _build_dir
-
-_idsdef_unzipped_relpath = Path("idsdef_unzipped")
-
-
-@pytest.mark.skip(reason="skipping IDSDef.zip generation")
-def test_prepare_data_dictionaries():
- prepare_data_dictionaries()
- assert os.path.exists(
- _idsdef_zip_relpath
- ), f"IDSDef.zip file does not exist at path: {_idsdef_zip_relpath}"
-
- expected_xml_files = [
- _build_dir / "3.40.0.xml",
- _build_dir / "3.41.0.xml",
- _build_dir / "3.42.0.xml",
- _build_dir / "4.0.0.xml",
- ]
-
- for xml_file in expected_xml_files:
- assert os.path.exists(xml_file), f"{xml_file} does not exist"
-
- with zipfile.ZipFile(_idsdef_zip_relpath, "r") as zip_ref:
- zip_ref.extractall(_idsdef_unzipped_relpath)
-
- expected_ids_directories = [
- _idsdef_unzipped_relpath / "data-dictionary" / "3.40.0.xml",
- _idsdef_unzipped_relpath / "data-dictionary" / "3.41.0.xml",
- _idsdef_unzipped_relpath / "data-dictionary" / "3.42.0.xml",
- _idsdef_unzipped_relpath / "data-dictionary" / "4.0.0.xml",
- _idsdef_unzipped_relpath
- / "identifiers"
- / "core_sources"
- / "core_source_identifier.xml",
- _idsdef_unzipped_relpath
- / "identifiers"
- / "equilibrium"
- / "equilibrium_profiles_2d_identifier.xml",
- ]
-
- for file_path in expected_ids_directories:
- assert os.path.exists(
- file_path
- ), f"Expected_ids_directories {file_path} does not exist"
-
- if _build_dir.exists():
- shutil.rmtree(_idsdef_unzipped_relpath)
diff --git a/pyproject.toml b/pyproject.toml
index db5111c..6134366 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -64,7 +64,8 @@ dependencies = [
"packaging",
"xxhash >= 2",
"saxonche",
- "gitpython"
+ "gitpython",
+ "imas_data_dictionaries",
]
[project.optional-dependencies]
@@ -108,7 +109,6 @@ test = [
]
[project.scripts]
-build_DD = "imas.dd_helpers:prepare_data_dictionaries"
imas = "imas.command.cli:cli"
[project.urls]
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 486b56d..0000000
--- a/setup.py
+++ /dev/null
@@ -1,140 +0,0 @@
-# pylint: disable=wrong-import-position
-# This file is part of IMAS-Python.
-# You should have received the IMAS-Python LICENSE file with this project.
-"""
-Packaging settings. Inspired by a minimal setup.py file, the Pandas cython build
-and the access-layer setup template.
-
-The installable IMAS-Python package tries to follow in the following order:
-- The style guide for Python code [PEP8](https://www.python.org/dev/peps/pep-0008/)
-- The [PyPA guide on packaging projects](
- https://packaging.python.org/guides/distributing-packages-using-setuptools/#distributing-packages)
-- The [PyPA tool recommendations](
- https://packaging.python.org/guides/tool-recommendations/), specifically:
- * Installing: [pip](https://pip.pypa.io/en/stable/)
- * Environment management: [venv](https://docs.python.org/3/library/venv.html)
- * Dependency management: [pip-tools](https://github.com/jazzband/pip-tools)
- * Packaging source distributions: [setuptools](https://setuptools.readthedocs.io/)
- * Packaging built distributions: [wheels](https://pythonwheels.com/)
-
-On the ITER cluster we handle the environment by using the `IMAS` module load.
-So instead, we install packages to the `USER_SITE` there, and do not use
-`pip`s `build-isolation`. See [IMAS-584](https://jira.iter.org/browse/IMAS-584)
-"""
-import importlib
-import importlib.util
-import site
-import traceback
-# Allow importing local files, see https://snarky.ca/what-the-heck-is-pyproject-toml/
-import sys
-import warnings
-# Import other stdlib packages
-from pathlib import Path
-
-# Use setuptools to build packages. Advised to import setuptools before distutils
-import setuptools
-from packaging.version import Version as V
-from setuptools import __version__ as setuptools_version
-from setuptools import setup
-from setuptools.command.build_ext import build_ext
-from setuptools.command.build_py import build_py
-from setuptools.command.sdist import sdist
-
-try:
- from wheel.bdist_wheel import bdist_wheel
-except ImportError:
- bdist_wheel = None
-
-# Ensure the current folder is on the import path:
-sys.path.append(str(Path(__file__).parent.resolve()))
-
-cannonical_python_command = "module load Python/3.8.6-GCCcore-10.2.0"
-
-if sys.version_info < (3, 7):
- sys.exit(
- "Sorry, Python < 3.7 is not supported. Use a different"
- f" python e.g. '{cannonical_python_command}'"
- )
-if sys.version_info < (3, 8):
- warnings.warn("Python < 3.8 support on best-effort basis", FutureWarning)
-
-
-# Check setuptools version before continuing for legacy builds
-# Version 61 is required for pyproject.toml support
-if V(setuptools_version) < V("61"):
- raise RuntimeError(
- "Setuptools version outdated. Found"
- f" {V(setuptools_version)} need at least {V('61')}"
- )
-
-# Workaround for https://github.com/pypa/pip/issues/7953
-# Cannot install into user site directory with editable source
-site.ENABLE_USER_SITE = "--user" in sys.argv[1:]
-
-
-# We need to know where we are for many things
-this_file = Path(__file__)
-this_dir = this_file.parent.resolve()
-
-# Start: Load dd_helpers
-dd_helpers_file = this_dir / "imas/dd_helpers.py"
-assert dd_helpers_file.is_file()
-spec = importlib.util.spec_from_file_location("dd_helpers", dd_helpers_file)
-module = importlib.util.module_from_spec(spec)
-spec.loader.exec_module(module)
-sys.modules["imas.dd_helpers"] = module
-from imas.dd_helpers import prepare_data_dictionaries # noqa
-
-# End: Load dd_helpers
-
-
-# Define building of the Data Dictionary as custom build step
-class BuildDDCommand(setuptools.Command):
- """A custom command to build the data dictionaries."""
-
- description = "build IDSDef.zip"
- user_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- pass
-
- def run(self):
- """Prepare DDs if they can be git pulled"""
- prepare_data_dictionaries()
-
-
-# Inject prepare_data_dictionaries() into the setuptool's build steps. So far it covers
-# all installation cases:
-# - `pip install -e .`` (from git clone)
-# - `python -m build``
-# - Source tarball from git-archive. Note: version only picked up when doing git-archive
-# from a tagged release,
-# `git archive HEAD -v -o imas.tar.gz && pip install imas.tar.gz`
-cmd_class = {}
-build_overrides = {"build_ext": build_ext, "build_py": build_py, "sdist": sdist}
-if bdist_wheel:
- build_overrides["bdist_wheel"] = bdist_wheel
-for name, cls in build_overrides.items():
-
- class build_DD_before(cls):
- """Build DD before executing original distutils command"""
-
- def run(self):
- try:
- prepare_data_dictionaries()
- except Exception:
- traceback.print_exc()
- print("Failed to build DD during setup, continuing without.")
- super().run()
-
- cmd_class[name] = build_DD_before
-
-
-if __name__ == "__main__":
- setup(
- zip_safe=False, # https://mypy.readthedocs.io/en/latest/installed_packages.html
- cmdclass={"build_DD": BuildDDCommand, **cmd_class}
- )
\ No newline at end of file