From a5acf1ef3a44d26481160f9f65ec9d5ee7469beb Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 9 Jul 2024 13:55:07 +0200 Subject: [PATCH 01/97] Implement `DBEntry.get_sample` in IMASPy --- imaspy/backends/db_entry_impl.py | 32 +- imaspy/backends/imas_core/al_context.py | 36 ++ imaspy/backends/imas_core/db_entry_al.py | 27 +- imaspy/backends/imas_core/imas_interface.py | 7 + imaspy/db_entry.py | 130 ++++++- imaspy/test/test_get_sample.py | 393 ++++++++++++++++++++ 6 files changed, 603 insertions(+), 22 deletions(-) create mode 100644 imaspy/test/test_get_sample.py diff --git a/imaspy/backends/db_entry_impl.py b/imaspy/backends/db_entry_impl.py index 7f86e622..dbbb1329 100644 --- a/imaspy/backends/db_entry_impl.py +++ b/imaspy/backends/db_entry_impl.py @@ -2,13 +2,34 @@ # You should have received the IMASPy LICENSE file with this project. from abc import ABC, abstractmethod -from typing import Any, List, Optional +from dataclasses import dataclass +from typing import Any, List, Optional, Union + +import numpy from imaspy.ids_convert import NBCPathMap from imaspy.ids_factory import IDSFactory from imaspy.ids_toplevel import IDSToplevel +@dataclass +class GetSliceParameters: + """Helper class to store parameters to get_slice.""" + + time_requested: float + interpolation_method: int + + +@dataclass +class GetSampleParameters: + """Helper class to store parameters to get_sample.""" + + tmin: float + tmax: float + dtime: Optional[numpy.ndarray] + interpolation_method: Optional[int] + + class DBEntryImpl(ABC): """Interface for DBEntry implementations.""" @@ -47,20 +68,17 @@ def get( self, ids_name: str, occurrence: int, - time_requested: Optional[float], - interpolation_method: int, + parameters: Union[None, GetSliceParameters, GetSampleParameters], destination: IDSToplevel, lazy: bool, nbc_map: Optional[NBCPathMap], ) -> None: - """Implement DBEntry.get()/get_slice(). Load data from the data source. + """Implement DBEntry.get/get_slice/get_sample. Load data from the data source. Args: ids_name: Name of the IDS to load. occurrence: Which occurence of the IDS to load. - time_requested: None for get(), requested time slice for get_slice(). - interpolation_method: Requested interpolation method (ignore when - time_requested is None). + parameters: Additional parameters for a get_slice/get_sample call. destination: IDS object to store data in. lazy: Use lazy loading. nbc_map: NBCPathMap to use for implicit conversion. When None, no implicit diff --git a/imaspy/backends/imas_core/al_context.py b/imaspy/backends/imas_core/al_context.py index 07f37dec..d14f6bfd 100644 --- a/imaspy/backends/imas_core/al_context.py +++ b/imaspy/backends/imas_core/al_context.py @@ -8,6 +8,8 @@ from contextlib import contextmanager from typing import TYPE_CHECKING, Any, Callable, Iterator, List, Optional, Tuple +import numpy + from imaspy.backends.imas_core.imas_interface import ll_interface from imaspy.exception import LowlevelError from imaspy.ids_defs import ( @@ -105,6 +107,21 @@ def slice_action( raise LowlevelError("slice_action", status) return ALContext(ctx) + def timerange_action( + self, + path: str, + rwmode: int, + tmin: float, + tmax: float, + dtime: Optional[numpy.ndarray], + interpolation_method: int, + ) -> "ALContext": + """Begin a new timerange action for use in a ``with`` context.""" + ctx = ll_interface.begin_timerange_action( + self.ctx, path, rwmode, tmin, tmax, dtime, interpolation_method + ) + return ALContext(ctx) + def arraystruct_action( self, path: str, timebase: str, size: int ) -> "ALArrayStructContext": @@ -317,6 +334,25 @@ def slice_action( (path, rwmode, time_requested, interpolation_method), ) + @contextmanager + def timerange_action( + self, + path: str, + rwmode: int, + tmin: float, + tmax: float, + dtime: Optional[numpy.ndarray], + interpolation_method: int, + ) -> Iterator["LazyALContext"]: + """Lazily start a lowlevel timerange action, see + :meth:`ALContext.timerange_action`. + """ + yield LazyALContext( + self, + ALContext.timerange_action, + (path, rwmode, tmin, tmax, dtime, interpolation_method), + ) + def arraystruct_action( self, path: str, timebase: str, size: int ) -> "LazyALArrayStructContext": diff --git a/imaspy/backends/imas_core/db_entry_al.py b/imaspy/backends/imas_core/db_entry_al.py index e126bf9b..89cf3625 100644 --- a/imaspy/backends/imas_core/db_entry_al.py +++ b/imaspy/backends/imas_core/db_entry_al.py @@ -5,9 +5,10 @@ import logging import os from collections import deque -from typing import Any, Deque, List, Optional +from typing import Any, Deque, List, Optional, Union from urllib.parse import urlparse +from imaspy.backends.db_entry_impl import GetSampleParameters, GetSliceParameters from imaspy.db_entry import DBEntryImpl from imaspy.exception import DataEntryException, LowlevelError from imaspy.ids_convert import NBCPathMap, dd_version_map_from_factories @@ -216,8 +217,7 @@ def get( self, ids_name: str, occurrence: int, - time_requested: Optional[float], - interpolation_method: int, + parameters: Union[None, GetSliceParameters, GetSampleParameters], destination: IDSToplevel, lazy: bool, nbc_map: Optional[NBCPathMap], @@ -245,13 +245,28 @@ def get( else: context = self._db_ctx # Now fill the IDSToplevel - if time_requested is None or destination.metadata.type is IDSType.CONSTANT: + if parameters is None or destination.metadata.type is IDSType.CONSTANT: # called from get(), or when the IDS is constant (see IMAS-3330) manager = context.global_action(ll_path, READ_OP) - else: # get_slice + elif isinstance(parameters, GetSliceParameters): manager = context.slice_action( - ll_path, READ_OP, time_requested, interpolation_method + ll_path, + READ_OP, + parameters.time_requested, + parameters.interpolation_method, ) + elif isinstance(parameters, GetSampleParameters): + manager = context.timerange_action( + ll_path, + READ_OP, + parameters.tmin, + parameters.tmax, + parameters.dtime, + parameters.interpolation_method, + ) + else: + raise TypeError(f"Incorrect type for parameters: {type(parameters)}.") + with manager as read_ctx: if lazy: destination._set_lazy_context(read_ctx) diff --git a/imaspy/backends/imas_core/imas_interface.py b/imaspy/backends/imas_core/imas_interface.py index 07f4783e..cca7d42f 100644 --- a/imaspy/backends/imas_core/imas_interface.py +++ b/imaspy/backends/imas_core/imas_interface.py @@ -215,6 +215,13 @@ def get_occurrences(self, ctx, ids_name): def get_al_version(self): return self._al_version_str + # New methods added in AL 5.3 + + def begin_timerange_action( + self, ctx, path, rwmode, tmin, tmax, dtime, interpolation_method + ): + raise self._minimal_version("5.3") + # Dummy documentation for interface: for funcname in dir(LowlevelInterface): diff --git a/imaspy/db_entry.py b/imaspy/db_entry.py index 9ca826b7..ba5bcac6 100644 --- a/imaspy/db_entry.py +++ b/imaspy/db_entry.py @@ -5,10 +5,16 @@ import logging import os -from typing import Any, List, Optional, Tuple, Type, overload +from typing import Any, List, Optional, Tuple, Type, Union, overload + +import numpy import imaspy -from imaspy.backends.db_entry_impl import DBEntryImpl +from imaspy.backends.db_entry_impl import ( + DBEntryImpl, + GetSampleParameters, + GetSliceParameters, +) from imaspy.dd_zip import dd_xml_versions from imaspy.exception import IDSNameError, UnknownDDVersion, ValidationError from imaspy.ids_base import IDSBase @@ -347,7 +353,6 @@ def get( ids_name, occurrence, None, - 0, destination, lazy, autoconvert, @@ -416,8 +421,117 @@ def get_slice( return self._get( ids_name, occurrence, - time_requested, - interpolation_method, + GetSliceParameters(time_requested, interpolation_method), + destination, + lazy, + autoconvert, + ignore_unknown_dd_version, + ) + + def get_sample( + self, + ids_name: str, + tmin: float, + tmax: float, + dtime: Optional[Union[float, numpy.ndarray]] = None, + interpolation_method: Optional[int] = None, + occurrence: int = 0, + *, + lazy: bool = False, + autoconvert: bool = True, + ignore_unknown_dd_version: bool = False, + destination: Optional[IDSToplevel] = None, + ) -> IDSToplevel: + """Read a range of time slices from an IDS in this Database Entry. + + This method has three different modes, depending on the provided arguments: + + 1. No interpolation. This method is selected when :param:`dtime` and + :param:`interpolation_method` are not provided. + + This mode returns an IDS object with all constant/static data filled. The + dynamic data is retrieved for the provided time range [tmin, tmax]. + + 2. Interpolate dynamic data on a uniform time base. This method is selected + when :param:`dtime` and :param:`interpolation_method` are provided. + :param:`dtime` must be a number or a numpy array of size 1. + + This mode will generate an IDS with a homogeneous time vector ``[tmin, tmin + + dtime, tmin + 2*dtime, ...`` up to ``tmax``. The returned IDS always has + ``ids_properties.homogeneous_time = 1``. + + 3. Interpolate dynamic data on an explicit time base. This method is selected + when :param:`dtime` and :param:`interpolation_method` are provided. + :param:`dtime` must be a numpy array of size larger than 1. + + This mode will generate an IDS with a homogeneous time vector equal to + :param:`dtime`. :param:`tmin` and :param:`tmax` are ignored in this mode. + The returned IDS always has ``ids_properties.homogeneous_time = 1``. + + Args: + ids_name: Name of the IDS to read from the backend + tmin: Lower bound of the requested time range + tmax: Upper bound of the requested time range, must be larger than or + equal to :param:`tmin` + dtime: Interval to use when interpolating, must be positive, or numpy array + containing an explicit time base to interpolate. + interpolation_method: Interpolation method to use. Available options: + + - :const:`~imaspy.ids_defs.CLOSEST_INTERP` + - :const:`~imaspy.ids_defs.PREVIOUS_INTERP` + - :const:`~imaspy.ids_defs.LINEAR_INTERP` + + occurrence: Which occurrence of the IDS to read. + + Keyword Args: + lazy: When set to ``True``, values in this IDS will be retrieved only when + needed (instead of getting the full IDS immediately). See :ref:`Lazy + loading` for more details. + autoconvert: Automatically convert IDSs. + + If enabled (default), a call to ``get_sample()`` will return + an IDS from the Data Dictionary version attached to this Data Entry. + Data is automatically converted between the on-disk version and the + in-memory version. + + When set to ``False``, the IDS will be returned in the DD version it was + stored in. + ignore_unknown_dd_version: When an IDS is stored with an unknown DD version, + do not attempt automatic conversion and fetch the data in the Data + Dictionary version attached to this Data Entry. + destination: Populate this IDSToplevel instead of creating an empty one. + + Returns: + The loaded IDS. + + Example: + .. code-block:: python + + import imaspy + import numpy + from imaspy import ids_defs + + imas_entry = imaspy.DBEntry( + "imas:mdsplus?user=public;pulse=131024;run=41;database=ITER", "r") + + # All time slices between t=200 and t=370 + core_profiles = imas_entry.get_sample("core_profiles", 200, 370) + + # Closest points to [0, 100, 200, ..., 1000] + core_profiles_interp = imas_entry.get_sample( + "core_profiles", 0, 1000, 100, ids_defs.CLOSEST_INTERP) + + # Linear interpolation for [10, 11, 12, 14, 16, 20, 30, 40, 50] + times = numpy.array([10, 11, 12, 14, 16, 20, 30, 40, 50]) + core_profiles_interp = imas_entry.get_sample( + "core_profiles", 0, 0, times, ids_defs.LINEAR_INTERP) + """ + if dtime is not None: + dtime = numpy.atleast_1d(dtime) # Convert floats and 0D arrays to 1D array + return self._get( + ids_name, + occurrence, + GetSampleParameters(tmin, tmax, dtime, interpolation_method), destination, lazy, autoconvert, @@ -428,8 +542,7 @@ def _get( self, ids_name: str, occurrence: int, - time_requested: Optional[float], - interpolation_method: int, + parameters: Union[None, GetSliceParameters, GetSampleParameters], destination: Optional[IDSToplevel], lazy: bool, autoconvert: bool, @@ -492,8 +605,7 @@ def _get( return self._dbe_impl.get( ids_name, occurrence, - time_requested, - interpolation_method, + parameters, destination, lazy, nbc_map, diff --git a/imaspy/test/test_get_sample.py b/imaspy/test/test_get_sample.py new file mode 100644 index 00000000..7c3b210f --- /dev/null +++ b/imaspy/test/test_get_sample.py @@ -0,0 +1,393 @@ +import numpy as np +import pytest + +import imaspy +from imaspy.backends.imas_core.imas_interface import lowlevel +from imaspy.exception import DataEntryException +from imaspy.ids_defs import ( + CLOSEST_INTERP, + EMPTY_FLOAT, + HDF5_BACKEND, + IDS_TIME_MODE_HETEROGENEOUS, + IDS_TIME_MODE_HOMOGENEOUS, + LINEAR_INTERP, + MDSPLUS_BACKEND, + PREVIOUS_INTERP, +) + + +@pytest.fixture() +def test_db_uri(backend, worker_id, tmp_path_factory): + # Check if begin_timerange_action is available in imas_core + if not hasattr(lowlevel, "al_begin_timerange_action"): + pytest.skip("imas_core version doesn't support begin_timerange_action.") + + if backend not in [HDF5_BACKEND, MDSPLUS_BACKEND]: + pytest.skip("Backend doesn't support time range operations.") + + tmp_path = tmp_path_factory.mktemp(f"testdb.{worker_id}") + backend_str = {HDF5_BACKEND: "hdf5", MDSPLUS_BACKEND: "mdsplus"}[backend] + uri = f"imas:{backend_str}?path={tmp_path}" + entry = imaspy.DBEntry(uri, "x") + + # Homogeneous core profiles: + cp = entry.factory.core_profiles() + cp.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS + N_time = 32 + cp.time = np.linspace(0, 1, N_time) + cp.profiles_1d.resize(N_time) + for i in range(N_time): + # FLT_1D: + cp.profiles_1d[i].grid.rho_tor_norm = np.array([0.0, 1.0]) + cp.profiles_1d[i].t_i_average = np.array([2.0, 1.0]) * (i + 1) + cp.profiles_1d[i].ion.resize(1) + # STR_0D: + cp.profiles_1d[i].ion[0].label = "D" + # FLT_0D + cp.profiles_1d[i].ion[0].z_ion = 1.0 + cp.profiles_1d[i].ion[0].temperature = cp.profiles_1d[i].t_i_average + # INT_0D + cp.profiles_1d[i].ion[0].temperature_validity = 0 + cp.global_quantities.ip = (2 - cp.time) ** 0.5 + entry.put(cp) + + # Inhomogeneous equilibrium + eq = entry.factory.equilibrium() + eq.ids_properties.homogeneous_time = IDS_TIME_MODE_HETEROGENEOUS + eq.time = np.linspace(0, 2, 512) + # GGD Grid with 1 time slice + eq.grids_ggd.resize(1) + eq.grids_ggd[0].time = 0.0 + eq.grids_ggd[0].grid.resize(1) + eq.grids_ggd[0].grid[0].path = "wall:0/description_ggd(1)/grid_ggd" + # multiple time slices with data + N_time = 6 + eq.time_slice.resize(N_time) + for i in range(N_time): + # FLT_0D + eq.time_slice[i].time = i / 5.0 + eq.time_slice[i].profiles_2d.resize(1) + # FLT_1D + eq.time_slice[i].profiles_2d[0].grid.dim1 = np.array([0.0, 1.0]) + eq.time_slice[i].profiles_2d[0].grid.dim2 = np.array([3.0, 4.0]) + # STR_0D + eq.time_slice[i].profiles_2d[0].grid_type.name = f"test {i}" + eq.time_slice[i].profiles_2d[0].grid_type.description = "test description" + # INT_0D + eq.time_slice[i].profiles_2d[0].grid_type.index = -1 + # FLT_2D + eq.time_slice[i].profiles_2d[0].r = np.array([[0.0, 0.0], [1.0, 1.0]]) + eq.time_slice[i].profiles_2d[0].z = np.array([[3.0, 4.0], [3.0, 4.0]]) + eq.time_slice[i].profiles_2d[0].psi = ( + eq.time_slice[i].profiles_2d[0].r - eq.time_slice[i].profiles_2d[0].z + ) * (1 + eq.time_slice[i].time) ** 2 + entry.put(eq) + + # Equilibrium only has dynamic AOS and no other non-homogenous time nodes + # Use magnetics to test that case: + mag = entry.factory.magnetics() + mag.ids_properties.homogeneous_time = IDS_TIME_MODE_HETEROGENEOUS + mag.time = np.array([0.0]) + mag.flux_loop.resize(3) + for i in range(3): + mag.flux_loop[i].flux.time = np.linspace(0.0123, 1, 5 + i) + mag.flux_loop[i].flux.data = 2 + 2 * mag.flux_loop[i].flux.time + mag.flux_loop[i].voltage.time = np.linspace(0.0123, 1, 8 + i) + mag.flux_loop[i].voltage.data = 2 - 5 * mag.flux_loop[i].voltage.time + entry.put(mag) + + entry.close() + return uri + + +def test_invalid_arguments(test_db_uri): + entry = imaspy.DBEntry(test_db_uri, "r") + with pytest.raises(ValueError): + entry.get_sample("core_profiles", 0.3, 0.2) # tmin > tmax + with pytest.raises(DataEntryException): + entry.get_sample("core_profiles", 0.1, 0.2, occurrence="invalid") + with pytest.raises(ValueError): + entry.get_sample("core_profiles", 0.1, 0.2, 0.05) # no interpolation method + + +def test_get_sample_homogeneous(test_db_uri): + entry = imaspy.DBEntry(test_db_uri, "r") + cp = entry.get_sample("core_profiles", 0.3, 14 / 31) + assert np.array_equal(cp.time, np.linspace(0, 1, 32)[10:15]) + + for i, p1d in enumerate(cp.profiles_1d): + assert np.array_equal(p1d.grid.rho_tor_norm, [0.0, 1.0]) + assert np.array_equal(p1d.t_i_average, np.array([2.0, 1.0]) * (i + 11)) + assert len(p1d.ion) == 1 + assert p1d.ion[0].label == "D" + assert p1d.ion[0].z_ion == 1 + assert np.array_equal(p1d.ion[0].temperature, p1d.t_i_average) + assert p1d.ion[0].temperature_validity == 0 + + assert np.array_equal(cp.global_quantities.ip, (2 - cp.time) ** 0.5) + + +def test_get_sample_heterogeneous(test_db_uri): + entry = imaspy.DBEntry(test_db_uri, "r") + eq = entry.get_sample("equilibrium", -1.0, 0.2) + # Main time array + assert np.array_equal(eq.time, np.linspace(0, 2, 512)[:52]) + # grids_ggd AoS + assert len(eq.grids_ggd) == 1 + assert eq.grids_ggd[0].time == 0.0 + assert eq.grids_ggd[0].grid[0].path == "wall:0/description_ggd(1)/grid_ggd" + # time_slice AoS + assert len(eq.time_slice) == 2 + assert eq.time_slice[0].time == 0.0 + assert eq.time_slice[1].time == 0.2 + + for i in range(2): + p2d = eq.time_slice[i].profiles_2d[0] + assert np.array_equal(p2d.grid.dim1, [0.0, 1.0]) + assert np.array_equal(p2d.grid.dim2, [3.0, 4.0]) + assert p2d.grid_type.name == f"test {i}" + assert p2d.grid_type.index == -1 + assert np.array_equal(p2d.r, [[0.0, 0.0], [1.0, 1.0]]) + assert np.array_equal(p2d.z, [[3.0, 4.0], [3.0, 4.0]]) + expected_psi = (p2d.r - p2d.z) * (1 + eq.time_slice[i].time) ** 2 + assert np.array_equal(p2d.psi, expected_psi) + + mag = entry.get_sample("magnetics", 0.25, 0.75) + assert mag.ids_properties.homogeneous_time == IDS_TIME_MODE_HETEROGENEOUS + assert len(mag.time) == 0 + assert len(mag.flux_loop) == 3 + for i in range(3): + fl = mag.flux_loop[i] + + flux_time = np.linspace(0.0123, 1, 5 + i) + flux_time = flux_time[0.25 <= flux_time] + flux_time = flux_time[flux_time <= 0.75] + assert np.array_equal(fl.flux.time, flux_time) + assert np.array_equal(fl.flux.data, 2 + 2 * flux_time) + + voltage_time = np.linspace(0.0123, 1, 8 + i) + voltage_time = voltage_time[0.25 <= voltage_time] + voltage_time = voltage_time[voltage_time <= 0.75] + assert np.array_equal(fl.voltage.time, voltage_time) + assert np.array_equal(fl.voltage.data, 2 - 5 * voltage_time) + + +def test_get_sample_homogeneous_linear_interp(test_db_uri): + entry = imaspy.DBEntry(test_db_uri, "r") + # Note requesting 0.401 and not 0.4, since + # (0.3 + 0.02 + 0.02 + 0.02 + 0.02 + 0.02) = 0.4 + 5e-17 + cp = entry.get_sample("core_profiles", 0.3, 0.401, 0.02, LINEAR_INTERP) + assert np.allclose(cp.time, np.linspace(0.3, 0.4, 6), rtol=1e-14, atol=0) + + assert len(cp.profiles_1d) == 6 + # Check some interpolated values + for i in range(6): + # Check rho_tor_norm + rho_tor_norm = cp.profiles_1d[i].grid.rho_tor_norm + assert np.array_equal(rho_tor_norm, np.array([0.0, 1.0])) + # Check t_i_average + expected = np.array([2.0, 1.0]) * (1 + 31 * cp.time[i]) + t_i_average = cp.profiles_1d[i].t_i_average + assert np.allclose(t_i_average, expected, rtol=1e-14, atol=0) + + +def test_get_sample_homogeneous_explicit_timebase(test_db_uri): + entry = imaspy.DBEntry(test_db_uri, "r") + times = [0.1, 0.2345, 0.5, np.sqrt(2) / 2] + cp = entry.get_sample("core_profiles", 0, 0, times, LINEAR_INTERP) + assert np.allclose(cp.time, times, rtol=1e-14, atol=0) + + assert len(cp.profiles_1d) == 4 + # Check some interpolated values + for i in range(4): + # Check rho_tor_norm + rho_tor_norm = cp.profiles_1d[i].grid.rho_tor_norm + assert np.array_equal(rho_tor_norm, np.array([0.0, 1.0])) + # Check t_i_average + expected = np.array([2.0, 1.0]) * (1 + 31 * cp.time[i]) + t_i_average = cp.profiles_1d[i].t_i_average + assert np.allclose(t_i_average, expected, rtol=1e-14, atol=0) + + +def test_get_sample_homogeneous_previous_interp(test_db_uri): + entry = imaspy.DBEntry(test_db_uri, "r") + # Note requesting 0.401 and not 0.4, since + # (0.3 + 0.02 + 0.02 + 0.02 + 0.02 + 0.02) = 0.4 + 5e-17 + cp = entry.get_sample("core_profiles", 0.3, 0.401, 0.02, PREVIOUS_INTERP) + assert np.allclose(cp.time, np.linspace(0.3, 0.4, 6), rtol=1e-14, atol=0) + + assert len(cp.profiles_1d) == 6 + # Check some interpolated values + for i in range(6): + # Check rho_tor_norm + rho_tor_norm = cp.profiles_1d[i].grid.rho_tor_norm + assert np.array_equal(rho_tor_norm, np.array([0.0, 1.0])) + # Check t_i_average + expected = np.array([2.0, 1.0]) * [10, 10, 11, 12, 12, 13][i] + t_i_average = cp.profiles_1d[i].t_i_average + assert np.allclose(t_i_average, expected, rtol=1e-14, atol=0) + + +def test_get_sample_homogeneous_closest_interp(test_db_uri): + entry = imaspy.DBEntry(test_db_uri, "r") + # Note requesting 0.401 and not 0.4, since + # (0.3 + 0.02 + 0.02 + 0.02 + 0.02 + 0.02) = 0.4 + 5e-17 + cp = entry.get_sample("core_profiles", 0.3, 0.401, 0.02, CLOSEST_INTERP) + assert np.allclose(cp.time, np.linspace(0.3, 0.4, 6), rtol=1e-14, atol=0) + + assert len(cp.profiles_1d) == 6 + # Check some interpolated values + for i in range(6): + # Check rho_tor_norm + rho_tor_norm = cp.profiles_1d[i].grid.rho_tor_norm + assert np.array_equal(rho_tor_norm, np.array([0.0, 1.0])) + # Check t_i_average + expected = np.array([2.0, 1.0]) * [10, 11, 12, 12, 13, 13][i] + t_i_average = cp.profiles_1d[i].t_i_average + assert np.allclose(t_i_average, expected, rtol=1e-14, atol=0) + + +def test_get_sample_heterogeneous_linear_interp(test_db_uri): + entry = imaspy.DBEntry(test_db_uri, "r") + eq = entry.get_sample("equilibrium", 0.2, 0.501, 0.05, LINEAR_INTERP) + N_samples = 7 + # IDS becomes homogeneous after resampling + assert np.allclose(eq.time, np.linspace(0.2, 0.5, N_samples)) + assert eq.ids_properties.homogeneous_time == IDS_TIME_MODE_HOMOGENEOUS + + # Check interpolated grids_ggd + assert len(eq.grids_ggd) == N_samples + for i in range(N_samples): + assert eq.grids_ggd[i].time == EMPTY_FLOAT + assert len(eq.grids_ggd[i].grid) == 1 + assert eq.grids_ggd[i].grid[0].path == "wall:0/description_ggd(1)/grid_ggd" + + # Check interpolated time_slice + assert len(eq.time_slice) == N_samples + for i in range(N_samples): + assert eq.time_slice[i].time == EMPTY_FLOAT + assert len(eq.time_slice[i].profiles_2d) == 1 + p2d = eq.time_slice[i].profiles_2d[0] + assert np.array_equal(p2d.grid.dim1, [0.0, 1.0]) + assert np.array_equal(p2d.grid.dim2, [3.0, 4.0]) + + # Determine the data as we have stored it in test_db_uri() + time = eq.time[i] + original_times = [0, 0.2, 0.4, 0.6, 0.8, 1.0] + index = np.searchsorted(original_times, time) + prevtime = original_times[index - 1] + nexttime = original_times[index] + prevpsi = (p2d.r - p2d.z) * (1 + prevtime) ** 2 + nextpsi = (p2d.r - p2d.z) * (1 + nexttime) ** 2 + # Linear interpolation + expected_psi = (nextpsi * (time - prevtime) + prevpsi * (nexttime - time)) / ( + nexttime - prevtime + ) + assert np.allclose(p2d.psi, expected_psi, rtol=1e-14, atol=0) + + mag = entry.get_sample("magnetics", 0.2, 0.501, 0.05, LINEAR_INTERP) + assert mag.ids_properties.homogeneous_time == IDS_TIME_MODE_HOMOGENEOUS + assert np.allclose(mag.time, np.linspace(0.2, 0.5, N_samples)) + + assert len(mag.flux_loop) == 3 + for i in range(3): + fl = mag.flux_loop[i] + assert np.allclose(fl.flux.data, 2 + 2 * mag.time, rtol=1e-14, atol=0) + assert np.allclose(fl.voltage.data, 2 - 5 * mag.time, rtol=1e-14, atol=2e-16) + + +def test_get_sample_heterogeneous_previous_interp(test_db_uri): + entry = imaspy.DBEntry(test_db_uri, "r") + eq = entry.get_sample("equilibrium", 0.2, 0.501, 0.05, PREVIOUS_INTERP) + N_samples = 7 + # IDS becomes homogeneous after resampling + assert np.allclose(eq.time, np.linspace(0.2, 0.5, N_samples)) + assert eq.ids_properties.homogeneous_time == IDS_TIME_MODE_HOMOGENEOUS + + # Check interpolated grids_ggd + assert len(eq.grids_ggd) == N_samples + for i in range(N_samples): + assert eq.grids_ggd[i].time == EMPTY_FLOAT + assert len(eq.grids_ggd[i].grid) == 1 + assert eq.grids_ggd[i].grid[0].path == "wall:0/description_ggd(1)/grid_ggd" + + # Check interpolated time_slice + assert len(eq.time_slice) == N_samples + for i in range(N_samples): + assert eq.time_slice[i].time == EMPTY_FLOAT + assert len(eq.time_slice[i].profiles_2d) == 1 + p2d = eq.time_slice[i].profiles_2d[0] + assert np.array_equal(p2d.grid.dim1, [0.0, 1.0]) + assert np.array_equal(p2d.grid.dim2, [3.0, 4.0]) + + origtime = [0.2, 0.2, 0.2, 0.2, 0.4, 0.4, 0.4][i] + expected_psi = (p2d.r - p2d.z) * (1 + origtime) ** 2 + assert np.allclose(p2d.psi, expected_psi, rtol=1e-14, atol=0) + + mag = entry.get_sample("magnetics", 0.2, 0.501, 0.05, PREVIOUS_INTERP) + assert mag.ids_properties.homogeneous_time == IDS_TIME_MODE_HOMOGENEOUS + assert np.allclose(mag.time, np.linspace(0.2, 0.5, N_samples)) + + assert len(mag.flux_loop) == 3 + for i in range(3): + fl = mag.flux_loop[i] + + flux_time = np.linspace(0.0123, 1, 5 + i) + flux_time = flux_time[np.searchsorted(flux_time, mag.time, side="right") - 1] + assert np.array_equal(fl.flux.data, 2 + 2 * flux_time) + + voltage_time = np.linspace(0.0123, 1, 8 + i) + voltage_time = voltage_time[ + np.searchsorted(voltage_time, mag.time, side="right") - 1 + ] + assert np.array_equal(fl.voltage.data, 2 - 5 * voltage_time) + + +def test_get_sample_heterogeneous_closest_interp(test_db_uri): + entry = imaspy.DBEntry(test_db_uri, "r") + eq = entry.get_sample("equilibrium", 0.2, 0.501, 0.05, CLOSEST_INTERP) + N_samples = 7 + # IDS becomes homogeneous after resampling + assert np.allclose(eq.time, np.linspace(0.2, 0.5, N_samples)) + assert eq.ids_properties.homogeneous_time == IDS_TIME_MODE_HOMOGENEOUS + + # Check interpolated grids_ggd + assert len(eq.grids_ggd) == N_samples + for i in range(N_samples): + assert eq.grids_ggd[i].time == EMPTY_FLOAT + assert len(eq.grids_ggd[i].grid) == 1 + assert eq.grids_ggd[i].grid[0].path == "wall:0/description_ggd(1)/grid_ggd" + + # Check interpolated time_slice + assert len(eq.time_slice) == N_samples + for i in range(N_samples): + assert eq.time_slice[i].time == EMPTY_FLOAT + assert len(eq.time_slice[i].profiles_2d) == 1 + p2d = eq.time_slice[i].profiles_2d[0] + assert np.array_equal(p2d.grid.dim1, [0.0, 1.0]) + assert np.array_equal(p2d.grid.dim2, [3.0, 4.0]) + + # Note: CLOSEST appears to round up: 0.4 is closer to 0.3 than 0.2 + origtime = [0.2, 0.2, 0.4, 0.4, 0.4, 0.4, 0.6][i] + expected_psi = (p2d.r - p2d.z) * (1 + origtime) ** 2 + assert np.allclose(p2d.psi, expected_psi, rtol=1e-14, atol=0) + + mag = entry.get_sample("magnetics", 0.2, 0.501, 0.05, CLOSEST_INTERP) + assert mag.ids_properties.homogeneous_time == IDS_TIME_MODE_HOMOGENEOUS + assert np.allclose(mag.time, np.linspace(0.2, 0.5, N_samples)) + + assert len(mag.flux_loop) == 3 + for i in range(3): + fl = mag.flux_loop[i] + + flux_time = np.linspace(0.0123, 1, 5 + i) + flux_time = flux_time[ + np.argmin(np.abs(flux_time[None, :] - mag.time[:, None]), axis=1) + ] + assert np.array_equal(fl.flux.data, 2 + 2 * flux_time) + + voltage_time = np.linspace(0.0123, 1, 8 + i) + voltage_time = voltage_time[ + np.argmin(np.abs(voltage_time[None, :] - mag.time[:, None]), axis=1) + ] + assert np.array_equal(fl.voltage.data, 2 - 5 * voltage_time) From fd49e02171ab03cb997303bb31eb9543db2ae251 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 9 Jul 2024 14:16:14 +0200 Subject: [PATCH 02/97] Add docstrings for GetSliceParameters / GetSampleParameters --- imaspy/backends/db_entry_impl.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/imaspy/backends/db_entry_impl.py b/imaspy/backends/db_entry_impl.py index dbbb1329..9fa42bd8 100644 --- a/imaspy/backends/db_entry_impl.py +++ b/imaspy/backends/db_entry_impl.py @@ -17,7 +17,9 @@ class GetSliceParameters: """Helper class to store parameters to get_slice.""" time_requested: float + """See :param:`imaspy.db_entry.DBEntry.get_slice.time_requested`.""" interpolation_method: int + """See :param:`imaspy.db_entry.DBEntry.get_slice.interpolation_method`.""" @dataclass @@ -25,9 +27,13 @@ class GetSampleParameters: """Helper class to store parameters to get_sample.""" tmin: float + """See :param:`imaspy.db_entry.DBEntry.get_sample.tmin`.""" tmax: float + """See :param:`imaspy.db_entry.DBEntry.get_sample.tmax`.""" dtime: Optional[numpy.ndarray] + """See :param:`imaspy.db_entry.DBEntry.get_sample.dtime`.""" interpolation_method: Optional[int] + """See :param:`imaspy.db_entry.DBEntry.get_sample.interpolation_method`.""" class DBEntryImpl(ABC): From 73268bb212c06c1dc23223a24e31537d8cffbee6 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Mon, 11 Nov 2024 10:11:44 +0100 Subject: [PATCH 03/97] Update tests for DD 4.0.0 --- imaspy/test/test_get_sample.py | 54 +++++++++++++++------------------- 1 file changed, 24 insertions(+), 30 deletions(-) diff --git a/imaspy/test/test_get_sample.py b/imaspy/test/test_get_sample.py index 7c3b210f..beffe52d 100644 --- a/imaspy/test/test_get_sample.py +++ b/imaspy/test/test_get_sample.py @@ -6,7 +6,6 @@ from imaspy.exception import DataEntryException from imaspy.ids_defs import ( CLOSEST_INTERP, - EMPTY_FLOAT, HDF5_BACKEND, IDS_TIME_MODE_HETEROGENEOUS, IDS_TIME_MODE_HOMOGENEOUS, @@ -28,7 +27,7 @@ def test_db_uri(backend, worker_id, tmp_path_factory): tmp_path = tmp_path_factory.mktemp(f"testdb.{worker_id}") backend_str = {HDF5_BACKEND: "hdf5", MDSPLUS_BACKEND: "mdsplus"}[backend] uri = f"imas:{backend_str}?path={tmp_path}" - entry = imaspy.DBEntry(uri, "x") + entry = imaspy.DBEntry(uri, "x", dd_version="4.0.0") # Homogeneous core profiles: cp = entry.factory.core_profiles() @@ -42,7 +41,7 @@ def test_db_uri(backend, worker_id, tmp_path_factory): cp.profiles_1d[i].t_i_average = np.array([2.0, 1.0]) * (i + 1) cp.profiles_1d[i].ion.resize(1) # STR_0D: - cp.profiles_1d[i].ion[0].label = "D" + cp.profiles_1d[i].ion[0].name = "D" # FLT_0D cp.profiles_1d[i].ion[0].z_ion = 1.0 cp.profiles_1d[i].ion[0].temperature = cp.profiles_1d[i].t_i_average @@ -100,8 +99,12 @@ def test_db_uri(backend, worker_id, tmp_path_factory): return uri -def test_invalid_arguments(test_db_uri): - entry = imaspy.DBEntry(test_db_uri, "r") +@pytest.fixture() +def entry(test_db_uri): + return imaspy.DBEntry(test_db_uri, "r", dd_version="4.0.0") + + +def test_invalid_arguments(entry): with pytest.raises(ValueError): entry.get_sample("core_profiles", 0.3, 0.2) # tmin > tmax with pytest.raises(DataEntryException): @@ -110,8 +113,7 @@ def test_invalid_arguments(test_db_uri): entry.get_sample("core_profiles", 0.1, 0.2, 0.05) # no interpolation method -def test_get_sample_homogeneous(test_db_uri): - entry = imaspy.DBEntry(test_db_uri, "r") +def test_get_sample_homogeneous(entry): cp = entry.get_sample("core_profiles", 0.3, 14 / 31) assert np.array_equal(cp.time, np.linspace(0, 1, 32)[10:15]) @@ -119,7 +121,7 @@ def test_get_sample_homogeneous(test_db_uri): assert np.array_equal(p1d.grid.rho_tor_norm, [0.0, 1.0]) assert np.array_equal(p1d.t_i_average, np.array([2.0, 1.0]) * (i + 11)) assert len(p1d.ion) == 1 - assert p1d.ion[0].label == "D" + assert p1d.ion[0].name == "D" assert p1d.ion[0].z_ion == 1 assert np.array_equal(p1d.ion[0].temperature, p1d.t_i_average) assert p1d.ion[0].temperature_validity == 0 @@ -127,8 +129,7 @@ def test_get_sample_homogeneous(test_db_uri): assert np.array_equal(cp.global_quantities.ip, (2 - cp.time) ** 0.5) -def test_get_sample_heterogeneous(test_db_uri): - entry = imaspy.DBEntry(test_db_uri, "r") +def test_get_sample_heterogeneous(entry): eq = entry.get_sample("equilibrium", -1.0, 0.2) # Main time array assert np.array_equal(eq.time, np.linspace(0, 2, 512)[:52]) @@ -172,8 +173,7 @@ def test_get_sample_heterogeneous(test_db_uri): assert np.array_equal(fl.voltage.data, 2 - 5 * voltage_time) -def test_get_sample_homogeneous_linear_interp(test_db_uri): - entry = imaspy.DBEntry(test_db_uri, "r") +def test_get_sample_homogeneous_linear_interp(entry): # Note requesting 0.401 and not 0.4, since # (0.3 + 0.02 + 0.02 + 0.02 + 0.02 + 0.02) = 0.4 + 5e-17 cp = entry.get_sample("core_profiles", 0.3, 0.401, 0.02, LINEAR_INTERP) @@ -191,8 +191,7 @@ def test_get_sample_homogeneous_linear_interp(test_db_uri): assert np.allclose(t_i_average, expected, rtol=1e-14, atol=0) -def test_get_sample_homogeneous_explicit_timebase(test_db_uri): - entry = imaspy.DBEntry(test_db_uri, "r") +def test_get_sample_homogeneous_explicit_timebase(entry): times = [0.1, 0.2345, 0.5, np.sqrt(2) / 2] cp = entry.get_sample("core_profiles", 0, 0, times, LINEAR_INTERP) assert np.allclose(cp.time, times, rtol=1e-14, atol=0) @@ -209,8 +208,7 @@ def test_get_sample_homogeneous_explicit_timebase(test_db_uri): assert np.allclose(t_i_average, expected, rtol=1e-14, atol=0) -def test_get_sample_homogeneous_previous_interp(test_db_uri): - entry = imaspy.DBEntry(test_db_uri, "r") +def test_get_sample_homogeneous_previous_interp(entry): # Note requesting 0.401 and not 0.4, since # (0.3 + 0.02 + 0.02 + 0.02 + 0.02 + 0.02) = 0.4 + 5e-17 cp = entry.get_sample("core_profiles", 0.3, 0.401, 0.02, PREVIOUS_INTERP) @@ -228,8 +226,7 @@ def test_get_sample_homogeneous_previous_interp(test_db_uri): assert np.allclose(t_i_average, expected, rtol=1e-14, atol=0) -def test_get_sample_homogeneous_closest_interp(test_db_uri): - entry = imaspy.DBEntry(test_db_uri, "r") +def test_get_sample_homogeneous_closest_interp(entry): # Note requesting 0.401 and not 0.4, since # (0.3 + 0.02 + 0.02 + 0.02 + 0.02 + 0.02) = 0.4 + 5e-17 cp = entry.get_sample("core_profiles", 0.3, 0.401, 0.02, CLOSEST_INTERP) @@ -247,8 +244,7 @@ def test_get_sample_homogeneous_closest_interp(test_db_uri): assert np.allclose(t_i_average, expected, rtol=1e-14, atol=0) -def test_get_sample_heterogeneous_linear_interp(test_db_uri): - entry = imaspy.DBEntry(test_db_uri, "r") +def test_get_sample_heterogeneous_linear_interp(entry): eq = entry.get_sample("equilibrium", 0.2, 0.501, 0.05, LINEAR_INTERP) N_samples = 7 # IDS becomes homogeneous after resampling @@ -258,14 +254,14 @@ def test_get_sample_heterogeneous_linear_interp(test_db_uri): # Check interpolated grids_ggd assert len(eq.grids_ggd) == N_samples for i in range(N_samples): - assert eq.grids_ggd[i].time == EMPTY_FLOAT + # assert eq.grids_ggd[i].time == EMPTY_FLOAT assert len(eq.grids_ggd[i].grid) == 1 assert eq.grids_ggd[i].grid[0].path == "wall:0/description_ggd(1)/grid_ggd" # Check interpolated time_slice assert len(eq.time_slice) == N_samples for i in range(N_samples): - assert eq.time_slice[i].time == EMPTY_FLOAT + # assert eq.time_slice[i].time == EMPTY_FLOAT assert len(eq.time_slice[i].profiles_2d) == 1 p2d = eq.time_slice[i].profiles_2d[0] assert np.array_equal(p2d.grid.dim1, [0.0, 1.0]) @@ -296,8 +292,7 @@ def test_get_sample_heterogeneous_linear_interp(test_db_uri): assert np.allclose(fl.voltage.data, 2 - 5 * mag.time, rtol=1e-14, atol=2e-16) -def test_get_sample_heterogeneous_previous_interp(test_db_uri): - entry = imaspy.DBEntry(test_db_uri, "r") +def test_get_sample_heterogeneous_previous_interp(entry): eq = entry.get_sample("equilibrium", 0.2, 0.501, 0.05, PREVIOUS_INTERP) N_samples = 7 # IDS becomes homogeneous after resampling @@ -307,14 +302,14 @@ def test_get_sample_heterogeneous_previous_interp(test_db_uri): # Check interpolated grids_ggd assert len(eq.grids_ggd) == N_samples for i in range(N_samples): - assert eq.grids_ggd[i].time == EMPTY_FLOAT + # assert eq.grids_ggd[i].time == EMPTY_FLOAT assert len(eq.grids_ggd[i].grid) == 1 assert eq.grids_ggd[i].grid[0].path == "wall:0/description_ggd(1)/grid_ggd" # Check interpolated time_slice assert len(eq.time_slice) == N_samples for i in range(N_samples): - assert eq.time_slice[i].time == EMPTY_FLOAT + # assert eq.time_slice[i].time == EMPTY_FLOAT assert len(eq.time_slice[i].profiles_2d) == 1 p2d = eq.time_slice[i].profiles_2d[0] assert np.array_equal(p2d.grid.dim1, [0.0, 1.0]) @@ -343,8 +338,7 @@ def test_get_sample_heterogeneous_previous_interp(test_db_uri): assert np.array_equal(fl.voltage.data, 2 - 5 * voltage_time) -def test_get_sample_heterogeneous_closest_interp(test_db_uri): - entry = imaspy.DBEntry(test_db_uri, "r") +def test_get_sample_heterogeneous_closest_interp(entry): eq = entry.get_sample("equilibrium", 0.2, 0.501, 0.05, CLOSEST_INTERP) N_samples = 7 # IDS becomes homogeneous after resampling @@ -354,14 +348,14 @@ def test_get_sample_heterogeneous_closest_interp(test_db_uri): # Check interpolated grids_ggd assert len(eq.grids_ggd) == N_samples for i in range(N_samples): - assert eq.grids_ggd[i].time == EMPTY_FLOAT + # assert eq.grids_ggd[i].time == EMPTY_FLOAT assert len(eq.grids_ggd[i].grid) == 1 assert eq.grids_ggd[i].grid[0].path == "wall:0/description_ggd(1)/grid_ggd" # Check interpolated time_slice assert len(eq.time_slice) == N_samples for i in range(N_samples): - assert eq.time_slice[i].time == EMPTY_FLOAT + # assert eq.time_slice[i].time == EMPTY_FLOAT assert len(eq.time_slice[i].profiles_2d) == 1 p2d = eq.time_slice[i].profiles_2d[0] assert np.array_equal(p2d.grid.dim1, [0.0, 1.0]) From bc9ea191da4626ec92905b68352d3e12d2a10c08 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Mon, 11 Nov 2024 10:47:46 +0100 Subject: [PATCH 04/97] Update NCDBEntryImpl for get_sample and raise NotImplementedError --- imaspy/backends/netcdf/db_entry_nc.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/imaspy/backends/netcdf/db_entry_nc.py b/imaspy/backends/netcdf/db_entry_nc.py index ba7334fc..f04630db 100644 --- a/imaspy/backends/netcdf/db_entry_nc.py +++ b/imaspy/backends/netcdf/db_entry_nc.py @@ -1,9 +1,13 @@ """DBEntry implementation using NetCDF as a backend.""" import logging -from typing import List +from typing import List, Optional, Union -from imaspy.backends.db_entry_impl import DBEntryImpl +from imaspy.backends.db_entry_impl import ( + DBEntryImpl, + GetSampleParameters, + GetSliceParameters, +) from imaspy.backends.netcdf.ids2nc import IDS2NC from imaspy.backends.netcdf.nc2ids import nc2ids from imaspy.exception import DataEntryException @@ -74,15 +78,18 @@ def get( self, ids_name: str, occurrence: int, - time_requested: float | None, - interpolation_method: int, + parameters: Union[None, GetSliceParameters, GetSampleParameters], destination: IDSToplevel, lazy: bool, - nbc_map: NBCPathMap | None, + nbc_map: Optional[NBCPathMap], ) -> None: # Feature compatibility checks - if time_requested is not None: - raise NotImplementedError("`get_slice` is not available for netCDF files.") + if parameters is not None: + if isinstance(parameters, GetSliceParameters): + func = "get_slice" + else: + func = "get_sample" + raise NotImplementedError(f"`{func}` is not available for netCDF files.") if lazy: raise NotImplementedError( "Lazy loading is not implemented for netCDF files." From 674460bbece63ccbd51f03e1d644a7916d250a6f Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Mon, 11 Nov 2024 10:49:14 +0100 Subject: [PATCH 05/97] Set `begin_timerange_action` as available since AL core 5.4 --- imaspy/backends/imas_core/imas_interface.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/imaspy/backends/imas_core/imas_interface.py b/imaspy/backends/imas_core/imas_interface.py index cca7d42f..b92438b1 100644 --- a/imaspy/backends/imas_core/imas_interface.py +++ b/imaspy/backends/imas_core/imas_interface.py @@ -215,12 +215,12 @@ def get_occurrences(self, ctx, ids_name): def get_al_version(self): return self._al_version_str - # New methods added in AL 5.3 + # New methods added in AL 5.4 def begin_timerange_action( self, ctx, path, rwmode, tmin, tmax, dtime, interpolation_method ): - raise self._minimal_version("5.3") + raise self._minimal_version("5.4") # Dummy documentation for interface: From 0a47f94e97467d6e57c65e335590daab25f021b0 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Thu, 14 Nov 2024 10:17:49 +0100 Subject: [PATCH 06/97] Make the NC2IDS reader class-based In preparation for validating the NC data --- imaspy/backends/netcdf/nc2ids.py | 157 ++++++++++++++++++------------- 1 file changed, 89 insertions(+), 68 deletions(-) diff --git a/imaspy/backends/netcdf/nc2ids.py b/imaspy/backends/netcdf/nc2ids.py index d071a3ba..2877b297 100644 --- a/imaspy/backends/netcdf/nc2ids.py +++ b/imaspy/backends/netcdf/nc2ids.py @@ -54,10 +54,98 @@ def _tree_iter( yield from _tree_iter(node, paths, curindex + (i,)) +class NC2IDS: + """Class responsible for reading an IDS from a NetCDF group.""" + + def __init__(self, group: netCDF4.Group, ids: IDSToplevel) -> None: + """Initialize NC2IDS converter. + + Args: + group: NetCDF group that stores the IDS data. + ids: Corresponding IDS toplevel to store the data in. + """ + self.group = group + """NetCDF Group that the IDS is stored in.""" + self.ids = ids + """IDS to store the data in.""" + + self.ncmeta = NCMetadata(ids.metadata) + """NetCDF related metadata.""" + self.variables = list(group.variables) + """List of variable names stored in the netCDF group.""" + # TODO: validate ids_properties.homogeneous_time + self.homogeneous_time = ( + group["ids_properties.homogeneous_time"][()] == IDS_TIME_MODE_HOMOGENEOUS + ) + """True iff the IDS time mode is homogeneous.""" + + # Don't use masked arrays: they're slow and we'll handle most of the unset + # values through the `:shape` arrays + self.group.set_auto_mask(False) + + def run(self) -> None: + # FIXME: ensure that var_names are sorted properly + # Current assumption is that creation-order is fine + for var_name in self.variables: + if var_name.endswith(":shape"): + continue # TODO: validate that this is used + + # FIXME: error handling: + metadata = self.ids.metadata[var_name] + + # TODO: validate metadata (data type, units, etc.) conforms to DD + + if metadata.data_type is IDSDataType.STRUCTURE: + continue # This only contains DD metadata we already know + + var = self.group[var_name] + if metadata.data_type is IDSDataType.STRUCT_ARRAY: + if "sparse" in var.ncattrs(): + shapes = self.group[var_name + ":shape"][()] + for index, node in tree_iter(self.ids, metadata): + node.resize(shapes[index][0]) + + else: + # FIXME: extract dimension name from nc file? + dim = self.ncmeta.get_dimensions( + metadata.path_string, self.homogeneous_time + )[-1] + size = self.group.dimensions[dim].size + for _, node in tree_iter(self.ids, metadata): + node.resize(size) + + continue + + # FIXME: this may be a gigantic array, not required for sparse data + var = self.group[var_name] + data = var[()] + + if "sparse" in var.ncattrs(): + if metadata.ndim: + shapes = self.group[var_name + ":shape"][()] + for index, node in tree_iter(self.ids, metadata): + shape = shapes[index] + if shape.all(): + node.value = data[index + tuple(map(slice, shapes[index]))] + else: + for index, node in tree_iter(self.ids, metadata): + value = data[index] + if value != getattr(var, "_FillValue", None): + node.value = data[index] + + elif metadata.path_string not in self.ncmeta.aos: + # Shortcut for assigning untensorized data + self.ids[metadata.path] = data + + else: + for index, node in tree_iter(self.ids, metadata): + node.value = data[index] + + def nc2ids(group: netCDF4.Group, ids: IDSToplevel): """Get data from the netCDF group and store it in the provided IDS.""" try: - _nc2ids(group, ids) + NC2IDS(group, ids).run() except Exception as exc: raise RuntimeError( "An error occurred while reading data from the netCDF file " @@ -66,70 +154,3 @@ def nc2ids(group: netCDF4.Group, ids: IDSToplevel): "may cause errors in IMASPy. A more robust mechanism to load IDS data from " "netCDF files will be included in the next release of IMASPy." ) from exc - - -def _nc2ids(group: netCDF4.Group, ids: IDSToplevel): - var_names = list(group.variables) - # FIXME: ensure that var_names are sorted properly - # Current assumption is that creation-order is fine - homogeneous_time = ( - group["ids_properties.homogeneous_time"][()] == IDS_TIME_MODE_HOMOGENEOUS - ) - ncmeta = NCMetadata(ids.metadata) - - # Never return masked arrays, they're slow and we'll handle most of the unset values - # through the `:shape` arrays - group.set_auto_mask(False) - - for var_name in var_names: - if var_name.endswith(":shape"): - continue # TODO: validate that this is used - - # FIXME: error handling: - metadata = ids.metadata[var_name] - - # TODO: validate metadata (data type, units, etc.) conforms to DD - - if metadata.data_type is IDSDataType.STRUCTURE: - continue # This only contains DD metadata we already know - - var = group[var_name] - if metadata.data_type is IDSDataType.STRUCT_ARRAY: - if "sparse" in var.ncattrs(): - shapes = group[var_name + ":shape"][()] - for index, node in tree_iter(ids, metadata): - node.resize(shapes[index][0]) - - else: - # FIXME: extract dimension name from nc file? - dim = ncmeta.get_dimensions(metadata.path_string, homogeneous_time)[-1] - size = group.dimensions[dim].size - for _, node in tree_iter(ids, metadata): - node.resize(size) - - continue - - # FIXME: this may be a gigantic array, not required for sparse data - var = group[var_name] - data = var[()] - - if "sparse" in var.ncattrs(): - if metadata.ndim: - shapes = group[var_name + ":shape"][()] - for index, node in tree_iter(ids, metadata): - shape = shapes[index] - if shape.all(): - node.value = data[index + tuple(map(slice, shapes[index]))] - else: - for index, node in tree_iter(ids, metadata): - value = data[index] - if value != getattr(var, "_FillValue", None): - node.value = data[index] - - elif metadata.path_string not in ncmeta.aos: - # Shortcut for assigning untensorized data - ids[metadata.path] = data - - else: - for index, node in tree_iter(ids, metadata): - node.value = data[index] From d59fcabc29d5691004649bcf48016255010d7fa7 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Thu, 14 Nov 2024 14:35:30 +0100 Subject: [PATCH 07/97] Add missing docstring --- imaspy/backends/netcdf/nc2ids.py | 1 + 1 file changed, 1 insertion(+) diff --git a/imaspy/backends/netcdf/nc2ids.py b/imaspy/backends/netcdf/nc2ids.py index 2877b297..cc3ebc25 100644 --- a/imaspy/backends/netcdf/nc2ids.py +++ b/imaspy/backends/netcdf/nc2ids.py @@ -84,6 +84,7 @@ def __init__(self, group: netCDF4.Group, ids: IDSToplevel) -> None: self.group.set_auto_mask(False) def run(self) -> None: + """Load the data from the netCDF group into the IDS.""" # FIXME: ensure that var_names are sorted properly # Current assumption is that creation-order is fine for var_name in self.variables: From ee385b736cb3f10a7378343f2de59e46815b26b7 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Fri, 15 Nov 2024 11:46:33 +0100 Subject: [PATCH 08/97] Disable MDSplus backend tests for get_sample Feature not yet implemented, see IMAS-5593 --- imaspy/test/test_get_sample.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/imaspy/test/test_get_sample.py b/imaspy/test/test_get_sample.py index beffe52d..0f5fed3e 100644 --- a/imaspy/test/test_get_sample.py +++ b/imaspy/test/test_get_sample.py @@ -21,7 +21,8 @@ def test_db_uri(backend, worker_id, tmp_path_factory): if not hasattr(lowlevel, "al_begin_timerange_action"): pytest.skip("imas_core version doesn't support begin_timerange_action.") - if backend not in [HDF5_BACKEND, MDSPLUS_BACKEND]: + # TODO: add MDSPLUS_BACKEND once implemented, see IMAS-5593 + if backend not in [HDF5_BACKEND]: pytest.skip("Backend doesn't support time range operations.") tmp_path = tmp_path_factory.mktemp(f"testdb.{worker_id}") From b007316a44e07a80a8ccad67f62b017a537b2332 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Wed, 13 Nov 2024 16:32:25 +0100 Subject: [PATCH 09/97] Add validation for the ids_properties.homogeneous time variable in netCDF IDSs. --- imaspy/backends/netcdf/ids2nc.py | 8 +++--- imaspy/backends/netcdf/nc2ids.py | 47 +++++++++++++++++++++++++++---- imaspy/exception.py | 4 +++ imaspy/test/test_nc_validation.py | 36 +++++++++++++++++++++++ 4 files changed, 85 insertions(+), 10 deletions(-) create mode 100644 imaspy/test/test_nc_validation.py diff --git a/imaspy/backends/netcdf/ids2nc.py b/imaspy/backends/netcdf/ids2nc.py index 9fad4044..34e63101 100644 --- a/imaspy/backends/netcdf/ids2nc.py +++ b/imaspy/backends/netcdf/ids2nc.py @@ -23,10 +23,10 @@ IDSDataType.CPX: netCDF4.default_fillvals["f8"] * (1 + 1j), } dtypes = { - IDSDataType.INT: numpy.int32, + IDSDataType.INT: numpy.dtype(numpy.int32), IDSDataType.STR: str, - IDSDataType.FLT: numpy.float64, - IDSDataType.CPX: numpy.complex128, + IDSDataType.FLT: numpy.dtype(numpy.float64), + IDSDataType.CPX: numpy.dtype(numpy.complex128), } SHAPE_DTYPE = numpy.int32 @@ -188,7 +188,7 @@ def create_variables(self) -> None: kwargs = {} if dtype is not str: # Enable compression: kwargs.update(compression="zlib", complevel=1) - if dtype is not numpy.complex128: # Set fillvalue + if dtype is not dtypes[IDSDataType.CPX]: # Set fillvalue kwargs.update(fill_value=default_fillvals[metadata.data_type]) # Create variable dimensions = get_dimensions(path, self.homogeneous_time) diff --git a/imaspy/backends/netcdf/nc2ids.py b/imaspy/backends/netcdf/nc2ids.py index cc3ebc25..24cbc7b3 100644 --- a/imaspy/backends/netcdf/nc2ids.py +++ b/imaspy/backends/netcdf/nc2ids.py @@ -2,7 +2,9 @@ import netCDF4 +from imaspy.backends.netcdf import ids2nc from imaspy.backends.netcdf.nc_metadata import NCMetadata +from imaspy.exception import InvalidNetCDFEntry from imaspy.ids_base import IDSBase from imaspy.ids_data_type import IDSDataType from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS @@ -73,16 +75,27 @@ def __init__(self, group: netCDF4.Group, ids: IDSToplevel) -> None: """NetCDF related metadata.""" self.variables = list(group.variables) """List of variable names stored in the netCDF group.""" - # TODO: validate ids_properties.homogeneous_time - self.homogeneous_time = ( - group["ids_properties.homogeneous_time"][()] == IDS_TIME_MODE_HOMOGENEOUS - ) - """True iff the IDS time mode is homogeneous.""" - # Don't use masked arrays: they're slow and we'll handle most of the unset # values through the `:shape` arrays self.group.set_auto_mask(False) + # Validate and get value of ids_properties.homogeneous_time + self.homogeneous_time = True # Must be initialized for self._validate_variable + """True iff the IDS time mode is homogeneous.""" + + if "ids_properties.homogeneous_time" not in self.variables: + raise InvalidNetCDFEntry( + "Mandatory variable `ids_properties.homogeneous_time` does not exist." + ) + var = group["ids_properties.homogeneous_time"] + self._validate_variable(var, ids.ids_properties.homogeneous_time.metadata) + if var[()] not in [0, 1, 2]: + raise InvalidNetCDFEntry( + f"Invalid value for ids_properties.homogeneous_time: {var[()]}. " + "Was expecting: 0, 1 or 2." + ) + self.homogeneous_time = var[()] == IDS_TIME_MODE_HOMOGENEOUS + def run(self) -> None: """Load the data from the netCDF group into the IDS.""" # FIXME: ensure that var_names are sorted properly @@ -142,6 +155,28 @@ def run(self) -> None: for index, node in tree_iter(self.ids, metadata): node.value = data[index] + def _validate_variable(self, var: netCDF4.Variable, metadata: IDSMetadata) -> None: + """Validate that the variable has correct metadata, raise an exception if not. + + Args: + var: NetCDF variable + metadata: IDSMetadata of the corresponding IDS object + """ + if var.dtype != ids2nc.dtypes[metadata.data_type]: + raise InvalidNetCDFEntry( + f"Variable {var.name} has incorrect data type: {var.dtype}. " + f"Was expecting: {ids2nc.dtypes[metadata.data_type]}." + ) + # Dimensions + expected_dims = self.ncmeta.get_dimensions( + metadata.path_string, self.homogeneous_time + ) + if var.dimensions != expected_dims: + raise InvalidNetCDFEntry( + f"Variable {var.name} has incorrect dimensions: {var.dimensions}. " + f"Was expecting: {expected_dims}." + ) + def nc2ids(group: netCDF4.Group, ids: IDSToplevel): """Get data from the netCDF group and store it in the provided IDS.""" diff --git a/imaspy/exception.py b/imaspy/exception.py index 8377d13b..550ce2ed 100644 --- a/imaspy/exception.py +++ b/imaspy/exception.py @@ -101,3 +101,7 @@ def __init__(self, node, dimension, expected_size, coor_path): super().__init__( f"Element `{node._path}` has incorrect shape {node.shape}: {details}" ) + + +class InvalidNetCDFEntry(Exception): + """Error raised when loading an IDS from a NetCDF file that fails validation.""" diff --git a/imaspy/test/test_nc_validation.py b/imaspy/test/test_nc_validation.py new file mode 100644 index 00000000..dc5309f0 --- /dev/null +++ b/imaspy/test/test_nc_validation.py @@ -0,0 +1,36 @@ +import netCDF4 +import pytest + +from imaspy.backends.netcdf.nc2ids import NC2IDS +from imaspy.exception import InvalidNetCDFEntry +from imaspy.ids_factory import IDSFactory + + +@pytest.fixture() +def memfile(): + with netCDF4.Dataset("-", "w", diskless=True) as memfile: + yield memfile + + +def test_invalid_homogeneous_time(memfile): + empty_group = memfile.createGroup("empty_group") + # Invalid dtype + invalid_dtype = memfile.createGroup("invalid_dtype") + invalid_dtype.createVariable("ids_properties.homogeneous_time", float, ())[()] = 0 + # Invalid shape: 1D instead of 0D + invalid_shape = memfile.createGroup("invalid_shape") + invalid_shape.createDimension("dim") + invalid_shape.createVariable("ids_properties.homogeneous_time", "i4", ("dim",)) + # Invalid value: not 0, 1 or 2 + invalid_value = memfile.createGroup("invalid_value") + invalid_value.createVariable("ids_properties.homogeneous_time", "i4", ()) + + ids = IDSFactory().core_profiles() + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(empty_group, ids) # ids_properties.homogeneous_time does not exist + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(invalid_dtype, ids) + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(invalid_shape, ids) + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(invalid_value, ids) From f7be3845994c6510be29a35d47b58b7504fbaa7a Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Fri, 15 Nov 2024 16:05:27 +0100 Subject: [PATCH 10/97] Additional validation checks and tests --- imaspy/backends/netcdf/nc2ids.py | 109 ++++++++++++++++++++++++++---- imaspy/test/test_nc_validation.py | 69 ++++++++++++++++++- 2 files changed, 164 insertions(+), 14 deletions(-) diff --git a/imaspy/backends/netcdf/nc2ids.py b/imaspy/backends/netcdf/nc2ids.py index 24cbc7b3..e2cf65b3 100644 --- a/imaspy/backends/netcdf/nc2ids.py +++ b/imaspy/backends/netcdf/nc2ids.py @@ -1,3 +1,4 @@ +import logging from typing import Iterator, List, Tuple import netCDF4 @@ -12,6 +13,15 @@ from imaspy.ids_structure import IDSStructure from imaspy.ids_toplevel import IDSToplevel +logger = logging.getLogger(__name__) + + +def variable_error(var, issue, value, expected=None) -> InvalidNetCDFEntry: + return InvalidNetCDFEntry( + f"Variable `{var.name}` has incorrect {issue}: `{value}`." + + (f" Was expecting `{expected}`." if expected is not None else "") + ) + def split_on_aos(metadata: IDSMetadata): paths = [] @@ -98,6 +108,7 @@ def __init__(self, group: netCDF4.Group, ids: IDSToplevel) -> None: def run(self) -> None: """Load the data from the netCDF group into the IDS.""" + self._validate_variables() # FIXME: ensure that var_names are sorted properly # Current assumption is that creation-order is fine for var_name in self.variables: @@ -155,6 +166,42 @@ def run(self) -> None: for index, node in tree_iter(self.ids, metadata): node.value = data[index] + def _validate_variables(self) -> None: + """Validate that all variables in the netCDF Group exist and match the DD.""" + self.variables.sort() + for var_name in self.variables: + if var_name.endswith(":shape"): + # Check that there is a corresponding variable + data_var = var_name.rpartition(":shape")[0] + if data_var not in self.variables: + raise InvalidNetCDFEntry( + f"Invalid netCDF variable: {var_name}. " + f"Shape information provided for non-existing {data_var}." + ) + # Corresponding variable must be sparse + if "sparse" not in self.group[data_var].ncattrs(): + raise InvalidNetCDFEntry( + f"Shape information provided for {data_var}, but this variable " + "is not sparse." + ) + # That's all for :shape arrays + continue + + # Check that the DD defines this variable, and validate its metadata + var = self.group[var_name] + try: + metadata = self.ids.metadata[var_name] + except KeyError: + raise InvalidNetCDFEntry( + f"Invalid variable {var_name}: no such variable exists in the " + f"{self.ids.metadata.name} IDS." + ) + self._validate_variable(var, metadata) + + # Validate sparsity metadata + if "sparse" in var.ncattrs(): + ... # TODO + def _validate_variable(self, var: netCDF4.Variable, metadata: IDSMetadata) -> None: """Validate that the variable has correct metadata, raise an exception if not. @@ -162,20 +209,58 @@ def _validate_variable(self, var: netCDF4.Variable, metadata: IDSMetadata) -> No var: NetCDF variable metadata: IDSMetadata of the corresponding IDS object """ - if var.dtype != ids2nc.dtypes[metadata.data_type]: - raise InvalidNetCDFEntry( - f"Variable {var.name} has incorrect data type: {var.dtype}. " - f"Was expecting: {ids2nc.dtypes[metadata.data_type]}." + attrs: dict = vars(var).copy() + attrs.pop("_FillValue", None) + if metadata.data_type not in [IDSDataType.STRUCTURE, IDSDataType.STRUCT_ARRAY]: + # Data type + expected_dtype = ids2nc.dtypes[metadata.data_type] + if var.dtype != expected_dtype: + raise variable_error(var, "data type", var.dtype, expected_dtype) + + # Dimensions + expected_dims = self.ncmeta.get_dimensions( + metadata.path_string, self.homogeneous_time ) - # Dimensions - expected_dims = self.ncmeta.get_dimensions( - metadata.path_string, self.homogeneous_time - ) - if var.dimensions != expected_dims: - raise InvalidNetCDFEntry( - f"Variable {var.name} has incorrect dimensions: {var.dimensions}. " - f"Was expecting: {expected_dims}." + if var.dimensions != expected_dims: + raise variable_error(var, "dimensions", var.dimensions, expected_dims) + + # Coordinates + coordinates = str(attrs.pop("coordinates", "")) + expected_coordinates = self.ncmeta.get_coordinates( + metadata.path_string, self.homogeneous_time ) + if any(coord not in expected_coordinates for coord in coordinates.split()): + raise variable_error( + var, "coordinates", coordinates, " ".join(expected_coordinates) + ) + + # Ancillary variables + ancvar = attrs.pop("ancillary_variables", None) + if ancvar: + allowed_ancvar = [f"{var.name}_error_upper", f"{var.name}_error_lower"] + if any(var not in allowed_ancvar for var in ancvar.split()): + raise variable_error( + var, "ancillary_variables", ancvar, " ".join(allowed_ancvar) + ) + + # Units + units = attrs.pop("units", None) + if metadata.units and metadata.units != units: + raise variable_error(var, "units", units, metadata.units) + + # Sparse + sparse = attrs.pop("sparse", None) + if sparse is not None: + ... # TODO + + # Documentation + doc = attrs.pop("documentation", None) + if metadata.documentation != doc: + logger.warning("Documentation of variable %s differs from the DD", var.name) + + # Unknown attrs + if attrs: + raise variable_error(var, "attributes", list(attrs.keys())) def nc2ids(group: netCDF4.Group, ids: IDSToplevel): diff --git a/imaspy/test/test_nc_validation.py b/imaspy/test/test_nc_validation.py index dc5309f0..d3bf8c09 100644 --- a/imaspy/test/test_nc_validation.py +++ b/imaspy/test/test_nc_validation.py @@ -1,8 +1,10 @@ import netCDF4 import pytest +from imaspy.backends.netcdf.ids2nc import IDS2NC from imaspy.backends.netcdf.nc2ids import NC2IDS from imaspy.exception import InvalidNetCDFEntry +from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS from imaspy.ids_factory import IDSFactory @@ -12,7 +14,26 @@ def memfile(): yield memfile -def test_invalid_homogeneous_time(memfile): +@pytest.fixture() +def factory(): + return IDSFactory("4.0.0") + + +@pytest.fixture() +def memfile_with_ids(memfile, factory): + ids = factory.core_profiles() + ids.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS + ids.time = [1.0, 2.0, 3.0] + ids.profiles_1d.resize(2) + for i in range(2): + ids.profiles_1d[i].grid.rho_tor_norm = [0.0, 0.2, 0.4, 0.6, 0.8, 1.0] + IDS2NC(ids, memfile).run() + # This one is valid: + NC2IDS(memfile, factory.core_profiles()).run() + return memfile + + +def test_invalid_homogeneous_time(memfile, factory): empty_group = memfile.createGroup("empty_group") # Invalid dtype invalid_dtype = memfile.createGroup("invalid_dtype") @@ -25,7 +46,7 @@ def test_invalid_homogeneous_time(memfile): invalid_value = memfile.createGroup("invalid_value") invalid_value.createVariable("ids_properties.homogeneous_time", "i4", ()) - ids = IDSFactory().core_profiles() + ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): NC2IDS(empty_group, ids) # ids_properties.homogeneous_time does not exist with pytest.raises(InvalidNetCDFEntry): @@ -34,3 +55,47 @@ def test_invalid_homogeneous_time(memfile): NC2IDS(invalid_shape, ids) with pytest.raises(InvalidNetCDFEntry): NC2IDS(invalid_value, ids) + + +def test_invalid_units(memfile_with_ids, factory): + memfile_with_ids["time"].units = "hours" + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(memfile_with_ids, factory.core_profiles()).run() + + +def test_invalid_documentation(memfile_with_ids, factory, caplog): + with caplog.at_level("WARNING"): + NC2IDS(memfile_with_ids, factory.core_profiles()).run() + assert not caplog.records + # Invalid docstring logs a warning + memfile_with_ids["time"].documentation = "https://en.wikipedia.org/wiki/Time" + with caplog.at_level("WARNING"): + NC2IDS(memfile_with_ids, factory.core_profiles()).run() + assert len(caplog.records) == 1 + + +def test_invalid_dimension_name(memfile_with_ids, factory): + memfile_with_ids.renameDimension("time", "T") + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(memfile_with_ids, factory.core_profiles()).run() + + +def test_invalid_coordinates(memfile_with_ids, factory): + memfile_with_ids["profiles_1d.grid.rho_tor_norm"].coordinates = "xyz" + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(memfile_with_ids, factory.core_profiles()).run() + + +def test_invalid_ancillary_variables(memfile_with_ids, factory): + memfile_with_ids["time"].ancillary_variables = "xyz" + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(memfile_with_ids, factory.core_profiles()).run() + + +def test_extra_attributes(memfile_with_ids, factory): + memfile_with_ids["time"].new_attribute = [1, 2, 3] + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(memfile_with_ids, factory.core_profiles()).run() + + +# TODO: tests for sparsity information From e5246464d588069af3f0f25e5a0e00d41d7fd4ef Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 19 Nov 2024 10:21:18 +0100 Subject: [PATCH 11/97] Fix a bug with lazy loading multiple IDSs from the same HDF5 DBEntry Ensure lazy contexts belonging to a different IDS are always closed. See IMAS-5603 for more details. --- imaspy/backends/imas_core/al_context.py | 4 ++++ imaspy/test/test_lazy_loading.py | 20 ++++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/imaspy/backends/imas_core/al_context.py b/imaspy/backends/imas_core/al_context.py index 07f37dec..10c0bf45 100644 --- a/imaspy/backends/imas_core/al_context.py +++ b/imaspy/backends/imas_core/al_context.py @@ -299,6 +299,10 @@ def get_context(self) -> ALContext: # from the cache else: + # Purge the cache to close open contexts from other IDSs (IMAS-5603) + cache = self.dbentry._lazy_ctx_cache + while cache: + cache.pop().close() return self.dbentry_ctx @contextmanager diff --git a/imaspy/test/test_lazy_loading.py b/imaspy/test/test_lazy_loading.py index 8c3b2fef..c0e54aad 100644 --- a/imaspy/test/test_lazy_loading.py +++ b/imaspy/test/test_lazy_loading.py @@ -163,3 +163,23 @@ def test_lazy_load_with_new_aos(requires_imas): assert len(lazy_et.model[0].ggd[0].electrons.particles.d_radial) == 0 dbentry.close() + + +def test_lazy_load_multiple_ids(backend, worker_id, tmp_path): + if backend == ASCII_BACKEND: + pytest.skip("Lazy loading is not supported by the ASCII backend.") + + with open_dbentry(backend, "w", worker_id, tmp_path) as dbentry: + cp = dbentry.factory.core_profiles() + cp.ids_properties.homogeneous_time = 1 + cp.time = [0.0, 1.0] + dbentry.put(cp) + eq = dbentry.factory.equilibrium() + eq.ids_properties.homogeneous_time = 1 + eq.time = [1.0, 2.0] + dbentry.put(eq) + + lazy_cp = dbentry.get("core_profiles", lazy=True) + lazy_eq = dbentry.get("equilibrium", lazy=True) + assert all(cp.time - eq.time == -1) + assert all(lazy_cp.time - lazy_eq.time == -1) From d47566baca31d9b53ca46ed0dfd06dde70030211 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 19 Nov 2024 13:26:29 +0100 Subject: [PATCH 12/97] Validate netCDF sparsity metadata --- imaspy/backends/netcdf/nc2ids.py | 60 +++++++++++++++++++++++-------- imaspy/test/test_nc_validation.py | 51 ++++++++++++++++++++++++-- 2 files changed, 93 insertions(+), 18 deletions(-) diff --git a/imaspy/backends/netcdf/nc2ids.py b/imaspy/backends/netcdf/nc2ids.py index e2cf65b3..3666d49e 100644 --- a/imaspy/backends/netcdf/nc2ids.py +++ b/imaspy/backends/netcdf/nc2ids.py @@ -1,5 +1,5 @@ import logging -from typing import Iterator, List, Tuple +from typing import Iterator, List, Optional, Tuple import netCDF4 @@ -108,18 +108,13 @@ def __init__(self, group: netCDF4.Group, ids: IDSToplevel) -> None: def run(self) -> None: """Load the data from the netCDF group into the IDS.""" + self.variables.sort() self._validate_variables() - # FIXME: ensure that var_names are sorted properly - # Current assumption is that creation-order is fine for var_name in self.variables: if var_name.endswith(":shape"): - continue # TODO: validate that this is used - - # FIXME: error handling: + continue metadata = self.ids.metadata[var_name] - # TODO: validate metadata (data type, units, etc.) conforms to DD - if metadata.data_type is IDSDataType.STRUCTURE: continue # This only contains DD metadata we already know @@ -168,7 +163,6 @@ def run(self) -> None: def _validate_variables(self) -> None: """Validate that all variables in the netCDF Group exist and match the DD.""" - self.variables.sort() for var_name in self.variables: if var_name.endswith(":shape"): # Check that there is a corresponding variable @@ -184,7 +178,8 @@ def _validate_variables(self) -> None: f"Shape information provided for {data_var}, but this variable " "is not sparse." ) - # That's all for :shape arrays + # That's all for :shape arrays here, rest is checked in + # _validate_variable (which defers to _validate_sparsity) continue # Check that the DD defines this variable, and validate its metadata @@ -198,10 +193,6 @@ def _validate_variables(self) -> None: ) self._validate_variable(var, metadata) - # Validate sparsity metadata - if "sparse" in var.ncattrs(): - ... # TODO - def _validate_variable(self, var: netCDF4.Variable, metadata: IDSMetadata) -> None: """Validate that the variable has correct metadata, raise an exception if not. @@ -251,7 +242,9 @@ def _validate_variable(self, var: netCDF4.Variable, metadata: IDSMetadata) -> No # Sparse sparse = attrs.pop("sparse", None) if sparse is not None: - ... # TODO + shape_name = f"{var.name}:shape" + shape_var = self.group[shape_name] if shape_name in self.variables else None + self._validate_sparsity(var, shape_var, metadata) # Documentation doc = attrs.pop("documentation", None) @@ -262,6 +255,43 @@ def _validate_variable(self, var: netCDF4.Variable, metadata: IDSMetadata) -> No if attrs: raise variable_error(var, "attributes", list(attrs.keys())) + def _validate_sparsity( + self, + var: netCDF4.Variable, + shape_var: Optional[netCDF4.Variable], + metadata: IDSMetadata, + ) -> None: + """Validate that the variable has correct sparsity. + + Args: + var: Variable with a "sparse" attribute + shape_var: Corresponding shape array (if it exists in the NC group) + metadata: IDSMetadata of the corresponding IDS object + """ + if metadata.ndim == 0: + return # Sparsity is stored with _Fillvalue, nothing to validate + + # Dimensions + aos_dimensions = self.ncmeta.get_dimensions( + self.ncmeta.aos.get(metadata.path_string), self.homogeneous_time + ) + shape_dimensions = shape_var.dimensions + if ( + len(shape_dimensions) != len(aos_dimensions) + 1 + or shape_dimensions[:-1] != aos_dimensions + or self.group.dimensions[shape_dimensions[-1]].size != metadata.ndim + ): + expected_dims = aos_dimensions + (f"{metadata.ndim}D",) + raise variable_error( + shape_var, "dimensions", shape_dimensions, expected_dims + ) + + # Data type + if shape_var.dtype.kind not in "ui": # should be (un)signed integer + raise variable_error( + shape_var, "dtype", shape_var.dtype, "any integer type" + ) + def nc2ids(group: netCDF4.Group, ids: IDSToplevel): """Get data from the netCDF group and store it in the provided IDS.""" diff --git a/imaspy/test/test_nc_validation.py b/imaspy/test/test_nc_validation.py index d3bf8c09..f7cc029f 100644 --- a/imaspy/test/test_nc_validation.py +++ b/imaspy/test/test_nc_validation.py @@ -1,4 +1,5 @@ import netCDF4 +import numpy as np import pytest from imaspy.backends.netcdf.ids2nc import IDS2NC @@ -24,9 +25,10 @@ def memfile_with_ids(memfile, factory): ids = factory.core_profiles() ids.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS ids.time = [1.0, 2.0, 3.0] - ids.profiles_1d.resize(2) - for i in range(2): + ids.profiles_1d.resize(3) + for i in range(3): ids.profiles_1d[i].grid.rho_tor_norm = [0.0, 0.2, 0.4, 0.6, 0.8, 1.0] + ids.profiles_1d[0].zeff = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] IDS2NC(ids, memfile).run() # This one is valid: NC2IDS(memfile, factory.core_profiles()).run() @@ -98,4 +100,47 @@ def test_extra_attributes(memfile_with_ids, factory): NC2IDS(memfile_with_ids, factory.core_profiles()).run() -# TODO: tests for sparsity information +def test_shape_array_without_data(memfile_with_ids, factory): + memfile_with_ids.createVariable("profiles_1d.t_i_average:shape", int, ()) + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(memfile_with_ids, factory.core_profiles()).run() + + +def test_shape_array_without_sparse_data(memfile_with_ids, factory): + memfile_with_ids.createVariable("profiles_1d.grid.rho_tor_norm:shape", int, ()) + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(memfile_with_ids, factory.core_profiles()).run() + + +def test_shape_array_with_invalid_dimensions(memfile_with_ids, factory): + cp = factory.core_profiles() + t_i_average_meta = cp.metadata["profiles_1d.t_i_average"] + t_i_average = memfile_with_ids.createVariable( + "profiles_1d.t_i_average", float, ("time", "profiles_1d.grid.rho_tor_norm:i") + ) + t_i_average.units = t_i_average_meta.units + t_i_average.documentation = t_i_average_meta.documentation + t_i_average.sparse = "Contents don't matter" + memfile_with_ids.createVariable( + "profiles_1d.t_i_average:shape", + np.int32, + ("time", "profiles_1d.grid.rho_tor_norm:i"), + ) + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(memfile_with_ids, cp).run() + + +def test_shape_array_with_invalid_dtype(memfile_with_ids, factory): + cp = factory.core_profiles() + t_i_average_meta = cp.metadata["profiles_1d.t_i_average"] + t_i_average = memfile_with_ids.createVariable( + "profiles_1d.t_i_average", float, ("time", "profiles_1d.grid.rho_tor_norm:i") + ) + t_i_average.units = t_i_average_meta.units + t_i_average.documentation = t_i_average_meta.documentation + t_i_average.sparse = "Contents don't matter" + memfile_with_ids.createVariable( + "profiles_1d.t_i_average:shape", float, ("time", "1D") + ) + with pytest.raises(InvalidNetCDFEntry): + NC2IDS(memfile_with_ids, cp).run() From 54d78d6e08fd44343578bb0ae13c8404f7951de2 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 19 Nov 2024 13:49:33 +0100 Subject: [PATCH 13/97] Add environment variable to disable netCDF file validation. --- docs/source/configuring.rst | 7 +++++++ imaspy/backends/netcdf/nc2ids.py | 9 +++++++++ 2 files changed, 16 insertions(+) diff --git a/docs/source/configuring.rst b/docs/source/configuring.rst index 07073faf..dae11b6f 100644 --- a/docs/source/configuring.rst +++ b/docs/source/configuring.rst @@ -29,6 +29,13 @@ This page provides an overview of available variables. you can use :external:py:meth:`logging.getLogger("imaspy").setLevel(...) ` to change the log level programmatically. + +``IMASPY_DISABLE_NC_VALIDATE`` + Disables validation of netCDF files when loading an IDS from an IMAS netCDF file. + + .. caution:: + Disabling the validation may lead to errors when reading data from an IMAS netCDF file. + ``IMAS_VERSION`` Sets :ref:`The default Data Dictionary version` to use. diff --git a/imaspy/backends/netcdf/nc2ids.py b/imaspy/backends/netcdf/nc2ids.py index 3666d49e..0a69f964 100644 --- a/imaspy/backends/netcdf/nc2ids.py +++ b/imaspy/backends/netcdf/nc2ids.py @@ -1,4 +1,5 @@ import logging +import os from typing import Iterator, List, Optional, Tuple import netCDF4 @@ -163,6 +164,14 @@ def run(self) -> None: def _validate_variables(self) -> None: """Validate that all variables in the netCDF Group exist and match the DD.""" + disable_validate = os.environ.get("IMASPY_DISABLE_NC_VALIDATE") + if disable_validate and disable_validate != "0": + logger.info( + "NetCDF file validation disabled: " + "This may lead to errors when reading data!" + ) + return # validation checks are disabled + for var_name in self.variables: if var_name.endswith(":shape"): # Check that there is a corresponding variable From 5ccae5dec8e4994748a42f0e7d970049504cda95 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 19 Nov 2024 15:50:32 +0100 Subject: [PATCH 14/97] Eliminate nc2ids function --- imaspy/backends/netcdf/db_entry_nc.py | 6 +++--- imaspy/backends/netcdf/nc2ids.py | 14 -------------- 2 files changed, 3 insertions(+), 17 deletions(-) diff --git a/imaspy/backends/netcdf/db_entry_nc.py b/imaspy/backends/netcdf/db_entry_nc.py index ba7334fc..9a0bf9c9 100644 --- a/imaspy/backends/netcdf/db_entry_nc.py +++ b/imaspy/backends/netcdf/db_entry_nc.py @@ -5,7 +5,7 @@ from imaspy.backends.db_entry_impl import DBEntryImpl from imaspy.backends.netcdf.ids2nc import IDS2NC -from imaspy.backends.netcdf.nc2ids import nc2ids +from imaspy.backends.netcdf.nc2ids import NC2IDS from imaspy.exception import DataEntryException from imaspy.ids_convert import NBCPathMap, convert_ids from imaspy.ids_factory import IDSFactory @@ -98,13 +98,13 @@ def get( # Load data into the destination IDS if self._ds_factory.dd_version == destination._dd_version: - nc2ids(group, destination) + NC2IDS(group, destination).run() else: # FIXME: implement automatic conversion using nbc_map # As a work-around: do an explicit conversion, but automatic conversion # will also be needed to implement lazy loading. ids = self._ds_factory.new(ids_name) - nc2ids(group, ids) + NC2IDS(group, ids).run() convert_ids(ids, None, target=destination) return destination diff --git a/imaspy/backends/netcdf/nc2ids.py b/imaspy/backends/netcdf/nc2ids.py index 0a69f964..b74b4676 100644 --- a/imaspy/backends/netcdf/nc2ids.py +++ b/imaspy/backends/netcdf/nc2ids.py @@ -300,17 +300,3 @@ def _validate_sparsity( raise variable_error( shape_var, "dtype", shape_var.dtype, "any integer type" ) - - -def nc2ids(group: netCDF4.Group, ids: IDSToplevel): - """Get data from the netCDF group and store it in the provided IDS.""" - try: - NC2IDS(group, ids).run() - except Exception as exc: - raise RuntimeError( - "An error occurred while reading data from the netCDF file " - f"'{group.filepath()}'. The netCDF functionality is currently in " - "preview status. Unexpected data in an otherwise valid netCDF file " - "may cause errors in IMASPy. A more robust mechanism to load IDS data from " - "netCDF files will be included in the next release of IMASPy." - ) from exc From afb8c292c6527f03354ac35fa43dfaebd8fb4ef3 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Wed, 20 Nov 2024 10:50:59 +0100 Subject: [PATCH 15/97] Update `get_sample` docstring to clarify that the interpolation mode has no effect on the `ids.time` vector. See also https://git.iter.org/projects/IMAS/repos/al-matlab/pull-requests/29/overview?commentId=48957 --- imaspy/db_entry.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/imaspy/db_entry.py b/imaspy/db_entry.py index cb948fea..3834655d 100644 --- a/imaspy/db_entry.py +++ b/imaspy/db_entry.py @@ -459,7 +459,9 @@ def get_sample( :param:`dtime` must be a number or a numpy array of size 1. This mode will generate an IDS with a homogeneous time vector ``[tmin, tmin - + dtime, tmin + 2*dtime, ...`` up to ``tmax``. The returned IDS always has + + dtime, tmin + 2*dtime, ...`` up to ``tmax``. The chosen interpolation + method will have no effect on the time vector, but may have an impact on the + other dynamic values. The returned IDS always has ``ids_properties.homogeneous_time = 1``. 3. Interpolate dynamic data on an explicit time base. This method is selected @@ -468,7 +470,9 @@ def get_sample( This mode will generate an IDS with a homogeneous time vector equal to :param:`dtime`. :param:`tmin` and :param:`tmax` are ignored in this mode. - The returned IDS always has ``ids_properties.homogeneous_time = 1``. + The chosen interpolation method will have no effect on the time vector, but + may have an impact on the other dynamic values. The returned IDS always has + ``ids_properties.homogeneous_time = 1``. Args: ids_name: Name of the IDS to read from the backend From 6cd1e44f2ca06d9746a6e0722cf216da2e6fed62 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Fri, 22 Nov 2024 15:05:37 +0100 Subject: [PATCH 16/97] Add `validate_nc` command to imaspy CLI Also fix a couple of bugs when opening invalid netCDF files and raise a proper exception. --- imaspy/backends/netcdf/db_entry_nc.py | 8 ++-- imaspy/backends/netcdf/nc2ids.py | 4 +- imaspy/backends/netcdf/nc_validate.py | 53 +++++++++++++++++++++++++++ imaspy/command/cli.py | 15 ++++++++ 4 files changed, 75 insertions(+), 5 deletions(-) create mode 100644 imaspy/backends/netcdf/nc_validate.py diff --git a/imaspy/backends/netcdf/db_entry_nc.py b/imaspy/backends/netcdf/db_entry_nc.py index 9a0bf9c9..3725c5a9 100644 --- a/imaspy/backends/netcdf/db_entry_nc.py +++ b/imaspy/backends/netcdf/db_entry_nc.py @@ -6,7 +6,7 @@ from imaspy.backends.db_entry_impl import DBEntryImpl from imaspy.backends.netcdf.ids2nc import IDS2NC from imaspy.backends.netcdf.nc2ids import NC2IDS -from imaspy.exception import DataEntryException +from imaspy.exception import DataEntryException, InvalidNetCDFEntry from imaspy.ids_convert import NBCPathMap, convert_ids from imaspy.ids_factory import IDSFactory from imaspy.ids_toplevel import IDSToplevel @@ -45,14 +45,16 @@ def __init__(self, fname: str, mode: str, factory: IDSFactory) -> None: # Check if there is already data in this dataset: if self._dataset.dimensions or self._dataset.variables or self._dataset.groups: if "data_dictionary_version" not in self._dataset.ncattrs(): - raise RuntimeError( + raise InvalidNetCDFEntry( "Invalid netCDF file: `data_dictionary_version` missing" ) dataset_dd_version = self._dataset.data_dictionary_version if dataset_dd_version != factory.dd_version: self._ds_factory = IDSFactory(dataset_dd_version) - # TODO: [validate] that the data contained in this file adheres to the DD + elif mode not in ["w", "r+", "a"]: + # Reading an empty file... + raise InvalidNetCDFEntry(f"Invalid netCDF file: `{fname}` is empty.") else: # This is an empty netCDF dataset: set global attributes self._dataset.Conventions = "IMAS" diff --git a/imaspy/backends/netcdf/nc2ids.py b/imaspy/backends/netcdf/nc2ids.py index b74b4676..50905ba8 100644 --- a/imaspy/backends/netcdf/nc2ids.py +++ b/imaspy/backends/netcdf/nc2ids.py @@ -110,7 +110,7 @@ def __init__(self, group: netCDF4.Group, ids: IDSToplevel) -> None: def run(self) -> None: """Load the data from the netCDF group into the IDS.""" self.variables.sort() - self._validate_variables() + self.validate_variables() for var_name in self.variables: if var_name.endswith(":shape"): continue @@ -162,7 +162,7 @@ def run(self) -> None: for index, node in tree_iter(self.ids, metadata): node.value = data[index] - def _validate_variables(self) -> None: + def validate_variables(self) -> None: """Validate that all variables in the netCDF Group exist and match the DD.""" disable_validate = os.environ.get("IMASPY_DISABLE_NC_VALIDATE") if disable_validate and disable_validate != "0": diff --git a/imaspy/backends/netcdf/nc_validate.py b/imaspy/backends/netcdf/nc_validate.py new file mode 100644 index 00000000..7b6a1eac --- /dev/null +++ b/imaspy/backends/netcdf/nc_validate.py @@ -0,0 +1,53 @@ +from imaspy.backends.netcdf.db_entry_nc import NCDBEntryImpl +from imaspy.backends.netcdf.nc2ids import NC2IDS +from imaspy.db_entry import DBEntry +from imaspy.exception import InvalidNetCDFEntry + + +def validate_netcdf_file(filename: str) -> None: + """Validate if the provided netCDF file adheres to the IMAS conventions.""" + if not filename.endswith(".nc"): + raise InvalidNetCDFEntry( + f"Invalid filename `{filename}` provided: " + "an IMAS netCDF file should end with `.nc`" + ) + + entry = DBEntry(filename, "r") + entry_impl: NCDBEntryImpl = entry._dbe_impl + dataset = entry_impl._dataset + factory = entry_impl._ds_factory + + ids_names = factory.ids_names() + + # Check that groups in the dataset correspond to an IDS/occurrence and no additional + # variables are smuggled inside: + groups = [dataset] + [dataset[group] for group in dataset.groups] + for group in groups: + if group.variables or group.dimensions: + raise InvalidNetCDFEntry( + "NetCDF file should not have variables or dimensions in the " + f"{group.name} group." + ) + if group is dataset: + continue + if group.name not in ids_names: + raise InvalidNetCDFEntry( + f"Invalid group name {group.name}: there is no IDS with this name." + ) + for subgroup in group.groups: + try: + int(subgroup) + except ValueError: + raise InvalidNetCDFEntry( + f"Invalid group name {group.name}/{subgroup}: " + f"{subgroup} is not a valid occurrence number." + ) + + for ids_name in ids_names: + for occurrence in entry.list_all_occurrences(ids_name): + group = dataset[f"{ids_name}/{occurrence}"] + try: + NC2IDS(group, factory.new(ids_name)).validate_variables() + except InvalidNetCDFEntry as exc: + occ = f":{occurrence}" if occurrence else "" + raise InvalidNetCDFEntry(f"Invalid IDS {ids_name}{occ}: {exc}") diff --git a/imaspy/command/cli.py b/imaspy/command/cli.py index 246922ce..f894f02d 100644 --- a/imaspy/command/cli.py +++ b/imaspy/command/cli.py @@ -218,5 +218,20 @@ def convert_ids( console.Console().print(timer.get_table("Time required per IDS")) +@cli.command("validate_nc", no_args_is_help=True) +@click.argument("filename", type=click.Path(exists=True, dir_okay=False)) +def validate_nc(filename): + """Validate if the provided netCDF file adheres to the IMAS conventions.""" + from imaspy.backends.netcdf.nc_validate import validate_netcdf_file + + try: + validate_netcdf_file(filename) + except Exception as exc: + click.echo(f"File `{filename}` does not adhere to the IMAS conventions:") + click.echo(exc) + sys.exit(1) + click.echo(f"File `{filename}` is a valid IMAS netCDF file.") + + if __name__ == "__main__": cli() From 6ec21c71dbda710bb9230525e2b2bb830b645d6b Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Mon, 25 Nov 2024 09:50:07 +0100 Subject: [PATCH 17/97] Fix incorrect exception when using mode="x" for netCDF files --- imaspy/backends/netcdf/db_entry_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imaspy/backends/netcdf/db_entry_nc.py b/imaspy/backends/netcdf/db_entry_nc.py index 3725c5a9..da239745 100644 --- a/imaspy/backends/netcdf/db_entry_nc.py +++ b/imaspy/backends/netcdf/db_entry_nc.py @@ -52,7 +52,7 @@ def __init__(self, fname: str, mode: str, factory: IDSFactory) -> None: if dataset_dd_version != factory.dd_version: self._ds_factory = IDSFactory(dataset_dd_version) - elif mode not in ["w", "r+", "a"]: + elif mode not in ["w", "x", "r+", "a"]: # Reading an empty file... raise InvalidNetCDFEntry(f"Invalid netCDF file: `{fname}` is empty.") else: From 1f6c6fe9730dc063443408cca81fc5416d34a184 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Mon, 25 Nov 2024 15:06:38 +0100 Subject: [PATCH 18/97] Close netCDF datasets when an exception is raised --- imaspy/backends/netcdf/db_entry_nc.py | 10 +++- imaspy/backends/netcdf/nc_validate.py | 68 +++++++++++++-------------- 2 files changed, 43 insertions(+), 35 deletions(-) diff --git a/imaspy/backends/netcdf/db_entry_nc.py b/imaspy/backends/netcdf/db_entry_nc.py index da239745..a66154f1 100644 --- a/imaspy/backends/netcdf/db_entry_nc.py +++ b/imaspy/backends/netcdf/db_entry_nc.py @@ -39,9 +39,17 @@ def __init__(self, fname: str, mode: str, factory: IDSFactory) -> None: """NetCDF4 dataset.""" self._factory = factory """Factory (DD version) that the user wishes to use.""" - self._ds_factory = factory # Overwritten if data exists, see below + self._ds_factory = factory # Overwritten if data exists, see _init_dd_version """Factory (DD version) that the data is stored in.""" + try: + self._init_dd_version(fname, mode, factory) + except Exception: + self._dataset.close() + raise + + def _init_dd_version(self, fname: str, mode: str, factory: IDSFactory) -> None: + """Check or setup data dictionary version.""" # Check if there is already data in this dataset: if self._dataset.dimensions or self._dataset.variables or self._dataset.groups: if "data_dictionary_version" not in self._dataset.ncattrs(): diff --git a/imaspy/backends/netcdf/nc_validate.py b/imaspy/backends/netcdf/nc_validate.py index 7b6a1eac..49a14283 100644 --- a/imaspy/backends/netcdf/nc_validate.py +++ b/imaspy/backends/netcdf/nc_validate.py @@ -12,42 +12,42 @@ def validate_netcdf_file(filename: str) -> None: "an IMAS netCDF file should end with `.nc`" ) - entry = DBEntry(filename, "r") - entry_impl: NCDBEntryImpl = entry._dbe_impl - dataset = entry_impl._dataset - factory = entry_impl._ds_factory + with DBEntry(filename, "r") as entry: + entry_impl: NCDBEntryImpl = entry._dbe_impl + dataset = entry_impl._dataset + factory = entry_impl._ds_factory - ids_names = factory.ids_names() + ids_names = factory.ids_names() - # Check that groups in the dataset correspond to an IDS/occurrence and no additional - # variables are smuggled inside: - groups = [dataset] + [dataset[group] for group in dataset.groups] - for group in groups: - if group.variables or group.dimensions: - raise InvalidNetCDFEntry( - "NetCDF file should not have variables or dimensions in the " - f"{group.name} group." - ) - if group is dataset: - continue - if group.name not in ids_names: - raise InvalidNetCDFEntry( - f"Invalid group name {group.name}: there is no IDS with this name." - ) - for subgroup in group.groups: - try: - int(subgroup) - except ValueError: + # Check that groups in the dataset correspond to an IDS/occurrence and no + # additional variables are smuggled inside: + groups = [dataset] + [dataset[group] for group in dataset.groups] + for group in groups: + if group.variables or group.dimensions: raise InvalidNetCDFEntry( - f"Invalid group name {group.name}/{subgroup}: " - f"{subgroup} is not a valid occurrence number." + "NetCDF file should not have variables or dimensions in the " + f"{group.name} group." ) + if group is dataset: + continue + if group.name not in ids_names: + raise InvalidNetCDFEntry( + f"Invalid group name {group.name}: there is no IDS with this name." + ) + for subgroup in group.groups: + try: + int(subgroup) + except ValueError: + raise InvalidNetCDFEntry( + f"Invalid group name {group.name}/{subgroup}: " + f"{subgroup} is not a valid occurrence number." + ) - for ids_name in ids_names: - for occurrence in entry.list_all_occurrences(ids_name): - group = dataset[f"{ids_name}/{occurrence}"] - try: - NC2IDS(group, factory.new(ids_name)).validate_variables() - except InvalidNetCDFEntry as exc: - occ = f":{occurrence}" if occurrence else "" - raise InvalidNetCDFEntry(f"Invalid IDS {ids_name}{occ}: {exc}") + for ids_name in ids_names: + for occurrence in entry.list_all_occurrences(ids_name): + group = dataset[f"{ids_name}/{occurrence}"] + try: + NC2IDS(group, factory.new(ids_name)).validate_variables() + except InvalidNetCDFEntry as exc: + occ = f":{occurrence}" if occurrence else "" + raise InvalidNetCDFEntry(f"Invalid IDS {ids_name}{occ}: {exc}") From b7d89635a270f623b0af2583c595b37c8a64420a Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Mon, 25 Nov 2024 15:06:54 +0100 Subject: [PATCH 19/97] Add unit tests for `nc_validate.py` --- imaspy/test/test_nc_validation.py | 53 ++++++++++++++++++++++++++++++- 1 file changed, 52 insertions(+), 1 deletion(-) diff --git a/imaspy/test/test_nc_validation.py b/imaspy/test/test_nc_validation.py index f7cc029f..efd25420 100644 --- a/imaspy/test/test_nc_validation.py +++ b/imaspy/test/test_nc_validation.py @@ -4,7 +4,8 @@ from imaspy.backends.netcdf.ids2nc import IDS2NC from imaspy.backends.netcdf.nc2ids import NC2IDS -from imaspy.exception import InvalidNetCDFEntry +from imaspy.backends.netcdf.nc_validate import validate_netcdf_file +from imaspy.exception import InvalidNetCDFEntry, UnknownDDVersion from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS from imaspy.ids_factory import IDSFactory @@ -144,3 +145,53 @@ def test_shape_array_with_invalid_dtype(memfile_with_ids, factory): ) with pytest.raises(InvalidNetCDFEntry): NC2IDS(memfile_with_ids, cp).run() + + +def test_validate_nc(tmpdir): + fname = str(tmpdir / "test.nc") + + # Wrong extension + with pytest.raises(InvalidNetCDFEntry): + validate_netcdf_file("test.h5") # invalid extension + + # Empty file + netCDF4.Dataset(fname, "w").close() + with pytest.raises(InvalidNetCDFEntry): + validate_netcdf_file(fname) + + # Invalid DD version + with netCDF4.Dataset(fname, "w") as dataset: + dataset.data_dictionary_version = "invalid" + dataset.createGroup("core_profiles") + with pytest.raises(UnknownDDVersion): + validate_netcdf_file(fname) + + # Invalid group + with netCDF4.Dataset(fname, "w") as dataset: + dataset.data_dictionary_version = "4.0.0" + dataset.createGroup("X") + with pytest.raises(InvalidNetCDFEntry): + validate_netcdf_file(fname) + + # Invalid occurrence + with netCDF4.Dataset(fname, "w") as dataset: + dataset.data_dictionary_version = "4.0.0" + dataset.createGroup("core_profiles/a") + with pytest.raises(InvalidNetCDFEntry): + validate_netcdf_file(fname) + + # Invalid variable in root group + with netCDF4.Dataset(fname, "w") as dataset: + dataset.data_dictionary_version = "4.0.0" + dataset.createVariable("core_profiles", int, ()) + with pytest.raises(InvalidNetCDFEntry): + validate_netcdf_file(fname) + + # Missing ids_properties.homogeneous_time + with netCDF4.Dataset(fname, "w") as dataset: + dataset.data_dictionary_version = "4.0.0" + dataset.createGroup("core_profiles/1") + with pytest.raises(InvalidNetCDFEntry): + validate_netcdf_file(fname) + + # All other validations are handled by NC2IDS and tested above From fc2cbf20b3cf1c549761214109d3d89b4ee34091 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Wed, 27 Nov 2024 16:31:59 +0100 Subject: [PATCH 20/97] Additional documentation for the `imaspy validate_nc` command line tool --- docs/source/netcdf.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/source/netcdf.rst b/docs/source/netcdf.rst index dd3bf431..7a7593e6 100644 --- a/docs/source/netcdf.rst +++ b/docs/source/netcdf.rst @@ -102,3 +102,11 @@ your directory. Let's open this file with ``xarray.load_dataset``: Attributes: Conventions: IMAS data_dictionary_version: 3.41.0 + + +Validating an IMAS netCDF file +------------------------------ + +IMAS netCDF files can be validated with IMASPy through the command line ``imaspy +validate_nc ``. See also :ref:`IMASPy Command Line tool` or type +``imaspy validate_nc --help`` in a command line. From 7c56b5f0713e7083fc3887884af3bf5dc2852f78 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Wed, 4 Dec 2024 15:35:31 +0100 Subject: [PATCH 21/97] Explicitly set `IDSDEF_PATH` when opening a DBEntry with the UDA backend --- imaspy/backends/imas_core/db_entry_al.py | 19 ++++++++ imaspy/backends/imas_core/uda_support.py | 56 ++++++++++++++++++++++++ imaspy/test/test_uda_support.py | 12 +++++ 3 files changed, 87 insertions(+) create mode 100644 imaspy/backends/imas_core/uda_support.py create mode 100644 imaspy/test/test_uda_support.py diff --git a/imaspy/backends/imas_core/db_entry_al.py b/imaspy/backends/imas_core/db_entry_al.py index a90e4d6a..34a3ab32 100644 --- a/imaspy/backends/imas_core/db_entry_al.py +++ b/imaspy/backends/imas_core/db_entry_al.py @@ -41,6 +41,7 @@ from .db_entry_helpers import delete_children, get_children, put_children from .imas_interface import LLInterfaceError, has_imas, ll_interface from .mdsplus_model import ensure_data_dir, mdsplus_model_dir +from .uda_support import extract_idsdef, get_dd_version_from_idsdef_xml _BACKEND_NAME = { ASCII_BACKEND: "ascii", @@ -186,6 +187,24 @@ def _setup_backend( pass # nothing to set up elif backend == "uda": + # Set IDSDEF_PATH to point the UDA backend to the selected DD version + idsdef_path = None + + if factory._xml_path is not None: + # Factory was constructed with an explicit XML path, point UDA to that: + idsdef_path = factory._xml_path + + elif "IMAS_PREFIX" in os.environ: + # Check if UDA can use the IDSDef.xml stored in $IMAS_PREFIX/include/ + idsdef_path = os.environ["IMAS_PREFIX"] + "/include/IDSDef.xml" + if get_dd_version_from_idsdef_xml(idsdef_path) != factory.version: + idsdef_path = None + + if idsdef_path is None: + # Extract XML from the DD zip and point UDA to it + idsdef_path = extract_idsdef(factory.version) + + os.environ["IDSDEF_PATH"] = idsdef_path logger.warning( "The UDA backend is not tested with IMASPy and may not work properly. " "Please raise any issues you find." diff --git a/imaspy/backends/imas_core/uda_support.py b/imaspy/backends/imas_core/uda_support.py new file mode 100644 index 00000000..8b599faa --- /dev/null +++ b/imaspy/backends/imas_core/uda_support.py @@ -0,0 +1,56 @@ +import logging +from pathlib import Path +from typing import Union +from xml.etree import ElementTree as ET + +from imaspy import dd_zip + +from .mdsplus_model import _get_xdg_cache_dir + +logger = logging.getLogger(__name__) + + +def get_dd_version_from_idsdef_xml(path: Union[str, Path]) -> str: + """Parse the IDSDef.xml up to the point where the Data Dictionary version is set. + + Returns: + The Data Dictionary version for the provided file, or None if the file cannot be + parsed / contains no Data Dictionary version. + """ + try: + for _, elem in ET.iterparse(path): + if elem.tag == "version": + return elem.text + except OSError: + pass # File not found, etc. + except Exception: + logger.warning("Could not read DD version from file '%s'.", path, exc_info=True) + return None + + +def extract_idsdef(dd_version: str) -> str: + """Extract the IDSDef.xml for the given version and return its path. + + The IDSDef.xml is extracted to the imaspy cache folder: + + - If the file imaspy/uda/.xml already exists, we assume it is correct + """ + cache_dir_path = Path(_get_xdg_cache_dir()) / "imaspy" / "uda" + cache_dir_path.mkdir(parents=True, exist_ok=True) # ensure cache folder exists + idsdef_path = cache_dir_path / (dd_version + ".xml") + + if idsdef_path.exists(): + extract = False + # Check if the file is fine + if get_dd_version_from_idsdef_xml(idsdef_path) != dd_version: + # File is corrupt, I guess? We'll overwrite: + extract = True + else: + extract = True + + if extract: + # Extract XML from the dd_zip and store + data = dd_zip.get_dd_xml(dd_version) + idsdef_path.write_bytes(data) + + return str(idsdef_path) diff --git a/imaspy/test/test_uda_support.py b/imaspy/test/test_uda_support.py new file mode 100644 index 00000000..f623219a --- /dev/null +++ b/imaspy/test/test_uda_support.py @@ -0,0 +1,12 @@ +from pathlib import Path +from zlib import crc32 + +from imaspy import dd_zip +from imaspy.backends.imas_core.uda_support import extract_idsdef + + +def test_extract_idsdef(): + fname = extract_idsdef("4.0.0") + expected_crc = dd_zip.get_dd_xml_crc("4.0.0") + actual_crc = crc32(Path(fname).read_bytes()) + assert expected_crc == actual_crc From 74d2e3afe41a3fb907bc0a3729fcea29590aa4ca Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Mon, 9 Dec 2024 15:14:44 +0100 Subject: [PATCH 22/97] Make prepare_data_dictionaries compatible with DD>4.0.0 (change in schemas layout) --- imaspy/dd_helpers.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/imaspy/dd_helpers.py b/imaspy/dd_helpers.py index 0506482f..21a7775f 100644 --- a/imaspy/dd_helpers.py +++ b/imaspy/dd_helpers.py @@ -58,9 +58,14 @@ def prepare_data_dictionaries(): dd_zip.write(filename, arcname=arcname) # Include identifiers from latest tag in zip file repo.git.checkout(newest_version_and_tag[1], force=True) + # DD layout <= 4.0.0 for filename in Path("data-dictionary").glob("*/*identifier.xml"): arcname = Path("identifiers").joinpath(*filename.parts[1:]) dd_zip.write(filename, arcname=arcname) + # DD layout > 4.0.0 + for filename in Path("data-dictionary").glob("schemas/*/*identifier.xml"): + arcname = Path("identifiers").joinpath(*filename.parts[2:]) + dd_zip.write(filename, arcname=arcname) # pre 3.30.0 versions of the DD have the `saxon9he.jar` file path hardcoded From 77fb044a48b709ddaaef9091b1101484526338cd Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Wed, 11 Dec 2024 17:03:11 +0100 Subject: [PATCH 23/97] Updating the license and readme --- LICENSE.md | 46 --------------- LICENSE.txt | 165 ++++++++++++++++++++++++++++++++++++++++++++++++++++ README.md | 121 ++++++-------------------------------- 3 files changed, 182 insertions(+), 150 deletions(-) delete mode 100644 LICENSE.md create mode 100644 LICENSE.txt diff --git a/LICENSE.md b/LICENSE.md deleted file mode 100644 index ea4a5d46..00000000 --- a/LICENSE.md +++ /dev/null @@ -1,46 +0,0 @@ -Copyright (c) 2020-2023 ITER Organization, Route de Vinon-sur-Verdon, CS 90 046, - 13067 St-Paul-lez-Durance Cedex, France - -Copyright (c) 2020-2023 Karel Lucas van de Plassche - -Copyright (c) 2020 Dutch Institute for Fundamental Energy Research - -Copyright (c) 2020-2022 Daan van Vugt - -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - -* Use and redistribution, for peaceful purposes only, are granted solely to the - ITER Members (the People's Republic of China, the European Atomic Energy - Community, the Republic of India, Japan, the Republic of Korea, the Russian - Federation, and the United States of America), with the right to sub-license - within their territory for the purpose of fusion research and development. - Organizations, bodies or individuals of non-ITER Members shall seek specific - written permission from the ITER Organization before use or redistribution of - this software. - -* All modifications/derivatives shall be made available to the ITER Organization. - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, this - list of conditions and the following disclaimer in the documentation and/or - other materials provided with the distribution. - -* Neither the name of the ITER Organization nor the names of its contributors - may be used to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE ITER ORGANIZATION OR ITS CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED -AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 00000000..33bb3680 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,165 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/README.md b/README.md index ce753f5a..23e83fde 100644 --- a/README.md +++ b/README.md @@ -1,99 +1,24 @@ # IMASPy IMASPy is a pure-python library to handle arbitrarily nested data structures. -IMASPy is designed for, but not necessarily bound to, interacting with -Interface Data Structures (IDSs) as defined by the -Integrated Modelling & Analysis Suite (IMAS) Data Model. +IMASPy is designed for, but not necessarily bound to, interacting with Interface +Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) +Data Model. -It provides: -* An easy-to-install and easy-to-get started package by - * Not requiring an IMAS installation - * Not strictly requiring matching a Data Dictionary (DD) version -* An pythonic alternative to the IMAS Python High Level Interface (HLI) -* Checking of correctness on assign time, instead of database write time -* Dynamically created in-memory pre-filled data trees from DD XML specifications +## Install -This package is developed on [ITER bitbucket](https://git.iter.org/projects/IMAS/repos/imaspy). -For user support, contact the IMAS team on the [IMAS user slack](https://imasusers.slack.com), -open a [JIRA issue](https://jira.iter.org/projects/IMAS), or email the -support team on . +Install steps are described in the documentation generated from `/docs/source/installing.rst`. -## Installation - -### On ITER system, EuroFusion gateway - -There is a `module` available on ITER and the Gateway, so you can run - -```bash -module load IMASPy -``` - -IMASPy can work with either Access Layer versions 4 or 5 (the used version is -automatically detected when importing the `imaspy` module). IMASPy still works (with -limited functionality) when no IMAS module is loaded. - -### Local - -We recommend using a `venv`: - -```bash -python3 -m venv ./venv -. venv/bin/activate -``` - -Then clone this repository, and run `pip install`: - -```bash -git clone ssh://git@git.iter.org/imas/imaspy.git -cd imaspy -pip install . -# Optional: also install `imas-core` with the HDF5 backend in the venv: -pip install .[imas-core] -``` - -If you get strange errors you might want to upgrade your `setuptools` and `pip`. -(you might want to add the `--user` flag to your pip installs when not in a `venv`) - -### Development installation - -For development an installation in editable mode may be more convenient, and -you will need some extra dependencies to run the test suite and build -documentation. - -```bash -pip install -e .[test,docs] -``` +Documentation is autogenerated from the source using [Sphinx](http://sphinx-doc.org/) +and can be found at the [ITER sharepoint](https://sharepoint.iter.org/departments/POP/CM/IMDesign/Code%20Documentation/IMASPy-doc/index.html) -Test your installation by trying +The documentation can be manually generated by installing sphinx and running: ```bash -cd ~ -python -c "import imaspy; print(imaspy.__version__)" +make -C docs html ``` -which should return your just installed version number. - -### Installation without ITER access - -The installation script tries to access the [ITER IMAS Core Data Dictionary repository](https://git.iter.org/projects/IMAS/repos/data-dictionary/browse) -to fetch the latest versions. If you do not have git+ssh access there, you can -try to find this repository elsewhere, and do a `git fetch --tags`. - -Alternatively you could try to obtain an `IDSDef.zip` and place it in `~/.config/imaspy/`. - -Test your installation by trying - -```bash -python -c "import imaspy; factory = imaspy.IDSFactory()" -``` -If the following error is raised: -```bash -RuntimeError: Could not find any data dictionary definitions. -``` -it means that the Data Dictionary definitions weren't created during the install. -You can generate these definitions by executing `build_DD` in the command line. -Missing packages can include among others: [GitPython](https://github.com/gitpython-developers/GitPython), and Java. ## How to use @@ -106,32 +31,20 @@ print(equilibrium) equilibrium.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HETEROGENEOUS equilibrium.ids_properties.comment = "testing" -dbentry = imaspy.DBEntry(imaspy.ids_defs.HDF5_BACKEND, "ITER", 1, 1) -dbentry.create() -dbentry.put(equilibrium) - -# TODO: find an example with a significant change between versions (rename?) -older_dbentry = imaspy.DBEntry(imaspy.ids_defs.HDF5_BACKEND, "ITER", 1, 1, version="3.35.0") -equilibrium2 = older_root.get("equilibrium") -print(equilibrium2.ids_properties.comment) +with imaspy.DBEntry("imas:hdf5?path=./testdb","w") as dbentry: + dbentry.put(equilibrium) ``` -## Documentation - -Documentation is autogenerated from the source using [Sphinx](http://sphinx-doc.org/) -and can be found at the [ITER sharepoint](https://sharepoint.iter.org/departments/POP/CM/IMDesign/Code%20Documentation/IMASPy-doc/index.html) +A quick 5 minutes introduction is available in the documentation generated from `/docs/sources/intro.rst`. -The documentation can be manually generated by installing sphinx and running: -```bash -make -C docs html -``` +## Legal -## Interacting with IMAS AL +IMASPy is Copyright 2020-2024 ITER Organization, Copyright 2020-2023 Karel Lucas van de +Plassche , Copyright 2020-2022 Daan van Vugt , +and Copyright 2020 Dutch Institute for Fundamental Energy Research . +It is licensed under [LGPL 3.0](LICENSE.txt). -Interaction with the IMAS AL is provided by a Cython interface to the Access Layer. -As Cython code, it needs to be compiled on your local system. -To find the headers, the Access Layer `include` folder needs to be in your `INCLUDE_PATH`. On most HPC systems, a `module load IMAS` is enough. ## Acknowledgments From d80778fe6cdadf50255a50905a00ed9e2b458c8c Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Thu, 12 Dec 2024 10:23:31 +0100 Subject: [PATCH 24/97] Replace references to LICENSE.md to LICENSE.txt --- docs/source/index.rst | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/index.rst b/docs/source/index.rst index 19e3985b..c5a3f24c 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -77,5 +77,5 @@ Manual LICENSE ------- -.. literalinclude:: ../../LICENSE.md +.. literalinclude:: ../../LICENSE.txt :language: text diff --git a/pyproject.toml b/pyproject.toml index 1c1ce2cc..dccd6912 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,7 @@ authors = [ description = "Pythonic wrappers for the IMAS Access Layer" readme = {file = "README.md", content-type = "text/markdown"} requires-python = ">=3.7" -license = {file = "LICENSE.md"} +license = {file = "LICENSE.txt"} classifiers = [ "Development Status :: 3 - Alpha", "Environment :: Console", From cef46674cc1f032d9ae65dfe4507060493a43ddf Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 17 Dec 2024 13:57:11 +0100 Subject: [PATCH 25/97] Fix a bug with lazy loading Bug: IMASPy runs into an attribute error when lazy loading a child quantity that was added in a newer DD version than stored on disk. Example: 1. Equilibrium IDS stored in DD 3.33.0 2. Lazy loading IDS with DD 4.0.0 3. Try to access `eq.time_slice[0].boundary.dr_dz_zero_point.r` resulted in an AttributeError Root cause: IMASPy did not handle correctly that the `dr_dz_zero_point` was added between 3.33.0 and 4.0.0. This commit fixes the bug. --- imaspy/backends/imas_core/db_entry_helpers.py | 6 +++++- imaspy/test/test_lazy_loading.py | 16 ++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/imaspy/backends/imas_core/db_entry_helpers.py b/imaspy/backends/imas_core/db_entry_helpers.py index de1d9323..f69eafd3 100644 --- a/imaspy/backends/imas_core/db_entry_helpers.py +++ b/imaspy/backends/imas_core/db_entry_helpers.py @@ -77,11 +77,15 @@ def get_children( getattr(structure, name)._IDSPrimitive__value = data -def _get_child(child: IDSBase, ctx: LazyALContext): +def _get_child(child: IDSBase, ctx: Optional[LazyALContext]): """Get a single child when required (lazy loading).""" # NOTE: changes in this method must be propagated to _get_children and vice versa # Performance: this method is specialized for the lazy get + # ctx can be None when the parent structure does not exist in the on-disk DD version + if ctx is None: + return # There is no data to be loaded + time_mode = ctx.time_mode if time_mode == IDS_TIME_MODE_INDEPENDENT and child.metadata.type.is_dynamic: return # skip dynamic (time-dependent) nodes diff --git a/imaspy/test/test_lazy_loading.py b/imaspy/test/test_lazy_loading.py index c0e54aad..1d34e2a1 100644 --- a/imaspy/test/test_lazy_loading.py +++ b/imaspy/test/test_lazy_loading.py @@ -165,6 +165,22 @@ def test_lazy_load_with_new_aos(requires_imas): dbentry.close() +def test_lazy_load_with_new_structure(requires_imas): + dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1, dd_version="3.30.0") + dbentry.create() + + eq = dbentry.factory.equilibrium() + eq.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS + eq.time = [0.0] + eq.time_slice.resize(1) + dbentry.put(eq) + + entry2 = DBEntry(MEMORY_BACKEND, "ITER", 1, 1, data_version="3", dd_version="4.0.0") + entry2.open() + lazy_eq = entry2.get("equilibrium", lazy=True) + assert not lazy_eq.time_slice[0].boundary.dr_dz_zero_point.r.has_value + + def test_lazy_load_multiple_ids(backend, worker_id, tmp_path): if backend == ASCII_BACKEND: pytest.skip("Lazy loading is not supported by the ASCII backend.") From 4beab9fcbcf590356b0d92b7b65894f907157962 Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Fri, 10 Jan 2025 18:03:27 +0100 Subject: [PATCH 26/97] Add contributing guidelines --- CODE_OF_CONDUCT.md | 72 ++++++++++++++++++++++++++++++++++++++++++++++ CONTRIBUTING.md | 45 +++++++++++++++++++++++++++++ 2 files changed, 117 insertions(+) create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..df8ba3bd --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,72 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to make participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all project spaces, and it also applies when +an individual is representing the project or its community in public spaces. +Examples of representing a project or community include using an official +project e-mail address, posting via an official social media account, or acting +as an appointed representative at an online or offline event. Representation of +a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at . All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..ac28e400 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,45 @@ +# Contributing guidelines + +We welcome any kind of contribution to `imas-python`, +from a simple comment, a question or even a full fledged pull +request. +Please first make sure you read and follow the +[Code of Conduct](CODE_OF_CONDUCT.md). + +## You think you found a bug in the code, or have a question in its use +1. use the [issue search](https://github.com/iterorganization/ +imas-python/issues) to check if someone already created +a similar issue; +2. if not, make a **new issue** to describe your problem or question. +In the case of a bug suspiscion, please try to give all the relevant +information to allow reproducing the error or identifying +its root cause (version of the imas-python, OS and relevant +dependencies, snippet of code); +3. apply relevant labels to the issue. + +## You want to make or ask some change to the code +1. use the [issue search](https://github.com/iterorganization/ +imas-python/issues) to check if someone already proposed +a similar idea/change; +2. if not, create a **new issue** to describe what change you would like to see +implemented and specify it if you intend to work on it yourself or if some help +will be needed; +3. wait until some kind of consensus is reached about your idea being relevant, +at which time the issue will be assigned (to you or someone else who can work on +this topic); +4. if you do the development yourself, fork the repository to your own Github +profile and create your own feature branch off of the latest develop commit. +Make sure to regularly sync your branch with the latest commits from `develop` +(find instructions +[here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/ +working-with-forks/syncing-a-fork); +5. when your development is ready, create a pull request (find instructions +[here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/ +proposing-changes-to-your-work-with-pull-requests/ +creating-a-pull-request-from-a-fork)). + + +While we will try to answer questions quickly and to address issues in a timely +manner, it can may sometimes take longer than expected. A friendly ping in the +discussion or the issue thread can help draw attention if you find that it was +stalled. From 2eb385e77e953ffef5a46a274897da6f4fb52d87 Mon Sep 17 00:00:00 2001 From: gautambaabu Date: Fri, 13 Dec 2024 23:10:14 +0530 Subject: [PATCH 27/97] fixed readme.md for imas --- README.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 23e83fde..03f00ce5 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ -# IMASPy +# IMAS -IMASPy is a pure-python library to handle arbitrarily nested data structures. -IMASPy is designed for, but not necessarily bound to, interacting with Interface +IMAS is a pure-python library to handle arbitrarily nested data structures. +IMAS is designed for, but not necessarily bound to, interacting with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) Data Model. @@ -11,7 +11,7 @@ Data Model. Install steps are described in the documentation generated from `/docs/source/installing.rst`. Documentation is autogenerated from the source using [Sphinx](http://sphinx-doc.org/) -and can be found at the [ITER sharepoint](https://sharepoint.iter.org/departments/POP/CM/IMDesign/Code%20Documentation/IMASPy-doc/index.html) +and can be found at the [ITER sharepoint](https://sharepoint.iter.org/departments/POP/CM/IMDesign/Code%20Documentation/IMAS-doc/index.html) The documentation can be manually generated by installing sphinx and running: @@ -23,15 +23,15 @@ make -C docs html ## How to use ```python -import imaspy -factory = imaspy.IDSFactory() +import imas +factory = imas.IDSFactory() equilibrium = factory.equilibrium() print(equilibrium) -equilibrium.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HETEROGENEOUS +equilibrium.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HETEROGENEOUS equilibrium.ids_properties.comment = "testing" -with imaspy.DBEntry("imas:hdf5?path=./testdb","w") as dbentry: +with imas.DBEntry("imas:hdf5?path=./testdb","w") as dbentry: dbentry.put(equilibrium) ``` @@ -40,7 +40,7 @@ A quick 5 minutes introduction is available in the documentation generated from ## Legal -IMASPy is Copyright 2020-2024 ITER Organization, Copyright 2020-2023 Karel Lucas van de +IMAS is Copyright 2020-2024 ITER Organization, Copyright 2020-2023 Karel Lucas van de Plassche , Copyright 2020-2022 Daan van Vugt , and Copyright 2020 Dutch Institute for Fundamental Energy Research . It is licensed under [LGPL 3.0](LICENSE.txt). From 133f78c30803cdb4ad8e3afab9f10e8b652c0d58 Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Tue, 17 Dec 2024 14:56:13 +0100 Subject: [PATCH 28/97] Apply suggestion on naming --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 03f00ce5..9fc27d68 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ -# IMAS +# IMAS-Python -IMAS is a pure-python library to handle arbitrarily nested data structures. -IMAS is designed for, but not necessarily bound to, interacting with Interface +IMAS-Python is a pure-python library to handle arbitrarily nested data structures. +IMAS-Python is designed for, but not necessarily bound to, interacting with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) Data Model. @@ -40,7 +40,7 @@ A quick 5 minutes introduction is available in the documentation generated from ## Legal -IMAS is Copyright 2020-2024 ITER Organization, Copyright 2020-2023 Karel Lucas van de +IMAS-Python is Copyright 2020-2024 ITER Organization, Copyright 2020-2023 Karel Lucas van de Plassche , Copyright 2020-2022 Daan van Vugt , and Copyright 2020 Dutch Institute for Fundamental Energy Research . It is licensed under [LGPL 3.0](LICENSE.txt). From 6f871f5b98f268b5329310fcd0e572c109cb6539 Mon Sep 17 00:00:00 2001 From: Gautam raj Date: Tue, 17 Dec 2024 19:32:31 +0530 Subject: [PATCH 29/97] Update README.md Co-authored-by: Simon Pinches --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9fc27d68..14d4b81e 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # IMAS-Python IMAS-Python is a pure-python library to handle arbitrarily nested data structures. -IMAS-Python is designed for, but not necessarily bound to, interacting with Interface +It is designed for, but not necessarily bound to, interacting with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) Data Model. From 693f035cdfa1e71e3c9e3062703997211f2adb5f Mon Sep 17 00:00:00 2001 From: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> Date: Mon, 20 Jan 2025 10:08:22 +0100 Subject: [PATCH 30/97] Update CONTRIBUTING.md Fix Markdown links: newlines inside a URL are not rendered properly --- CONTRIBUTING.md | 29 ++++++++++++----------------- 1 file changed, 12 insertions(+), 17 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ac28e400..661eedb0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -7,36 +7,31 @@ Please first make sure you read and follow the [Code of Conduct](CODE_OF_CONDUCT.md). ## You think you found a bug in the code, or have a question in its use -1. use the [issue search](https://github.com/iterorganization/ -imas-python/issues) to check if someone already created -a similar issue; -2. if not, make a **new issue** to describe your problem or question. +1. use the [issue search](https://github.com/iterorganization/imas-python/issues) +to check if someone already created a similar issue; +3. if not, make a **new issue** to describe your problem or question. In the case of a bug suspiscion, please try to give all the relevant information to allow reproducing the error or identifying its root cause (version of the imas-python, OS and relevant dependencies, snippet of code); -3. apply relevant labels to the issue. +4. apply relevant labels to the issue. ## You want to make or ask some change to the code -1. use the [issue search](https://github.com/iterorganization/ -imas-python/issues) to check if someone already proposed -a similar idea/change; -2. if not, create a **new issue** to describe what change you would like to see +1. use the [issue search](https://github.com/iterorganization/imas-python/issues) +to check if someone already proposed a similar idea/change; +3. if not, create a **new issue** to describe what change you would like to see implemented and specify it if you intend to work on it yourself or if some help will be needed; -3. wait until some kind of consensus is reached about your idea being relevant, +4. wait until some kind of consensus is reached about your idea being relevant, at which time the issue will be assigned (to you or someone else who can work on this topic); -4. if you do the development yourself, fork the repository to your own Github +5. if you do the development yourself, fork the repository to your own Github profile and create your own feature branch off of the latest develop commit. Make sure to regularly sync your branch with the latest commits from `develop` (find instructions -[here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/ -working-with-forks/syncing-a-fork); -5. when your development is ready, create a pull request (find instructions -[here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/ -proposing-changes-to-your-work-with-pull-requests/ -creating-a-pull-request-from-a-fork)). +[here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/syncing-a-fork)); +6. when your development is ready, create a pull request (find instructions +[here](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request-from-a-fork)). While we will try to answer questions quickly and to address issues in a timely From b2913f6910a83f155cdc45c868d18126554af01e Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Wed, 22 Jan 2025 09:49:23 +0100 Subject: [PATCH 31/97] renamed imaspy to imas --- .gitignore | 13 +- MANIFEST.in | 12 +- README.md | 10 +- asv.conf.json | 6 +- benchmarks/core_profiles.py | 27 +- benchmarks/edge_profiles.py | 30 +- benchmarks/technical.py | 17 +- benchmarks/utils.py | 29 +- ci/build_dd_zip.sh | 2 +- ci/build_docs_and_dist.sh | 4 +- ci/linting.sh | 4 +- ci/run_benchmark.sh | 26 +- ci/run_pytest.sh | 6 +- conftest.py | 40 +- docs/Makefile | 2 +- docs/source/_static/{imaspy.css => imas.css} | 0 .../{imaspy_200x200.png => imas_200x200.png} | Bin docs/source/api-hidden.rst | 2 +- docs/source/api.rst | 10 +- docs/source/benchmarking.rst | 44 +- docs/source/changelog.rst | 52 +- docs/source/ci_config.rst | 40 +- docs/source/cli.rst | 76 +- docs/source/code_style.rst | 10 +- docs/source/conf.py | 45 +- docs/source/configuring.rst | 30 +- docs/source/courses/advanced/dd_versions.rst | 92 +-- docs/source/courses/advanced/explore.rst | 38 +- docs/source/courses/advanced/hashing.rst | 14 +- .../alternative_coordinates.py | 6 +- .../autoconvert_get.py | 18 +- .../autoconvert_put.py | 12 +- .../calc_with_units.py | 6 +- .../coordinates.py | 10 +- .../dd_versions.py | 10 +- .../explore_data.py | 12 +- .../explore_structures.py | 6 +- .../hashing.py | 20 +- .../ids_convert.py | 20 +- .../ids_to_xarray.py | 6 +- .../tensorized_ids_to_xarray.py | 6 +- docs/source/courses/advanced/metadata.rst | 52 +- docs/source/courses/advanced/xarray.rst | 6 +- .../source/courses/advanced_user_training.rst | 10 +- .../al4_snippets/create_core_profiles.py | 40 -- .../al4_snippets/iterate_core_profiles.py | 12 - .../plot_core_profiles_ne_timeslice.py | 35 - .../courses/basic/al4_snippets/print_idss.py | 5 - .../read_core_profiles_ne_timeslice.py | 21 - .../read_equilibrium_time_array.py | 26 - .../al4_snippets/read_whole_equilibrium.py | 20 - .../basic/al4_snippets/transform_grid.py | 105 --- docs/source/courses/basic/analyze.rst | 97 ++- docs/source/courses/basic/create.rst | 95 +-- docs/source/courses/basic/explore.rst | 69 +- .../{imaspy_inspect.png => imas_inspect.png} | Bin .../create_core_profiles.py | 14 +- .../explore_public_ec_launchers.py | 15 + .../imas_snippets/explore_public_pf_active.py | 15 + .../imas_snippets/explore_training_data.py | 13 + .../courses/basic/imas_snippets/find_paths.py | 16 + .../iterate_core_profiles.py | 4 +- .../plot_core_profiles_ne_timeslice.py | 6 +- .../plot_core_profiles_te.py | 6 +- .../print_idss.py | 8 +- .../read_core_profiles_ne_timeslice.py | 6 +- .../read_equilibrium_time_array.py | 4 +- .../read_whole_equilibrium.py | 4 +- .../transform_grid.py | 10 +- .../explore_public_ec_launchers.py | 15 - .../explore_public_pf_active.py | 15 - .../imaspy_snippets/explore_training_data.py | 13 - .../basic/imaspy_snippets/find_paths.py | 16 - docs/source/courses/basic/setup.rst | 15 +- docs/source/courses/basic/transform.rst | 70 +- docs/source/courses/basic_user_training.rst | 18 +- docs/source/identifiers.rst | 56 +- ...architecture.rst => imas_architecture.rst} | 182 +++-- ...maspy_structure.png => imas_structure.png} | Bin docs/source/index.rst | 22 +- docs/source/installing.rst | 65 +- docs/source/intro.rst | 48 +- docs/source/lazy_loading.rst | 46 +- docs/source/mdsplus.rst | 10 +- docs/source/metadata.rst | 48 +- docs/source/multi-dd.rst | 56 +- docs/source/netcdf.rst | 40 +- docs/source/netcdf/conventions.rst | 2 +- .../{release_imaspy.rst => release_imas.rst} | 43 +- docs/source/resampling.rst | 18 +- docs/source/validation.rst | 20 +- imas/__init__.py | 31 + imas/__main__.py | 17 + {imaspy => imas}/_util.py | 24 +- .../core_instant_changes_identifier.xml | 15 + .../core_sources/core_source_identifier.xml | 76 ++ .../core_transport_identifier.xml | 25 + .../edge_sources/edge_source_identifier.xml | 31 + .../edge_transport_identifier.xml | 25 + .../em_coupling_quantity_identifier.xml | 27 + .../equilibrium_profiles_2d_identifier.xml | 21 + .../magnetics_flux_loop_type_identifier.xml | 14 + .../magnetics_probe_type_identifier.xml | 16 + ...magnetics_rogowski_measured_identifier.xml | 13 + .../mhd_linear_ballooning_identifier.xml | 13 + .../mhd_linear_equations_identifier.xml | 15 + .../mhd_linear_model_identifier.xml | 14 + .../mhd_linear_perturbation_identifier.xml | 24 + .../neutron_event_identifier.xml | 18 + .../neutron_mode_identifier.xml | 15 + .../operational_sensor_type_identifier.xml | 15 + .../pf_active_coil_function_identifier.xml | 10 + .../plasma_source_identifier.xml | 70 ++ .../plasma_transport_identifier.xml | 38 + .../radiation/radiation_identifier.xml | 23 + .../refractometer_formula_identifier.xml | 12 + .../e_field_critical_identifier.xml | 12 + ...momentum_critical_avalanche_identifier.xml | 11 + .../momentum_critical_hot_tail_identifier.xml | 11 + ...spectrometer_visible_method_identifier.xml | 13 + .../crystal_mesh_identifier.xml | 12 + ...ectro_x_instrument_function_identifier.xml | 14 + .../spi/shatter_cone_identifier.xml | 11 + .../utilities/coordinate_identifier.xml | 51 ++ .../curved_object_curvature_identifier.xml | 16 + .../curved_object_geometry_identifier.xml | 14 + .../utilities/data_type_identifier.xml | 12 + .../distribution_source_identifier.xml | 36 + .../utilities/emission_grid_identifier.xml | 9 + .../ggd_geometry_content_identifier.xml | 21 + .../identifiers/utilities/ggd_identifier.xml | 29 + .../utilities/ggd_space_identifier.xml | 18 + .../utilities/ggd_subset_identifier.xml | 70 ++ .../utilities/materials_identifier.xml | 43 ++ .../utilities/midplane_identifier.xml | 14 + .../utilities/neutrals_identifier.xml | 20 + .../utilities/occurrence_type_identifier.xml | 11 + .../utilities/optical_element_identifier.xml | 13 + .../optical_element_material_identifier.xml | 12 + .../utilities/orbit_type_identifier.xml | 20 + .../poloidal_plane_coordinates_identifier.xml | 119 ++++ .../species_reference_identifier.xml | 19 + .../utilities/statistics_type_identifier.xml | 17 + .../utilities/surface_geometry_identifier.xml | 16 + .../identifiers/utilities/wave_identifier.xml | 15 + .../wall/wall_component_identifier.xml | 18 + .../wall_description_2d_type_identifier.xml | 13 + {imaspy => imas}/assets/IDSDef2MDSpreTree.xsl | 0 .../core_instant_changes_identifier.xml | 15 + .../core_sources/core_source_identifier.xml | 76 ++ .../core_transport_identifier.xml | 25 + .../edge_sources/edge_source_identifier.xml | 31 + .../edge_transport_identifier.xml | 25 + .../em_coupling_quantity_identifier.xml | 27 + .../equilibrium_profiles_2d_identifier.xml | 21 + .../magnetics_flux_loop_type_identifier.xml | 14 + .../magnetics_probe_type_identifier.xml | 16 + ...magnetics_rogowski_measured_identifier.xml | 13 + .../mhd_linear_ballooning_identifier.xml | 13 + .../mhd_linear_equations_identifier.xml | 15 + .../mhd_linear_model_identifier.xml | 14 + .../mhd_linear_perturbation_identifier.xml | 24 + .../neutron_event_identifier.xml | 18 + .../neutron_mode_identifier.xml | 15 + .../operational_sensor_type_identifier.xml | 15 + .../pf_active_coil_function_identifier.xml | 10 + .../plasma_source_identifier.xml | 70 ++ .../plasma_transport_identifier.xml | 38 + .../radiation/radiation_identifier.xml | 23 + .../refractometer_formula_identifier.xml | 12 + .../e_field_critical_identifier.xml | 12 + ...momentum_critical_avalanche_identifier.xml | 11 + .../momentum_critical_hot_tail_identifier.xml | 11 + ...spectrometer_visible_method_identifier.xml | 13 + .../crystal_mesh_identifier.xml | 12 + ...ectro_x_instrument_function_identifier.xml | 14 + .../spi/shatter_cone_identifier.xml | 11 + .../utilities/coordinate_identifier.xml | 51 ++ .../curved_object_curvature_identifier.xml | 16 + .../curved_object_geometry_identifier.xml | 14 + .../utilities/data_type_identifier.xml | 12 + .../distribution_source_identifier.xml | 36 + .../utilities/emission_grid_identifier.xml | 9 + .../ggd_geometry_content_identifier.xml | 21 + .../identifiers/utilities/ggd_identifier.xml | 29 + .../utilities/ggd_space_identifier.xml | 18 + .../utilities/ggd_subset_identifier.xml | 70 ++ .../utilities/materials_identifier.xml | 43 ++ .../utilities/midplane_identifier.xml | 14 + .../utilities/neutrals_identifier.xml | 20 + .../utilities/occurrence_type_identifier.xml | 11 + .../utilities/optical_element_identifier.xml | 13 + .../optical_element_material_identifier.xml | 12 + .../utilities/orbit_type_identifier.xml | 20 + .../poloidal_plane_coordinates_identifier.xml | 119 ++++ .../species_reference_identifier.xml | 19 + .../utilities/statistics_type_identifier.xml | 17 + .../utilities/surface_geometry_identifier.xml | 16 + .../identifiers/utilities/wave_identifier.xml | 15 + .../wall/wall_component_identifier.xml | 18 + .../wall_description_2d_type_identifier.xml | 13 + {imaspy => imas}/assets/IDS_fake_toplevel.xml | 0 {imaspy => imas}/assets/IDS_minimal.xml | 2 +- {imaspy => imas}/assets/IDS_minimal_2.xml | 2 +- .../assets/IDS_minimal_struct_array.xml | 2 +- {imaspy => imas}/assets/IDS_minimal_types.xml | 2 +- .../assets/ITER_134173_106_core_profiles.ids | 0 .../assets/ITER_134173_106_equilibrium.ids | 0 {imaspy => imas}/assets/README.md | 0 {imaspy => imas}/assets/core_profiles.ids | 0 {imaspy => imas}/assets/equilibrium.ids | 0 {imaspy => imas}/backends/__init__.py | 4 +- {imaspy => imas}/backends/db_entry_impl.py | 22 +- imas/backends/imas_core/__init__.py | 4 + .../backends/imas_core/al_context.py | 20 +- .../backends/imas_core/db_entry_al.py | 21 +- .../backends/imas_core/db_entry_helpers.py | 18 +- .../backends/imas_core/imas_interface.py | 45 +- .../backends/imas_core/mdsplus_model.py | 90 ++- .../backends/imas_core/uda_support.py | 8 +- imas/backends/netcdf/__init__.py | 4 + .../backends/netcdf/db_entry_nc.py | 16 +- {imaspy => imas}/backends/netcdf/ids2nc.py | 20 +- {imaspy => imas}/backends/netcdf/nc2ids.py | 20 +- .../backends/netcdf/nc_metadata.py | 12 +- .../backends/netcdf/nc_validate.py | 8 +- {imaspy => imas}/command/cli.py | 51 +- {imaspy => imas}/command/db_analysis.py | 16 +- {imaspy => imas}/command/helpers.py | 16 +- {imaspy => imas}/command/timer.py | 4 +- {imaspy => imas}/db_entry.py | 93 +-- imas/dd_helpers.py | 168 +++++ {imaspy => imas}/dd_zip.py | 44 +- {imaspy => imas}/exception.py | 10 +- {imaspy => imas}/ids_base.py | 18 +- {imaspy => imas}/ids_convert.py | 28 +- {imaspy => imas}/ids_coordinates.py | 32 +- {imaspy => imas}/ids_data_type.py | 6 +- {imaspy => imas}/ids_defs.py | 8 +- {imaspy => imas}/ids_factory.py | 16 +- {imaspy => imas}/ids_identifiers.py | 10 +- {imaspy => imas}/ids_metadata.py | 31 +- {imaspy => imas}/ids_path.py | 12 +- {imaspy => imas}/ids_primitive.py | 14 +- {imaspy => imas}/ids_struct_array.py | 20 +- {imaspy => imas}/ids_structure.py | 38 +- {imaspy => imas}/ids_toplevel.py | 58 +- {imaspy => imas}/setup_logging.py | 12 +- {imaspy => imas}/test/test_all_dd_versions.py | 4 +- {imaspy => imas}/test/test_cli.py | 14 +- {imaspy => imas}/test/test_dbentry.py | 46 +- imas/test/test_dd_helpers.py | 56 ++ imas/test/test_dd_helpers_old.py | 92 +++ {imaspy => imas}/test/test_dd_zip.py | 4 +- {imaspy => imas}/test/test_deepcopy.py | 10 +- {imaspy => imas}/test/test_exception.py | 6 +- {imaspy => imas}/test/test_get_sample.py | 12 +- {imaspy => imas}/test/test_hash.py | 64 +- {imaspy => imas}/test/test_helpers.py | 18 +- {imaspy => imas}/test/test_identifiers.py | 8 +- {imaspy => imas}/test/test_ids2nc.py | 8 +- {imaspy => imas}/test/test_ids_ascii_data.py | 12 +- {imaspy => imas}/test/test_ids_convert.py | 18 +- {imaspy => imas}/test/test_ids_coordinates.py | 8 +- {imaspy => imas}/test/test_ids_data_type.py | 2 +- {imaspy => imas}/test/test_ids_factory.py | 4 +- {imaspy => imas}/test/test_ids_metadata.py | 4 +- {imaspy => imas}/test/test_ids_mixin.py | 4 +- {imaspy => imas}/test/test_ids_path.py | 4 +- {imaspy => imas}/test/test_ids_primitive.py | 10 +- .../test/test_ids_struct_array.py | 4 +- {imaspy => imas}/test/test_ids_structure.py | 8 +- {imaspy => imas}/test/test_ids_toplevel.py | 6 +- {imaspy => imas}/test/test_ids_validate.py | 10 +- .../test/test_latest_dd_autofill.py | 8 +- .../test/test_latest_dd_manual.py | 6 +- .../test/test_latest_dd_resample.py | 14 +- {imaspy => imas}/test/test_lazy_loading.py | 12 +- .../test/test_list_occurrences.py | 10 +- {imaspy => imas}/test/test_minimal.py | 2 +- .../test/test_minimal_conversion.py | 4 +- {imaspy => imas}/test/test_minimal_io.py | 4 +- .../test/test_minimal_struct_array_io.py | 4 +- {imaspy => imas}/test/test_minimal_types.py | 10 +- .../test/test_minimal_types_io.py | 4 +- .../test/test_minimal_types_io_automatic.py | 8 +- {imaspy => imas}/test/test_nbc_change.py | 16 +- {imaspy => imas}/test/test_nc_autofill.py | 4 +- {imaspy => imas}/test/test_nc_entry.py | 8 +- {imaspy => imas}/test/test_nc_metadata.py | 4 +- {imaspy => imas}/test/test_nc_validation.py | 12 +- {imaspy => imas}/test/test_snippets.py | 1 + {imaspy => imas}/test/test_static_ids.py | 14 +- {imaspy => imas}/test/test_str_1d.py | 4 +- {imaspy => imas}/test/test_time_slicing.py | 9 +- {imaspy => imas}/test/test_uda_support.py | 4 +- {imaspy => imas}/test/test_util.py | 36 +- {imaspy => imas}/training.py | 22 +- {imaspy => imas}/util.py | 90 +-- imaspy/__init__.py | 37 - imaspy/__main__.py | 17 - imaspy/_version.py | 658 ------------------ imaspy/backends/imas_core/__init__.py | 4 - imaspy/backends/netcdf/__init__.py | 4 - imaspy/dd_helpers.py | 300 -------- imaspy/test/test_dd_helpers.py | 134 ---- pyproject.toml | 51 +- setup.cfg | 4 +- setup.py | 25 +- tools/compare_lowlevel_access_patterns.py | 68 +- tools/extract_test_data.py | 4 +- 311 files changed, 4631 insertions(+), 3343 deletions(-) rename docs/source/_static/{imaspy.css => imas.css} (100%) rename docs/source/_static/{imaspy_200x200.png => imas_200x200.png} (100%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/alternative_coordinates.py (91%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/autoconvert_get.py (83%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/autoconvert_put.py (71%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/calc_with_units.py (93%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/coordinates.py (88%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/dd_versions.py (76%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/explore_data.py (68%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/explore_structures.py (85%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/hashing.py (59%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/ids_convert.py (83%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/ids_to_xarray.py (92%) rename docs/source/courses/advanced/{imaspy_snippets => imas_snippets}/tensorized_ids_to_xarray.py (94%) delete mode 100644 docs/source/courses/basic/al4_snippets/create_core_profiles.py delete mode 100644 docs/source/courses/basic/al4_snippets/iterate_core_profiles.py delete mode 100644 docs/source/courses/basic/al4_snippets/plot_core_profiles_ne_timeslice.py delete mode 100644 docs/source/courses/basic/al4_snippets/print_idss.py delete mode 100644 docs/source/courses/basic/al4_snippets/read_core_profiles_ne_timeslice.py delete mode 100644 docs/source/courses/basic/al4_snippets/read_equilibrium_time_array.py delete mode 100644 docs/source/courses/basic/al4_snippets/read_whole_equilibrium.py delete mode 100644 docs/source/courses/basic/al4_snippets/transform_grid.py rename docs/source/courses/basic/{imaspy_inspect.png => imas_inspect.png} (100%) rename docs/source/courses/basic/{imaspy_snippets => imas_snippets}/create_core_profiles.py (73%) create mode 100644 docs/source/courses/basic/imas_snippets/explore_public_ec_launchers.py create mode 100644 docs/source/courses/basic/imas_snippets/explore_public_pf_active.py create mode 100644 docs/source/courses/basic/imas_snippets/explore_training_data.py create mode 100644 docs/source/courses/basic/imas_snippets/find_paths.py rename docs/source/courses/basic/{imaspy_snippets => imas_snippets}/iterate_core_profiles.py (77%) rename docs/source/courses/basic/{imaspy_snippets => imas_snippets}/plot_core_profiles_ne_timeslice.py (83%) rename docs/source/courses/basic/{imaspy_snippets => imas_snippets}/plot_core_profiles_te.py (92%) rename docs/source/courses/basic/{imaspy_snippets => imas_snippets}/print_idss.py (71%) rename docs/source/courses/basic/{imaspy_snippets => imas_snippets}/read_core_profiles_ne_timeslice.py (71%) rename docs/source/courses/basic/{imaspy_snippets => imas_snippets}/read_equilibrium_time_array.py (88%) rename docs/source/courses/basic/{imaspy_snippets => imas_snippets}/read_whole_equilibrium.py (90%) rename docs/source/courses/basic/{imaspy_snippets => imas_snippets}/transform_grid.py (90%) delete mode 100644 docs/source/courses/basic/imaspy_snippets/explore_public_ec_launchers.py delete mode 100644 docs/source/courses/basic/imaspy_snippets/explore_public_pf_active.py delete mode 100644 docs/source/courses/basic/imaspy_snippets/explore_training_data.py delete mode 100644 docs/source/courses/basic/imaspy_snippets/find_paths.py rename docs/source/{imaspy_architecture.rst => imas_architecture.rst} (58%) rename docs/source/{imaspy_structure.png => imas_structure.png} (100%) rename docs/source/{release_imaspy.rst => release_imas.rst} (72%) create mode 100644 imas/__init__.py create mode 100644 imas/__main__.py rename {imaspy => imas}/_util.py (94%) create mode 100644 imas/assets/IDSDef/identifiers/core_instant_changes/core_instant_changes_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/core_sources/core_source_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/core_transport/core_transport_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/edge_sources/edge_source_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/edge_transport/edge_transport_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/em_coupling/em_coupling_quantity_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/magnetics/magnetics_probe_type_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_equations_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_model_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_event_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_mode_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/pf_active/pf_active_coil_function_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/plasma_sources/plasma_source_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/plasma_transport/plasma_transport_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/radiation/radiation_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/refractometer/refractometer_formula_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/runaway_electrons/e_field_critical_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/spi/shatter_cone_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/coordinate_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/curved_object_curvature_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/curved_object_geometry_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/data_type_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/distribution_source_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/emission_grid_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/ggd_geometry_content_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/ggd_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/ggd_space_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/ggd_subset_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/materials_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/midplane_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/neutrals_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/occurrence_type_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/optical_element_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/optical_element_material_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/orbit_type_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/poloidal_plane_coordinates_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/species_reference_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/statistics_type_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/surface_geometry_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/utilities/wave_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/wall/wall_component_identifier.xml create mode 100644 imas/assets/IDSDef/identifiers/wall/wall_description_2d_type_identifier.xml rename {imaspy => imas}/assets/IDSDef2MDSpreTree.xsl (100%) create mode 100644 imas/assets/IDSDef_correct/identifiers/core_instant_changes/core_instant_changes_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/core_sources/core_source_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/core_transport/core_transport_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/edge_sources/edge_source_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/edge_transport/edge_transport_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/em_coupling/em_coupling_quantity_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_probe_type_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_equations_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_model_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_event_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_mode_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/pf_active/pf_active_coil_function_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/plasma_sources/plasma_source_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/plasma_transport/plasma_transport_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/radiation/radiation_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/refractometer/refractometer_formula_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/runaway_electrons/e_field_critical_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/spi/shatter_cone_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/coordinate_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/curved_object_curvature_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/curved_object_geometry_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/data_type_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/distribution_source_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/emission_grid_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/ggd_geometry_content_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/ggd_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/ggd_space_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/ggd_subset_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/materials_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/midplane_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/neutrals_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/occurrence_type_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/optical_element_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/optical_element_material_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/orbit_type_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/poloidal_plane_coordinates_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/species_reference_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/statistics_type_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/surface_geometry_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/wave_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/wall/wall_component_identifier.xml create mode 100644 imas/assets/IDSDef_correct/identifiers/wall/wall_description_2d_type_identifier.xml rename {imaspy => imas}/assets/IDS_fake_toplevel.xml (100%) rename {imaspy => imas}/assets/IDS_minimal.xml (95%) rename {imaspy => imas}/assets/IDS_minimal_2.xml (95%) rename {imaspy => imas}/assets/IDS_minimal_struct_array.xml (96%) rename {imaspy => imas}/assets/IDS_minimal_types.xml (98%) rename {imaspy => imas}/assets/ITER_134173_106_core_profiles.ids (100%) rename {imaspy => imas}/assets/ITER_134173_106_equilibrium.ids (100%) rename {imaspy => imas}/assets/README.md (100%) rename {imaspy => imas}/assets/core_profiles.ids (100%) rename {imaspy => imas}/assets/equilibrium.ids (100%) rename {imaspy => imas}/backends/__init__.py (76%) rename {imaspy => imas}/backends/db_entry_impl.py (83%) create mode 100644 imas/backends/imas_core/__init__.py rename {imaspy => imas}/backends/imas_core/al_context.py (95%) rename {imaspy => imas}/backends/imas_core/db_entry_al.py (96%) rename {imaspy => imas}/backends/imas_core/db_entry_helpers.py (94%) rename {imaspy => imas}/backends/imas_core/imas_interface.py (87%) rename {imaspy => imas}/backends/imas_core/mdsplus_model.py (83%) rename {imaspy => imas}/backends/imas_core/uda_support.py (86%) create mode 100644 imas/backends/netcdf/__init__.py rename {imaspy => imas}/backends/netcdf/db_entry_nc.py (93%) rename {imaspy => imas}/backends/netcdf/ids2nc.py (96%) rename {imaspy => imas}/backends/netcdf/nc2ids.py (95%) rename {imaspy => imas}/backends/netcdf/nc_metadata.py (98%) rename {imaspy => imas}/backends/netcdf/nc_validate.py (91%) rename {imaspy => imas}/command/cli.py (83%) rename {imaspy => imas}/command/db_analysis.py (96%) rename {imaspy => imas}/command/helpers.py (69%) rename {imaspy => imas}/command/timer.py (95%) rename {imaspy => imas}/db_entry.py (91%) create mode 100644 imas/dd_helpers.py rename {imaspy => imas}/dd_zip.py (89%) rename {imaspy => imas}/exception.py (92%) rename {imaspy => imas}/ids_base.py (90%) rename {imaspy => imas}/ids_convert.py (98%) rename {imaspy => imas}/ids_coordinates.py (94%) rename {imaspy => imas}/ids_data_type.py (95%) rename {imaspy => imas}/ids_defs.py (95%) rename {imaspy => imas}/ids_factory.py (88%) rename {imaspy => imas}/ids_identifiers.py (94%) rename {imaspy => imas}/ids_metadata.py (92%) rename {imaspy => imas}/ids_path.py (97%) rename {imaspy => imas}/ids_primitive.py (97%) rename {imaspy => imas}/ids_struct_array.py (93%) rename {imaspy => imas}/ids_structure.py (91%) rename {imaspy => imas}/ids_toplevel.py (87%) rename {imaspy => imas}/setup_logging.py (83%) rename {imaspy => imas}/test/test_all_dd_versions.py (89%) rename {imaspy => imas}/test/test_cli.py (77%) rename {imaspy => imas}/test/test_dbentry.py (66%) create mode 100644 imas/test/test_dd_helpers.py create mode 100644 imas/test/test_dd_helpers_old.py rename {imaspy => imas}/test/test_dd_zip.py (87%) rename {imaspy => imas}/test/test_deepcopy.py (63%) rename {imaspy => imas}/test/test_exception.py (67%) rename {imaspy => imas}/test/test_get_sample.py (98%) rename {imaspy => imas}/test/test_hash.py (69%) rename {imaspy => imas}/test/test_helpers.py (97%) rename {imaspy => imas}/test/test_identifiers.py (94%) rename {imaspy => imas}/test/test_ids2nc.py (96%) rename {imaspy => imas}/test/test_ids_ascii_data.py (71%) rename {imaspy => imas}/test/test_ids_convert.py (97%) rename {imaspy => imas}/test/test_ids_coordinates.py (96%) rename {imaspy => imas}/test/test_ids_data_type.py (96%) rename {imaspy => imas}/test/test_ids_factory.py (91%) rename {imaspy => imas}/test/test_ids_metadata.py (94%) rename {imaspy => imas}/test/test_ids_mixin.py (84%) rename {imaspy => imas}/test/test_ids_path.py (98%) rename {imaspy => imas}/test/test_ids_primitive.py (93%) rename {imaspy => imas}/test/test_ids_struct_array.py (96%) rename {imaspy => imas}/test/test_ids_structure.py (91%) rename {imaspy => imas}/test/test_ids_toplevel.py (88%) rename {imaspy => imas}/test/test_ids_validate.py (97%) rename {imaspy => imas}/test/test_latest_dd_autofill.py (92%) rename {imaspy => imas}/test/test_latest_dd_manual.py (88%) rename {imaspy => imas}/test/test_latest_dd_resample.py (91%) rename {imaspy => imas}/test/test_lazy_loading.py (95%) rename {imaspy => imas}/test/test_list_occurrences.py (92%) rename {imaspy => imas}/test/test_minimal.py (97%) rename {imaspy => imas}/test/test_minimal_conversion.py (92%) rename {imaspy => imas}/test/test_minimal_io.py (86%) rename {imaspy => imas}/test/test_minimal_struct_array_io.py (93%) rename {imaspy => imas}/test/test_minimal_types.py (98%) rename {imaspy => imas}/test/test_minimal_types_io.py (94%) rename {imaspy => imas}/test/test_minimal_types_io_automatic.py (90%) rename {imaspy => imas}/test/test_nbc_change.py (97%) rename {imaspy => imas}/test/test_nc_autofill.py (73%) rename {imaspy => imas}/test/test_nc_entry.py (82%) rename {imaspy => imas}/test/test_nc_metadata.py (97%) rename {imaspy => imas}/test/test_nc_validation.py (95%) rename {imaspy => imas}/test/test_snippets.py (95%) rename {imaspy => imas}/test/test_static_ids.py (73%) rename {imaspy => imas}/test/test_str_1d.py (96%) rename {imaspy => imas}/test/test_time_slicing.py (96%) rename {imaspy => imas}/test/test_uda_support.py (73%) rename {imaspy => imas}/test/test_util.py (86%) rename {imaspy => imas}/training.py (59%) rename {imaspy => imas}/util.py (85%) delete mode 100644 imaspy/__init__.py delete mode 100644 imaspy/__main__.py delete mode 100644 imaspy/_version.py delete mode 100644 imaspy/backends/imas_core/__init__.py delete mode 100644 imaspy/backends/netcdf/__init__.py delete mode 100644 imaspy/dd_helpers.py delete mode 100644 imaspy/test/test_dd_helpers.py diff --git a/.gitignore b/.gitignore index 6a19e008..8bff5e88 100644 --- a/.gitignore +++ b/.gitignore @@ -91,21 +91,16 @@ ENV/ *.swo # SCM setuptools -imaspy/version.py - -# Saxon symlink or downloaded file -saxon*.jar +imas/_version.py # IMAS DD data-dictionary -access-layer -containers/arch/imaspy/ +containers/arch/imas/ containers/arch/data-dictionary/ -containers/arch/access-layer/ -imaspy/assets/IDSDef.zip +imas/assets/IDSDef.zip # IDS files -*.ids +# *.ids # ASV folder /.asv diff --git a/MANIFEST.in b/MANIFEST.in index b002b31c..9c3b7df1 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ -include imaspy/assets/IDSDef.zip -include imaspy/assets/IDSDef2MDSpreTree.xsl -include imaspy/assets/ITER_134173_106_equilibrium.ids -include imaspy/assets/ITER_134173_106_core_profiles.ids -include imaspy/assets/equilibrium.ids -include imaspy/assets/core_profiles.ids +include imas/assets/IDSDef.zip +include imas/assets/IDSDef2MDSpreTree.xsl +include imas/assets/ITER_134173_106_equilibrium.ids +include imas/assets/ITER_134173_106_core_profiles.ids +include imas/assets/equilibrium.ids +include imas/assets/core_profiles.ids diff --git a/README.md b/README.md index 14d4b81e..c2afa4ad 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ -# IMAS-Python +# imas-python -IMAS-Python is a pure-python library to handle arbitrarily nested data structures. -It is designed for, but not necessarily bound to, interacting with Interface +imas-python is a pure-python library to handle arbitrarily nested data structures. +imas-python is designed for, but not necessarily bound to, interacting with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) Data Model. @@ -11,7 +11,7 @@ Data Model. Install steps are described in the documentation generated from `/docs/source/installing.rst`. Documentation is autogenerated from the source using [Sphinx](http://sphinx-doc.org/) -and can be found at the [ITER sharepoint](https://sharepoint.iter.org/departments/POP/CM/IMDesign/Code%20Documentation/IMAS-doc/index.html) +and can be found at the [readthedocs](https://imas-python.readthedocs.io/en/latest/) The documentation can be manually generated by installing sphinx and running: @@ -40,7 +40,7 @@ A quick 5 minutes introduction is available in the documentation generated from ## Legal -IMAS-Python is Copyright 2020-2024 ITER Organization, Copyright 2020-2023 Karel Lucas van de +imas-python is Copyright 2020-2024 ITER Organization, Copyright 2020-2023 Karel Lucas van de Plassche , Copyright 2020-2022 Daan van Vugt , and Copyright 2020 Dutch Institute for Fundamental Energy Research . It is licensed under [LGPL 3.0](LICENSE.txt). diff --git a/asv.conf.json b/asv.conf.json index b10c743a..0b11cf72 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -4,10 +4,10 @@ "version": 1, // The name of the project being benchmarked - "project": "imaspy", + "project": "imas", // The project's homepage - "project_url": "https://git.iter.org/projects/IMAS/repos/imaspy/browse", + "project_url": "https://github.com/iterorganization/imas-python", // The URL or local path of the source code repository for the // project being benchmarked @@ -53,7 +53,7 @@ //"install_timeout": 600, // the base URL to show a commit for the project. - "show_commit_url": "https://git.iter.org/projects/IMAS/repos/imaspy/commits/", + "show_commit_url": "https://github.com/iterorganization/imas-python/commits/main/", // The Pythons you'd like to test against. If not provided, defaults // to the current version of Python used to run `asv`. diff --git a/benchmarks/core_profiles.py b/benchmarks/core_profiles.py index 743cd12e..047b5afc 100644 --- a/benchmarks/core_profiles.py +++ b/benchmarks/core_profiles.py @@ -3,7 +3,7 @@ import numpy as np -import imaspy +import imas from .utils import ( available_backends, @@ -22,16 +22,16 @@ def fill_slices(core_profiles, times): """Fill a time slice of a core_profiles IDS with generated data. Args: - core_profiles: core_profiles IDS (either from IMASPy or AL HLI) + core_profiles: core_profiles IDS (either from imas-python or AL HLI) times: time values to fill a slice for """ core_profiles.ids_properties.homogeneous_time = 1 # HOMOGENEOUS - core_profiles.ids_properties.comment = "Generated for the IMASPy benchmark suite" + core_profiles.ids_properties.comment = "Generated for the imas-python benchmark suite" core_profiles.ids_properties.creation_date = datetime.date.today().isoformat() - core_profiles.code.name = "IMASPy ASV benchmark" - core_profiles.code.version = imaspy.__version__ + core_profiles.code.name = "imas-python ASV benchmark" + core_profiles.code.version = imas.__version__ core_profiles.code.repository = ( - "https://git.iter.org/projects/IMAS/repos/imaspy/browse" + "https://github.com/iterorganization/imas-python" ) core_profiles.time = np.array(times) @@ -50,7 +50,14 @@ def fill_slices(core_profiles, times): profiles_1d.ion.resize(len(ions)) profiles_1d.neutral.resize(len(ions)) for i, ion in enumerate(ions): - profiles_1d.ion[i].label = profiles_1d.neutral[i].label = ion + if hasattr(profiles_1d.ion[i], 'label'): + profiles_1d.ion[i].label = ion + profiles_1d.neutral[i].label = ion + if hasattr(profiles_1d.ion[i], 'name'): + profiles_1d.ion[i].name = ion + profiles_1d.neutral[i].name = ion + + # profiles_1d.ion[i].label = profiles_1d.neutral[i].label = ion profiles_1d.ion[i].z_ion = 1.0 profiles_1d.ion[i].neutral_index = profiles_1d.neutral[i].ion_index = i + 1 @@ -74,7 +81,7 @@ def setup(self, hli, backend): def time_get_slice(self, hli, backend): for t in TIME: - self.dbentry.get_slice("core_profiles", t, imaspy.ids_defs.CLOSEST_INTERP) + self.dbentry.get_slice("core_profiles", t, imas.ids_defs.CLOSEST_INTERP) def teardown(self, hli, backend): if hasattr(self, "dbentry"): # imas + netCDF has no dbentry @@ -96,8 +103,8 @@ class LazyGet: param_names = ["lazy", "backend"] def setup(self, lazy, backend): - self.dbentry = create_dbentry("imaspy", backend) - core_profiles = factory["imaspy"].core_profiles() + self.dbentry = create_dbentry("imas", backend) + core_profiles = factory["imas"].core_profiles() fill_slices(core_profiles, TIME) self.dbentry.put(core_profiles) diff --git a/benchmarks/edge_profiles.py b/benchmarks/edge_profiles.py index 87ff2b51..c2f69c28 100644 --- a/benchmarks/edge_profiles.py +++ b/benchmarks/edge_profiles.py @@ -3,7 +3,7 @@ import numpy as np -import imaspy +import imas from .utils import available_backends, create_dbentry, factory, hlis @@ -17,18 +17,18 @@ def fill_ggd(edge_profiles, times): """Fill nested arrays of structures in grids_ggd and ggd substructures. Args: - edge_profiles: edge_profiles IDS object (either from IMASPy or AL HLI) + edge_profiles: edge_profiles IDS object (either from imas-python or AL HLI) times: time values to fill """ edge_profiles.ids_properties.homogeneous_time = ( - imaspy.ids_defs.IDS_TIME_MODE_HETEROGENEOUS + imas.ids_defs.IDS_TIME_MODE_HETEROGENEOUS ) - edge_profiles.ids_properties.comment = "Generated for IMASPy benchmark suite" + edge_profiles.ids_properties.comment = "Generated for imas-python benchmark suite" edge_profiles.ids_properties.creation_date = datetime.date.today().isoformat() - edge_profiles.code.name = "IMASPy ASV benchmark" - edge_profiles.code.version = imaspy.__version__ + edge_profiles.code.name = "imas-python ASV benchmark" + edge_profiles.code.version = imas.__version__ edge_profiles.code.repository = ( - "https://git.iter.org/projects/IMAS/repos/imaspy/browse" + "https://github.com/iterorganization/imas-python" ) # This GGD grid is not a valid description, but it's a good stress test for the @@ -46,7 +46,13 @@ def fill_ggd(edge_profiles, times): grid.space[i].identifier.index = 1 grid.space[i].identifier.description = "Description...." grid.space[i].geometry_type.index = 0 - grid.space[0].coordinates_type = np.array([4, 5], dtype=np.int32) + grid.space[0].coordinates_type.resize(1) + if imas.__version__ >= "4.0.0": + grid.space[0].coordinates_type = np.array([4, 5], dtype=np.int32) + else: + grid.space[0].coordinates_type[0].name = "coordinates type" + grid.space[0].coordinates_type[0].index = 0 + grid.space[0].coordinates_type[0].name = "example coordinates type" grid.space[0].objects_per_dimension.resize(3) # points, lines, surfaces points = grid.space[0].objects_per_dimension[0].object points.resize(N_POINTS) @@ -61,7 +67,13 @@ def fill_ggd(edge_profiles, times): for i in range(N_SURFACES): surfaces[i].nodes = np.random.randint(1, N_LINES + 1, 4, dtype=np.int32) - grid.space[1].coordinates_type = np.array([6], dtype=np.int32) + grid.space[1].coordinates_type.resize(1) + if imas.__version__ >= "4.0.0": + grid.space[1].coordinates_type = np.array([6], dtype=np.int32) + else: + grid.space[1].coordinates_type[0].name = "coordinates type" + grid.space[1].coordinates_type[0].index = 0 + grid.space[1].coordinates_type[0].name = "example coordinates type" grid.space[1].objects_per_dimension.resize(2) obp = grid.space[1].objects_per_dimension[0] obp.object.resize(2) diff --git a/benchmarks/technical.py b/benchmarks/technical.py index 59072d48..1bbf3a48 100644 --- a/benchmarks/technical.py +++ b/benchmarks/technical.py @@ -1,18 +1,11 @@ -import imaspy import imas -def timeraw_create_default_imaspy_factory(): +def timeraw_create_default_imas_factory(): # timeraw to ensure that nothing is cached return """ - import imaspy - imaspy.IDSFactory() - """ - - -def timeraw_import_imaspy(): - return """ - import imaspy + import imas + imas.IDSFactory() """ @@ -38,6 +31,6 @@ def track_imas_versions(): ) -def track_imaspy_dd_version(): - return imaspy.IDSFactory().version +def track_imas_dd_version(): + return imas.IDSFactory().version """ diff --git a/benchmarks/utils.py b/benchmarks/utils.py index 5a8beeb8..0d2a9958 100644 --- a/benchmarks/utils.py +++ b/benchmarks/utils.py @@ -3,12 +3,8 @@ import uuid from pathlib import Path -import imaspy -import imaspy.exception - -# Don't directly import imas: code analyzers break on the huge code base -imas = importlib.import_module("imas") - +import imas +import imas.exception # Backend constants HDF5 = "HDF5" @@ -28,11 +24,11 @@ def backend_exists(backend): """Tries to detect if the lowlevel has support for the given backend.""" uri = create_uri(backend, str(uuid.uuid4())) try: - entry = imaspy.DBEntry(uri, "r") + entry = imas.DBEntry(uri, "r") except Exception as exc: if "backend is not available" in str(exc): return False - elif isinstance(exc, (imaspy.exception.ALException, FileNotFoundError)): + elif isinstance(exc, (imas.exception.ALException, FileNotFoundError)): return True return True # Highly unlikely, but it could succeed without error @@ -60,32 +56,27 @@ def backend_exists(backend): backend for backend in available_backends if backend not in [ASCII, NETCDF] ] -hlis = ["imas", "imaspy"] +hlis = ["imas"] DBEntry = { "imas": imas.DBEntry, - "imaspy": imaspy.DBEntry, } factory = { - "imas": imas, - "imaspy": imaspy.IDSFactory(), + "imas": imas.IDSFactory(), } -available_serializers = [imaspy.ids_defs.ASCII_SERIALIZER_PROTOCOL] +available_serializers = [imas.ids_defs.ASCII_SERIALIZER_PROTOCOL] def create_dbentry(hli, backend): if backend == NETCDF: - if hli == "imas": - # Raising NotImplementedError will skip the benchmarks for this combination - raise NotImplementedError("AL-Python HLI doesn't implement netCDF.") - if hli == "imaspy": # check if netcdf backend is available + if hli == "imas": # check if netcdf backend is available try: assert ( - imaspy.DBEntry._select_implementation("x.nc").__name__ + imas.DBEntry._select_implementation("x.nc").__name__ == "NCDBEntryImpl" ) except (AttributeError, AssertionError): raise NotImplementedError( - "This version of IMASPy doesn't implement netCDF." + "This version of imas-python doesn't implement netCDF." ) from None path = Path.cwd() / f"DB-{hli}-{backend}" diff --git a/ci/build_dd_zip.sh b/ci/build_dd_zip.sh index a4f14683..8f704d98 100755 --- a/ci/build_dd_zip.sh +++ b/ci/build_dd_zip.sh @@ -17,4 +17,4 @@ echo "Done loading modules" set -x # Build the DD zip -python imaspy/dd_helpers.py +python imas/dd_helpers.py diff --git a/ci/build_docs_and_dist.sh b/ci/build_docs_and_dist.sh index 2ef4c12a..a83ed031 100755 --- a/ci/build_docs_and_dist.sh +++ b/ci/build_docs_and_dist.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Bamboo CI script to install imaspy and run all tests +# Bamboo CI script to install imas and run all tests # Note: this script should be run from the root of the git repository # Debuggging: @@ -27,7 +27,7 @@ pip install --upgrade pip setuptools wheel build rm -rf dist python -m build . -# Install imaspy and documentation dependencies from the just-built wheel +# Install imas and documentation dependencies from the just-built wheel pip install "`readlink -f dist/*.whl`[docs,netcdf]" # Debugging: diff --git a/ci/linting.sh b/ci/linting.sh index a66eacaf..415ad1bf 100755 --- a/ci/linting.sh +++ b/ci/linting.sh @@ -24,5 +24,5 @@ python -m venv venv # Install and run linters pip install --upgrade 'black >=24,<25' flake8 -black --check imaspy -flake8 imaspy +black --check imas +flake8 imas diff --git a/ci/run_benchmark.sh b/ci/run_benchmark.sh index 74783585..022804fd 100755 --- a/ci/run_benchmark.sh +++ b/ci/run_benchmark.sh @@ -1,16 +1,24 @@ #!/bin/bash -# Bamboo CI script to install imaspy and run all tests +# Bamboo CI script to install imas and run all tests # Note: this script should be run from the root of the git repository # Debuggging: -set -e -o pipefail + echo "Loading modules:" $@ +BENCHMARKS_DIR=$(realpath "$PWD/imas_benchmarks") +if [[ "$(uname -n)" == *"bamboo"* ]]; then + set -e -o pipefail + # create + BENCHMARKS_DIR=$(realpath "/mnt/bamboo_deploy/imas/benchmarks/") +fi # Set up environment such that module files can be loaded source /etc/profile.d/modules.sh module purge # Modules are supplied as arguments in the CI job: -module load $@ +# IMAS-AL-Python/5.2.1-intel-2023b-DD-3.41.0 Saxon-HE/12.4-Java-21 +module load IMAS-AL-Core/5.4.3-intel-2023b Saxon-HE/12.4-Java-21 + # Debuggging: echo "Done loading modules" @@ -24,17 +32,17 @@ rm -rf venv # Environment should be clean, but remove directory to be sure python -m venv venv source venv/bin/activate -# Install asv and imaspy +# Install asv and imas pip install --upgrade pip setuptools wheel pip install virtualenv .[test] # Generate MDS+ models cache -python -c 'import imaspy.backends.imas_core.mdsplus_model; print(imaspy.backends.imas_core.mdsplus_model.mdsplus_model_dir(imaspy.IDSFactory()))' +python -c 'import imas.backends.imas_core.mdsplus_model; print(imas.backends.imas_core.mdsplus_model.mdsplus_model_dir(imas.IDSFactory()))' # Copy previous results (if any) -mkdir -p /mnt/bamboo_deploy/imaspy/benchmarks/results +mkdir -p "$BENCHMARKS_DIR/results" mkdir -p .asv -cp -rf /mnt/bamboo_deploy/imaspy/benchmarks/results .asv/ +cp -rf "$BENCHMARKS_DIR/results" .asv/ # Ensure numpy won't do multi-threading export OPENBLAS_NUM_THREADS=1 @@ -47,7 +55,7 @@ asv machine --yes # Run ASV for the current commit, develop and main asv run --skip-existing-successful HEAD^! asv run --skip-existing-successful develop^! -asv run --skip-existing-successful main^! +# asv run --skip-existing-successful main^! # Compare results if [ `git rev-parse --abbrev-ref HEAD` == develop ] @@ -61,5 +69,5 @@ fi asv publish # And persistently store them -cp -rf .asv/{results,html} /mnt/bamboo_deploy/imaspy/benchmarks/ +cp -rf .asv/{results,html} "$BENCHMARKS_DIR" diff --git a/ci/run_pytest.sh b/ci/run_pytest.sh index 9579e635..7b204bf8 100755 --- a/ci/run_pytest.sh +++ b/ci/run_pytest.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Bamboo CI script to install imaspy and run all tests +# Bamboo CI script to install imas and run all tests # Note: this script should be run from the root of the git repository # Debuggging: @@ -22,7 +22,7 @@ rm -rf venv # Environment should be clean, but remove directory to be sure python -m venv venv source venv/bin/activate -# Install imaspy and test dependencies +# Install imas and test dependencies pip install --upgrade pip setuptools wheel pip install .[h5py,netcdf,test] @@ -34,4 +34,4 @@ pip freeze rm -f junit.xml rm -rf htmlcov -python -m pytest -n=auto --cov=imaspy --cov-report=term-missing --cov-report=html --junit-xml=junit.xml +python -m pytest -n=auto --cov=imas --cov-report=term-missing --cov-report=html --junit-xml=junit.xml diff --git a/conftest.py b/conftest.py index 20b26679..a7eb12b1 100644 --- a/conftest.py +++ b/conftest.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. # # Set up pytest: # - Backend parametrization (and corresponding command line options) @@ -22,19 +22,19 @@ import pytest from packaging.version import Version -from imaspy.backends.imas_core.imas_interface import has_imas as _has_imas -from imaspy.backends.imas_core.imas_interface import ll_interface, lowlevel -from imaspy.dd_zip import dd_etree, dd_xml_versions, latest_dd_version -from imaspy.ids_defs import ( +from imas.backends.imas_core.imas_interface import has_imas as _has_imas +from imas.backends.imas_core.imas_interface import ll_interface, lowlevel +from imas.dd_zip import dd_etree, dd_xml_versions, latest_dd_version +from imas.ids_defs import ( ASCII_BACKEND, HDF5_BACKEND, IDS_TIME_MODE_INDEPENDENT, MDSPLUS_BACKEND, MEMORY_BACKEND, ) -from imaspy.ids_factory import IDSFactory +from imas.ids_factory import IDSFactory -logger = logging.getLogger("imaspy") +logger = logging.getLogger("imas") logger.setLevel(logging.INFO) os.environ["IMAS_AL_DISABLE_VALIDATE"] = "1" @@ -136,33 +136,33 @@ def latest_factory3(): # Fixtures for various assets @pytest.fixture() -def imaspy_assets(): - return files("imaspy") / "assets" +def imas_assets(): + return files("imas") / "assets" @pytest.fixture() -def fake_toplevel_xml(imaspy_assets): - return imaspy_assets / "IDS_fake_toplevel.xml" +def fake_toplevel_xml(imas_assets): + return imas_assets / "IDS_fake_toplevel.xml" @pytest.fixture() -def ids_minimal(imaspy_assets): - return imaspy_assets / "IDS_minimal.xml" +def ids_minimal(imas_assets): + return imas_assets / "IDS_minimal.xml" @pytest.fixture() -def ids_minimal2(imaspy_assets): - return imaspy_assets / "IDS_minimal_2.xml" +def ids_minimal2(imas_assets): + return imas_assets / "IDS_minimal_2.xml" @pytest.fixture() -def ids_minimal_struct_array(imaspy_assets): - return imaspy_assets / "IDS_minimal_struct_array.xml" +def ids_minimal_struct_array(imas_assets): + return imas_assets / "IDS_minimal_struct_array.xml" @pytest.fixture() -def ids_minimal_types(imaspy_assets): - return imaspy_assets / "IDS_minimal_types.xml" +def ids_minimal_types(imas_assets): + return imas_assets / "IDS_minimal_types.xml" @pytest.fixture diff --git a/docs/Makefile b/docs/Makefile index 6f98ead9..f0c27f01 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -29,7 +29,7 @@ clean: Makefile # This recipe generates source files, so put result in source # Seems to overwrite autosummary documentation though! So not using this rn -MODULE_EXCLUDE="../imaspy/examples/**" "../imas/**" +MODULE_EXCLUDE="../imas/examples/**" "../imas/**" apidocs: Makefile sphinx-apidoc --implicit-namespaces -o "$(GENERATEDDIR)" "$(PROJECT_ROOT)/imaspy/" $(MODULE_EXCLUDE) diff --git a/docs/source/_static/imaspy.css b/docs/source/_static/imas.css similarity index 100% rename from docs/source/_static/imaspy.css rename to docs/source/_static/imas.css diff --git a/docs/source/_static/imaspy_200x200.png b/docs/source/_static/imas_200x200.png similarity index 100% rename from docs/source/_static/imaspy_200x200.png rename to docs/source/_static/imas_200x200.png diff --git a/docs/source/api-hidden.rst b/docs/source/api-hidden.rst index 41595482..a437b34f 100644 --- a/docs/source/api-hidden.rst +++ b/docs/source/api-hidden.rst @@ -11,4 +11,4 @@ API autosummary :recursive: :template: custom-module-template.rst - imaspy + imas diff --git a/docs/source/api.rst b/docs/source/api.rst index 143ee1fe..87be0471 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -1,16 +1,16 @@ -.. currentmodule:: imaspy +.. currentmodule:: imas API reference ============= -This page provides an auto-generated summary of IMASPy's API. For more details +This page provides an auto-generated summary of imas-python's API. For more details and examples, refer to the relevant chapters in the main part of the documentation. -IMASPy IDS manipulation -------------------------- +imas-python IDS manipulation +---------------------------- -.. currentmodule:: imaspy +.. currentmodule:: imas .. autosummary:: diff --git a/docs/source/benchmarking.rst b/docs/source/benchmarking.rst index 4588b173..ce7b0090 100644 --- a/docs/source/benchmarking.rst +++ b/docs/source/benchmarking.rst @@ -1,33 +1,33 @@ -.. _`benchmarking IMASPY`: +.. _`benchmarking IMAS`: -Benchmarking IMASPy -=================== +Benchmarking imas-python +======================== -IMASPy integrates with the `airspeed velocity +imas-python integrates with the `airspeed velocity `_ ``asv`` package for benchmarking. -IMASPy benchmarks ------------------ +imas-python benchmarks +---------------------- -IMASPy benchmarks are stored in the ``benchmarks`` folder in the git repository. We can +imas-python benchmarks are stored in the ``benchmarks`` folder in the git repository. We can currently distinguish three types of benchmarks: Technical benchmarks These are for benchmarking features not directly connected to user-interfacing - functionality. For example benchmarking the time it takes to import the imaspy + functionality. For example benchmarking the time it takes to import the imas package. Basic functional benchmarks These are for benchmarking functionality with an equivalent feature in the IMAS - Access Layer HLI. In addition to tracking the performance of the IMASPy features + Access Layer HLI. In addition to tracking the performance of the imas-python features over time, we can also benchmark the performance against the traditional HLI. For example: putting and getting IDSs. -IMASPy-specific functional benchmarks +imas-python-specific functional benchmarks These are for benchmarking functionality without an equivalent feature in the IMAS - Access Layer HLI. We use these for tracking the IMASPy performance over time. + Access Layer HLI. We use these for tracking the imas-python performance over time. For example: data conversion between DD versions. @@ -35,7 +35,7 @@ IMASPy-specific functional benchmarks Running benchmarks (quick) -------------------------- -When you have an existing IMASPy installation, you can run the benchmarks like this: +When you have an existing imas-python installation, you can run the benchmarks like this: .. code-block:: console @@ -67,12 +67,12 @@ contains tabular results. Some examples: hli -------- ------------ imas 22.9±0.4μs - imaspy 408±8μs + imas 408±8μs ======== ============ Here we see the benchmark ``core_profiles.Generate.time_create_core_profiles`` was repeated for multiple values of ``hli``: once for the ``imas`` HLI, and once for the -``imaspy`` HLI. +``imas`` HLI. Some benchmarks are parametrized in multiple dimensions, as in below example. This results in a 2D table of results. @@ -87,7 +87,7 @@ results in a 2D table of results. hli 13 14 11 ======== ========== ============ ========= imas 75.1±1ms 70.2±0.5ms 207±2ms - imaspy 241±4ms 229±2ms 364±6ms + imas 241±4ms 229±2ms 364±6ms ======== ========== ============ ========= .. note:: @@ -103,8 +103,8 @@ Running benchmarks (advanced) ----------------------------- Running benchmarks quickly, as explained in the previous section, is great during -development and for comparing the performance of IMASPy against the imas HLI. However, -``asv`` can also track the performance of benchmarks over various commits of IMASPy. +development and for comparing the performance of imas-python against the imas HLI. However, +``asv`` can also track the performance of benchmarks over various commits of imas-python. Unfortunately this is a bit more tricky to set up. @@ -112,7 +112,7 @@ Setup advanced benchmarking ''''''''''''''''''''''''''' First, some background on how ``asv`` tracks performance: it creates an isolated virtual -environment (using the ``virtualenv`` package) and installs IMASPy for each commit that +environment (using the ``virtualenv`` package) and installs imas-python for each commit that will be benchmarked. However, because the virtual environment is isolated, the ``imas`` package won't be available. We need to work around it by setting the environment variable ``ASV_PYTHONPATH``: @@ -125,8 +125,8 @@ variable ``ASV_PYTHONPATH``: .. caution:: - ``imaspy`` must not be available on the ``ASV_PYTHONPATH`` to avoid the interfering - of two imaspy modules (one on the ``PYTHONPATH``, and the other installed by ``asv`` + ``imas`` must not be available on the ``ASV_PYTHONPATH`` to avoid the interfering + of two imas modules (one on the ``PYTHONPATH``, and the other installed by ``asv`` in the virtual environment). @@ -171,7 +171,7 @@ Instead, you can submit a benchmark job to the compute nodes. #!/bin/bash # Set SLURM options: - #SBATCH --job-name=IMASPy-benchmark + #SBATCH --job-name=imas-python-benchmark #SBATCH --time=1:00:00 #SBATCH --partition=gen10_ib # Note: for proper benchmarking we need to exclusively reserve a node, even though @@ -199,7 +199,7 @@ Instead, you can submit a benchmark job to the compute nodes. echo # Activate the virtual environment which has asv installed - . venv_imaspy/bin/activate + . venv_imas/bin/activate # Setup asv machine (using default values) asv machine --yes diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 2601639a..d3a4ef93 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -3,8 +3,8 @@ Changelog ========= -What's new in IMASPy 1.1.1 --------------------------- +What's new in imas-python 1.1.1 +------------------------------- This is a small release that mainly fixes issues related to the recent Data Dictionary 4.0.0 release. @@ -19,7 +19,7 @@ Bug fixes Dictionary 4.0.0 and 3.42.0. In other cases, the Data Dictionary version is now explicitly indicated. -- :issue:`IMAS-5560`: Fix a bug where IMASPy would not correctly recognize that +- :issue:`IMAS-5560`: Fix a bug where imas-python would not correctly recognize that the UDA backend is used. - :issue:`IMAS-5541`: Fix a bug when converting a closed contour to Data Dictionary version 4.0.0. @@ -29,15 +29,15 @@ Bug fixes recent Data Dictionary version than the on-disk data was stored with. -What's new in IMASPy 1.1 ------------------------- +What's new in imas-python 1.1 +----------------------------- New features '''''''''''' - :ref:`1.1/improved performance`. - :ref:`1.1/improved conversion`. -- IMASPy 1.1 adds support for Identifiers defined by the Data Dictionary. This +- imas-python 1.1 adds support for Identifiers defined by the Data Dictionary. This functionality is described in detail in :ref:`Identifiers`. - Support for the new :py:const:`~imaspy.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` that is @@ -52,7 +52,7 @@ New features netCDF file, which can be used for sharing and/or archiving data. This feature is in `preview` status, meaning that it may change in upcoming - minor releases of IMASPy. + minor releases of imas-python. - Additional utility functions in :py:mod:`imaspy.util`: @@ -71,18 +71,18 @@ New features - :py:func:`imaspy.util.get_data_dictionary_version` returns the Data Dictionary version for which an IDS was created. -- Add support for IMAS Access Layer Core 5.2 and later. IMASPy can now be used +- Add support for IMAS Access Layer Core 5.2 and later. imas-python can now be used with just the Access Layer Core package available, the full AL-Python HLI is no longer required. Since the Access Layer Core is now installable with ``pip`` as well (requires access to the git repository on - ``__), you can install - ``imaspy`` and ``imas_core`` in one go with: + ``__), you can install + ``imas`` and ``imas_core`` in one go with: .. code-block:: bash - pip install 'imaspy[imas-core] @ git+ssh://git@git.iter.org/imas/imaspy.git' + pip install 'imas[imas-core] @ git+ssh://git@github.com/iterorganization/imas-core.git' - A diff tool for IDSs: :py:func:`imaspy.util.idsdiff`. - Implement ``==`` equality checking for IDS Structures and Arrays of Structures @@ -91,16 +91,16 @@ New features backend. During a :py:meth:`~imaspy.db_entry.DBEntry.get` or - :py:meth:`~imaspy.db_entry.DBEntry.get_slice`, IMASPy first reads the version + :py:meth:`~imaspy.db_entry.DBEntry.get_slice`, imas-python first reads the version of the Data Dictionary that was used to store the IDS. When this version is - not known to IMASPy, an error is raised. This error can now be ignored by + not known to imas-python, an error is raised. This error can now be ignored by setting the parameter :py:param:`~imaspy.db_entry.DBEntry.get.ignore_unknown_dd_version` to - ``True``, and IMASPy will do its best to load the data anyway. + ``True``, and imas-python will do its best to load the data anyway. - A new command line tool exists for analyzing which Data Dictionary fields are used in provided Data Entries. This tool is explained in detail in - :ref:`IMASPy Data Entry analysis`. + :ref:`imas-python Data Entry analysis`. - Various improvements to the documentation were made. @@ -110,7 +110,7 @@ Breaking changes .. note:: - We attempt to keep the public API of IMASPy stable with minor releases. The + We attempt to keep the public API of imas-python stable with minor releases. The following breaking change is the result of an upgrade of the IMAS Access Layer. - Starting with Access Layer 5.2 or newer, the Access Layer will raise @@ -121,8 +121,8 @@ Breaking changes You may need to update the :py:class:`Exception` classes in ``try/except`` blocks to the new Exception classes raised by ``imas_core``. - When using an older version of the Access Layer, the behaviour of IMASPy is no - different than in IMASPy 1.0. + When using an older version of the Access Layer, the behaviour of imas-python is no + different than in imas-python 1.0. Bug fixes @@ -136,10 +136,10 @@ Bug fixes - Fixed a bug with :py:func:`~imaspy.ids_toplevel.IDSToplevel.serialize` when the IDS is in a non-default Data Dictionary version. - Fixed a bug when assigning ``nan`` to a FLT_0D, which would lead to a - confusing and incorrect log message in IMASPy 1.0. -- Fixed incorrect oldest supported DD version. Previously IMASPy indicated that + confusing and incorrect log message in imas-python 1.0. +- Fixed incorrect oldest supported DD version. Previously imas-python indicated that DD ``3.21.1`` was supported, however ``3.22.0`` is the oldest Data Dictionary - tested (and provided) with IMASPy. :py:attr:`imaspy.OLDEST_SUPPORTED_VERSION` + tested (and provided) with imas-python. :py:attr:`imaspy.OLDEST_SUPPORTED_VERSION` has been updated to reflect this. - Fixed a bug when using numpy functions, such as :external:py:func:`numpy.isclose` on scalar numbers. Previously an error was @@ -158,11 +158,11 @@ Improved performance '''''''''''''''''''' - Improved performance of :py:meth:`~imaspy.ids_toplevel.IDSToplevel.validate`. -- Improved creation of IMASPy IDS objects. This made filling IDSs and loading +- Improved creation of imas-python IDS objects. This made filling IDSs and loading them with :py:meth:`~imaspy.db_entry.DBEntry.get` / :py:meth:`~imaspy.db_entry.DBEntry.get_slice` 10-20% faster. - Improved the performance of lazy loading. This is most noticeable with the - ``HDF5`` backend, which is now up to 40x faster than with IMASPy 1.0. + ``HDF5`` backend, which is now up to 40x faster than with imas-python 1.0. - Improved the performance of :py:meth:`~imaspy.db_entry.DBEntry.get` / :py:meth:`~imaspy.db_entry.DBEntry.get_slice` / :py:meth:`~imaspy.db_entry.DBEntry.put` / @@ -180,9 +180,9 @@ Converting IDSs between Data Dictionary versions has several improvements for recent DD versions. Further details on IDS conversion can be found in :ref:`Conversion of IDSs between DD versions`. -- The IMASPy Command Line Interface for converting Data Entries between different +- The imas-python Command Line Interface for converting Data Entries between different versions of the Data Dictionary has been improved. See :ref:`Command line tool - reference` or execute ``imaspy convert --help`` in a shell for further + reference` or execute ``imas convert --help`` in a shell for further details. - Add support for multiple renames in an IDS' path. @@ -190,7 +190,7 @@ recent DD versions. Further details on IDS conversion can be found in For example, in the ``pulse_schedule`` IDS, the node ``ec/beam/power_launched/reference`` in Data Dictionary ``3.40.0`` was renamed from ``ec/launcher/power/reference/data`` in Data Dictionary ``3.39.0``. This - use case is now supported by IMASPy. + use case is now supported by imas-python. - Automatically convert data between 0D and 1D when possible (`IMAS-5170 `__). diff --git a/docs/source/ci_config.rst b/docs/source/ci_config.rst index ced4f52a..2fd284f4 100644 --- a/docs/source/ci_config.rst +++ b/docs/source/ci_config.rst @@ -3,25 +3,25 @@ CI configuration ================ -IMASPy uses `ITER Bamboo `_ for CI. This page provides an overview +imas-python uses `ITER Bamboo `_ for CI. This page provides an overview of the CI Plan and deployment projects. CI Plan ------- -The `IMASPy CI plan `_ consists of 4 types of jobs: +The `imas-python CI plan `_ consists of 4 types of jobs: Linting and DD ZIP This job is responsible for three things: 1. Verify that the ``IDSDef2MDSplusPreTree.xsl`` file matches the one in the Access Layer repository. This file is required for building MDSplus models and the - models built by IMASPy should match those built by the Access Layer. - 2. Linting: run ``black`` and ``flake8`` on the IMASPy code base. See :ref:`code + models built by imas-python should match those built by the Access Layer. + 2. Linting: run ``black`` and ``flake8`` on the imas-python code base. See :ref:`code style and linting`. 3. Build the Data Dictionary zip file. This Task builds the Data Dictionary for all tagged releases since DD version ``3.22.0``. These are combined into the - ``IDSDef.zip`` file, which is distributed with IMASPy. + ``IDSDef.zip`` file, which is distributed with imas-python. The ZIP file is built in a separate job, such that the subsequent test jobs can reuse this. @@ -32,11 +32,9 @@ Linting and DD ZIP - ``ci/build_dd_zip.sh`` Test with AL - This runs all unit tests with pytest. There are multiple (at the time of writing 3) - Access Layer versions that we test against: AL4.11.7 (from SDCC module - ``IMAS/3.39.0-4.11.7-intel-2020b``), AL5.0.0 (from SDCC module - ``IMAS/3.39.0-5.0.0-intel-2020b``) and AL5.1.0 (from SDCC module - ``IMAS/3.40.0-5.1.0-intel-2020b``). + This runs all unit tests with pytest. + Access Layer version that we test against: + IMAS-AL-Core/5.4.3-intel-2023b The CI script executed in this job is ``ci/run_pytest.sh``, which expects the modules it needs to load as arguments. @@ -51,7 +49,7 @@ Test with AL 5. In the "Script" Task, update the module(s) in the Argument field Benchmark - This job runs the :ref:`ASV benchmarks ` on the CI server. It + This job runs the :ref:`ASV benchmarks ` on the CI server. It is configured such that it can only run on a single CI agent (`io-ls-bamboowk6.iter.org`). There are two reasons for this: @@ -64,7 +62,7 @@ Benchmark The CI script executed in this job is: ``ci/run_benchmark.sh``. Build docs and dists - This job builds the Sphinx documentation and python packages for IMASPy (``sdist`` + This job builds the Sphinx documentation and python packages for imas-python (``sdist`` and ``wheel``). The CI script executed in this job is: ``ci/build_docs_and_dist.sh``. @@ -73,18 +71,14 @@ Build docs and dists Deployment projects ------------------- -There are two Bamboo deployment projects for IMASPy: +There is github workflow for imas-python: -`Deploy IMASPy-doc `_ - Deploy the documentation created in the `Build docs and dists` job to `Sharepoint - `_. +`imas-python-PyPi `_ + Deploy the python packages job to the https://pypi.org/ server and https://test.pypi.org/ server. + You can find link here : `imas-python `_ - This deployment project runs for after each successful CI build of the IMASPy main - branch. -`IMASPy-PyPi `_ - Deploy the python packages created in the `Build docs and dists` job to the - https://pypi.iter.org/ server. +`Deploy imas-python-doc `_ + Deploy the documentation using `readthedocs + `_. - This deployment project runs for after each successful CI build of the IMASPy main - branch. diff --git a/docs/source/cli.rst b/docs/source/cli.rst index 61d8251e..0fa3819a 100644 --- a/docs/source/cli.rst +++ b/docs/source/cli.rst @@ -1,32 +1,32 @@ -.. _`IMASPy Command Line tool`: +.. _`imas-python Command Line tool`: -IMASPy Command Line tool -======================== +imas-python Command Line tool +============================= -IMASPy comes with a command line tool: ``imaspy``. This allows you to execute +imas-python comes with a command line tool: ``imas``. This allows you to execute some tasks without writing Python code: -- ``imaspy convert`` can convert Data Entries (or, optionally, single IDSs from +- ``imas convert`` can convert Data Entries (or, optionally, single IDSs from a Data Entry) to a different DD version. This command can also be used to convert IDSs between different backends. -- ``imaspy print`` can print the contents of an IDS to the terminal. -- ``imaspy version`` shows version information of IMASPy. -- ``imaspy analyze-db`` and ``imaspy process-db-analysis`` analyze the contents +- ``imas print`` can print the contents of an IDS to the terminal. +- ``imas version`` shows version information of imas-python. +- ``imas analyze-db`` and ``imas process-db-analysis`` analyze the contents of one or more Data Entries (stored in the HDF5 backend format). This tool is - explained in more detail :ref:`below `. + explained in more detail :ref:`below `. You can get further details, including the expected command line arguments and options, by running any tool with the ``--help`` flag. This help is also available in the :ref:`Command line tool reference` below. -.. _`IMASPy Data Entry analysis`: +.. _`imas-python Data Entry analysis`: -IMASPy Data Entry analysis --------------------------- +imas-python Data Entry analysis +------------------------------- -The IMASPy Data Entry analysis tool is a set of two command line programs: -``imaspy analyze-db`` and ``imaspy process-db-analysis``. The tool analyzes the +The imas-python Data Entry analysis tool is a set of two command line programs: +``imas analyze-db`` and ``imas process-db-analysis``. The tool analyzes the files from the HDF5 backend to figure out which IDSs are stored in the Data Entry, and which fields from the Data Dictionary have any data stored. This provides statistical data that is useful for Data Dictionary maintenance: by @@ -37,12 +37,12 @@ adding, changing or removing data fields. Usage ''''' -The ``imaspy analyze-db`` is run first. Its output is then used by ``imaspy +The ``imas analyze-db`` is run first. Its output is then used by ``imas process-db-analysis`` to provide statistics on the collected data. -.. rubric:: ``imaspy analyze-db`` +.. rubric:: ``imas analyze-db`` -``imaspy analyze-db`` analyzes Data Entries. You need to provide one or more +``imas analyze-db`` analyzes Data Entries. You need to provide one or more paths to folders where HDF5-backend IMAS data is stored. .. note:: @@ -59,61 +59,61 @@ paths to folders where HDF5-backend IMAS data is stored. ``/public/imasdb/////`` folder, where ```` is typically ``/home/``. -The tool collects a small amount of metadata (see the output of ``imaspy +The tool collects a small amount of metadata (see the output of ``imas analyze-db --help`` for an overview) on top of the filled fields of IDSs. All data (the metadata, and usage data of the provided Data Entries) is stored in a `gzipped `__ `JSON `__ file. -By default this is output in ``imaspy-db-analysis.json.gz`` in the current +By default this is output in ``imas-db-analysis.json.gz`` in the current working directory, but this can be customized with the ``--output/-o`` option. If the output file already exists, the existing data is retained and the additional analysis data is *appended* to the file. .. code-block:: bash - :caption: Example usage of ``imaspy analyze-db`` + :caption: Example usage of ``imas analyze-db`` - # Analyze a single data entry, output to the default imaspy-db-analysis.json.gz - imaspy analyze-db /work/imas/shared/imasdb/iter_scenarios/3/106015/1/ + # Analyze a single data entry, output to the default imas-db-analysis.json.gz + imas analyze-db /work/imas/shared/imasdb/iter_scenarios/3/106015/1/ # Analyze a single data entry, provide a custom output filename - imaspy analyze-db ./test/dataset/ -o test-dataset-analysis.json.gz + imas analyze-db ./test/dataset/ -o test-dataset-analysis.json.gz # Analyze multiple data entries, use shell globbing to select all runs - imaspy analyze-db /work/imas/shared/imasdb/iter_scenarios/3/150601/*/ + imas analyze-db /work/imas/shared/imasdb/iter_scenarios/3/150601/*/ # Analyze **all** HDF5 Data Entries inside a folder # 1. Find all HDF5 Data Entries (by locating their master.h5 files) # in the ~/public/imasdb/ folder # 2. Get the directory names for each of these files - # 3. Pass the directories to imaspy analyze-db + # 3. Pass the directories to imas analyze-db find ~/public/imasdb/ -name master.h5 | \ xargs dirname | \ - xargs imaspy analyze-db + xargs imas analyze-db .. note:: - ``imaspy analyze-db`` only works with the HDF5 backend, because the data files + ``imas analyze-db`` only works with the HDF5 backend, because the data files stored by this backend allow for a fast way to check which fields in an IDS are filled. We use the `h5py `__ Python module, which needs to be available to run the tool. An error message instructing to install / activate ``h5py`` is provided when ``h5py`` cannot be loaded. - If your data is stored in another backend than HDF5, you can use ``imaspy + If your data is stored in another backend than HDF5, you can use ``imas convert`` to convert the data to the HDF5 backend. For example: .. code-block:: bash - imaspy convert \ - imas:mdsplus?path=/path/to/mdsplus/data 3.41.0 imas:hdf5?path=/tmp/imaspy-analysis + imas convert \ + imas:mdsplus?path=/path/to/mdsplus/data 3.41.0 imas:hdf5?path=/tmp/imas-analysis -.. rubric:: ``imaspy process-db-analysis`` +.. rubric:: ``imas process-db-analysis`` -Once you have one or more output files from ``imaspy analyze-db``, you can -process these files with ``imaspy process-db-analysis``. This will: +Once you have one or more output files from ``imas analyze-db``, you can +process these files with ``imas process-db-analysis``. This will: 1. Load all analysis results from the provided files, and compare this against the available fields in :ref:`The default Data Dictionary version` (which @@ -139,13 +139,13 @@ process these files with ``imaspy process-db-analysis``. This will: - Enter End Of File: *Ctrl+D*. .. code-block:: bash - :caption: Example usage for ``imaspy process-db-analysis`` + :caption: Example usage for ``imas process-db-analysis`` # Process a single analysis output - imaspy process-db-analysis imaspy-db-analysis.json.gz + imas process-db-analysis imas-db-analysis.json.gz # Process multiple outputs - imaspy process-db-anlysis workflow-1.json.gz workflow-2.json.gz + imas process-db-anlysis workflow-1.json.gz workflow-2.json.gz .. [#data_fields] Data fields are all fields in an IDS that can contain data. Structures and Arrays of Structures are not included. All data types @@ -158,7 +158,7 @@ process these files with ``imaspy process-db-analysis``. This will: Command line tool reference --------------------------- -.. click:: imaspy.command.cli:cli - :prog: imaspy +.. click:: imas.command.cli:cli + :prog: imas :nested: full \ No newline at end of file diff --git a/docs/source/code_style.rst b/docs/source/code_style.rst index 7729a4d7..e7f3913c 100644 --- a/docs/source/code_style.rst +++ b/docs/source/code_style.rst @@ -7,7 +7,7 @@ Code style and linting Code style ---------- -IMASPy follows `The Black Code Style +imas-python follows `The Black Code Style `_. All Python files should be formatted with the ``black`` command line tool (this is checked in :ref:`CI `). @@ -40,7 +40,7 @@ with pre-commit hooks): .. code-block:: console - $ black imaspy + $ black imas All done! ✨ 🍰 ✨ 66 files left unchanged. @@ -48,8 +48,8 @@ with pre-commit hooks): Linting ------- -IMASPy uses `flake8 `_ for linting (static code -analysis). Flake8 should not report any violations when running it on the ``imaspy`` +imas-python uses `flake8 `_ for linting (static code +analysis). Flake8 should not report any violations when running it on the ``imas`` code base. Again, this is checked in CI. In some exceptions we can ignore a violation. For example, if a violation cannot be @@ -74,5 +74,5 @@ your code introduces any violations: .. code-block:: console - $ flake8 imaspy + $ flake8 imas diff --git a/docs/source/conf.py b/docs/source/conf.py index 53b3ad77..d6e32651 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -17,44 +17,41 @@ from jinja2.defaults import DEFAULT_FILTERS from packaging.version import Version -import imaspy +import imas print("python exec:", sys.executable) print("sys.path:", sys.path) # -- Project information ----------------------------------------------------- # The documented project’s name -project = src_project = PROJECT = "IMASPy" -PACKAGE = "imaspy" -src_group = GROUP = "IMAS" +project = src_project = PROJECT = "imas-python" +PACKAGE = "imas" +GROUP = "IMAS" # A copyright statement in the style '2008, Author Name'. copyright = f"2020-{datetime.datetime.now().year}, ITER Organization" # The author name(s) of the document author = "ITER Organization" -src_host = "git.iter.org" +src_host = "https://github.com/iterorganization/" # Parse urls here for convenience, to be re-used - # ITER docs -iter_projects = "https://git.iter.org/projects/" -imas_repos = urljoin(iter_projects, "IMAS/") -imex_repos = urljoin(iter_projects, "IMEX/") -dd_url = urljoin(imas_repos, "repos/data-dictionary/") -al_url = urljoin(imas_repos, "repos/access-layer/") -issue_url = jira_url = "https://jira.iter.org/browse/" +iter_projects = "https://github.com/iterorganization/" +dd_url = urljoin(iter_projects, "imas-data-dictionary/") +al_url = urljoin(iter_projects, "imas-core/") +issue_url = jira_url = "https://github.com/iterorganization/imas-python/issues" -# IMASPy -repository_url = f"{iter_projects}/{src_group}/repos/{src_project}/" -blob_url = urljoin(repository_url, "browse/") -mr_url = urljoin(repository_url, "/pull-requests") +# imas-python +repository_url = f"{iter_projects}/{src_project}/" +blob_url = repository_url +mr_url = urljoin(repository_url, "/pulls") # Configuration of sphinx.ext.extlinks # See https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html # unique name: (base URL, label prefix) extlinks = { - "src": (blob_url + "%s", f"{src_group}/{src_project}/%s"), + "src": (blob_url + "%s", "%s"), "issue": (issue_url + "%s", "%s"), "merge": (mr_url + "%s", "!%s"), "dd": (dd_url + "%s", "%s"), @@ -62,7 +59,7 @@ "pypa": ("https://packaging.python.org/%s", None), } -full_version = Version(imaspy.__version__) +full_version = Version(imas.__version__) # version: The major project version, used as the replacement for |version|. # For example, for the Python documentation, this may be something like 2.6. @@ -137,10 +134,10 @@ # and # https://sphinx-immaterial.readthedocs.io/en/latest/customization.html#confval-html_theme_options html_theme_options = { - "repo_url": "https://git.iter.org/projects/IMAS/repos/imaspy", - "repo_name": "IMASPy", + "repo_url": "https://github.com/iterorganization/imas-python", + "repo_name": "imas-python", "icon": { - "repo": "fontawesome/brands/bitbucket", + "repo": "fontawesome/brands/github", }, "features": [ # "navigation.expand", @@ -202,7 +199,7 @@ # The name of an image file (relative to this directory) to place at the top # of the sidebar. -html_logo = "_static/imaspy_200x200.png" +html_logo = "_static/imas_200x200.png" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 @@ -261,7 +258,7 @@ # html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = "imaspy_doc" +htmlhelp_basename = "imas_doc" # -- Extension configuration ------------------------------------------------- @@ -356,7 +353,7 @@ def sphinx_click_process_usage(app, ctx, lines): def setup(app): DEFAULT_FILTERS["escape_underscores"] = escape_underscores - app.add_css_file("imaspy.css") + app.add_css_file("imas.css") # Customize output of sphinx-click app.connect("sphinx-click-process-arguments", sphinx_click_process_arguments) app.connect("sphinx-click-process-description", sphinx_click_process_description) diff --git a/docs/source/configuring.rst b/docs/source/configuring.rst index dae11b6f..388ac813 100644 --- a/docs/source/configuring.rst +++ b/docs/source/configuring.rst @@ -1,7 +1,7 @@ -Configuring IMASPy -================== +Configuring imas-python +======================= -IMASPy has a couple of environment variables that can be used to control its behaviour. +imas-python has a couple of environment variables that can be used to control its behaviour. This page provides an overview of available variables. .. note:: @@ -12,25 +12,25 @@ This page provides an overview of available variables. `_ -``IMASPY_LOGLEVEL`` - Sets the log level used by the IMASPy logger. +``IMAS_LOGLEVEL`` + Sets the log level used by the imas-python logger. By default (when this environment variable is not set), all log messages of ``INFO`` or more severe are logged. You may set this to, for example, - ``IMASPY_LOGLEVEL=WARNING``, to suppress some of the log messages. + ``IMAS_LOGLEVEL=WARNING``, to suppress some of the log messages. See the Python documentation for the :external:py:mod:`logging` module which log levels are available. .. note:: - This environment variable is read when the ``imaspy`` library is initialized - during the first ``import imaspy``. Changing it afterwards has no effect, but - you can use :external:py:meth:`logging.getLogger("imaspy").setLevel(...) + This environment variable is read when the ``imas`` library is initialized + during the first ``import imas``. Changing it afterwards has no effect, but + you can use :external:py:meth:`logging.getLogger("imas").setLevel(...) ` to change the log level programmatically. -``IMASPY_DISABLE_NC_VALIDATE`` +``IMAS_DISABLE_NC_VALIDATE`` Disables validation of netCDF files when loading an IDS from an IMAS netCDF file. .. caution:: @@ -44,16 +44,16 @@ Environment variables shared with the IMAS Python HLI ----------------------------------------------------- ``IMAS_AL_DISABLE_VALIDATE`` - By default, IMASPy :ref:`validates ` IDSs to check that all data is - consistent with their coordinates during a :py:meth:`~imaspy.db_entry.DBEntry.put` - or :py:meth:`~imaspy.db_entry.DBEntry.put_slice`. + By default, imas-python :ref:`validates ` IDSs to check that all data is + consistent with their coordinates during a :py:meth:`~imas.db_entry.DBEntry.put` + or :py:meth:`~imas.db_entry.DBEntry.put_slice`. Setting ``IMAS_AL_DISABLE_VALIDATE=1`` disables this validation. ``IMAS_AL_SERIALIZER_TMP_DIR`` Specify the path to storing temporary data during - :py:meth:`~imaspy.ids_toplevel.IDSToplevel.serialize` and - :py:meth:`~imaspy.ids_toplevel.IDSToplevel.deserialize`. + :py:meth:`~imas.ids_toplevel.IDSToplevel.serialize` and + :py:meth:`~imas.ids_toplevel.IDSToplevel.deserialize`. If it is not set, the default location ``/dev/shm/`` or the current working directory will be chosen. diff --git a/docs/source/courses/advanced/dd_versions.rst b/docs/source/courses/advanced/dd_versions.rst index 7b3eb02e..5ccb2474 100644 --- a/docs/source/courses/advanced/dd_versions.rst +++ b/docs/source/courses/advanced/dd_versions.rst @@ -3,13 +3,13 @@ Working with multiple data dictionary versions ============================================== -Contrary to most high level interface for IMAS, IMASPy code is not tied to a specific -version of the Data Dictionary. In this lesson we will explore how IMASPy handles +Contrary to most high level interface for IMAS, imas-python code is not tied to a specific +version of the Data Dictionary. In this lesson we will explore how imas-python handles different DD versions (including development builds of the DD), and how we can convert IDSs between different versions of the Data Dictionary. .. note:: - Most of the time you won't need to worry about DD versions and the default IMASPy + Most of the time you won't need to worry about DD versions and the default imas-python behaviour should be fine. @@ -19,7 +19,7 @@ The default Data Dictionary version ----------------------------------- In the other training lessons, we didn't explicitly work with Data Dictionary versions. -Therefore IMASPy was always using the `default` DD version. Let's find out what that +Therefore imas-python was always using the `default` DD version. Let's find out what that version is: @@ -32,35 +32,35 @@ Exercise 1: The default DD version .. md-tab-item:: Exercise - 1. Create an :py:class:`imaspy.IDSFactory() `. + 1. Create an :py:class:`imas.IDSFactory() `. 2. Print the version of the DD that is used. 3. Create an empty IDS with this IDSFactory (any IDS is fine) and print the DD version of the IDS, see - :py:meth:`~imaspy.util.get_data_dictionary_version`. What do you notice? - 4. Create an :py:class:`imaspy.DBEntry `, you may use - the :py:attr:`MEMORY_BACKEND `. Print the + :py:meth:`~imas.util.get_data_dictionary_version`. What do you notice? + 4. Create an :py:class:`imas.DBEntry `, you may use + the :py:attr:`MEMORY_BACKEND `. Print the DD version that is used. What do you notice? .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/dd_versions.py + .. literalinclude:: imas_snippets/dd_versions.py -Okay, so now you know what your default DD version is. But how is it determined? IMASPy +Okay, so now you know what your default DD version is. But how is it determined? imas-python first checks if you have an IMAS environment loaded by checking the environment variable ``IMAS_VERSION``. If you are on a cluster and have used ``module load IMAS`` or similar, this environment variable will indicate what data dictionary version this module is -using. IMASPy will use that version as its default. +using. imas-python will use that version as its default. -If the ``IMAS_VERSION`` environment is not set, IMASPy will take the newest version of +If the ``IMAS_VERSION`` environment is not set, imas-python will take the newest version of the Data Dictionary that came bundled with it. Which brings us to the following topic: Bundled Data Dictionary definitions ----------------------------------- -IMASPy comes bundled [#DDdefs]_ with many versions of the Data Dictionary definitions. +imas-python comes bundled [#DDdefs]_ with many versions of the Data Dictionary definitions. You can find out which versions are available by calling -:py:meth:`imaspy.dd_zip.dd_xml_versions`. +:py:meth:`imas.dd_zip.dd_xml_versions`. Converting an IDS between Data Dictionary versions @@ -74,7 +74,7 @@ things that could change: - Change the data type of an IDS node - Rename an IDS node -IMASPy can convert between different versions of the DD and will migrate the data as +imas-python can convert between different versions of the DD and will migrate the data as much as possible. Let's see how this works in the following exercise. @@ -89,23 +89,23 @@ Exercise 2: Convert an IDS between DD versions for the ``pulse_schedule`` IDS because a number of IDS nodes were renamed for this IDS. - 1. Create an :py:class:`imaspy.IDSFactory() ` + 1. Create an :py:class:`imas.IDSFactory() ` for DD version ``3.25.0``. 2. Create a ``pulse_schedule`` IDS with this IDSFactory and verify that it is using DD version ``3.25.0``. 3. Fill the IDS with some test data: - .. literalinclude:: imaspy_snippets/ids_convert.py + .. literalinclude:: imas_snippets/ids_convert.py :start-after: # 3. :end-before: # 4. - 4. Use :py:func:`imaspy.convert_ids ` to + 4. Use :py:func:`imas.convert_ids ` to convert the IDS to DD version 3.39.0. The ``antenna`` structure that we filled in the old version of the DD has since been renamed to ``launcher``, and the ``launching_angle_*`` structures to ``steering_angle``. Check that - IMASPy has converted the data successfully (for example with - :py:func:`imaspy.util.print_tree`). - 5. By default, IMASPy creates a shallow copy of the data, which means that the + imas-python has converted the data successfully (for example with + :py:func:`imas.util.print_tree`). + 5. By default, imas-python creates a shallow copy of the data, which means that the underlying data arrays are shared between the IDSs of both versions. Update the ``time`` data of the original IDS (for example: :code:`pulse_schedule.time[1] = 3`) and print the ``time`` data of the @@ -113,7 +113,7 @@ Exercise 2: Convert an IDS between DD versions .. note:: - :py:func:`imaspy.convert_ids ` has an + :py:func:`imas.convert_ids ` has an optional keyword argument ``deep_copy``. If you set this to ``True``, the converted IDS will not share data with the original IDS. @@ -126,7 +126,7 @@ Exercise 2: Convert an IDS between DD versions .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/ids_convert.py + .. literalinclude:: imas_snippets/ids_convert.py .. _`Automatic conversion between DD versions`: @@ -134,22 +134,22 @@ Exercise 2: Convert an IDS between DD versions Automatic conversion between DD versions ---------------------------------------- -When loading data (with :py:meth:`~imaspy.db_entry.DBEntry.get` or -:py:meth:`~imaspy.db_entry.DBEntry.get_slice`) or storing data (with -:py:meth:`~imaspy.db_entry.DBEntry.put` or -:py:meth:`~imaspy.db_entry.DBEntry.put_slice`), IMASPy automatically converts the DD +When loading data (with :py:meth:`~imas.db_entry.DBEntry.get` or +:py:meth:`~imas.db_entry.DBEntry.get_slice`) or storing data (with +:py:meth:`~imas.db_entry.DBEntry.put` or +:py:meth:`~imas.db_entry.DBEntry.put_slice`), imas-python automatically converts the DD version for you. In this section we will see how that works. The ``DBEntry`` DD version '''''''''''''''''''''''''' -A :py:class:`~imaspy.db_entry.DBEntry` object is tied to a specific version of the Data +A :py:class:`~imas.db_entry.DBEntry` object is tied to a specific version of the Data Dictionary. We have already briefly seen this in :ref:`dd version exercise 1`. The DD version can be selected when constructing a new ``DBEntry`` object, through the -:py:param:`~imaspy.db_entry.DBEntry.__init__.dd_version` or -:py:param:`~imaspy.db_entry.DBEntry.__init__.xml_path` (see also :ref:`Using custom +:py:param:`~imas.db_entry.DBEntry.__init__.dd_version` or +:py:param:`~imas.db_entry.DBEntry.__init__.xml_path` (see also :ref:`Using custom builds of the Data Dictionary`) parameters. If you provide neither, the default DD version is used. @@ -174,8 +174,8 @@ Exercise 3: Automatic conversion when storing IDSs .. code-block:: python - new_entry = imaspy.DBEntry( - imaspy.ids_defs.MEMORY_BACKEND, "test", 0, 0, dd_version="3.37.0" + new_entry = imas.DBEntry( + imas.ids_defs.MEMORY_BACKEND, "test", 0, 0, dd_version="3.37.0" ) 4. Put the ``core_profiles`` IDS in the new ``DBEntry``. @@ -184,7 +184,7 @@ Exercise 3: Automatic conversion when storing IDSs .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/autoconvert_put.py + .. literalinclude:: imas_snippets/autoconvert_put.py Exercise 4: Automatic conversion when loading IDSs @@ -196,24 +196,24 @@ Exercise 4: Automatic conversion when loading IDSs 1. For this exercise we will first create some test data: - .. literalinclude:: imaspy_snippets/autoconvert_get.py + .. literalinclude:: imas_snippets/autoconvert_get.py :start-after: # 1. :end-before: # 2. 2. Reopen the ``DBEntry`` with the default DD version. 3. ``get`` the pulse schedule IDS. Print its ``version_put/data_dictionary`` and Data Dictionary version (with - :py:meth:`~imaspy.util.get_data_dictionary_version`). What do you + :py:meth:`~imas.util.get_data_dictionary_version`). What do you notice? - 4. Use ``imaspy.util.print_tree`` to print all data in the loaded IDS. What do + 4. Use ``imas.util.print_tree`` to print all data in the loaded IDS. What do you notice? 5. Repeat steps 3 and 4, but set - :py:param:`~imaspy.db_entry.DBEntry.get.autoconvert` to ``False``. What do + :py:param:`~imas.db_entry.DBEntry.get.autoconvert` to ``False``. What do you notice this time? .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/autoconvert_get.py + .. literalinclude:: imas_snippets/autoconvert_get.py Use cases for disabling autoconvert @@ -230,7 +230,7 @@ contain large changes between DD versions, such as: .. caution:: - The :py:meth:`~imaspy.ids_convert.convert_ids` method warns you when data is not + The :py:meth:`~imas.ids_convert.convert_ids` method warns you when data is not converted. Due to technical constraints, the ``autoconvert`` logic doesn't log any such warnings. @@ -240,7 +240,7 @@ contain large changes between DD versions, such as: >>> # Continuing with the example from Exercise 4: >>> ps_noconvert = entry.get("pulse_schedule", autoconvert=False) - >>> imaspy.convert_ids(ps_noconvert, "3.40.0") + >>> imas.convert_ids(ps_noconvert, "3.40.0") 15:32:32 INFO Parsing data dictionary version 3.40.0 @dd_zip.py:129 15:32:32 INFO Starting conversion of IDS pulse_schedule from version 3.25.0 to version 3.40.0. @ids_convert.py:350 15:32:32 INFO Element 'ec/antenna/phase' does not exist in the target IDS. Data is not copied. @ids_convert.py:396 @@ -255,7 +255,7 @@ contain large changes between DD versions, such as: Using custom builds of the Data Dictionary ------------------------------------------ -In the previous sections we showed how you can direct IMASPy to use a specific released +In the previous sections we showed how you can direct imas-python to use a specific released version of the Data Dictionary definitions. Sometimes it is useful to work with unreleased (development or custom) versions of the data dictionaries as well. @@ -267,21 +267,21 @@ unreleased (development or custom) versions of the data dictionaries as well. might not be read properly in the future. If you build the Data Dictionary, a file called ``IDSDef.xml`` is created. This file -contains all IDS definitions. To work with a custom DD build, you need to point IMASPy +contains all IDS definitions. To work with a custom DD build, you need to point imas-python to this ``IDSDef.xml`` file: .. code-block:: python - :caption: Use a custom Data Dictionary build with IMASPy + :caption: Use a custom Data Dictionary build with imas-python my_idsdef_file = "path/to/IDSDef.xml" # Replace with the actual path # Point IDSFactory to this path: - my_factory = imaspy.IDSFactory(xml_path=my_idsdef_file) + my_factory = imas.IDSFactory(xml_path=my_idsdef_file) # Now you can create IDSs using your custom DD build: my_ids = my_factory.new("...") # If you need a DBEntry to put / get IDSs in the custom version: - my_entry = imaspy.DBEntry("imas:hdf5?path=my-testdb", "w", xml_path=my_idsdef_file) + my_entry = imas.DBEntry("imas:hdf5?path=my-testdb", "w", xml_path=my_idsdef_file) Once you have created the ``IDSFactory`` and/or ``DBEntry`` pointing to your custom DD @@ -291,5 +291,5 @@ build, you can use them like you normally would. .. rubric:: Footnotes .. [#DDdefs] To be more precise, the Data Dictionary definitions are generated when the - IMASPy package is created. See :ref:`this reference
` for more + imas-python package is created. See :ref:`this reference
` for more details. diff --git a/docs/source/courses/advanced/explore.rst b/docs/source/courses/advanced/explore.rst index 86b692e8..5fa6fdca 100644 --- a/docs/source/courses/advanced/explore.rst +++ b/docs/source/courses/advanced/explore.rst @@ -1,10 +1,10 @@ Advanced data exploration ========================= -In the :ref:`basic/explore` training we have seen how to explore IMASPy data structures +In the :ref:`basic/explore` training we have seen how to explore imas-python data structures in an interactive way. -In this lesson, we will go a step further and look at methods to explore IMASPy data +In this lesson, we will go a step further and look at methods to explore imas-python data structures programmatically. This can be useful for, for example, writing plotting tools, analysis scripts, etc. @@ -13,26 +13,26 @@ Exploring IDS (sub)structures ----------------------------- An IDS structure is a collection of IDS nodes (which could be structures, or arrays of -structures themselves). In IMASPy this is represented by the -:py:class:`~imaspy.ids_structure.IDSStructure` class. You will find these classes in a +structures themselves). In imas-python this is represented by the +:py:class:`~imas.ids_structure.IDSStructure` class. You will find these classes in a lot of places: - Data Dictionary IDSs is a special case of an IDS structure (implemented by class - :py:class:`~imaspy.ids_toplevel.IDSToplevel`, which is a subclass of + :py:class:`~imas.ids_toplevel.IDSToplevel`, which is a subclass of ``IDSStructure``). - Data Dictionary structures, for example, the ``ids_properties`` structure that is present in every IDS. - Data Dictionary arrays of structures (implemented by - :py:class:`~imaspy.ids_struct_array.IDSStructArray`) contain ``IDSStructure``\ s. + :py:class:`~imas.ids_struct_array.IDSStructArray`) contain ``IDSStructure``\ s. When you have an ``IDSStructure`` object, you can iterate over it to get all child nodes that are contained in this structure. See the following example: .. code-block:: python - import imaspy + import imas - core_profiles = imaspy.IDSFactory().core_profiles() + core_profiles = imas.IDSFactory().core_profiles() # core_profiles is an IDS toplevel, which is also a structure: print("Core profiles contains the following elements:") @@ -61,15 +61,15 @@ Exercise 1: Explore structures .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/explore_structures.py + .. literalinclude:: imas_snippets/explore_structures.py Explore IDS data nodes and arrays of structures ----------------------------------------------- Besides structures, IDSs contain arrays of structures, and data nodes. Arrays of -structures (modeled by :py:class:`~imaspy.ids_struct_array.IDSStructArray`) are (as the -name applies) arrays containing :py:class:`~imaspy.ids_structure.IDSStructure`\ s. Data +structures (modeled by :py:class:`~imas.ids_struct_array.IDSStructArray`) are (as the +name applies) arrays containing :py:class:`~imas.ids_structure.IDSStructure`\ s. Data nodes can contain scalar or array data of various types. Some methods and properties are defined for all data nodes and arrays of structures: @@ -106,14 +106,14 @@ Some methods and properties are defined for all data nodes and arrays of structu details. .. seealso:: - You can find more details on IDS data node related classes and methods in the IMASPy Architecture documentation: - :ref:`imaspy_architecture/IDS_nodes` + You can find more details on IDS data node related classes and methods in the imas-python Architecture documentation: + :ref:`imas_architecture/IDS_nodes` Apply a function to all nodes in an IDS ''''''''''''''''''''''''''''''''''''''' Before diving into the exercise and use this new knowledge, it is useful to know the -:py:meth:`imaspy.util.visit_children` method. This method allows you to apply a method +:py:meth:`imas.util.visit_children` method. This method allows you to apply a method to all nodes of an IDS. Additional keyword arguments can control whether you want to include leaf nodes (data nodes) only, or also include structures and arrays of structure. You can also choose between applying the function to filled nodes only (the @@ -122,7 +122,7 @@ default) or all nodes, including empty ones. .. seealso:: You can find more details in the API documentation: - :py:meth:`imaspy.util.visit_children` + :py:meth:`imas.util.visit_children` Exercise 2: Explore data nodes @@ -134,7 +134,7 @@ Exercise 2: Explore data nodes 1. Load the training data for the ``equilibrium`` IDS. 2. Create a function that prints the path, shape and size of an IDS node. - 3. Use :py:meth:`~imaspy.util.visit_children` to apply the function to all + 3. Use :py:meth:`~imas.util.visit_children` to apply the function to all non-empty nodes in the equilbrium IDS. 4. Update your function such that it skips scalar (0D) IDS nodes. Apply the updated function to the equilibrium IDS. @@ -142,9 +142,9 @@ Exercise 2: Explore data nodes .. hint:: :collapsible: - Review IMASPy Architecture documentation for data node methods: - :ref:`imaspy_architecture/IDS_nodes` + Review imas-python Architecture documentation for data node methods: + :ref:`imas_architecture/IDS_nodes` .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/explore_data.py + .. literalinclude:: imas_snippets/explore_data.py diff --git a/docs/source/courses/advanced/hashing.rst b/docs/source/courses/advanced/hashing.rst index 4bac1fda..bc9d77fe 100644 --- a/docs/source/courses/advanced/hashing.rst +++ b/docs/source/courses/advanced/hashing.rst @@ -1,14 +1,14 @@ Calculating hashes of IMAS data =============================== -IMASPy can calculate *hashes* of IMAS data. As `Wikipedia explains better than I could +imas-python can calculate *hashes* of IMAS data. As `Wikipedia explains better than I could do `__: A hash function is any function that can be used to map data of arbitrary size to fixed-size values, [...]. The values returned by a hash function are called *hash values*, *hash codes*, *hash digests*, *digests*, or simply *hashes*. -IMASPy is using the XXH3 hash function from the `xxHash project +imas-python is using the XXH3 hash function from the `xxHash project `__. This is a *non-cryptographic* hash and returns 64-bit hashes. @@ -33,7 +33,7 @@ Exercise 1: Calculate some hashes .. md-tab-item:: Exercise - In this exercise we will use :py:func:`imaspy.util.calc_hash` to calculate + In this exercise we will use :py:func:`imas.util.calc_hash` to calculate hashes of some IDSs. Use :external:py:meth:`bytes.hex` to show a more readable hexidecimal format of the hash. @@ -51,11 +51,11 @@ Exercise 1: Calculate some hashes .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/hashing.py + .. literalinclude:: imas_snippets/hashing.py -Properties of IMASPy's hashes ------------------------------ +Properties of imas-python's hashes +---------------------------------- The implementation of the hash function has the following properties: @@ -91,4 +91,4 @@ Technical details and specification ----------------------------------- You can find the technical details, and a specification for calculating the hashes, in -the documentation of :py:meth:`imaspy.util.calc_hash`. +the documentation of :py:meth:`imas.util.calc_hash`. diff --git a/docs/source/courses/advanced/imaspy_snippets/alternative_coordinates.py b/docs/source/courses/advanced/imas_snippets/alternative_coordinates.py similarity index 91% rename from docs/source/courses/advanced/imaspy_snippets/alternative_coordinates.py rename to docs/source/courses/advanced/imas_snippets/alternative_coordinates.py index 11816362..e4adfceb 100644 --- a/docs/source/courses/advanced/imaspy_snippets/alternative_coordinates.py +++ b/docs/source/courses/advanced/imas_snippets/alternative_coordinates.py @@ -1,7 +1,7 @@ -import imaspy +import imas # 1. Create an empty distributions IDS -distributions = imaspy.IDSFactory().distributions() +distributions = imas.IDSFactory().distributions() # 2. Use the metadata attribute to find the coordinates of # distribution/profiles_2d/density @@ -21,7 +21,7 @@ # What do you notice: in both dimensions there are multiple options for the coordinate. # 3. Retrieve the coordinate values through the ``coordinates`` attribute. -# This will raise a coordinate lookup error because IMASPy cannot choose which of the +# This will raise a coordinate lookup error because imas-python cannot choose which of the # coordinates to use: try: print(p2d.density.coordinates[0]) diff --git a/docs/source/courses/advanced/imaspy_snippets/autoconvert_get.py b/docs/source/courses/advanced/imas_snippets/autoconvert_get.py similarity index 83% rename from docs/source/courses/advanced/imaspy_snippets/autoconvert_get.py rename to docs/source/courses/advanced/imas_snippets/autoconvert_get.py index f2b03aee..76ee8e90 100644 --- a/docs/source/courses/advanced/imaspy_snippets/autoconvert_get.py +++ b/docs/source/courses/advanced/imas_snippets/autoconvert_get.py @@ -1,17 +1,17 @@ -import imaspy -from imaspy.ids_defs import ASCII_BACKEND, IDS_TIME_MODE_HOMOGENEOUS -from imaspy.util import get_data_dictionary_version +import imas +from imas.ids_defs import ASCII_BACKEND, IDS_TIME_MODE_HOMOGENEOUS +from imas.util import get_data_dictionary_version # 1. Create test data # Create an IDSFactory for DD 3.25.0 -factory = imaspy.IDSFactory("3.25.0") +factory = imas.IDSFactory("3.25.0") # Create a pulse_schedule IDS pulse_schedule = factory.new("pulse_schedule") # Fill the IDS with some test data pulse_schedule.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS -pulse_schedule.ids_properties.comment = "Testing renamed IDS nodes with IMASPy" +pulse_schedule.ids_properties.comment = "Testing renamed IDS nodes with imas-python" pulse_schedule.time = [1.0, 1.1, 1.2] pulse_schedule.ec.antenna.resize(1) @@ -28,13 +28,13 @@ antenna.phase.reference_name = "Phase reference name" # And store the IDS in a DBEntry using DD 3.25.0 -entry = imaspy.DBEntry(ASCII_BACKEND, "autoconvert", 1, 1, dd_version="3.25.0") +entry = imas.DBEntry(ASCII_BACKEND, "autoconvert", 1, 1, dd_version="3.25.0") entry.create() entry.put(pulse_schedule) entry.close() # 2. Reopen the DBEntry with DD 3.42.0: -entry = imaspy.DBEntry(ASCII_BACKEND, "autoconvert", 1, 1, dd_version="3.42.0") +entry = imas.DBEntry(ASCII_BACKEND, "autoconvert", 1, 1, dd_version="3.42.0") entry.open() # 3. Get the pulse schedule IDS @@ -47,7 +47,7 @@ # get_data_dictionary_version: 3.40.0 -> the IDS was automatically converted # 4. Print the data in the loaded IDS -imaspy.util.print_tree(ps_autoconvert) +imas.util.print_tree(ps_autoconvert) # What do you notice? # 1. The antenna AoS was renamed # 2. Several nodes no longer exist! @@ -65,6 +65,6 @@ # get_data_dictionary_version: 3.25.0 -> the IDS was not converted! # Print the data in the loaded IDS -imaspy.util.print_tree(ps_noconvert) +imas.util.print_tree(ps_noconvert) # What do you notice? # All data is here exactly as it was put at the beginnning of this exercise. diff --git a/docs/source/courses/advanced/imaspy_snippets/autoconvert_put.py b/docs/source/courses/advanced/imas_snippets/autoconvert_put.py similarity index 71% rename from docs/source/courses/advanced/imaspy_snippets/autoconvert_put.py rename to docs/source/courses/advanced/imas_snippets/autoconvert_put.py index 0ab5d121..ba9d2b48 100644 --- a/docs/source/courses/advanced/imaspy_snippets/autoconvert_put.py +++ b/docs/source/courses/advanced/imas_snippets/autoconvert_put.py @@ -1,17 +1,17 @@ -import imaspy -import imaspy.training -from imaspy.util import get_data_dictionary_version +import imas +import imas.training +from imas.util import get_data_dictionary_version # 1. Load the training data for the ``core_profiles`` IDS -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() core_profiles = entry.get("core_profiles") # 2. Print the DD version: print(get_data_dictionary_version(core_profiles)) # 3. Create a new DBEntry with DD version 3.37.0 -new_entry = imaspy.DBEntry( - imaspy.ids_defs.MEMORY_BACKEND, "test", 0, 0, dd_version="3.37.0" +new_entry = imas.DBEntry( + imas.ids_defs.MEMORY_BACKEND, "test", 0, 0, dd_version="3.37.0" ) new_entry.create() diff --git a/docs/source/courses/advanced/imaspy_snippets/calc_with_units.py b/docs/source/courses/advanced/imas_snippets/calc_with_units.py similarity index 93% rename from docs/source/courses/advanced/imaspy_snippets/calc_with_units.py rename to docs/source/courses/advanced/imas_snippets/calc_with_units.py index 6bc131f4..fd2253ed 100644 --- a/docs/source/courses/advanced/imaspy_snippets/calc_with_units.py +++ b/docs/source/courses/advanced/imas_snippets/calc_with_units.py @@ -1,11 +1,11 @@ import itertools # python standard library iteration tools -import imaspy -import imaspy.training +import imas +import imas.training import pint # 1. Load core_profiles IDS from training DBEntry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() cp = entry.get("core_profiles") # 2. Select the first time slice of profiles_1d diff --git a/docs/source/courses/advanced/imaspy_snippets/coordinates.py b/docs/source/courses/advanced/imas_snippets/coordinates.py similarity index 88% rename from docs/source/courses/advanced/imaspy_snippets/coordinates.py rename to docs/source/courses/advanced/imas_snippets/coordinates.py index db56f844..8b9b67fe 100644 --- a/docs/source/courses/advanced/imaspy_snippets/coordinates.py +++ b/docs/source/courses/advanced/imas_snippets/coordinates.py @@ -1,7 +1,7 @@ -import imaspy.training +import imas.training # 1. Load the training data for the core_profiles IDS: -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() core_profiles = entry.get("core_profiles") # 1a. Print the coordinate of profiles_1d[0].electrons.temperature @@ -17,13 +17,13 @@ # 1c. Change the time mode and print again core_profiles.ids_properties.homogeneous_time = \ - imaspy.ids_defs.IDS_TIME_MODE_HETEROGENEOUS + imas.ids_defs.IDS_TIME_MODE_HETEROGENEOUS print(core_profiles.profiles_1d.coordinates[0]) # What has changed? Now we get a numpy array with values -9e+40: # [-9.e+40 -9.e+40 -9.e+40] # # In heterogeneous time, the coordinate of profiles_1d is profiles_1d/time, which is a -# scalar. IMASPy will construct a numpy array for you where +# scalar. imas-python will construct a numpy array for you where # array[i] := profiles_1d[i]/time # Since we didn't set these values, they are set to the default EMPTY_FLOAT, which is # -9e+40. @@ -37,7 +37,7 @@ # This will output: # (IDSCoordinate('1...N'),) # The coordinate of profiles_2d is an index. When requesting the coordinate values, -# IMASPy will generate an index array for you: +# imas-python will generate an index array for you: print(slice0.profiles_2d.coordinates[0]) # -> array([0]) diff --git a/docs/source/courses/advanced/imaspy_snippets/dd_versions.py b/docs/source/courses/advanced/imas_snippets/dd_versions.py similarity index 76% rename from docs/source/courses/advanced/imaspy_snippets/dd_versions.py rename to docs/source/courses/advanced/imas_snippets/dd_versions.py index 24046bf2..1f9b3a69 100644 --- a/docs/source/courses/advanced/imaspy_snippets/dd_versions.py +++ b/docs/source/courses/advanced/imas_snippets/dd_versions.py @@ -1,8 +1,8 @@ -import imaspy -from imaspy.util import get_data_dictionary_version +import imas +from imas.util import get_data_dictionary_version # 1. Create an IDSFactory -default_factory = imaspy.IDSFactory() +default_factory = imas.IDSFactory() # 2. Print the DD version used by the IDSFactory # @@ -17,9 +17,9 @@ # it. # 4. Create a new DBEntry -default_entry = imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "test", 0, 0) +default_entry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 0, 0) default_entry.create() # Alternative URI syntax when using AL5.0.0: -# default_entry = imaspy.DBEntry("imas:memory?path=.") +# default_entry = imas.DBEntry("imas:memory?path=.") print("DD version used for the DBEntry:", get_data_dictionary_version(default_entry)) # What do you notice? It is the same default version again. diff --git a/docs/source/courses/advanced/imaspy_snippets/explore_data.py b/docs/source/courses/advanced/imas_snippets/explore_data.py similarity index 68% rename from docs/source/courses/advanced/imaspy_snippets/explore_data.py rename to docs/source/courses/advanced/imas_snippets/explore_data.py index e79f5415..5a9f824c 100644 --- a/docs/source/courses/advanced/imaspy_snippets/explore_data.py +++ b/docs/source/courses/advanced/imas_snippets/explore_data.py @@ -1,9 +1,9 @@ -import imaspy -import imaspy.training -from imaspy.util import get_full_path +import imas +import imas.training +from imas.util import get_full_path # 1. Load the training data equilibrium IDS -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() equilibrium = entry.get("equilibrium") @@ -13,7 +13,7 @@ def print_path_shape_size(node): # 3. Apply to equilibrium IDS -imaspy.util.visit_children(print_path_shape_size, equilibrium) +imas.util.visit_children(print_path_shape_size, equilibrium) print() @@ -25,4 +25,4 @@ def print_path_shape_size_not0d(node): # And apply to the equilibrium IDS -imaspy.util.visit_children(print_path_shape_size_not0d, equilibrium) +imas.util.visit_children(print_path_shape_size_not0d, equilibrium) diff --git a/docs/source/courses/advanced/imaspy_snippets/explore_structures.py b/docs/source/courses/advanced/imas_snippets/explore_structures.py similarity index 85% rename from docs/source/courses/advanced/imaspy_snippets/explore_structures.py rename to docs/source/courses/advanced/imas_snippets/explore_structures.py index de4691b3..8cc9730a 100644 --- a/docs/source/courses/advanced/imaspy_snippets/explore_structures.py +++ b/docs/source/courses/advanced/imas_snippets/explore_structures.py @@ -1,8 +1,8 @@ -import imaspy -import imaspy.training +import imas +import imas.training # 1. Load the equilibrium IDS from the training data -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() equilibrium = entry.get("equilibrium") # 2. Print non-empty child nodes diff --git a/docs/source/courses/advanced/imaspy_snippets/hashing.py b/docs/source/courses/advanced/imas_snippets/hashing.py similarity index 59% rename from docs/source/courses/advanced/imaspy_snippets/hashing.py rename to docs/source/courses/advanced/imas_snippets/hashing.py index 2a5d5bca..74e12be6 100644 --- a/docs/source/courses/advanced/imaspy_snippets/hashing.py +++ b/docs/source/courses/advanced/imas_snippets/hashing.py @@ -1,19 +1,19 @@ -import imaspy +import imas # 1. Create IDS -eq = imaspy.IDSFactory().equilibrium() -print(imaspy.util.calc_hash(eq).hex(' ', 2)) # 2d06 8005 38d3 94c2 +eq = imas.IDSFactory().equilibrium() +print(imas.util.calc_hash(eq).hex(' ', 2)) # 2d06 8005 38d3 94c2 # 2. Update homogeneous_time eq.ids_properties.homogeneous_time = 0 -print(imaspy.util.calc_hash(eq).hex(' ', 2)) # 3b9b 9297 56a2 42fd +print(imas.util.calc_hash(eq).hex(' ', 2)) # 3b9b 9297 56a2 42fd # Yes: the hash changed (significantly!). This was expected, because the data is no # longer the same # 3. Resize time_slice eq.time_slice.resize(2) -print(imaspy.util.calc_hash(eq.time_slice[0]).hex(' ', 2)) # 2d06 8005 38d3 94c2 -print(imaspy.util.calc_hash(eq.time_slice[1]).hex(' ', 2)) # 2d06 8005 38d3 94c2 +print(imas.util.calc_hash(eq.time_slice[0]).hex(' ', 2)) # 2d06 8005 38d3 94c2 +print(imas.util.calc_hash(eq.time_slice[1]).hex(' ', 2)) # 2d06 8005 38d3 94c2 # What do you notice? # # The hashes of both time_slice[0] and time_slice[1] are identical, because both @@ -29,15 +29,15 @@ # 5. Fill data p2d.r = [[1., 2.]] p2d.z = p2d.r -print(imaspy.util.calc_hash(p2d.r).hex(' ', 2)) # 352b a6a6 b40c 708d -print(imaspy.util.calc_hash(p2d.z).hex(' ', 2)) # 352b a6a6 b40c 708d +print(imas.util.calc_hash(p2d.r).hex(' ', 2)) # 352b a6a6 b40c 708d +print(imas.util.calc_hash(p2d.z).hex(' ', 2)) # 352b a6a6 b40c 708d # These hashes are identical, because they contain the same data # 6. Only r or z del p2d.z -print(imaspy.util.calc_hash(p2d).hex(' ', 2)) # 0dcb ddaa 78ea 83a3 +print(imas.util.calc_hash(p2d).hex(' ', 2)) # 0dcb ddaa 78ea 83a3 p2d.z = p2d.r del p2d.r -print(imaspy.util.calc_hash(p2d).hex(' ', 2)) # f86b 8ea8 9652 3768 +print(imas.util.calc_hash(p2d).hex(' ', 2)) # f86b 8ea8 9652 3768 # Although the data inside `r` and `z` is identical, we get different hashes because the # data is in a different attribute. diff --git a/docs/source/courses/advanced/imaspy_snippets/ids_convert.py b/docs/source/courses/advanced/imas_snippets/ids_convert.py similarity index 83% rename from docs/source/courses/advanced/imaspy_snippets/ids_convert.py rename to docs/source/courses/advanced/imas_snippets/ids_convert.py index 70f1892d..77ea422c 100644 --- a/docs/source/courses/advanced/imaspy_snippets/ids_convert.py +++ b/docs/source/courses/advanced/imas_snippets/ids_convert.py @@ -1,8 +1,8 @@ -import imaspy -from imaspy.util import get_data_dictionary_version +import imas +from imas.util import get_data_dictionary_version # 1. Create an IDSFactory for DD 3.25.0 -factory = imaspy.IDSFactory("3.25.0") +factory = imas.IDSFactory("3.25.0") # 2. Create a pulse_schedule IDS pulse_schedule = factory.new("pulse_schedule") @@ -10,9 +10,9 @@ # 3. Fill the IDS with some test data pulse_schedule.ids_properties.homogeneous_time = \ - imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS pulse_schedule.ids_properties.comment = \ - "Testing renamed IDS nodes with IMASPy" + "Testing renamed IDS nodes with imas-python" pulse_schedule.time = [1., 1.1, 1.2] pulse_schedule.ec.antenna.resize(1) @@ -26,10 +26,10 @@ antenna.launching_angle_tor.reference.data = [3.1, 3.2, 3.3] # 4. Convert the IDS from version 3.25.0 to 3.39.0 -pulse_schedule_3_39 = imaspy.convert_ids(pulse_schedule, "3.39.0") +pulse_schedule_3_39 = imas.convert_ids(pulse_schedule, "3.39.0") # Check that the data is converted -imaspy.util.print_tree(pulse_schedule_3_39) +imas.util.print_tree(pulse_schedule_3_39) # 5. Update time data pulse_schedule.time[1] = 3 @@ -41,7 +41,7 @@ print(pulse_schedule_3_39.ids_properties.comment) # What do you notice? # This prints the original value of the comment ("Testing renamed IDS -# nodes with IMASPy"). +# nodes with imas-python"). # This is actually the same that you get when creating a shallow copy # with ``copy.copy`` of a regular Python dictionary: import copy @@ -60,7 +60,7 @@ # 7. Set phase.reference_name: pulse_schedule.ec.antenna[0].phase.reference_name = "Test refname" # And convert again -pulse_schedule_3_39 = imaspy.convert_ids(pulse_schedule, "3.39.0") -imaspy.util.print_tree(pulse_schedule_3_39) +pulse_schedule_3_39 = imas.convert_ids(pulse_schedule, "3.39.0") +imas.util.print_tree(pulse_schedule_3_39) # What do you notice? # Element 'ec/antenna/phase' does not exist in the target IDS. Data is not copied. diff --git a/docs/source/courses/advanced/imaspy_snippets/ids_to_xarray.py b/docs/source/courses/advanced/imas_snippets/ids_to_xarray.py similarity index 92% rename from docs/source/courses/advanced/imaspy_snippets/ids_to_xarray.py rename to docs/source/courses/advanced/imas_snippets/ids_to_xarray.py index 717db8dd..89a94b01 100644 --- a/docs/source/courses/advanced/imaspy_snippets/ids_to_xarray.py +++ b/docs/source/courses/advanced/imas_snippets/ids_to_xarray.py @@ -9,12 +9,12 @@ import matplotlib.pyplot as plt import numpy -import imaspy -import imaspy.training +import imas +import imas.training import xarray # 1. Load core_profiles IDS from training DBEntry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() cp = entry.get("core_profiles") # 2. Store the temperature of the first time slice diff --git a/docs/source/courses/advanced/imaspy_snippets/tensorized_ids_to_xarray.py b/docs/source/courses/advanced/imas_snippets/tensorized_ids_to_xarray.py similarity index 94% rename from docs/source/courses/advanced/imaspy_snippets/tensorized_ids_to_xarray.py rename to docs/source/courses/advanced/imas_snippets/tensorized_ids_to_xarray.py index fe7bbb9c..ff4f4e28 100644 --- a/docs/source/courses/advanced/imaspy_snippets/tensorized_ids_to_xarray.py +++ b/docs/source/courses/advanced/imas_snippets/tensorized_ids_to_xarray.py @@ -9,12 +9,12 @@ import matplotlib.pyplot as plt import numpy -import imaspy -import imaspy.training +import imas +import imas.training import xarray # 1. Load core_profiles IDS from training DBEntry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() cp = entry.get("core_profiles") # 2. Store the temperature of the first time slice diff --git a/docs/source/courses/advanced/metadata.rst b/docs/source/courses/advanced/metadata.rst index c8eeba2e..42cb6abc 100644 --- a/docs/source/courses/advanced/metadata.rst +++ b/docs/source/courses/advanced/metadata.rst @@ -3,13 +3,13 @@ Using Data Dictionary metadata ============================== -IMASPy provides convenient access to Data Dictionary metadata of any IDS node through +imas-python provides convenient access to Data Dictionary metadata of any IDS node through the ``metadata`` attribute: .. code-block:: python - >>> import imaspy - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> import imas + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.metadata >>> core_profiles.time.metadata @@ -23,21 +23,21 @@ cases. Overview of available metadata ------------------------------ -The data dictionary metadata that is parsed by IMASPy is listed in the API -documentation for :py:class:`~imaspy.ids_metadata.IDSMetadata`. +The data dictionary metadata that is parsed by imas-python is listed in the API +documentation for :py:class:`~imas.ids_metadata.IDSMetadata`. -Note that not all metadata from the IMAS Data Dictionary is parsed by IMASPy. +Note that not all metadata from the IMAS Data Dictionary is parsed by imas-python. This metadata is still accessible on the :code:`metadata` attribute. You can use -:py:func:`imaspy.util.inspect` to get an overview of all metadata associated to an +:py:func:`imas.util.inspect` to get an overview of all metadata associated to an element in an IDS. .. code-block:: python :caption: Example showing all metadata for some ``core_profiles`` elements. - >>> import imaspy - >>> core_profiles = imaspy.IDSFactory().core_profiles() - >>> imaspy.util.inspect(core_profiles.metadata) - ╭---- -----╮ + >>> import imas + >>> core_profiles = imas.IDSFactory().core_profiles() + >>> imas.util.inspect(core_profiles.metadata) + ╭---- -----╮ │ Container for IDS Metadata │ │ │ │ ╭------------------------------------------------╮ │ @@ -63,8 +63,8 @@ element in an IDS. │ type = │ │ units = '' │ ╰----------------------------------------------------╯ - >>> imaspy.util.inspect(core_profiles.time.metadata) - ╭------ -------╮ + >>> imas.util.inspect(core_profiles.time.metadata) + ╭------ -------╮ │ Container for IDS Metadata │ │ │ │ ╭----------------------------------------------------╮ │ @@ -108,7 +108,7 @@ quite complicated, but summarized they come in two categories: values per pixel, and another variable storing some processed quantities per pixel. In this case, the coordinates are indices (line / column index of the pixel), but these must be the same for both quantities. This information is stored in the - :py:attr:`~imaspy.ids_metadata.IDSMetadata.coordinates_same_as` metadata. + :py:attr:`~imas.ids_metadata.IDSMetadata.coordinates_same_as` metadata. 2. Coordinates are other quantities in the Data Dictionary. @@ -140,7 +140,7 @@ Exercise 1: Using coordinates do you notice? c. Change the time mode of the IDS from homogeneous time to heterogeneous time. You do this by setting - ``ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HETEROGENEOUS``. + ``ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HETEROGENEOUS``. Print the coordinate of the ``profiles_1d`` array of structure again. What has changed? @@ -151,7 +151,7 @@ Exercise 1: Using coordinates .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/coordinates.py + .. literalinclude:: imas_snippets/coordinates.py Exercise 2: Alternative coordinates @@ -176,7 +176,7 @@ Exercise 2: Alternative coordinates 1. Resize the array of structures so you can access the metadata of the elements. 2. Use the indexing operator on - :py:class:`~imaspy.ids_metadata.IDSMetadata`. For example, + :py:class:`~imas.ids_metadata.IDSMetadata`. For example, ``distributions.metadata["distribution/wave"]`` to get the metadata of the ``distribution[]/wave`` array of structures. 3. Resize the ``distribution`` and ``distribution[0].profiles_2d`` arrays of @@ -185,17 +185,17 @@ Exercise 2: Alternative coordinates you notice? 4. You can still use the metadata to go to the coordinate node options: - a. Use the :py:attr:`~imaspy.ids_coordinates.IDSCoordinate.references` - attribute of the :py:class:`~imaspy.ids_coordinates.IDSCoordinate` + a. Use the :py:attr:`~imas.ids_coordinates.IDSCoordinate.references` + attribute of the :py:class:`~imas.ids_coordinates.IDSCoordinate` objects in the ``metadata`` to get the paths to each of the coordinate - options. This will give you the :py:class:`~imaspy.ids_path.IDSPath` + options. This will give you the :py:class:`~imas.ids_path.IDSPath` objects for each coordinate option. - b. Then, use :py:meth:`IDSPath.goto ` to go + b. Then, use :py:meth:`IDSPath.goto ` to go to the corresponding IDS node. .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/alternative_coordinates.py + .. literalinclude:: imas_snippets/alternative_coordinates.py Units and dimensional analysis with Pint @@ -208,8 +208,8 @@ Units and dimensional analysis with Pint `_. The Data Dictionary specifies the units of stored quantities. This metadata is -accessible in IMASPy via :py:attr:`metadata.units -`. In most cases, these units are in a format +accessible in imas-python via :py:attr:`metadata.units +`. In most cases, these units are in a format that ``pint`` can understand (for example ``T``, ``Wb``, ``m^-3``, ``m.s^-1``). There are some exceptions to that, with the main ones ``-`` (indicating a quantity is @@ -220,7 +220,7 @@ go into that in this lesson. For conversion of units from the Data Dictionary format to pint units, we recommend creating a custom function, such as the following: -.. literalinclude:: imaspy_snippets/calc_with_units.py +.. literalinclude:: imas_snippets/calc_with_units.py :caption: Convert DD units to Pint Units :start-at: # Create pint UnitRegistry :end-before: # End @@ -253,4 +253,4 @@ Exercise 3: Calculate the mass density from ``core_profiles/profiles_1d`` .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/calc_with_units.py + .. literalinclude:: imas_snippets/calc_with_units.py diff --git a/docs/source/courses/advanced/xarray.rst b/docs/source/courses/advanced/xarray.rst index a8ab68c2..e1fb5498 100644 --- a/docs/source/courses/advanced/xarray.rst +++ b/docs/source/courses/advanced/xarray.rst @@ -4,7 +4,7 @@ Create ``xarray.DataArray`` from an IDS .. info:: In this lesson you will create a ``DataArray`` manually. In a future version of - IMASPy we plan to include functionality that will automatically do this for you. + imas-python we plan to include functionality that will automatically do this for you. That should further simplify working with data inside IDSs. Let's start with an introduction of Xarray. According to `their website @@ -61,7 +61,7 @@ Exercise 1: create a ``DataArray`` for ``profiles_1d/temperature`` .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/ids_to_xarray.py + .. literalinclude:: imas_snippets/ids_to_xarray.py Exercise 2: include the ``time`` axis in the ``DataArray`` @@ -96,4 +96,4 @@ the ``profiles_1d`` array of structures. When the grid is not changing in the ID .. md-tab-item:: Solution - .. literalinclude:: imaspy_snippets/tensorized_ids_to_xarray.py + .. literalinclude:: imas_snippets/tensorized_ids_to_xarray.py diff --git a/docs/source/courses/advanced_user_training.rst b/docs/source/courses/advanced_user_training.rst index 8f11558a..c91be432 100644 --- a/docs/source/courses/advanced_user_training.rst +++ b/docs/source/courses/advanced_user_training.rst @@ -1,9 +1,9 @@ -Advanced IMASPy -=============== +Advanced imas-python +==================== -In this IMASPy training, we dive into more advanced features of IMASPy. It is assumed -you are familiar with the basic features of IMASPy, which are introduced in the -:ref:`IMASPy 101` training. +In this imas-python training, we dive into more advanced features of imas-python. It is assumed +you are familiar with the basic features of imas-python, which are introduced in the +:ref:`imas-python 101` training. .. note:: diff --git a/docs/source/courses/basic/al4_snippets/create_core_profiles.py b/docs/source/courses/basic/al4_snippets/create_core_profiles.py deleted file mode 100644 index 39c8aac5..00000000 --- a/docs/source/courses/basic/al4_snippets/create_core_profiles.py +++ /dev/null @@ -1,40 +0,0 @@ -import datetime - -import imas -import numpy as np - - -cp = imas.core_profiles() - -# Set properties -cp.ids_properties.homogeneous_time = imas.imasdef.IDS_TIME_MODE_HOMOGENEOUS -cp.ids_properties.comment = "Synthetic IDS created for the IMASPy course" -cp.ids_properties.creation_date = datetime.date.today().isoformat() - -# Set a time array -cp.time = np.array([1.0, 2.5, 4.0]) - -# Main coordinate -rho_tor_norm = np.linspace(0, 1, num=64) - -# Generate some 1D profiles -cp.profiles_1d.resize(len(cp.time)) -for index, t in enumerate(cp.time): - t_e = np.exp(-16 * rho_tor_norm**2) + (1 - np.exp(4 * rho_tor_norm - 3)) * t / 8 - t_e *= t * 500 - # Store the generated t_e as electron temperature - cp.profiles_1d[index].electrons.temperature = t_e - -# Validate the IDS for consistency -# cp.validate() # <-- not available in AL4 - -# Fill in the missing rho_tor_norm coordinate -for index in range(3): - cp.profiles_1d[index].grid.rho_tor_norm = rho_tor_norm - -# Create a new data entry for storing the IDS -pulse, run, database = 1, 1, "imaspy-course" -entry = imas.DBEntry(imas.imasdef.ASCII_BACKEND, database, pulse, run) -entry.create() - -entry.put(cp) diff --git a/docs/source/courses/basic/al4_snippets/iterate_core_profiles.py b/docs/source/courses/basic/al4_snippets/iterate_core_profiles.py deleted file mode 100644 index dad52da1..00000000 --- a/docs/source/courses/basic/al4_snippets/iterate_core_profiles.py +++ /dev/null @@ -1,12 +0,0 @@ -import imas -import imaspy.training - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -cp = entry.get("core_profiles") -for el in ["profiles_1d", "global_quantities", "code"]: - try: - print(getattr(cp, el)) - except NameError: - print(f"Could not print {el}, internal IMAS error") diff --git a/docs/source/courses/basic/al4_snippets/plot_core_profiles_ne_timeslice.py b/docs/source/courses/basic/al4_snippets/plot_core_profiles_ne_timeslice.py deleted file mode 100644 index 9da796f8..00000000 --- a/docs/source/courses/basic/al4_snippets/plot_core_profiles_ne_timeslice.py +++ /dev/null @@ -1,35 +0,0 @@ -import os - -import matplotlib -import imas -import imaspy.training - -# To avoid possible display issues when Matplotlib uses a non-GUI backend -if "DISPLAY" not in os.environ: - matplotlib.use("agg") -else: - matplotlib.use("TKagg") - -import matplotlib.pyplot as plt - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -# Read n_e profile and the associated normalised toroidal flux coordinate at -t = 443 # seconds - -cp = entry.get_slice("core_profiles", t, imas.imasdef.CLOSEST_INTERP) - -# profiles_1d should only contain the requested slice -assert len(cp.profiles_1d) == 1 - -ne = cp.profiles_1d[0].electrons.density -rho = cp.profiles_1d[0].grid.rho_tor_norm - -# Plot the figure -fig, ax = plt.subplots() -ax.plot(rho, ne) -ax.set_ylabel(r"$n_e$") -ax.set_xlabel(r"$\rho_{tor, norm}$") -ax.ticklabel_format(axis="y", scilimits=(-1, 1)) -plt.show() diff --git a/docs/source/courses/basic/al4_snippets/print_idss.py b/docs/source/courses/basic/al4_snippets/print_idss.py deleted file mode 100644 index 05de3094..00000000 --- a/docs/source/courses/basic/al4_snippets/print_idss.py +++ /dev/null @@ -1,5 +0,0 @@ -from imas.ids_names import IDSName - -# As each imas module is compiled with a specific DD version, we can load the -# names from the module itself -print([name.value for name in IDSName]) diff --git a/docs/source/courses/basic/al4_snippets/read_core_profiles_ne_timeslice.py b/docs/source/courses/basic/al4_snippets/read_core_profiles_ne_timeslice.py deleted file mode 100644 index 6d7b0fda..00000000 --- a/docs/source/courses/basic/al4_snippets/read_core_profiles_ne_timeslice.py +++ /dev/null @@ -1,21 +0,0 @@ -import imas -import imaspy.training - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -# Read n_e profile and the associated normalised toroidal flux coordinate at -t = 443 # seconds - -cp = entry.get_slice("core_profiles", t, imas.imasdef.CLOSEST_INTERP) - -# profiles_1d should only contain the requested slice -assert len(cp.profiles_1d) == 1 - -ne = cp.profiles_1d[0].electrons.density -rho = cp.profiles_1d[0].grid.rho_tor_norm -print("ne =", ne) -print("rho =", rho) - -# Close the datafile -entry.close() diff --git a/docs/source/courses/basic/al4_snippets/read_equilibrium_time_array.py b/docs/source/courses/basic/al4_snippets/read_equilibrium_time_array.py deleted file mode 100644 index 2347e44f..00000000 --- a/docs/source/courses/basic/al4_snippets/read_equilibrium_time_array.py +++ /dev/null @@ -1,26 +0,0 @@ -import numpy as np -import imas -import imaspy.training - - -# Find nearest value and index in an array -def find_nearest(a, a0): - "Element in nd array `a` closest to the scalar value `a0`" - idx = np.abs(a - a0).argmin() - return a[idx], idx - - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -# Read the time array from the equilibrium IDS -equilibrium = entry.get("equilibrium") # All time slices -time_array = equilibrium.time - -# Find the index of the desired time slice in the time array -t_closest, t_index = find_nearest(time_array, 433) -print("Time index = ", t_index) -print("Time value = ", t_closest) - -# Close input data entry -entry.close() diff --git a/docs/source/courses/basic/al4_snippets/read_whole_equilibrium.py b/docs/source/courses/basic/al4_snippets/read_whole_equilibrium.py deleted file mode 100644 index d7df1b22..00000000 --- a/docs/source/courses/basic/al4_snippets/read_whole_equilibrium.py +++ /dev/null @@ -1,20 +0,0 @@ -import imas -import imaspy.training - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -# 1. Read and print the time of the equilibrium IDS for the whole scenario -equilibrium = entry.get("equilibrium") # All time slices -# 2. Print the time array: -print(equilibrium.time) - -# 3. Load the core_profiles IDS -core_profiles = entry.get("core_profiles") -# 4. When you inspect the core_profiles.time array, you'll find that item [1] -# corresponds to t ~ 433s. -# 5. Print the electron temperature -print(core_profiles.profiles_1d[1].electrons.temperature) - -# Close input data entry -entry.close() diff --git a/docs/source/courses/basic/al4_snippets/transform_grid.py b/docs/source/courses/basic/al4_snippets/transform_grid.py deleted file mode 100644 index 08a572f4..00000000 --- a/docs/source/courses/basic/al4_snippets/transform_grid.py +++ /dev/null @@ -1,105 +0,0 @@ -import os - -import imas -import imaspy.training -import matplotlib -import numpy as np -from scipy.interpolate import RegularGridInterpolator - -if "DISPLAY" not in os.environ: - matplotlib.use("agg") -else: - matplotlib.use("TKagg") - -import matplotlib.pyplot as plt - -# Open input data entry -entry = imaspy.training.get_training_imas_db_entry() - -# Use a partial get to retrieve the full time vector -input_times = entry.partial_get("equilibrium", "time") - -# Create output data entry -output_entry = imas.DBEntry( - imas.imasdef.MEMORY_BACKEND, "imaspy-course", 2, 1) -output_entry.create() - -# Loop over each time slice -for time in input_times: - eq = entry.get_slice("equilibrium", time, imas.imasdef.CLOSEST_INTERP) - - # Update comment - eq.ids_properties.comment = "IMASPy training: transform coordinate system" - - p2d = eq.time_slice[0].profiles_2d[0] - r, z = p2d.grid.dim1, p2d.grid.dim2 - r_axis = eq.time_slice[0].global_quantities.magnetic_axis.r - z_axis = eq.time_slice[0].global_quantities.magnetic_axis.z - - # Create new rho/theta coordinates - theta = np.linspace(-np.pi, np.pi, num=64, endpoint=False) - max_rho = min( - r_axis - r[0], - r[-1] - r_axis, - z_axis - z[0], - z[-1] - z_axis, - ) - rho = np.linspace(0, max_rho, num=64) - - # Calculate corresponding R/Z for interpolating the original values - rho_grid, theta_grid = np.meshgrid(rho, theta, indexing="ij", sparse=True) - grid_r = r_axis + rho_grid * np.cos(theta_grid) - grid_z = z_axis + rho_grid * np.sin(theta_grid) - interpolation_points = np.dstack((grid_r.flatten(), grid_z.flatten())) - - # Interpolate all data nodes on the new grid - for data_node in ["b_field_r", "b_field_z", "b_field_tor", "psi"]: - data = getattr(p2d, data_node) - interp = RegularGridInterpolator((r, z), data) - new_data = interp(interpolation_points).reshape(grid_r.shape) - setattr(p2d, data_node, new_data) - - # Update coordinate identifier - p2d.grid_type.index = 2 - p2d.grid_type.name = "inverse" - p2d.grid_type.description = "Rhopolar_polar 2D polar coordinates (rho=dim1, theta=dim2) with magnetic axis as centre of grid; theta and values following the COCOS=11 convention; the polar angle is theta=atan2(z-zaxis,r-raxis)" # noqa: E501 - - # Update coordinates - p2d.grid.dim1 = rho - p2d.grid.dim2 = theta - p2d.r = grid_r - p2d.z = grid_z - - # Finally, put the slice to disk - output_entry.put_slice(eq) - -# Create a plot to verify the transformation is correct -fig, (ax1, ax2, ax3) = plt.subplots(1, 3) - -vmin, vmax = np.min(data), np.max(data) -contour_levels = np.linspace(vmin, vmax, 32) - -rzmesh = np.meshgrid(r, z, indexing="ij") -mesh = ax1.pcolormesh(*rzmesh, data, vmin=vmin, vmax=vmax) -ax1.contour(*rzmesh, data, contour_levels, colors='black') - -ax2.pcolormesh(grid_r, grid_z, new_data, vmin=vmin, vmax=vmax) -ax2.contour(grid_r, grid_z, new_data, contour_levels, colors='black') - -rho_theta_mesh = np.meshgrid(rho, theta, indexing="ij") -ax3.pcolormesh(*rho_theta_mesh, new_data, vmin=vmin, vmax=vmax) -ax3.contour(*rho_theta_mesh, new_data, contour_levels, colors='black') - -ax1.set_xlabel("r [m]") -ax2.set_xlabel("r [m]") -ax1.set_ylabel("z [m]") -ax2.set_xlim(ax1.get_xlim()) -ax2.set_ylim(ax1.get_ylim()) -ax3.set_xlabel(r"$\rho$ [m]") -ax3.set_ylabel(r"$\theta$ [rad]") - -fig.suptitle(r"$\psi$ in ($r,z$) and ($\rho,\theta$) coordinates.") -fig.colorbar(mesh, ax=ax3) -fig.tight_layout() - -plt.show() diff --git a/docs/source/courses/basic/analyze.rst b/docs/source/courses/basic/analyze.rst index eb761580..a17fa20b 100644 --- a/docs/source/courses/basic/analyze.rst +++ b/docs/source/courses/basic/analyze.rst @@ -1,5 +1,5 @@ -Analyze with IMASPy -=================== +Analyze with imas-python +======================== For this part of the training we will learn to open an IMAS database entry, and plot some basic data in it using `matplotlib `_. @@ -12,26 +12,26 @@ Open an IMAS database entry IMAS explicitly separates the data on disk from the data in memory. To get started we load an existing IMAS data file from disk. The on-disk file -is represented by an :class:`imaspy.DBEntry `, which we have to -:meth:`~imaspy.db_entry.DBEntry.open()` to get a reference to the data file we +is represented by an :class:`imas.DBEntry `, which we have to +:meth:`~imas.db_entry.DBEntry.open()` to get a reference to the data file we will manipulate. The connection to the data file is kept intact until we -:meth:`~imaspy.db_entry.DBEntry.close()` the file. Note that the on-disk file -will not be changed until an explicit :meth:`~imaspy.db_entry.DBEntry.put()` or -:meth:`~imaspy.db_entry.DBEntry.put_slice()` is called. -We load data in memory with the :meth:`~imaspy.db_entry.DBEntry.get()` and -:meth:`~imaspy.db_entry.DBEntry.get_slice()` methods, after which we +:meth:`~imas.db_entry.DBEntry.close()` the file. Note that the on-disk file +will not be changed until an explicit :meth:`~imas.db_entry.DBEntry.put()` or +:meth:`~imas.db_entry.DBEntry.put_slice()` is called. +We load data in memory with the :meth:`~imas.db_entry.DBEntry.get()` and +:meth:`~imas.db_entry.DBEntry.get_slice()` methods, after which we can use the data. .. hint:: - Use the ASCII data supplied with IMASPy for all exercises. It contains two + Use the ASCII data supplied with imas-python for all exercises. It contains two IDSs (``equilibrium`` and ``core_profiles``) filled with data from three time slices of ITER reference data. Two convenience methods are available in the - :mod:`imaspy.training` module to open the DBEntry for this training data. + :mod:`imas.training` module to open the DBEntry for this training data. - 1. :meth:`imaspy.training.get_training_db_entry()` returns an opened - ``imaspy.DBEntry`` object. Use this method if you want to use the IMASPy + 1. :meth:`imas.training.get_training_db_entry()` returns an opened + ``imas.DBEntry`` object. Use this method if you want to use the imas-python interface. - 2. :meth:`imaspy.training.get_training_imas_db_entry()` returns an opened + 2. :meth:`imas.training.get_training_imas_db_entry()` returns an opened ``imas.DBEntry`` object. Use this method if you want to use the Python Access Layer interface. @@ -42,10 +42,10 @@ Exercise 1 .. md-tab-item:: Exercise - Open the training database entry: ``entry = imaspy.training.get_training_db_entry()`` + Open the training database entry: ``entry = imas.training.get_training_db_entry()`` 1. Load the ``equilibrium`` IDS into memory using the - :meth:`entry.get ` method + :meth:`entry.get ` method 2. Read and print the ``time`` array of the ``equilibrium`` IDS 3. Load the ``core_profiles`` IDS into memory 4. Explore the ``core_profiles.profiles_1d`` property and try to match @@ -69,13 +69,9 @@ Exercise 1 ``core_profiles.profiles_1d[i].electrons.temperature``) from the ``core_profiles`` IDS at time slice :math:`t\approx 433\,\mathrm{s}` - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/read_whole_equilibrium.py - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/read_whole_equilibrium.py + .. literalinclude:: imas_snippets/read_whole_equilibrium.py .. caution:: When dealing with unknown data, you shouldn't blindly ``get()`` all data: @@ -84,7 +80,7 @@ Exercise 1 The recommendations for larger data files are: - Only load the time slice(s) that you are interested in. - - Alternatively, IMASPy allows to load data on-demand, see + - Alternatively, imas-python allows to load data on-demand, see :ref:`Lazy loading` for more details. @@ -108,28 +104,25 @@ Exercise 2 Now the index of the closest time slice can be found with :external:func:`numpy.argmin`. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/read_equilibrium_time_array.py - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/read_equilibrium_time_array.py + .. literalinclude:: imas_snippets/read_equilibrium_time_array.py .. attention:: - IMASPy objects mostly behave the same way as numpy arrays. However, in some cases - functions explicitly expect a pure numpy array and supplying an IMASPy object raises + imas-python objects mostly behave the same way as numpy arrays. However, in some cases + functions explicitly expect a pure numpy array and supplying an imas-python object raises an exception. When this is the case, the ``.value`` attribute can be used to obtain the underlying data. .. note:: - IMASPy has two main ways of accessing IDSs. In the exercises above, we used + imas-python has two main ways of accessing IDSs. In the exercises above, we used the "attribute-like" access. This is the main way of navigating the IDS tree. - However, IMASPy also provides a "dict-like" interface to access data, which + However, imas-python also provides a "dict-like" interface to access data, which might be more convenient in some cases. For example: - .. literalinclude:: imaspy_snippets/iterate_core_profiles.py + .. literalinclude:: imas_snippets/iterate_core_profiles.py Retreiving part of an IDS @@ -148,7 +141,7 @@ Retrieve a single time slice When we are interested in quantities at a single time slice (or a low number of time slices), we can decide to only load the data at specified times. This can be -accomplished with the aforementioned :meth:`~imaspy.db_entry.DBEntry.get_slice()` +accomplished with the aforementioned :meth:`~imas.db_entry.DBEntry.get_slice()` method. @@ -159,23 +152,19 @@ Exercise 3 .. md-tab-item:: Exercise - Use the :meth:`~imaspy.db_entry.DBEntry.get_slice()` method to obtain the electron density + Use the :meth:`~imas.db_entry.DBEntry.get_slice()` method to obtain the electron density :math:`n_e` at :math:`t\approx 433\,\mathrm{s}`. .. hint:: :collapsible: - :meth:`~imaspy.db_entry.DBEntry.get_slice()` requires an ``interpolation_method`` as one - of its arguments, here you can use ``imas.imasdef.CLOSEST_INTERP``. Alternatively, - if you use IMASPy, you can use ``imaspy.ids_defs.CLOSEST_INTERP``. + :meth:`~imas.db_entry.DBEntry.get_slice()` requires an ``interpolation_method`` as one + of its arguments, here you can use ``imas.ids_defs.CLOSEST_INTERP``. - .. md-tab-item:: AL4 - .. literalinclude:: al4_snippets/read_core_profiles_ne_timeslice.py + .. md-tab-item:: imas-python - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/read_core_profiles_ne_timeslice.py + .. literalinclude:: imas_snippets/read_core_profiles_ne_timeslice.py .. attention:: @@ -196,13 +185,9 @@ Exercise 4 Using ``matplotlib``, create a plot of :math:`n_e` on the y-axis and :math:`\rho_{tor, norm}` on the x-axis at :math:`t=433\mathrm{s}` - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/plot_core_profiles_ne_timeslice.py - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/plot_core_profiles_ne_timeslice.py + .. literalinclude:: imas_snippets/plot_core_profiles_ne_timeslice.py .. md-tab-item:: Plot @@ -220,9 +205,9 @@ When you are interested in the time evolution of a quantity, using ``get_slice`` impractical. It gets around the limitation of the data not fitting in memory, but will still need to read all of the data from disk (just not at once). -IMASPy has a `lazy loading` mode, where it will only read the requested data from disk +imas-python has a `lazy loading` mode, where it will only read the requested data from disk when you try to access it. You can enable it by supplying ``lazy=True`` to a call to -:meth:`~imaspy.db_entry.DBEntry.get()` or :meth:`~imaspy.db_entry.DBEntry.get_slice()`. +:meth:`~imas.db_entry.DBEntry.get()` or :meth:`~imas.db_entry.DBEntry.get_slice()`. Exercise 5 @@ -241,16 +226,16 @@ Exercise 5 the ITER cluster, you can load the following data entry with much more data, to better notice the difference that lazy loading can make:: - import imaspy - from imaspy.ids_defs import MDSPLUS_BACKEND + import imas + from imas.ids_defs import MDSPLUS_BACKEND database, pulse, run, user = "ITER", 134173, 106, "public" - data_entry = imaspy.DBEntry(MDSPLUS_BACKEND, database, pulse, run, user) + data_entry = imas.DBEntry(MDSPLUS_BACKEND, database, pulse, run, user) data_entry.open() - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/plot_core_profiles_te.py + .. literalinclude:: imas_snippets/plot_core_profiles_te.py .. md-tab-item:: Plot diff --git a/docs/source/courses/basic/create.rst b/docs/source/courses/basic/create.rst index 8fb438f0..2db00104 100644 --- a/docs/source/courses/basic/create.rst +++ b/docs/source/courses/basic/create.rst @@ -1,5 +1,5 @@ -Create with IMASPy -================== +Create with imas-python +======================= In this section of the training, we will have a look at creating (and filling) IDSs from scratch. @@ -7,8 +7,8 @@ scratch. Create an empty IDS ------------------- -Empty IDSs in IMASPy are created by the :py:meth:`~imaspy.ids_factory.IDSFactory.new` -method of an :py:class:`~imaspy.ids_factory.IDSFactory`. +Empty IDSs in imas-python are created by the :py:meth:`~imas.ids_factory.IDSFactory.new` +method of an :py:class:`~imas.ids_factory.IDSFactory`. .. note:: New IDSs can also be created by calling :code:`IDSFactory().()`, similar @@ -24,14 +24,9 @@ Exercise 1 Create an empty ``core_profiles`` IDS. - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/create_core_profiles.py - :end-before: # Set properties - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py :end-before: # Set properties @@ -42,7 +37,7 @@ Now we have an empty IDS, we can start filling fields. For this exercise we will populate the following fields: - ``ids_properties.homogeneous_time``, which we will set to the constant - :py:const:`~imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS`. This flags that this IDS is in + :py:const:`~imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS`. This flags that this IDS is in homogeneous time mode, meaning that all time-dependent quantities use the root ``time`` as their coordinate. - ``ids_properties.comment``, where we can describe this IDS. @@ -63,27 +58,16 @@ Exercise 2 Fill the ``core_profiles`` IDS with the fields as described above. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/create_core_profiles.py - :start-at: # Set properties - :end-before: # Validate the IDS for consistency - - .. note:: - - When using the IMAS access layer instead of IMASPy, we must always create - numpy arrays (of the correct data type). IMASPy will - automatically convert your data to the appropriate numpy array. - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Set properties :end-before: # Validate the IDS for consistency .. note:: - Observe that we can assign a Python list to ``cp.time``. IMASPy will + Observe that we can assign a Python list to ``cp.time``. imas-python will automatically convert it to a numpy array. @@ -105,17 +89,9 @@ Exercise 3 Validate the just-filled IDS. - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - IDS validation is an Access Layer 5 feature, and cannot be done in Al4. - - .. literalinclude:: al4_snippets/create_core_profiles.py - :start-at: # Validate the IDS for consistency - :end-before: # Fill in the missing rho_tor_norm coordinate - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Validate the IDS for consistency :end-before: # Fill in the missing rho_tor_norm coordinate @@ -139,15 +115,9 @@ Exercise 4 Fix the coordinate consistency error. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/create_core_profiles.py - :start-at: # Fill in the missing rho_tor_norm coordinate - :end-before: # Create a new data entry for storing the IDS - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Fill in the missing rho_tor_norm coordinate :end-before: # Create a new data entry for storing the IDS @@ -157,9 +127,9 @@ Store the IDS on disk Now we have created, filled and validated an IDS, the only thing left is to store it to disk. Like loading IDSs, storing IDSs is achieved through the -:py:class:`~imaspy.db_entry.DBEntry` class. After constructing a ``DBEntry`` object, you -need to :py:meth:`~imaspy.db_entry.DBEntry.create` the data entry on-disk before you can -:py:meth:`~imaspy.db_entry.DBEntry.put` the IDS to disk. +:py:class:`~imas.db_entry.DBEntry` class. After constructing a ``DBEntry`` object, you +need to :py:meth:`~imas.db_entry.DBEntry.create` the data entry on-disk before you can +:py:meth:`~imas.db_entry.DBEntry.put` the IDS to disk. .. note:: For this exercise we will use the ASCII backend. Although it doesn't have the best @@ -178,30 +148,23 @@ Exercise 5 The recommended parameters for this exercise are:: - backend = imaspy.ids_defs.ASCII_BACKEND - database = "imaspy-course" + backend = imas.ids_defs.ASCII_BACKEND + database = "imas-course" pulse = 1 run = 1 - After a successful ``put``, the ids file will be created. If you are using - AL4, you should see a file ``imaspy-course_1_1_core_profiles.ids`` in your - working directory with the contents of the IDS. - If you are using AL5, this file can be found under - ``~/public/imasdb/imaspy-course/3/1/1/core_profiles.ids`` + After a successful ``put``, the ids file will be created. + this file can be found under + ``~/public/imasdb/imas-course/3/1/1/core_profiles.ids`` .. hint:: :collapsible: - The signature of :meth:`~imaspy.db_entry.DBEntry()` is: ``DBEntry(backend, database, pulse, run)`` + The signature of :meth:`~imas.db_entry.DBEntry()` is: ``DBEntry(backend, database, pulse, run)`` - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/create_core_profiles.py - :start-at: # Create a new data entry for storing the IDS - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Create a new data entry for storing the IDS Summary @@ -220,10 +183,6 @@ Congratulations for completing this section of the course. You have: Click on the tabs to see the complete source, combining all exercises. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/create_core_profiles.py - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/create_core_profiles.py + .. literalinclude:: imas_snippets/create_core_profiles.py diff --git a/docs/source/courses/basic/explore.rst b/docs/source/courses/basic/explore.rst index 39e2e3a8..f01e23d8 100644 --- a/docs/source/courses/basic/explore.rst +++ b/docs/source/courses/basic/explore.rst @@ -1,7 +1,7 @@ .. _`basic/explore`: -Explore with IMASPy -=================== +Explore with imas-python +======================== In this part of the training, we will learn how to use Python to explore data saved in IDSs. @@ -30,25 +30,20 @@ Exercise 1 Find out the names of the available IDSs. .. hint:: - The module ``imas.ids_names`` contains information on the available IDSs in - AL4. + The module ``imas.ids_names`` contains information on the available IDSs. - In IMASPy, you can use :py:class:`~imaspy.ids_factory.IDSFactory` to figure + In imas-python, you can use :py:class:`~imas.ids_factory.IDSFactory` to figure out which IDSs are avaible. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/print_idss.py - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/print_idss.py + .. literalinclude:: imas_snippets/print_idss.py Explore the structure and contents of an IDS -------------------------------------------- -IMASPy has several features and utilities for exploring an IDS. These are best used in +imas-python has several features and utilities for exploring an IDS. These are best used in an interactive Python console, such as the default python console or the `IPython `_ console. @@ -56,22 +51,22 @@ an interactive Python console, such as the default python console or the `IPytho Tab completion '''''''''''''' -As with most Python objects, you can use :kbd:`Tab` completion on IMASPy objects. +As with most Python objects, you can use :kbd:`Tab` completion on imas-python objects. .. note:: In the python console, you need to press :kbd:`Tab` twice to show suggestions. -- :py:class:`~imaspy.ids_factory.IDSFactory` has tab completion for IDS names: +- :py:class:`~imas.ids_factory.IDSFactory` has tab completion for IDS names: .. code-block:: pycon - >>> factory = imaspy.IDSFactory() + >>> factory = imas.IDSFactory() >>> factory.core_ factory.core_instant_changes( factory.core_sources( factory.core_profiles( factory.core_transport( -- :py:class:`~imaspy.ids_toplevel.IDSToplevel` and - :py:class:`~imaspy.ids_structure.IDSStructure` have tab completion for child nodes: +- :py:class:`~imas.ids_toplevel.IDSToplevel` and + :py:class:`~imas.ids_structure.IDSStructure` have tab completion for child nodes: .. image:: interactive_tab_core_profiles_toplevel.png @@ -79,25 +74,25 @@ As with most Python objects, you can use :kbd:`Tab` completion on IMASPy objects Interactive help '''''''''''''''' -Use the built-in :external:py:func:`help()` function to get more information on IMASPy +Use the built-in :external:py:func:`help()` function to get more information on imas-python functions, objects, etc. .. code-block:: pycon - >>> import imaspy - >>> help(imaspy.DBEntry) - Help on class DBEntry in module imaspy.db_entry: + >>> import imas + >>> help(imas.DBEntry) + Help on class DBEntry in module imas.db_entry: class DBEntry(builtins.object) [...] -Inspecting IMASPy objects -''''''''''''''''''''''''' +Inspecting imas-python objects +'''''''''''''''''''''''''''''' :kbd:`Tab` completion is nice when you already know more or less what attribute you are -looking for. For a more comprehensive overview of any IMASPy node, you can use -:py:meth:`imaspy.util.inspect` to show: +looking for. For a more comprehensive overview of any imas-python node, you can use +:py:meth:`imas.util.inspect` to show: 1. The path to the node (relative to the IDS it is contained in) 2. The Data Dictionary version @@ -108,7 +103,7 @@ looking for. For a more comprehensive overview of any IMASPy node, you can use .. hint:: - The output of :py:meth:`imaspy.util.inspect` is colored when your terminal supports + The output of :py:meth:`imas.util.inspect` is colored when your terminal supports it. You may use the environment variable ``NO_COLOR`` to disable colored output or ``FORCE_COLOR`` to force colored output. See ``_. @@ -118,13 +113,13 @@ looking for. For a more comprehensive overview of any IMASPy node, you can use .. rubric:: Examples -.. image:: imaspy_inspect.png +.. image:: imas_inspect.png Printing an IDS tree '''''''''''''''''''' -Another useful utility function in IMASPy is :py:meth:`imaspy.util.print_tree`. This +Another useful utility function in imas-python is :py:meth:`imas.util.print_tree`. This will print a complete tree structure of all non-empty quantities in the provided node. As an argument you can give a complete IDS, or any structure in the IDS such as ``ids_properties``: @@ -135,26 +130,26 @@ As an argument you can give a complete IDS, or any structure in the IDS such as Depending on the size of the IDS (structure) you print, this may generate a lot of output. For interactive exploration of large IDSs we recommend to use - :py:meth:`imaspy.util.inspect` (optionally with the parameter ``hide_empty_nodes`` - set to :code:`True`) and only use :py:meth:`imaspy.util.print_tree` for smaller + :py:meth:`imas.util.inspect` (optionally with the parameter ``hide_empty_nodes`` + set to :code:`True`) and only use :py:meth:`imas.util.print_tree` for smaller sub-structures. Find paths in an IDS '''''''''''''''''''' -In IMASPy you can also search for paths inside an IDS: -:py:meth:`imaspy.util.find_paths`. This can be useful when you know what quantity you +In imas-python you can also search for paths inside an IDS: +:py:meth:`imas.util.find_paths`. This can be useful when you know what quantity you are looking for, but aren't sure exactly in which (sub)structure of the IDS it is located. -:py:meth:`imaspy.util.find_paths` accepts any Python regular expression (see +:py:meth:`imas.util.find_paths` accepts any Python regular expression (see :external:py:mod:`re`) as input. This allows for anything from basic to advanced searches. .. rubric:: Examples -.. literalinclude:: imaspy_snippets/find_paths.py +.. literalinclude:: imas_snippets/find_paths.py Exercise 2 @@ -188,12 +183,12 @@ Exercise 2 .. md-tab-item:: Training data - .. literalinclude:: imaspy_snippets/explore_training_data.py + .. literalinclude:: imas_snippets/explore_training_data.py .. md-tab-item:: `pf_active` data - .. literalinclude:: imaspy_snippets/explore_public_pf_active.py + .. literalinclude:: imas_snippets/explore_public_pf_active.py .. md-tab-item:: `ec_launchers` data - .. literalinclude:: imaspy_snippets/explore_public_ec_launchers.py \ No newline at end of file + .. literalinclude:: imas_snippets/explore_public_ec_launchers.py \ No newline at end of file diff --git a/docs/source/courses/basic/imaspy_inspect.png b/docs/source/courses/basic/imas_inspect.png similarity index 100% rename from docs/source/courses/basic/imaspy_inspect.png rename to docs/source/courses/basic/imas_inspect.png diff --git a/docs/source/courses/basic/imaspy_snippets/create_core_profiles.py b/docs/source/courses/basic/imas_snippets/create_core_profiles.py similarity index 73% rename from docs/source/courses/basic/imaspy_snippets/create_core_profiles.py rename to docs/source/courses/basic/imas_snippets/create_core_profiles.py index 277e0c5e..b263299c 100644 --- a/docs/source/courses/basic/imaspy_snippets/create_core_profiles.py +++ b/docs/source/courses/basic/imas_snippets/create_core_profiles.py @@ -1,17 +1,17 @@ import datetime -import imaspy +import imas import numpy as np -factory = imaspy.IDSFactory() +factory = imas.IDSFactory() cp = factory.new("core_profiles") # Alternative cp = factory.core_profiles() # Set properties -cp.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS -cp.ids_properties.comment = "Synthetic IDS created for the IMASPy course" +cp.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS +cp.ids_properties.comment = "Synthetic IDS created for the imas-python course" cp.ids_properties.creation_date = datetime.date.today().isoformat() # Set a time array @@ -32,7 +32,7 @@ try: cp.validate() print("IDS is valid!") -except imaspy.exception.ValidationError as exc: +except imas.exception.ValidationError as exc: print("Oops, the IDS is not valid: ", exc) # Fill in the missing rho_tor_norm coordinate @@ -42,8 +42,8 @@ cp.validate() # Create a new data entry for storing the IDS -pulse, run, database = 1, 1, "imaspy-course" -entry = imaspy.DBEntry(imaspy.ids_defs.ASCII_BACKEND, database, pulse, run) +pulse, run, database = 1, 1, "imas-course" +entry = imas.DBEntry(imas.ids_defs.ASCII_BACKEND, database, pulse, run) entry.create() entry.put(cp) diff --git a/docs/source/courses/basic/imas_snippets/explore_public_ec_launchers.py b/docs/source/courses/basic/imas_snippets/explore_public_ec_launchers.py new file mode 100644 index 00000000..e5b61553 --- /dev/null +++ b/docs/source/courses/basic/imas_snippets/explore_public_ec_launchers.py @@ -0,0 +1,15 @@ +import imas.util + +# Open input data entry +entry = imas.DBEntry( + imas.ids_defs.HDF5_BACKEND, "ITER_MD", 120000, 204, "public", data_version="3" +) +entry.open() + +# Get the ec_launchers IDS +pf = entry.get("ec_launchers") + +# Inspect the IDS +imas.util.inspect(pf, hide_empty_nodes=True) + +entry.close() diff --git a/docs/source/courses/basic/imas_snippets/explore_public_pf_active.py b/docs/source/courses/basic/imas_snippets/explore_public_pf_active.py new file mode 100644 index 00000000..65b2fc21 --- /dev/null +++ b/docs/source/courses/basic/imas_snippets/explore_public_pf_active.py @@ -0,0 +1,15 @@ +import imas.util + +# Open input data entry +entry = imas.DBEntry( + imas.ids_defs.HDF5_BACKEND, "ITER_MD", 111001, 103, "public", data_version="3" +) +entry.open() + +# Get the pf_active IDS +pf = entry.get("pf_active") + +# Inspect the IDS +imas.util.inspect(pf, hide_empty_nodes=True) + +entry.close() diff --git a/docs/source/courses/basic/imas_snippets/explore_training_data.py b/docs/source/courses/basic/imas_snippets/explore_training_data.py new file mode 100644 index 00000000..e60279af --- /dev/null +++ b/docs/source/courses/basic/imas_snippets/explore_training_data.py @@ -0,0 +1,13 @@ +import imas.util +import imas.training + +# Open input data entry +entry = imas.training.get_training_db_entry() + +# Get the core_profiles IDS +cp = entry.get("core_profiles") + +# Inspect the IDS +imas.util.inspect(cp, hide_empty_nodes=True) + +entry.close() \ No newline at end of file diff --git a/docs/source/courses/basic/imas_snippets/find_paths.py b/docs/source/courses/basic/imas_snippets/find_paths.py new file mode 100644 index 00000000..e70c6c6d --- /dev/null +++ b/docs/source/courses/basic/imas_snippets/find_paths.py @@ -0,0 +1,16 @@ +import imas.util + +factory = imas.IDSFactory() +core_profiles = factory.core_profiles() + +print("Paths containing `rho`:") +print(imas.util.find_paths(core_profiles, "rho")) +print() + +print("Paths containing `rho`, not followed by `error`:") +print(imas.util.find_paths(core_profiles, "rho(?!.*error)")) +print() + +print("All paths ending with `time`:") +print(imas.util.find_paths(core_profiles, "time$")) +print() diff --git a/docs/source/courses/basic/imaspy_snippets/iterate_core_profiles.py b/docs/source/courses/basic/imas_snippets/iterate_core_profiles.py similarity index 77% rename from docs/source/courses/basic/imaspy_snippets/iterate_core_profiles.py rename to docs/source/courses/basic/imas_snippets/iterate_core_profiles.py index bfbd1860..9d315027 100644 --- a/docs/source/courses/basic/imaspy_snippets/iterate_core_profiles.py +++ b/docs/source/courses/basic/imas_snippets/iterate_core_profiles.py @@ -1,7 +1,7 @@ -import imaspy.training +import imas.training # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() cp = entry.get("core_profiles") for el in ["profiles_1d", "global_quantities", "code"]: diff --git a/docs/source/courses/basic/imaspy_snippets/plot_core_profiles_ne_timeslice.py b/docs/source/courses/basic/imas_snippets/plot_core_profiles_ne_timeslice.py similarity index 83% rename from docs/source/courses/basic/imaspy_snippets/plot_core_profiles_ne_timeslice.py rename to docs/source/courses/basic/imas_snippets/plot_core_profiles_ne_timeslice.py index 2df9ef8e..8c2beee5 100644 --- a/docs/source/courses/basic/imaspy_snippets/plot_core_profiles_ne_timeslice.py +++ b/docs/source/courses/basic/imas_snippets/plot_core_profiles_ne_timeslice.py @@ -1,7 +1,7 @@ import os import matplotlib -import imaspy.training +import imas.training # To avoid possible display issues when Matplotlib uses a non-GUI backend if "DISPLAY" not in os.environ: @@ -12,12 +12,12 @@ import matplotlib.pyplot as plt # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() # Read n_e profile and the associated normalised toroidal flux coordinate at t = 443 # seconds -cp = entry.get_slice("core_profiles", t, imaspy.ids_defs.CLOSEST_INTERP) +cp = entry.get_slice("core_profiles", t, imas.ids_defs.CLOSEST_INTERP) # profiles_1d should only contain the requested slice assert len(cp.profiles_1d) == 1 diff --git a/docs/source/courses/basic/imaspy_snippets/plot_core_profiles_te.py b/docs/source/courses/basic/imas_snippets/plot_core_profiles_te.py similarity index 92% rename from docs/source/courses/basic/imaspy_snippets/plot_core_profiles_te.py rename to docs/source/courses/basic/imas_snippets/plot_core_profiles_te.py index f620158e..44ac96c3 100644 --- a/docs/source/courses/basic/imaspy_snippets/plot_core_profiles_te.py +++ b/docs/source/courses/basic/imas_snippets/plot_core_profiles_te.py @@ -11,11 +11,11 @@ from matplotlib import pyplot as plt -import imaspy -from imaspy.ids_defs import MDSPLUS_BACKEND +import imas +from imas.ids_defs import MDSPLUS_BACKEND database, pulse, run, user = "ITER", 134173, 106, "public" -data_entry = imaspy.DBEntry( +data_entry = imas.DBEntry( MDSPLUS_BACKEND, database, pulse, run, user, data_version="3" ) data_entry.open() diff --git a/docs/source/courses/basic/imaspy_snippets/print_idss.py b/docs/source/courses/basic/imas_snippets/print_idss.py similarity index 71% rename from docs/source/courses/basic/imaspy_snippets/print_idss.py rename to docs/source/courses/basic/imas_snippets/print_idss.py index 1811398f..5cb3f1cc 100644 --- a/docs/source/courses/basic/imaspy_snippets/print_idss.py +++ b/docs/source/courses/basic/imas_snippets/print_idss.py @@ -1,10 +1,10 @@ -import imaspy +import imas -# IMASPy has multiple DD versions inside, which makes this exercise harder. +# imas-python has multiple DD versions inside, which makes this exercise harder. # We provide possible solutions here # Option 1: Print the IDSs in the default-selected DD version -factory = imaspy.IDSFactory() +factory = imas.IDSFactory() print("IDSs available in DD version", factory.version) print(factory.ids_names()) @@ -14,6 +14,6 @@ print() # Option 2: Print the IDSs in a specific DD version -factory = imaspy.IDSFactory("3.39.0") +factory = imas.IDSFactory("3.39.0") print("IDSs available in DD version", factory.version) print(list(factory)) diff --git a/docs/source/courses/basic/imaspy_snippets/read_core_profiles_ne_timeslice.py b/docs/source/courses/basic/imas_snippets/read_core_profiles_ne_timeslice.py similarity index 71% rename from docs/source/courses/basic/imaspy_snippets/read_core_profiles_ne_timeslice.py rename to docs/source/courses/basic/imas_snippets/read_core_profiles_ne_timeslice.py index 4818f841..d54ac922 100644 --- a/docs/source/courses/basic/imaspy_snippets/read_core_profiles_ne_timeslice.py +++ b/docs/source/courses/basic/imas_snippets/read_core_profiles_ne_timeslice.py @@ -1,12 +1,12 @@ -import imaspy.training +import imas.training # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() # Read n_e profile and the associated normalised toroidal flux coordinate at t = 443 # seconds -cp = entry.get_slice("core_profiles", t, imaspy.ids_defs.CLOSEST_INTERP) +cp = entry.get_slice("core_profiles", t, imas.ids_defs.CLOSEST_INTERP) # profiles_1d should only contain the requested slice assert len(cp.profiles_1d) == 1 diff --git a/docs/source/courses/basic/imaspy_snippets/read_equilibrium_time_array.py b/docs/source/courses/basic/imas_snippets/read_equilibrium_time_array.py similarity index 88% rename from docs/source/courses/basic/imaspy_snippets/read_equilibrium_time_array.py rename to docs/source/courses/basic/imas_snippets/read_equilibrium_time_array.py index eb1edae3..8fe482e9 100644 --- a/docs/source/courses/basic/imaspy_snippets/read_equilibrium_time_array.py +++ b/docs/source/courses/basic/imas_snippets/read_equilibrium_time_array.py @@ -1,5 +1,5 @@ import numpy as np -import imaspy.training +import imas.training # Find nearest value and index in an array @@ -10,7 +10,7 @@ def find_nearest(a, a0): # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() # Read the time array from the equilibrium IDS eq = entry.get("equilibrium") diff --git a/docs/source/courses/basic/imaspy_snippets/read_whole_equilibrium.py b/docs/source/courses/basic/imas_snippets/read_whole_equilibrium.py similarity index 90% rename from docs/source/courses/basic/imaspy_snippets/read_whole_equilibrium.py rename to docs/source/courses/basic/imas_snippets/read_whole_equilibrium.py index 3d1a1f3a..c70c73cc 100644 --- a/docs/source/courses/basic/imaspy_snippets/read_whole_equilibrium.py +++ b/docs/source/courses/basic/imas_snippets/read_whole_equilibrium.py @@ -1,7 +1,7 @@ -import imaspy.training +import imas.training # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() # 1. Read and print the time of the equilibrium IDS for the whole scenario # This explicitly converts the data from the old DD version on disk, to the diff --git a/docs/source/courses/basic/imaspy_snippets/transform_grid.py b/docs/source/courses/basic/imas_snippets/transform_grid.py similarity index 90% rename from docs/source/courses/basic/imaspy_snippets/transform_grid.py rename to docs/source/courses/basic/imas_snippets/transform_grid.py index a2a56a20..f4cef866 100644 --- a/docs/source/courses/basic/imaspy_snippets/transform_grid.py +++ b/docs/source/courses/basic/imas_snippets/transform_grid.py @@ -4,7 +4,7 @@ import numpy as np from scipy.interpolate import RegularGridInterpolator -import imaspy.training +import imas.training if "DISPLAY" not in os.environ: matplotlib.use("agg") @@ -14,22 +14,22 @@ import matplotlib.pyplot as plt # Open input data entry -entry = imaspy.training.get_training_db_entry() +entry = imas.training.get_training_db_entry() # Lazy-loaded input equilibrium eq_in = entry.get("equilibrium", lazy=True) input_times = eq_in.time # Create output data entry -output_entry = imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "imaspy-course", 2, 1) +output_entry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "imas-course", 2, 1) output_entry.create() # Loop over each time slice for time in input_times: - eq = entry.get_slice("equilibrium", time, imaspy.ids_defs.CLOSEST_INTERP) + eq = entry.get_slice("equilibrium", time, imas.ids_defs.CLOSEST_INTERP) # Update comment - eq.ids_properties.comment = "IMASPy training: transform coordinate system" + eq.ids_properties.comment = "imas-python training: transform coordinate system" p2d = eq.time_slice[0].profiles_2d[0] # Get `.value` so we can plot the original values after the IDS node is overwritten diff --git a/docs/source/courses/basic/imaspy_snippets/explore_public_ec_launchers.py b/docs/source/courses/basic/imaspy_snippets/explore_public_ec_launchers.py deleted file mode 100644 index 425e2260..00000000 --- a/docs/source/courses/basic/imaspy_snippets/explore_public_ec_launchers.py +++ /dev/null @@ -1,15 +0,0 @@ -import imaspy.util - -# Open input data entry -entry = imaspy.DBEntry( - imaspy.ids_defs.HDF5_BACKEND, "ITER_MD", 120000, 204, "public", data_version="3" -) -entry.open() - -# Get the ec_launchers IDS -pf = entry.get("ec_launchers") - -# Inspect the IDS -imaspy.util.inspect(pf, hide_empty_nodes=True) - -entry.close() diff --git a/docs/source/courses/basic/imaspy_snippets/explore_public_pf_active.py b/docs/source/courses/basic/imaspy_snippets/explore_public_pf_active.py deleted file mode 100644 index c8acbb39..00000000 --- a/docs/source/courses/basic/imaspy_snippets/explore_public_pf_active.py +++ /dev/null @@ -1,15 +0,0 @@ -import imaspy.util - -# Open input data entry -entry = imaspy.DBEntry( - imaspy.ids_defs.HDF5_BACKEND, "ITER_MD", 111001, 103, "public", data_version="3" -) -entry.open() - -# Get the pf_active IDS -pf = entry.get("pf_active") - -# Inspect the IDS -imaspy.util.inspect(pf, hide_empty_nodes=True) - -entry.close() diff --git a/docs/source/courses/basic/imaspy_snippets/explore_training_data.py b/docs/source/courses/basic/imaspy_snippets/explore_training_data.py deleted file mode 100644 index 68a16141..00000000 --- a/docs/source/courses/basic/imaspy_snippets/explore_training_data.py +++ /dev/null @@ -1,13 +0,0 @@ -import imaspy.util -import imaspy.training - -# Open input data entry -entry = imaspy.training.get_training_db_entry() - -# Get the core_profiles IDS -cp = entry.get("core_profiles") - -# Inspect the IDS -imaspy.util.inspect(cp, hide_empty_nodes=True) - -entry.close() \ No newline at end of file diff --git a/docs/source/courses/basic/imaspy_snippets/find_paths.py b/docs/source/courses/basic/imaspy_snippets/find_paths.py deleted file mode 100644 index 937ebd09..00000000 --- a/docs/source/courses/basic/imaspy_snippets/find_paths.py +++ /dev/null @@ -1,16 +0,0 @@ -import imaspy.util - -factory = imaspy.IDSFactory() -core_profiles = factory.core_profiles() - -print("Paths containing `rho`:") -print(imaspy.util.find_paths(core_profiles, "rho")) -print() - -print("Paths containing `rho`, not followed by `error`:") -print(imaspy.util.find_paths(core_profiles, "rho(?!.*error)")) -print() - -print("All paths ending with `time`:") -print(imaspy.util.find_paths(core_profiles, "time$")) -print() diff --git a/docs/source/courses/basic/setup.rst b/docs/source/courses/basic/setup.rst index f37bf4ce..3034cf76 100644 --- a/docs/source/courses/basic/setup.rst +++ b/docs/source/courses/basic/setup.rst @@ -1,13 +1,12 @@ -IMASPy 101: setup IMASPy -======================== +imas-python 101: setup imas-python +================================== -This course was written for IMASPy version 0.8.0 and requires an IMAS installation to -load IMAS data. IMASPy may be installed on your cluster, in which case you can do +This course was written for imas-python version 0.8.0 and requires an IMAS installation to +load IMAS data. imas-python may be installed on your cluster, in which case you can do .. code-block:: console - $ module load IMASPy IMAS - $ python -c 'import imaspy; print(imaspy.__version__)' - 0.8.0 + $ module load imas-python IMAS + $ python -c 'import imas; print(imas.__version__)' -Have a look at the :ref:`Installing IMASPy` page for more details on installing IMASPy. +Have a look at the :ref:`Installing imas-python` page for more details on installing imas-python. diff --git a/docs/source/courses/basic/transform.rst b/docs/source/courses/basic/transform.rst index 8ff0a647..6f208443 100644 --- a/docs/source/courses/basic/transform.rst +++ b/docs/source/courses/basic/transform.rst @@ -1,5 +1,5 @@ -Transform with IMASPy -===================== +Transform with imas-python +========================== In this part of the course we'll perform a coordinate transformation. Our input data is in rectilinear :math:`R, Z` coordinates, which we will transform into poloidal polar @@ -27,15 +27,9 @@ Exercise 1: Check which time slices exist .. hint:: You can use :ref:`lazy loading` to avoid loading all data in memory. - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/transform_grid.py - :start-at: # Open input data entry - :end-before: # Create output data entry - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Open input data entry :end-before: # Create output data entry @@ -50,15 +44,9 @@ Exercise 2: Load a time slice Loop over each available time in the IDS and load the time slice inside the loop. - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/transform_grid.py - :start-at: # Loop over each time slice - :end-before: # Update comment - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Loop over each time slice :end-before: # Update comment @@ -86,15 +74,9 @@ We will apply the transformation of the data as follows: .. md-tab-set:: - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/transform_grid.py - :start-at: # Loop over each time slice - :end-before: # Finally, put the slice to disk - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Loop over each time slice :end-before: # Finally, put the slice to disk @@ -108,26 +90,14 @@ Exercise 4: Store a time slice Store the time slice after the transformation. - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Create output data entry :end-at: output_entry.create() :caption: The data entry is created once, outside the time slice loop - .. literalinclude:: al4_snippets/transform_grid.py - :start-at: # Finally, put the slice to disk - :end-at: output_entry.put_slice - :caption: Store the time slice inside the loop - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/transform_grid.py - :start-at: # Create output data entry - :end-at: output_entry.create() - :caption: The data entry is created once, outside the time slice loop - - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Finally, put the slice to disk :end-at: output_entry.put_slice :caption: Store the time slice inside the loop @@ -144,14 +114,9 @@ Exercise 5: Plotting data before and after the transformation :math:`\rho,\theta` plane (transformed data) to verify that the transformation is correct. - .. md-tab-item:: AL4 + .. md-tab-item:: imas-python - .. literalinclude:: al4_snippets/transform_grid.py - :start-at: # Create a plot - - .. md-tab-item:: IMASPy - - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Create a plot @@ -160,12 +125,7 @@ Bringing it all together .. md-tab-set:: - .. md-tab-item:: AL4 - - .. literalinclude:: al4_snippets/transform_grid.py - :caption: Source code for the complete exercise - - .. md-tab-item:: IMASPy + .. md-tab-item:: imas-python - .. literalinclude:: imaspy_snippets/transform_grid.py + .. literalinclude:: imas_snippets/transform_grid.py :caption: Source code for the complete exercise diff --git a/docs/source/courses/basic_user_training.rst b/docs/source/courses/basic_user_training.rst index f70be9ee..a3703462 100644 --- a/docs/source/courses/basic_user_training.rst +++ b/docs/source/courses/basic_user_training.rst @@ -1,18 +1,12 @@ -.. _`IMASPy 101`: +.. _`imas-python 101`: -IMASPy 101 -========== +imas-python 101 +=============== -In this IMASPy training, we introduce you to the basic concepts and features of -IMASPy. You will need some basic familiarity with Python. For a refresher, see +In this imas-python training, we introduce you to the basic concepts and features of +imas-python. You will need some basic familiarity with Python. For a refresher, see the `Python tutorial `_. We also assume -some basic knowledge of the ITER IMAS infrastructure, see -https://imas.iter.org. To get access to the nessecary knowledge and tooling -to start using IMASPy, including an already installed IMASPy, follow the excellent -`Getting Started `_ -until "How to read / write IMAS data", after which you can follow the IMASPy -specific guide below. - +some basic knowledge of the ITER IMAS infrastructure. .. toctree:: :caption: Training contents diff --git a/docs/source/identifiers.rst b/docs/source/identifiers.rst index 661e7fb2..de885a3d 100644 --- a/docs/source/identifiers.rst +++ b/docs/source/identifiers.rst @@ -20,25 +20,25 @@ representations: 3. A description (long string) -Identifiers in IMASPy ---------------------- +Identifiers in imas-python +-------------------------- -IMASPy implements identifiers as an :py:class:`enum.Enum`. Identifiers are +imas-python implements identifiers as an :py:class:`enum.Enum`. Identifiers are constructed on-demand from the loaded Data Dictionary definitions. -All identifier enums can be accessed through ``imaspy.identifiers``. A list of -the available identifiers is stored as ``imaspy.identifiers.identifiers``. +All identifier enums can be accessed through ``imas.identifiers``. A list of +the available identifiers is stored as ``imas.identifiers.identifiers``. .. code-block:: python :caption: Accessing identifiers - import imaspy + import imas # List all identifier names - for identifier_name in imaspy.identifiers.identifiers: + for identifier_name in imas.identifiers.identifiers: print(identifier_name) # Get a specific identifier - csid = imaspy.identifiers.core_source_identifier + csid = imas.identifiers.core_source_identifier # Get and print information of an identifier value print(csid.total) print(csid.total.index) @@ -47,23 +47,23 @@ the available identifiers is stored as ``imaspy.identifiers.identifiers``. # Item access is also possible print(identifiers["edge_source_identifier"]) - # You can use imaspy.util.inspect to list all options - imaspy.util.inspect(identifiers.ggd_identifier) + # You can use imas.util.inspect to list all options + imas.util.inspect(identifiers.ggd_identifier) # And also to get more details of a specific option - imaspy.util.inspect(identifiers.ggd_identifier.SN) + imas.util.inspect(identifiers.ggd_identifier.SN) # When an IDS node is an identifier, you can use # metadata.identifier_enum to get the identifier - core_sources = imaspy.IDSFactory().core_sources() + core_sources = imas.IDSFactory().core_sources() core_sources.source.resize(1) print(core_sources.source[0].identifier.metadata.identifier_enum) -Assigning identifiers in IMASPy -------------------------------- +Assigning identifiers in imas-python +------------------------------------ -IMASPy implements smart assignment of identifiers. You may assign an identifier -enum value (for example ``imaspy.identifiers.core_source_identifier.total``), a +imas-python implements smart assignment of identifiers. You may assign an identifier +enum value (for example ``imas.identifiers.core_source_identifier.total``), a string (for example ``"total"``) or an integer (for example ``"1"``) to an identifier structure (for example ``core_profiles.source[0].identifier``) to set all three child nodes ``name``, ``index`` and ``description`` in one go. See @@ -72,12 +72,12 @@ below example: .. code-block:: python :caption: Assigning identifiers - import imaspy + import imas - core_sources = imaspy.IDSFactory().core_sources() + core_sources = imas.IDSFactory().core_sources() core_sources.source.resize(2) - csid = imaspy.identifiers.core_source_identifier + csid = imas.identifiers.core_source_identifier # We can set the identifier in three ways: # 1. Assign an instance of the identifier enum: core_sources.source[0].identifier = csid.total @@ -87,18 +87,18 @@ below example: core_sources.source[0].identifier = 1 # Inspect the contents of the structure - imaspy.util.inspect(core_sources.source[0].identifier) + imas.util.inspect(core_sources.source[0].identifier) # You can still assign any value to the individual name / index / # description nodes: core_sources.source[1].identifier.name = "total" # Only name is set, index and description are empty - imaspy.util.inspect(core_sources.source[1].identifier) + imas.util.inspect(core_sources.source[1].identifier) # This also allows to use not-yet-standardized identifier values core_sources.source[1].identifier.name = "my_custom_identifier" core_sources.source[1].identifier.index = -1 core_sources.source[1].identifier.description = "My custom identifier" - imaspy.util.inspect(core_sources.source[1].identifier) + imas.util.inspect(core_sources.source[1].identifier) Compare identifiers @@ -117,9 +117,9 @@ the Data Dictionary description: .. code-block:: python :caption: Comparing identifiers - >>> import imaspy - >>> csid = imaspy.identifiers.core_source_identifier - >>> core_sources = imaspy.IDSFactory().core_sources() + >>> import imas + >>> csid = imas.identifiers.core_source_identifier + >>> core_sources = imas.IDSFactory().core_sources() >>> core_sources.source.resize(1) >>> core_sources.source[0].identifier.index = 1 >>> # Compares equal to csid.total, though name and description are empty @@ -143,8 +143,8 @@ the Data Dictionary description: .. seealso:: - - :py:class:`imaspy.ids_identifiers.IDSIdentifier`: which is the base class + - :py:class:`imas.ids_identifiers.IDSIdentifier`: which is the base class of all identifier enumerations. - - :py:data:`imaspy.ids_identifiers.identifiers`: identifier accessor. - - :py:attr:`imaspy.ids_metadata.IDSMetadata.identifier_enum`: get the + - :py:data:`imas.ids_identifiers.identifiers`: identifier accessor. + - :py:attr:`imas.ids_metadata.IDSMetadata.identifier_enum`: get the identifier enum from an IDS node. diff --git a/docs/source/imaspy_architecture.rst b/docs/source/imas_architecture.rst similarity index 58% rename from docs/source/imaspy_architecture.rst rename to docs/source/imas_architecture.rst index 90c0f0ed..bfcab45d 100644 --- a/docs/source/imaspy_architecture.rst +++ b/docs/source/imas_architecture.rst @@ -1,7 +1,7 @@ -IMASPy Architecture -=================== +imas-python architecture +======================== -This document provides a brief overview of the components of IMASPy, grouped into +This document provides a brief overview of the components of imas-python, grouped into different functional areas. We don't aim to give detailed explanations of the code or the algorithms in it. These @@ -15,53 +15,53 @@ These classes are used to parse and represent IDS metadata from the Data Diction Metadata objects are generated from a Data Dictionary XML and are (supposed to be) immutable. -- :py:mod:`imaspy.ids_metadata` contains the main metadata class - :py:class:`~imaspy.ids_metadata.IDSMetadata`. This class is generated from an +- :py:mod:`imas.ids_metadata` contains the main metadata class + :py:class:`~imas.ids_metadata.IDSMetadata`. This class is generated from an ```` or ```` element in the Data Dictionary XML and contains all (parsed) data belonging to that ```` or ````. Most of the (Python) attributes correspond directly to an attribute of the XML element. - This module also contains the :py:class:`~imaspy.ids_metadata.IDSType` enum. This + This module also contains the :py:class:`~imas.ids_metadata.IDSType` enum. This enum corresponds to the Data Dictionary notion of ``type`` which can be ``dynamic``, ``constant``, ``static`` or unavailable on a Data Dictionary element. -- :py:mod:`imaspy.ids_coordinates` contains two classes: - :py:class:`~imaspy.ids_coordinates.IDSCoordinate`, which handles the parsing of +- :py:mod:`imas.ids_coordinates` contains two classes: + :py:class:`~imas.ids_coordinates.IDSCoordinate`, which handles the parsing of coordinate identifiers from the Data Dictionary, and - :py:class:`~imaspy.ids_coordinates.IDSCoordinates`, which handles coordinate + :py:class:`~imas.ids_coordinates.IDSCoordinates`, which handles coordinate retrieval and validation of IDS nodes. - :py:class:`~imaspy.ids_coordinates.IDSCoordinate`\ s are created for each coordinate + :py:class:`~imas.ids_coordinates.IDSCoordinate`\ s are created for each coordinate attribute of a Data Dictionary element: ``coordinate1``, ``coordinate2``, ... ``coordinate1_same_as``, etc. - :py:class:`~imaspy.ids_coordinates.IDSCoordinates` is created and assigned as - ``coordinates`` attribute of :py:class:`~imaspy.ids_struct_array.IDSStructArray` and - :py:class:`~imaspy.ids_primitive.IDSPrimitive` objects. This class is responsible + :py:class:`~imas.ids_coordinates.IDSCoordinates` is created and assigned as + ``coordinates`` attribute of :py:class:`~imas.ids_struct_array.IDSStructArray` and + :py:class:`~imas.ids_primitive.IDSPrimitive` objects. This class is responsible for retrieving coordinate values and for checking the coordinate consistency in - :py:func:`~imaspy.ids_toplevel.IDSToplevel.validate`. + :py:func:`~imas.ids_toplevel.IDSToplevel.validate`. -- :py:mod:`imaspy.ids_data_type` handles parsing Data Dictionary ``data_type`` - attributes (see method :py:meth:`~imaspy.ids_data_type.IDSDataType.parse`) to an - :py:class:`~imaspy.ids_data_type.IDSDataType` and number of dimensions. +- :py:mod:`imas.ids_data_type` handles parsing Data Dictionary ``data_type`` + attributes (see method :py:meth:`~imas.ids_data_type.IDSDataType.parse`) to an + :py:class:`~imas.ids_data_type.IDSDataType` and number of dimensions. - :py:class:`~imaspy.ids_data_type.IDSDataType` also has attributes for default values + :py:class:`~imas.ids_data_type.IDSDataType` also has attributes for default values and mappings to Python / Numpy / Access Layer type identifiers. -- :py:mod:`imaspy.ids_path` handles parsing of IDS paths to - :py:class:`~imaspy.ids_path.IDSPath` objects. Paths can occur as the ``path`` +- :py:mod:`imas.ids_path` handles parsing of IDS paths to + :py:class:`~imas.ids_path.IDSPath` objects. Paths can occur as the ``path`` attribute of Data Dictionary elements, and inside coordinate identifiers. .. caution:: - Although an :py:class:`~imaspy.ids_path.IDSPath` in IMASPy implements roughly + Although an :py:class:`~imas.ids_path.IDSPath` in imas-python implements roughly the same concept as `the "IDS Path syntax" in the Data Dictionary - `__, + `__, they are not necessarily the same thing! At the moment of writing this (January 2024), the IDS path definition in the Data Dictionary is not yet finalized. - Be aware that the syntax of IMASPy's :py:class:`~imaspy.ids_path.IDSPath` may + Be aware that the syntax of imas-python's :py:class:`~imas.ids_path.IDSPath` may differ slightly and might be incompatible with the definition from the Data Dictionary. @@ -72,22 +72,22 @@ Data Dictionary building and loading The following submodules are responsible for building the Data Dictionary and loading DD definitions at runtime. -- :py:mod:`imaspy.dd_helpers` handles building the ``IDSDef.zip`` file, containing all +- :py:mod:`imas.dd_helpers` handles building the ``IDSDef.zip`` file, containing all versions of the Data Dictionary since ``3.22.0``. -- :py:mod:`imaspy.dd_zip` handles loading the Data Dictionary definitions at run time. +- :py:mod:`imas.dd_zip` handles loading the Data Dictionary definitions at run time. These definitions can be loaded from an ``IDSDef.zip`` or from a custom XML file. -.. _imaspy_architecture/IDS_nodes: +.. _imas_architecture/IDS_nodes: IDS nodes --------- The following submodules and classes represent IDS nodes. -- :py:mod:`imaspy.ids_base` defines the base class for all IDS nodes: - :py:class:`~imaspy.ids_base.IDSBase`. This class is an abstract class and shouldn't +- :py:mod:`imas.ids_base` defines the base class for all IDS nodes: + :py:class:`~imas.ids_base.IDSBase`. This class is an abstract class and shouldn't be instantiated directly. Several useful properties are defined in this class, which are therefore available @@ -98,9 +98,9 @@ The following submodules and classes represent IDS nodes. .. code-block:: python - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles._parent - + >>> core_profiles.ids_properties._parent >>> core_profiles.ids_properties.homogeneous_time._parent @@ -116,9 +116,9 @@ The following submodules and classes represent IDS nodes. .. code-block:: python - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles._dd_parent - + >>> core_profiles.ids_properties._dd_parent >>> core_profiles.ids_properties.homogeneous_time._dd_parent @@ -134,52 +134,52 @@ The following submodules and classes represent IDS nodes. indices. - ``_lazy`` indicates if the IDS is lazy loaded. - ``_version`` is the Data Dictionary version of this node. - - ``_toplevel`` is a shortcut to the :py:class:`~imaspy.ids_toplevel.IDSToplevel` + - ``_toplevel`` is a shortcut to the :py:class:`~imas.ids_toplevel.IDSToplevel` element that this node is a decendent of. -- :py:mod:`imaspy.ids_primitive` contains all data node classes, which are child - classes of :py:class:`~imaspy.ids_primitive.IDSPrimitive`. ``IDSPrimitive`` +- :py:mod:`imas.ids_primitive` contains all data node classes, which are child + classes of :py:class:`~imas.ids_primitive.IDSPrimitive`. ``IDSPrimitive`` implements all functionality that is common for every data type, whereas the classes in below list are specific per data type. Assignment-time data type checking is handled by the setter of the - :py:attr:`~imaspy.ids_primitive.IDSPrimitive.value` property and the ``_cast_value`` + :py:attr:`~imas.ids_primitive.IDSPrimitive.value` property and the ``_cast_value`` methods on each of the type specialization classes. - - :py:class:`~imaspy.ids_primitive.IDSString0D` is the type specialization for 0D + - :py:class:`~imas.ids_primitive.IDSString0D` is the type specialization for 0D strings. It can be used as if it is a python :external:py:class:`str` object. - - :py:class:`~imaspy.ids_primitive.IDSString1D` is the type specialization for 1D + - :py:class:`~imas.ids_primitive.IDSString1D` is the type specialization for 1D strings. It behaves as if it is a python :external:py:class:`list` of :external:py:class:`str`. - - :py:class:`~imaspy.ids_primitive.IDSNumeric0D` is the base class for 0D + - :py:class:`~imas.ids_primitive.IDSNumeric0D` is the base class for 0D numerical types: - - :py:class:`~imaspy.ids_primitive.IDSComplex0D` is the type specialization + - :py:class:`~imas.ids_primitive.IDSComplex0D` is the type specialization for 0D complex numbers. It can be used as if it is a python :external:py:class:`complex`. - - :py:class:`~imaspy.ids_primitive.IDSFloat0D` is the type specialization + - :py:class:`~imas.ids_primitive.IDSFloat0D` is the type specialization for 0D floating point numbers. It can be used as if it is a python :external:py:class:`float`. - - :py:class:`~imaspy.ids_primitive.IDSInt0D` is the type specialization + - :py:class:`~imas.ids_primitive.IDSInt0D` is the type specialization for 0D whole numbers. It can be used as if it is a python :external:py:class:`int`. - - :py:class:`~imaspy.ids_primitive.IDSNumericArray` is the type specialization for + - :py:class:`~imas.ids_primitive.IDSNumericArray` is the type specialization for any numeric type with at least one dimension. It can be used as if it is a :external:py:class:`numpy.ndarray`. -- :py:mod:`imaspy.ids_struct_array` contains the - :py:class:`~imaspy.ids_struct_array.IDSStructArray` class, which models Arrays of +- :py:mod:`imas.ids_struct_array` contains the + :py:class:`~imas.ids_struct_array.IDSStructArray` class, which models Arrays of Structures. It also contains some :ref:`dev lazy loading` logic. -- :py:mod:`imaspy.ids_structure` contains the - :py:class:`~imaspy.ids_structure.IDSStructure` class, which models Structures. It +- :py:mod:`imas.ids_structure` contains the + :py:class:`~imas.ids_structure.IDSStructure` class, which models Structures. It contains the :ref:`lazy instantiation` logic and some of the :ref:`dev lazy loading` logic. -- :py:mod:`imaspy.ids_toplevel` contains the - :py:class:`~imaspy.ids_toplevel.IDSToplevel` class, which is a subclass of - :py:class:`~imaspy.ids_structure.IDSStructure` and models toplevel IDSs. +- :py:mod:`imas.ids_toplevel` contains the + :py:class:`~imas.ids_toplevel.IDSToplevel` class, which is a subclass of + :py:class:`~imas.ids_structure.IDSStructure` and models toplevel IDSs. It implements some API methods that are only available on IDSs, such as ``validate`` and ``(de)serialize``, and overwrites implementations of some @@ -197,9 +197,9 @@ have any IDS child nodes instantiated: .. code-block:: python - >>> import imaspy + >>> import imas >>> # Create an empty IDS - >>> cp = imaspy.IDSFactory().core_profiles() + >>> cp = imas.IDSFactory().core_profiles() >>> # Show which elements are already created: >>> list(cp.__dict__) ['_lazy', '_children', '_parent', 'metadata', '__doc__', '_lazy_context'] @@ -221,12 +221,12 @@ Lazy loading '''''''''''' :ref:`lazy loading` defers reading the data from the backend in a -:py:meth:`~imaspy.db_entry.DBEntry.get` or :py:meth:`~imaspy.db_entry.DBEntry.get_slice` +:py:meth:`~imas.db_entry.DBEntry.get` or :py:meth:`~imas.db_entry.DBEntry.get_slice` until the data is requested. This is handled in two places: 1. ``IDSStructure.__getattr__`` implements the lazy loading alongside the lazy instantiation. When a new element is created by lazy instantiation, it will call - ``imaspy.db_entry_helpers._get_child`` to lazy load this element: + ``imas.db_entry_helpers._get_child`` to lazy load this element: - When the element is a data node (``IDSPrimitive`` subclass), the data for this element is loaded from the backend. @@ -244,18 +244,18 @@ until the data is requested. This is handled in two places: Creating and loading IDSs ------------------------- -- :py:mod:`imaspy.db_entry` contains the :py:class:`~imaspy.db_entry.DBEntry` class. +- :py:mod:`imas.db_entry` contains the :py:class:`~imas.db_entry.DBEntry` class. This class represents an on-disk Data Entry and can be used to store - (:py:meth:`~imaspy.db_entry.DBEntry.put`, - :py:meth:`~imaspy.db_entry.DBEntry.put_slice`) or load - (:py:meth:`~imaspy.db_entry.DBEntry.get`, - :py:meth:`~imaspy.db_entry.DBEntry.get_slice`) IDSs. The actual implementation of + (:py:meth:`~imas.db_entry.DBEntry.put`, + :py:meth:`~imas.db_entry.DBEntry.put_slice`) or load + (:py:meth:`~imas.db_entry.DBEntry.get`, + :py:meth:`~imas.db_entry.DBEntry.get_slice`) IDSs. The actual implementation of data storage and retrieval is handled by the backends in the - ``imaspy.backends.*`` subpackages. + ``imas.backends.*`` subpackages. - :py:class:`~imaspy.db_entry.DBEntry` handles the autoconversion between IDS versions + :py:class:`~imas.db_entry.DBEntry` handles the autoconversion between IDS versions as described in :ref:`Automatic conversion between DD versions`. -- :py:mod:`imaspy.ids_factory` contains the :py:class:`~imaspy.ids_factory.IDSFactory` +- :py:mod:`imas.ids_factory` contains the :py:class:`~imas.ids_factory.IDSFactory` class. This class is responsible for creating IDS toplevels from a given Data Dictionary definition, and can list all IDS names inside a DD definition. @@ -263,20 +263,19 @@ Creating and loading IDSs Access Layer interfaces ----------------------- -- :py:mod:`imaspy.backends.imas_core.al_context` provides an object-oriented interface when working with +- :py:mod:`imas.backends.imas_core.al_context` provides an object-oriented interface when working with Lowlevel contexts. The contexts returned by the lowlevel are an integer identifier and need to be provided to several LL methods (e.g. ``read_data``), some of which may create new contexts. - The :py:class:`~imaspy.backends.imas_core.al_context.ALContext` class implements this object oriented + The :py:class:`~imas.backends.imas_core.al_context.ALContext` class implements this object oriented interface. - A second class (:py:class:`~imaspy.backends.imas_core.al_context.LazyALContext`) implements the same + A second class (:py:class:`~imas.backends.imas_core.al_context.LazyALContext`) implements the same interface, but is used when :ref:`dev lazy loading`. -- :py:mod:`imaspy.ids_defs` provides access to Access Layer constants (mostly defined - in ``imas.imasdef``). -- :py:mod:`imaspy.backends.imas_core.imas_interface` provides a version-independent interface to the - Access Layer through :py:class:`~imaspy.backends.imas_core.imas_interface.LowlevelInterface`. It +- :py:mod:`imas.ids_defs` provides access to Access Layer constants +- :py:mod:`imas.backends.imas_core.imas_interface` provides a version-independent interface to the + Access Layer through :py:class:`~imas.backends.imas_core.imas_interface.LowlevelInterface`. It defines all known methods of the Access Layer and defers to the correct implementation if it is available in the loaded AL version (and raises a descriptive exception if the function is not available). @@ -285,40 +284,35 @@ Access Layer interfaces MDSplus support --------------- -- :py:mod:`imaspy.backends.imas_core.mdsplus_model` is responsible for creating MDSplus `models`. These +- :py:mod:`imas.backends.imas_core.mdsplus_model` is responsible for creating MDSplus `models`. These models are specific to a DD version and are required when using the MDSplus backend for creating new Data Entries. - .. seealso:: :ref:`MDSplus in IMASPy` + .. seealso:: :ref:`MDSplus in imas-python` Versioning ---------- -IMASPy uses `versioneer `_ for -versioning. An IMASPy release has a corresponding tag (which sets the version), e.g. -`this is the tag -`_ for -version ``0.8.0``. Development builds are versioned based on the ``git describe`` of the -repository. - -The ``imaspy._version`` module is generated by ``versioneer`` and implements this logic -for editable installs. This module is replaced by ``versioneer`` when building python -packages (this is handled in ``setup.py``). +imas-python uses `setuptools-scm `_ for +versioning. An imas-python release has a corresponding tag (which sets the version). +The ``imas._version`` module is generated by ``setuptools-scm`` and implements this logic +for editable installs. This module is generated by ``setuptools-scm`` when building python +packages. Conversion between Data Dictionary versions ------------------------------------------- -:py:mod:`imaspy.ids_convert` contains logic for converting an IDS between DD versions. +:py:mod:`imas.ids_convert` contains logic for converting an IDS between DD versions. -The :py:class:`~imaspy.ids_convert.DDVersionMap` class creates and contains mappings for +The :py:class:`~imas.ids_convert.DDVersionMap` class creates and contains mappings for an IDS between two Data Dictionary versions. It creates two mappings: one to be used when converting from the newer version of the two to the older version (``new_to_old``) and a map for the reverse (``old_to_new``). These mappings are of type -:py:class:`~imaspy.ids_convert.NBCPathMap`. See its API documentation for more details. +:py:class:`~imas.ids_convert.NBCPathMap`. See its API documentation for more details. -:py:func:`~imaspy.ids_convert.convert_ids` is the main API method for converting IDSs +:py:func:`~imas.ids_convert.convert_ids` is the main API method for converting IDSs between versions. It works as follows: - It builds a ``DDVersionMap`` between the two DD versions version and selects the @@ -326,11 +320,11 @@ between versions. It works as follows: - If needed, it creates a target IDS of the destination DD version. - It then uses the ``NBCPathMap`` to convert data and store it in the target IDS. -:py:class:`~imaspy.db_entry.DBEntry` can also handle automatic DD version conversion. It +:py:class:`~imas.db_entry.DBEntry` can also handle automatic DD version conversion. It uses the same ``DDVersionMap`` and ``NBCPathMap`` as -:py:func:`~imaspy.ids_convert.convert_ids`. When reading data from the backends, the +:py:func:`~imas.ids_convert.convert_ids`. When reading data from the backends, the ``NBCPathMap`` is used to translate between the old and the new DD version. See the -implementation in :py:mod:`imaspy.backends.imas_core.db_entry_helpers`. +implementation in :py:mod:`imas.backends.imas_core.db_entry_helpers`. Miscelleneous @@ -339,13 +333,13 @@ Miscelleneous The following is a list of miscelleneous modules, which don't belong to any of the other categories on this page. -- :py:mod:`imaspy.exception` contains all Exception classes that IMASPy may raise. -- :py:mod:`imaspy.setup_logging` initializes a logging handler for IMASPy. -- :py:mod:`imaspy.training` contains helper methods for making training data +- :py:mod:`imas.exception` contains all Exception classes that imas-python may raise. +- :py:mod:`imas.setup_logging` initializes a logging handler for imas-python. +- :py:mod:`imas.training` contains helper methods for making training data available. -- :py:mod:`imaspy.util` contains useful utility methods. It is imported automatically. +- :py:mod:`imas.util` contains useful utility methods. It is imported automatically. All methods requiring third party libraries (``rich`` and ``scipy``) are implemented - in ``imaspy._util``. This avoids importing these libraries immediately when a - user imports ``imaspy`` (which can take a couple hundred milliseconds). Instead, + in ``imas._util``. This avoids importing these libraries immediately when a + user imports ``imas`` (which can take a couple hundred milliseconds). Instead, this module is only loaded when a user needs this functionality. diff --git a/docs/source/imaspy_structure.png b/docs/source/imas_structure.png similarity index 100% rename from docs/source/imaspy_structure.png rename to docs/source/imas_structure.png diff --git a/docs/source/index.rst b/docs/source/index.rst index c5a3f24c..20a5d80a 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -2,12 +2,12 @@ Master "index". This will be converted to a landing index.html by sphinx. We define TOC here, but it'll be put in the sidebar by the theme -============= -IMASPy Manual -============= +================== +imas-python manual +================== -IMASPy is a pure-python library to handle arbitrarily nested -data structures. IMASPy is designed for, but not necessarily bound to, +imas-python is a pure-python library to handle arbitrarily nested +data structures. imas-python is designed for, but not necessarily bound to, interacting with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) Data Model. @@ -20,9 +20,9 @@ It provides: - Checking of correctness at assign time, instead of at database write time - Dynamically created in-memory pre-filled data trees from DD XML specifications -The README is best read on :src:`#imaspy`. +The README is best read on :src:`#imas`. -Read what's new in the current version of IMASPy in our :ref:`changelog`! +Read what's new in the current version of imas-python in our :ref:`changelog`! Manual @@ -48,7 +48,7 @@ Manual changelog .. toctree:: - :caption: IMASPy training courses + :caption: imas-python training courses :maxdepth: 1 courses/basic_user_training @@ -64,14 +64,14 @@ Manual .. toctree:: - :caption: IMASPy development + :caption: imas-python development :maxdepth: 1 - imaspy_architecture + imas_architecture code_style ci_config benchmarking - release_imaspy + release_imas LICENSE diff --git a/docs/source/installing.rst b/docs/source/installing.rst index 3f596aaa..a454d0aa 100644 --- a/docs/source/installing.rst +++ b/docs/source/installing.rst @@ -1,46 +1,33 @@ -.. _`Installing IMASPy`: +.. _`Installing imas-python`: -Installing IMASPy -================= +Installing imas-python +====================== -IMASPy is a pure Python package. For full functionality of the package you need -an installation of `the IMAS Access Layer `_. See -:ref:`IMASPy 5 minute introduction` for an overview of functionality which does -(not) require the IMAS Access Layer available. +imas-python is a pure Python package. For full functionality of the package you need +an installation of `the IMAS Core library `_. See +:ref:`imas-python 5 minute introduction` for an overview of functionality which does +(not) require the IMAS Core library available. - -IMASPy modules on the ITER cluster and EuroFusion gateway ---------------------------------------------------------- - -There is a `module` available on the ITER and Eurofusion Gateway clusters, so -you can run: +To get started, you can install it from `pypi.org `_: .. code-block:: bash - module load IMASPy - -Additionally, if you wish to use the MDSPlus backend, you should load: - -.. code-block:: bash + pip install imas-python - module load MDSplus-Java/7.96.17-GCCcore-10.2.0-Java-11 -If you're using a different cluster, please contact your system administrator to see -if IMASPy is available (or can be made available) on the system. +Local installation from sources +------------------------------- - -Local installation ------------------- - -We recommend using a :external:py:mod:`venv`. Then, clone the IMASPy repository +We recommend using a :external:py:mod:`venv`. Then, clone the imas-python repository and run `pip install`: .. code-block:: bash python3 -m venv ./venv . venv/bin/activate - git clone ssh://git@git.iter.org/imas/imaspy.git - cd imaspy + + git clone ssh://git@github.com:iterorganization/imas-python.git + cd imas pip install --upgrade pip pip install --upgrade wheel setuptools pip install . @@ -61,32 +48,22 @@ Test your installation by trying .. code-block:: bash cd ~ - python -c "import imaspy; print(imaspy.__version__)" + python -c "import imas; print(imas.__version__)" -This is how to run the IMASPy test suite: +This is how to run the imas-python test suite: .. code-block:: bash - # inside the IMASPy git repository - pytest imaspy --mini + # inside the imas-python git repository + pytest imas --mini # run with a specific backend - pytest imaspy --ascii --mini + pytest imas --ascii --mini -And to build the IMASPy documentation, execute: +And to build the imas-python documentation, execute: .. code-block:: bash make -C docs html -Installation without ITER access --------------------------------- - -The installation script tries to access the `ITER IMAS Core Data Dictionary -repository `_ -to fetch the latest versions. If you do not have git+ssh access there, you can -try to find this repository elsewhere, and do a ``git fetch --tags``. - -Alternatively you could try to obtain an ``IDSDef.zip`` and place it in -``~/.config/imaspy/``. diff --git a/docs/source/intro.rst b/docs/source/intro.rst index a32799e6..30c75e1e 100644 --- a/docs/source/intro.rst +++ b/docs/source/intro.rst @@ -1,7 +1,7 @@ -.. _`IMASPy 5 minute introduction`: +.. _`imas-python 5 minute introduction`: -IMASPy 5 minute introduction ----------------------------- +imas-python 5 minute introduction +--------------------------------- .. contents:: Contents :local: @@ -11,39 +11,39 @@ IMASPy 5 minute introduction Verify your IMAS installation ''''''''''''''''''''''''''''' -Before continuing, verify that your imaspy install is working. Check the -:ref:`Installing IMASPy` page for installation instructions if below fails for -you. Start python and import imaspy. Note that the version in below output may +Before continuing, verify that your imas install is working. Check the +:ref:`Installing imas-python` page for installation instructions if below fails for +you. Start python and import imas. Note that the version in below output may be outdated. .. code-block:: python - >>> import imaspy - >>> print(imaspy.__version__) + >>> import imas + >>> print(imas.__version__) 1.0.0 .. note:: - If you have an IMASPy install without the IMAS Access Layer, importing - IMASPy will display an error message. You can still use IMASPy, but not all + If you have an imas-python install without the IMAS Access Layer, importing + imas-python will display an error message. You can still use imas-python, but not all functionalities are available. Create and use an IDS ''''''''''''''''''''' -To create an IDS, you must first make an :py:class:`~imaspy.ids_factory.IDSFactory` +To create an IDS, you must first make an :py:class:`~imas.ids_factory.IDSFactory` object. The IDS factory is necessary for specifying which version of the IMAS Data -Dictionary you want to use. If you don't specify anything, IMASPy uses the same Data +Dictionary you want to use. If you don't specify anything, imas-python uses the same Data Dictionary version as the loaded IMAS environment, or the latest available version. See :ref:`Using multiple DD versions in the same environment` for more information on different Data Dictionary versions. .. code-block:: python - >>> import imaspy + >>> import imas >>> import numpy as np - >>> ids_factory = imaspy.IDSFactory() + >>> ids_factory = imas.IDSFactory() 13:26:47 [INFO] Parsing data dictionary version 3.38.1 @dd_zip.py:127 >>> # Create an empty core_profiles IDS >>> core_profiles = ids_factory.core_profiles() @@ -52,8 +52,8 @@ We can now use this ``core_profiles`` IDS and assign some data to it: .. code-block:: python - >>> core_profiles.ids_properties.comment = "Testing IMASPy" - >>> core_profiles.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + >>> core_profiles.ids_properties.comment = "Testing imas-python" + >>> core_profiles.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS >>> # array quantities are automatically converted to the appropriate numpy arrays >>> core_profiles.time = [1, 2, 3] >>> # the python list of ints is converted to a 1D array of floats @@ -68,7 +68,7 @@ We can now use this ``core_profiles`` IDS and assign some data to it: >>> core_profiles.profiles_1d[0].grid.rho_tor_norm = [0, 0.5, 1.0] >>> core_profiles.profiles_1d[0].j_tor = [0, 0, 0] -As you can see in the example above, IMASPy automatically checks the data you try to +As you can see in the example above, imas-python automatically checks the data you try to assign to an IDS with the data type specified in the Data Dictionary. When possible, your data is automatically converted to the expected type. You will get an error message if this is not possible: @@ -90,7 +90,7 @@ Store an IDS to disk .. note:: - This functionality requires the IMAS Access Layer. - - This API will change when IMASPy is moving to Access Layer 5 (expected Q2 + - This API will change when imas-python is moving to Access Layer 5 (expected Q2 2023). To store an IDS to disk, we need to indicate the following information to the @@ -102,18 +102,18 @@ IMAS Access Layer. Please check the `IMAS Access Layer documentation - ``pulse`` - ``run`` -In IMASPy you do this as follows: +In imas-python you do this as follows: .. code-block:: python >>> # Create a new IMAS data entry for storing the core_profiles IDS we created earlier >>> # Here we specify the backend, database, pulse and run - >>> dbentry = imaspy.DBEntry(imaspy.ids_defs.HDF5_BACKEND, "TEST", 10, 2) + >>> dbentry = imas.DBEntry(imas.ids_defs.HDF5_BACKEND, "TEST", 10, 2) >>> dbentry.create() >>> # now store the core_profiles IDS we just populated >>> dbentry.put(core_profiles) -.. image:: imaspy_structure.png +.. image:: imas_structure.png Load an IDS from disk @@ -122,7 +122,7 @@ Load an IDS from disk .. note:: - This functionality requires the IMAS Access Layer. - - This API will change when IMASPy is moving to Access Layer 5 (expected Q2 + - This API will change when imas-python is moving to Access Layer 5 (expected Q2 2023). To load an IDS from disk, you need to specify the same information as @@ -132,8 +132,8 @@ can use ``.get()`` to load IDS data from disk: .. code-block:: python >>> # Now load the core_profiles IDS back from disk - >>> dbentry2 = imaspy.DBEntry(imaspy.ids_defs.HDF5_BACKEND, "TEST", 10, 2) + >>> dbentry2 = imas.DBEntry(imas.ids_defs.HDF5_BACKEND, "TEST", 10, 2) >>> dbentry2.open() >>> core_profiles2 = dbentry2.get("core_profiles") >>> print(core_profiles2.ids_properties.comment.value) - Testing IMASPy + Testing imas-python diff --git a/docs/source/lazy_loading.rst b/docs/source/lazy_loading.rst index cfea365e..a4317d5d 100644 --- a/docs/source/lazy_loading.rst +++ b/docs/source/lazy_loading.rst @@ -4,12 +4,12 @@ Lazy loading ============ When reading data from a data entry (using :meth:`DBEntry.get -`, or :meth:`DBEntry.get_slice -`), by default all data is read immediately from the +`, or :meth:`DBEntry.get_slice +`), by default all data is read immediately from the lowlevel Access Layer backend. This may take a long time to complete if the data entry has a lot of data stored for the requested IDS. -Instead of reading data immediately, IMASPy can also `lazy load` the data when you need +Instead of reading data immediately, imas-python can also `lazy load` the data when you need it. This will speed up your program in cases where you are interested in a subset of all the data stored in an IDS. @@ -18,12 +18,12 @@ Enable lazy loading of data --------------------------- You can enable lazy loading of data by supplying the keyword argument :code:`lazy=True` -to :meth:`DBEntry.get `, or :meth:`DBEntry.get_slice -`. The returned IDS +to :meth:`DBEntry.get `, or :meth:`DBEntry.get_slice +`. The returned IDS object will fetch the data from the backend at the moment that you want to access it. See below example: -.. literalinclude:: courses/basic/imaspy_snippets/plot_core_profiles_te.py +.. literalinclude:: courses/basic/imas_snippets/plot_core_profiles_te.py :caption: Example with lazy loading of data In this example, using lazy loading with the MDSPLUS backend is about 12 times @@ -39,39 +39,39 @@ Lazy loading of data may speed up your programs, but also comes with some limita 1. Some functionality is not implemented or works differently for lazy-loaded IDSs: - Iterating over non-empty nodes works differently, see API documentation: - :py:meth:`imaspy.ids_structure.IDSStructure.iter_nonempty_`. - - :py:meth:`~imaspy.ids_structure.IDSStructure.has_value` is not implemented for + :py:meth:`imas.ids_structure.IDSStructure.iter_nonempty_`. + - :py:meth:`~imas.ids_structure.IDSStructure.has_value` is not implemented for lazy-loaded structure elements. - - :py:meth:`~imaspy.ids_toplevel.IDSToplevel.validate` will only validate loaded + - :py:meth:`~imas.ids_toplevel.IDSToplevel.validate` will only validate loaded data. Additional data might be loaded from the backend to validate coordinate sizes. - - :py:meth:`imaspy.util.print_tree` will only print data that is loaded when - :py:param:`~imaspy.util.print_tree.hide_empty_nodes` is ``True``. - - :py:meth:`imaspy.util.visit_children`: + - :py:meth:`imas.util.print_tree` will only print data that is loaded when + :py:param:`~imas.util.print_tree.hide_empty_nodes` is ``True``. + - :py:meth:`imas.util.visit_children`: - - When :py:param:`~imaspy.util.visit_children.visit_empty` is ``False`` + - When :py:param:`~imas.util.visit_children.visit_empty` is ``False`` (default), this method uses - :py:meth:`~imaspy.ids_structure.IDSStructure.iter_nonempty_`. This raises an + :py:meth:`~imas.ids_structure.IDSStructure.iter_nonempty_`. This raises an error for lazy-loaded IDSs, unless you set - :py:param:`~imaspy.util.visit_children.accept_lazy` to ``True``. - - When :py:param:`~imaspy.util.visit_children.visit_empty` is ``True``, this + :py:param:`~imas.util.visit_children.accept_lazy` to ``True``. + - When :py:param:`~imas.util.visit_children.visit_empty` is ``True``, this will iteratively load `all` data from the backend. This is effectively a full, but less efficient, ``get()``\ /\ ``get_slice()``. It will be faster if you don't use lazy loading in this case. - - IDS conversion through :py:meth:`imaspy.convert_ids - ` is not implemented for lazy loaded IDSs. Note + - IDS conversion through :py:meth:`imas.convert_ids + ` is not implemented for lazy loaded IDSs. Note that :ref:`Automatic conversion between DD versions` also applies when lazy loading. - Lazy loaded IDSs are read-only, setting or changing values, resizing arrays of structures, etc. is not allowed. - - You cannot :py:meth:`~imaspy.db_entry.DBEntry.put`, - :py:meth:`~imaspy.db_entry.DBEntry.put_slice` or - :py:meth:`~imaspy.ids_toplevel.IDSToplevel.serialize` lazy-loaded IDSs. + - You cannot :py:meth:`~imas.db_entry.DBEntry.put`, + :py:meth:`~imas.db_entry.DBEntry.put_slice` or + :py:meth:`~imas.ids_toplevel.IDSToplevel.serialize` lazy-loaded IDSs. - Copying lazy-loaded IDSs (through :external:py:func:`copy.deepcopy`) is not implemented. -2. IMASPy **assumes** that the underlying data entry is not modified. +2. imas-python **assumes** that the underlying data entry is not modified. When you (or another user) overwrite or add data to the same data entry, you may end up with a mix of old and new data in the lazy loaded IDS. @@ -89,5 +89,5 @@ Lazy loading of data may speed up your programs, but also comes with some limita 4. Lazy loading has more overhead for reading data from the lowlevel: it is therefore more efficient to do a full :code:`get()` or :code:`get_slice()` when you intend to use most of the data stored in an IDS. -5. When using IMASPy with remote data access (i.e. the UDA backend), a full +5. When using imas-python with remote data access (i.e. the UDA backend), a full :code:`get()` or :code:`get_slice()` is more efficient than lazy loading. diff --git a/docs/source/mdsplus.rst b/docs/source/mdsplus.rst index b097df43..1ff6e74e 100644 --- a/docs/source/mdsplus.rst +++ b/docs/source/mdsplus.rst @@ -1,15 +1,15 @@ -.. _`MDSplus in IMASPy`: +.. _`MDSplus in imas-python`: -MDSplus in IMASPy -================= +MDSplus in imas-python +====================== `MDSplus `_ is a set of software tools for data acquisition and storage and a methodology for management of complex -scientific data. IMASPy uses the IMAS LowLevel interface to interact +scientific data. imas-python uses the IMAS LowLevel interface to interact with MDSplus data. The model files required to read IMAS IDS-structured data are generated on demand, whenever a specific DD version is used by the user. As this generation might take a while, MDSplus models are -cached to disk, generally in ``$HOME/.cache/imaspy``. As multiple +cached to disk, generally in ``$HOME/.cache/imas``. As multiple processes can write to this location, especially during testing, special care is taken to avoid write collisions. ``$MDSPLUS_MODEL_TIMEOUT`` can be used to specify the amount of seconds diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index fcbd87d9..32545c35 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -5,21 +5,21 @@ IDS metadata Besides the data structure, the IMAS Data Dictionary also defines metadata associated with elements in the IDS, such as coordinate information, units, etc. -IMASPy provides the :py:class:`~imaspy.ids_metadata.IDSMetadata` API for +imas-python provides the :py:class:`~imas.ids_metadata.IDSMetadata` API for interacting with this metadata. On this page you find several examples for querying and using the metadata of IDS elements. .. seealso:: - IMASPy advanced training: :ref:`Using metadata` + imas-python advanced training: :ref:`Using metadata` Overview of available metadata ------------------------------ An overview of available metadata is given in the API documentation for -:py:class:`~imaspy.ids_metadata.IDSMetadata`. +:py:class:`~imas.ids_metadata.IDSMetadata`. The documented attributes are always available, but additional metadata from the data dictionary may be available as well. For example, the data dictionary indicates a ``lifecycle_last_change`` on all IDS @@ -28,8 +28,8 @@ metadata documentation, but you can still access it. See the following code samp .. code-block:: pycon - >>> import imaspy - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> import imas + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.metadata.lifecycle_last_change '3.39.0' @@ -60,7 +60,7 @@ looked up. See below example. .. code-block:: python :caption: Example getting coordinate values belonging to a 1D quantity - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.profiles_1d.resize(1) >>> profile = core_profiles.profiles_1d[0] >>> profile.grid.rho_tor_norm = [0, 0.15, 0.3, 0.45, 0.6] @@ -68,14 +68,14 @@ looked up. See below example. >>> profile.electrons.temperature.coordinates[0] IDSNumericArray("/core_profiles/profiles_1d/1/grid/rho_tor_norm", array([0. , 0.15, 0.3 , 0.45, 0.6 ])) -When a coordinate is just an index, IMASPy generates a +When a coordinate is just an index, imas-python generates a :external:py:func:`numpy.arange` with the same length as the data. See below example. .. code-block:: python :caption: Example getting index coordinate values belonging to an array of structures - >>> pf_active = imaspy.IDSFactory().pf_active() + >>> pf_active = imas.IDSFactory().pf_active() >>> pf_active.coil.resize(10) >>> # Coordinate1 of coil is an index 1...N >>> pf_active.coil.coordinates[0] @@ -84,18 +84,18 @@ example. .. rubric:: Time coordinates Time coordinates are a special case: the coordinates depend on whether the IDS -is in homogeneous time mode or not. IMASPy handles this transparently. +is in homogeneous time mode or not. imas-python handles this transparently. .. code-block:: python :caption: Example getting time coordinate values - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> core_profiles = imas.IDSFactory().core_profiles() >>> # profiles_1d is a time-dependent array of structures: >>> core_profiles.profiles_1d.coordinates[0] [...] ValueError: Invalid IDS time mode: ids_properties/homogeneous_time is , was expecting 0 or 1. >>> core_profiles.ids_properties.homogeneous_time = \\ - ... imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + ... imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS >>> # In homogeneous time mode, the root /time array is used >>> core_profiles.time = [0, 1] >>> core_profiles.profiles_1d.resize(2) @@ -103,7 +103,7 @@ is in homogeneous time mode or not. IMASPy handles this transparently. IDSNumericArray("/core_profiles/time", array([0., 1.])) >>> # But in heterogeneous time mode, profiles_1d/time is used instead >>> core_profiles.ids_properties.homogeneous_time = \\ - ... imaspy.ids_defs.IDS_TIME_MODE_HETEROGENEOUS + ... imas.ids_defs.IDS_TIME_MODE_HETEROGENEOUS >>> core_profiles.profiles_1d.coordinates[0] array([-9.e+40, -9.e+40]) @@ -116,36 +116,36 @@ used as a coordinate. For example, the ``distribution(i1)/profiles_2d(itime)/grid/r OR distribution(i1)/profiles_2d(itime)/grid/rho_tor_norm``. This means that either ``r`` or ``rho_tor_norm`` can be used as coordinate. When requesting such a -coordinate from IMASPy, four things may happen: +coordinate from imas-python, four things may happen: 1. When ``r`` is empty and ``rho_tor_norm`` not, ``coordinates[0]`` will return ``rho_tor_norm``. 2. When ``rho_tor_norm`` is empty and ``r`` not, ``coordinates[0]`` will return ``r``. -3. When both ``r`` and ``rho_tor_norm`` are not empty, IMASPy raises an error +3. When both ``r`` and ``rho_tor_norm`` are not empty, imas-python raises an error because it cannot determine which of the two coordinates should be used. -4. Similarly, an error is raised by IMASPy when neither ``r`` nor +4. Similarly, an error is raised by imas-python when neither ``r`` nor ``rho_tor_norm`` are set. .. seealso:: - API documentation for :py:class:`~imaspy.ids_coordinates.IDSCoordinates` + API documentation for :py:class:`~imas.ids_coordinates.IDSCoordinates` Query coordinate information '''''''''''''''''''''''''''' -In IMASPy you can query coordinate information in two ways: +In imas-python you can query coordinate information in two ways: 1. Directly query the coordinate attribute on the metadata: :code:`.metadata.coordinate2` gives you the coordinate information for the second dimension of the quantity. -2. Use the :py:attr:`~imaspy.ids_metadata.IDSMetadata.coordinates` attribute: +2. Use the :py:attr:`~imas.ids_metadata.IDSMetadata.coordinates` attribute: :code:`.metadata.coordinates` is a tuple containing all coordinate information for the quantity. The coordinate information from the Data Dictionary is parsed and stored in an -:py:class:`~imaspy.ids_coordinates.IDSCoordinate`. The Data Dictionary has +:py:class:`~imas.ids_coordinates.IDSCoordinate`. The Data Dictionary has several types of coordinate information: 1. When the coordinate is an index, the Data Dictionary indicates this via @@ -163,7 +163,7 @@ several types of coordinate information: .. code-block:: python :caption: Examples querying coordinate information - >>> pf_active = imaspy.IDSFactory().pf_active() + >>> pf_active = imas.IDSFactory().pf_active() >>> # coordinate1 of pf_active/coil is an index (the number of the coil) >>> pf_active.coil.metadata.coordinate1 IDSCoordinate('1...N') @@ -175,7 +175,7 @@ several types of coordinate information: .. seealso:: - API documentation for :py:class:`~imaspy.ids_coordinates.IDSCoordinate`. + API documentation for :py:class:`~imas.ids_coordinates.IDSCoordinate`. Query alternative coordinates @@ -183,7 +183,7 @@ Query alternative coordinates Starting in Data Dictionary 4.0, a coordinate quantity may indicate alternatives for itself. These alternatives are stored in the metadata attribute -:py:attr:`~imaspy.ids_metadata.IDSMetadata.alternative_coordinates`. +:py:attr:`~imas.ids_metadata.IDSMetadata.alternative_coordinates`. For example, most quantities in ``profiles_1d`` of the ``core_profiles`` IDS have ``profiles_1d/grid/rho_tor_norm`` as coordinate. However, there are alternatives @@ -193,9 +193,9 @@ the metadata of ``rho_tor_norm``: .. code-block:: python :caption: Showing alternative coordinates in Data Dictionary version 4.0.0 - >>> import imaspy + >>> import imas >>> import rich - >>> dd4 = imaspy.IDSFactory("4.0.0") + >>> dd4 = imas.IDSFactory("4.0.0") >>> core_profiles = dd4.core_profiles() >>> rich.print(cp.profiles_1d[0].grid.rho_tor_norm.metadata.alternative_coordinates) ( diff --git a/docs/source/multi-dd.rst b/docs/source/multi-dd.rst index 19d51b41..372aaae0 100644 --- a/docs/source/multi-dd.rst +++ b/docs/source/multi-dd.rst @@ -4,26 +4,26 @@ Using multiple DD versions in the same environment ================================================== Whereas the default IMAS High Level Interface is built for a single Data Dictionary -version, IMASPy can transparently handle multiple DD versions. +version, imas-python can transparently handle multiple DD versions. -By default, IMASPy uses the same Data Dictionary version as the loaded IMAS environment +By default, imas-python uses the same Data Dictionary version as the loaded IMAS environment is using, as specified by the environment variable ``IMAS_VERSION``. If no IMAS environment is loaded, the last available DD version is used. You can also explicitly specify which IMAS version you want to use when constructing a -:py:class:`~imaspy.db_entry.DBEntry` or :py:class:`~imaspy.ids_factory.IDSFactory`. For +:py:class:`~imas.db_entry.DBEntry` or :py:class:`~imas.ids_factory.IDSFactory`. For example: .. code-block:: python :caption: Using non-default IMAS versions. - import imaspy + import imas - factory_default = imaspy.IDSFactory() # Use default DD version - factory_3_32_0 = imaspy.IDSFactory("3.32.0") # Use DD version 3.32.0 + factory_default = imas.IDSFactory() # Use default DD version + factory_3_32_0 = imas.IDSFactory("3.32.0") # Use DD version 3.32.0 # Will write IDSs to the backend in DD version 3.32.0 - dbentry = imaspy.DBEntry(imaspy.ids_defs.HDF5_BACKEND, "TEST", 10, 2, version="3.32.0") + dbentry = imas.DBEntry(imas.ids_defs.HDF5_BACKEND, "TEST", 10, 2, version="3.32.0") dbentry.create() .. seealso:: :ref:`multi-dd training` @@ -34,13 +34,13 @@ example: Conversion of IDSs between DD versions -------------------------------------- -IMASPy can convert IDSs between different versions of the data dictionary. This uses the +imas-python can convert IDSs between different versions of the data dictionary. This uses the "non-backwards compatible changes" metadata from the DD definitions. There are two conversion modes: 1. Automatic conversion: this is handled when reading or writing data - (:py:meth:`~imaspy.db_entry.DBEntry.get`/:py:meth:`~imaspy.db_entry.DBEntry.get_slice`, - :py:meth:`~imaspy.db_entry.DBEntry.put`/:py:meth:`~imaspy.db_entry.DBEntry.put_slice`). + (:py:meth:`~imas.db_entry.DBEntry.get`/:py:meth:`~imas.db_entry.DBEntry.get_slice`, + :py:meth:`~imas.db_entry.DBEntry.put`/:py:meth:`~imas.db_entry.DBEntry.put_slice`). The DBEntry class automatically converts IDSs to the requested version: @@ -57,7 +57,7 @@ two conversion modes: are silently ignored. 2. Explicit conversion: this is achieved with a call to - :py:func:`imaspy.convert_ids `. + :py:func:`imas.convert_ids `. Automatic conversion is faster when reading data (up to a factor 2, depending on the backend and the stored data), but it doesn't support all conversion logic @@ -75,8 +75,8 @@ the backend and the stored data), but it doesn't support all conversion logic be more efficient to convert the data to your DD version, store it and then use it. This avoids conversion every time you read the data. - Converting an entire Data Entry can also be done with the IMASPy command - line interface. See :ref:`IMASPy Command Line tool`. + Converting an entire Data Entry can also be done with the imas-python command + line interface. See :ref:`imas-python Command Line tool`. Explicit conversion @@ -85,28 +85,28 @@ Explicit conversion .. code-block:: python :caption: Explicitly convert data when reading from disk - import imaspy + import imas - entry = imaspy.DBEntry("", "r") + entry = imas.DBEntry("", "r") # Disable automatic conversion when reading the IDS with autoconvert=False ids = entry.get("", autoconvert=False) # Explicitly convert the IDS to the target version - ids = imaspy.convert_ids(ids, "") + ids = imas.convert_ids(ids, "") .. code-block:: python :caption: Convert an IDS to a different DD version - import imaspy + import imas # Create a pulse_schedule IDS in version 3.23.0 - ps = imaspy.IDSFactory("3.25.0").new("pulse_schedule") + ps = imas.IDSFactory("3.25.0").new("pulse_schedule") ps.ec.antenna.resize(1) ps.ec.antenna[0].name = "IDS conversion test" # Convert the IDS to version 3.30.0 - ps330 = imaspy.convert_ids(ps, "3.30.0") + ps330 = imas.convert_ids(ps, "3.30.0") # ec.antenna was renamed to ec.launcher between 3.23.0 and 3.30.0 print(len(ps330.ec.launcher)) # 1 print(ps330.ec.launcher[0].name.value) # IDS conversion test @@ -114,7 +114,7 @@ Explicit conversion .. note:: Not all data may be converted. For example, when an IDS node is removed between DD - versions, the corresponding data is not copied. IMASPy provides logging to indicate + versions, the corresponding data is not copied. imas-python provides logging to indicate when this happens. @@ -181,16 +181,16 @@ explicit conversion mechanisms. Background information ---------------------- -Since IMASPy needs to have access to multiple DD versions it was chosen to +Since imas-python needs to have access to multiple DD versions it was chosen to bundle these with the code at build-time, in setup.py. If a git clone of the Data Dictionary succeeds, the setup tools automatically download saxon and generate ``IDSDef.xml`` for each of the tagged versions in the DD git repository. These are then gathered into ``IDSDef.zip``, which is -distributed inside the IMASPy package. +distributed inside the imas-python package. To update the set of data dictionaries new versions can be added to the zipfile. A reinstall of the package will ensure that all available versions are included -in IMASPy. Additionally an explicit path to an XML file can be specified, which +in imas-python. Additionally an explicit path to an XML file can be specified, which is useful for development. Automated tests have been provided that check the loading of all of the DD @@ -203,14 +203,14 @@ Extending the DD set Use the command ``python setup.py build_DD`` to build a new ``IDSDef.zip``. This fetches all tags from the data dictionary git repository and builds the ``IDSDef.zip``. -IMASPy searches for an ``IDSDef.zip`` in the following locations: +imas-python searches for an ``IDSDef.zip`` in the following locations: -1. The environment variable ``$IMASPY_DDZIP`` (path to a zip file) +1. The environment variable ``$IMAS_DDZIP`` (path to a zip file) 2. The file ``./IDSDef.zip`` in the current working directory -3. In the local configuration folder: ``~/.config/imaspy/IDSDef.zip``, or - ``$XDG_CONFIG_DIR/imaspy/IDSDef.zip`` (if the environment variable +3. In the local configuration folder: ``~/.config/imas/IDSDef.zip``, or + ``$XDG_CONFIG_DIR/imas/IDSDef.zip`` (if the environment variable ``$XDG_CONFIG_DIR`` is set) -4. The zipfile bundled with the IMASPy installation: ``assets/IDSDef.zip`` +4. The zipfile bundled with the imas-python installation: ``assets/IDSDef.zip`` All paths are searched in order when loading the definitions of a specific data dictionary version: the first zip file that contains the definitions of the requested diff --git a/docs/source/netcdf.rst b/docs/source/netcdf.rst index 7a7593e6..4ef62a2a 100644 --- a/docs/source/netcdf.rst +++ b/docs/source/netcdf.rst @@ -9,60 +9,60 @@ IMAS netCDF files netcdf/conventions -IMASPy supports reading IDSs from and writing IDSs to IMAS netCDF files. This +imas-python supports reading IDSs from and writing IDSs to IMAS netCDF files. This feature is currently in alpha status, and its functionality may change in -upcoming minor releases of IMASPy. +upcoming minor releases of imas-python. A detailed description of the IMAS netCDF format and conventions can be found on the :ref:`IMAS conventions for the netCDF data format` page. -Reading from and writing to netCDF files uses the same :py:class:`imaspy.DBEntry -` API as reading and writing to Access Layer backends. +Reading from and writing to netCDF files uses the same :py:class:`imas.DBEntry +` API as reading and writing to Access Layer backends. If you provide a path to a netCDF file (ending with ``.nc``) the netCDF backend -will be used for :py:meth:`~imaspy.db_entry.DBEntry.get` and -:py:meth:`~imaspy.db_entry.DBEntry.put` calls. See the below example: +will be used for :py:meth:`~imas.db_entry.DBEntry.get` and +:py:meth:`~imas.db_entry.DBEntry.put` calls. See the below example: .. code-block:: python :caption: Use DBEntry to write and read IMAS netCDF files - import imaspy + import imas - cp = imaspy.IDSFactory().core_profiles() - cp.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_INDEPENDENT + cp = imas.IDSFactory().core_profiles() + cp.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_INDEPENDENT cp.ids_properties.comment = "Test IDS" # This will create the `test.nc` file and stores the core_profiles IDS in it - with imaspy.DBEntry("test.nc", "w") as netcdf_entry: + with imas.DBEntry("test.nc", "w") as netcdf_entry: netcdf_entry.put(cp) # Reading back: - with imaspy.DBEntry("test.nc", "r") as netcdf_entry: + with imas.DBEntry("test.nc", "r") as netcdf_entry: cp2 = netcdf_entry.get("core_profiles") - imaspy.util.print_tree(cp2) + imas.util.print_tree(cp2) Using IMAS netCDF files with 3rd-party tools -------------------------------------------- -The netCDF files produces by IMASPy can be read with external tools. In this +The netCDF files produces by imas-python can be read with external tools. In this section we will show how to load data with the `xarray `__ package. Let's first create a small netCDF file in the current working directory based on -the IMASPy training data: +the imas-python training data: .. code-block:: python :caption: Store ``core_profiles`` training data in a netCDF file - import imaspy.training + import imas.training # Open the training entry - with imaspy.training.get_training_db_entry() as training_entry: + with imas.training.get_training_db_entry() as training_entry: # Load the core_profiles IDS core_profiles = training_entry.get("core_profiles") # Open a netCDF entry to store this IDS in: - with imaspy.DBEntry("core_profiles.nc", "w") as nc: + with imas.DBEntry("core_profiles.nc", "w") as nc: nc.put(core_profiles) If you execute this code snippet, you will find a file ``core_profiles.nc`` in @@ -107,6 +107,6 @@ your directory. Let's open this file with ``xarray.load_dataset``: Validating an IMAS netCDF file ------------------------------ -IMAS netCDF files can be validated with IMASPy through the command line ``imaspy -validate_nc ``. See also :ref:`IMASPy Command Line tool` or type -``imaspy validate_nc --help`` in a command line. +IMAS netCDF files can be validated with imas-python through the command line ``imas +validate_nc ``. See also :ref:`imas-python Command Line tool` or type +``imas validate_nc --help`` in a command line. diff --git a/docs/source/netcdf/conventions.rst b/docs/source/netcdf/conventions.rst index 99718b47..2dbbacb4 100644 --- a/docs/source/netcdf/conventions.rst +++ b/docs/source/netcdf/conventions.rst @@ -263,7 +263,7 @@ IMAS netCDF writers are recommended to overwrite the following metadata: - ``ids_properties.version_put.access_layer``: fill with ``"N/A"``, since this IDS is not written by the IMAS Access Layer. - ``ids_properties.version_put.access_layer_language``: fill with the name and - version of the netCDF writer, for example ``IMASPy 1.1.0``. + version of the netCDF writer, for example ``imas-python 1.1.0``. All other IDS metadata and provenance should be filled by the user or software that provides the IDS data. diff --git a/docs/source/release_imaspy.rst b/docs/source/release_imas.rst similarity index 72% rename from docs/source/release_imaspy.rst rename to docs/source/release_imas.rst index cc946ea4..4606118d 100644 --- a/docs/source/release_imaspy.rst +++ b/docs/source/release_imas.rst @@ -1,33 +1,26 @@ -IMASPy development and release process -====================================== +imas-python development and release process +=========================================== -IMASPy development follows the `Gitflow workflow -`_: +imas-python development follows the a fork-based model described in +`the contributing guidelines +`_. -1. New features, bug fixes, etc. are developed in a separate branch. Typically named - ``feature/``, ``bugfix/IMAS-XXXX``, etc. -2. When the feature is finished, a Pull Request to the ``develop`` branch is created. -3. The PR is reviewed and, after approval, changes are merged to ``develop``. -4. The ``main`` branch is updated only on releases, see below. +Creating an imas-python release +------------------------------- -Creating an IMASPy release --------------------------- - -1. Create a Pull Request from ``develop`` to ``main``. +1. Create a Pull Request using fork based workflow from ``develop`` to ``main``. 2. Add a change log to the Pull Request, briefly describing new features, bug fixes, - etc. See, for example, `this PR for version 0.8.0 - `_. -3. The PR is reviewed and merged by IO (currently Olivier Hoenen, who also creates the - release tags). + and update accordingly the :ref:`changelog`. +3. The PR is reviewed and merged by the maintainers who also create the release tags. 4. After the release PR is merged, update the Easybuild configurations for SDCC modules in the `easybuild-easyconfigs repository - `_. + `_. See the next section for more details on how to do this. -Updating and testing the IMASPy Easybuild configuration -------------------------------------------------------- +Updating and testing the imas-python Easybuild configuration +------------------------------------------------------------ The following steps can be taken on an SDCC login node. @@ -77,7 +70,7 @@ The following steps must be performed for each of the tool chains (currently a. Copy the ``.eb`` file from the previous release. b. Update the ``version`` to reflect the just-released version tag. - c. If any of the IMASPy dependencies in ``pyproject.toml`` where updated or changed + c. If any of the imas-python dependencies in ``pyproject.toml`` where updated or changed since the previous release, update the easybuild dependencies: - ``builddependencies`` contains build-time dependencies which are available @@ -86,7 +79,7 @@ The following steps must be performed for each of the tool chains (currently .. note:: The IMAS module is a build-time dependency only and not a runtime - dependency. This allows IMASPy users to load the IMASPy module and + dependency. This allows imas-python users to load the imas-python module and **any** supported IMAS module. - ``dependencies`` contains run-time dependencies which are available as a @@ -95,13 +88,13 @@ The following steps must be performed for each of the tool chains (currently dependencies of dependencies) which are not available in any of the Python modules on SDCC. - d. Update the checksum of imaspy: download an archive of the IMASPy repository from + d. Update the checksum of imas: download an archive of the imas-python repository from bitbucket. This is easiest to do by copying the following URL, replace ```` with the version tag, and paste it in a web browser: .. code-block:: text - https://git.iter.org/rest/api/latest/projects/IMAS/repos/imaspy/archive?at=refs/tags/&format=tar.gz + https://github.com/iterorganization/imas-python/archive/refs/heads/.tar.gz Then, calculate the hash of the downloaded archive with ``sha256sum`` and update it in the ``.eb`` file. @@ -127,7 +120,7 @@ The following steps must be performed for each of the tool chains (currently module purge module use ~/.local/easybuild/modules/all/ - module load IMASPy/- + module load imas-python/- module laod IMAS c. Sanity check the module, for example by running the ``pytest`` unit tests. diff --git a/docs/source/resampling.rst b/docs/source/resampling.rst index d788d5ad..22acb798 100644 --- a/docs/source/resampling.rst +++ b/docs/source/resampling.rst @@ -14,14 +14,14 @@ find the value of new points. This can be used like so: .. code-block:: python - pulse_schedule = imaspy.IDSFactory().new("pulse_schedule") + pulse_schedule = imas.IDSFactory().new("pulse_schedule") f = scipy.interpolate.interp1d(pulse_schedule.time, pulse_schedule_some_1d_var) ids.pulse_schedule.some_1d_var = f(pulse_schedule.some_1d_var) A more general approach would work on the basis of scanning the tree for shared coordinates, and resampling those in the same manner (by creating a -local interpolator and applying it). The :py:meth:`imaspy.util.visit_children` +local interpolator and applying it). The :py:meth:`imas.util.visit_children` method can be used for this. For a proof-of-concept it is recommended to only resample in the time direction. @@ -31,15 +31,15 @@ For example, a proposal implementation included in 0.4.0 can be used as such .. code-block:: python - import imaspy - nbi = imaspy.IDSFactory().new("nbi") - nbi.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + import imas + nbi = imas.IDSFactory().new("nbi") + nbi.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS nbi.time = [1, 2, 3] nbi.unit.resize(1) nbi.unit[0].energy.data = 2 * nbi.time old_id = id(nbi.unit[0].energy.data) - imaspy.util.resample( + imas.util.resample( nbi.unit[0].energy.data, nbi.time, [0.5, 1.5], @@ -56,14 +56,14 @@ Or as such (explicit in-memory copy + interpolation, producing a new data leaf/c .. code-block:: python - nbi = imaspy.IDSFactory().new("nbi") - nbi.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + nbi = imas.IDSFactory().new("nbi") + nbi.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS nbi.time = [1, 2, 3] nbi.unit.resize(1) nbi.unit[0].energy.data = 2 * nbi.time old_id = id(nbi.unit[0].energy.data) - new_data = imaspy.util.resample( + new_data = imas.util.resample( nbi.unit[0].energy.data, nbi.time, [0.5, 1.5], diff --git a/docs/source/validation.rst b/docs/source/validation.rst index 880c3402..472c70ca 100644 --- a/docs/source/validation.rst +++ b/docs/source/validation.rst @@ -3,30 +3,30 @@ IDS validation ============== -The IDSs you fill should be consistent. To help you in validating that, IMASPy has a -:py:meth:`~imaspy.ids_toplevel.IDSToplevel.validate` method that executes the following +The IDSs you fill should be consistent. To help you in validating that, imas-python has a +:py:meth:`~imas.ids_toplevel.IDSToplevel.validate` method that executes the following checks. .. contents:: Validation checks :local: :depth: 1 -If you call this method and your IDS fails validation, IMASPy raises an error explaining +If you call this method and your IDS fails validation, imas-python raises an error explaining the problem. See the following example: ->>> import imaspy ->>> core_profiles = imaspy.IDSFactory().core_profiles() +>>> import imas +>>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.validate() -imaspy.exception.ValidationError: Invalid value for ids_properties.homogeneous_time: -999999999 +imas.exception.ValidationError: Invalid value for ids_properties.homogeneous_time: -999999999 -IMASPy also automatically validates an IDS every time you do a -:py:meth:`~imaspy.db_entry.DBEntry.put` or -:py:meth:`~imaspy.db_entry.DBEntry.put_slice`. To disable this feature, you must set the +imas-python also automatically validates an IDS every time you do a +:py:meth:`~imas.db_entry.DBEntry.put` or +:py:meth:`~imas.db_entry.DBEntry.put_slice`. To disable this feature, you must set the environment variable ``IMAS_AL_DISABLE_VALIDATE`` to ``1``. .. seealso:: - API documentation: :py:meth:`IDSToplevel.validate() ` + API documentation: :py:meth:`IDSToplevel.validate() ` Validate the time mode diff --git a/imas/__init__.py b/imas/__init__.py new file mode 100644 index 00000000..b0b8f567 --- /dev/null +++ b/imas/__init__.py @@ -0,0 +1,31 @@ +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. + +# isort: skip_file + +from packaging.version import Version as _V + +from ._version import version as __version__ # noqa: F401 +from ._version import version_tuple # noqa: F401 + +# Import logging _first_ +from . import setup_logging + +# Import main user API objects in the imas module +from .db_entry import DBEntry +from .ids_factory import IDSFactory +from .ids_convert import convert_ids +from .ids_identifiers import identifiers + +# Load the imas-python IMAS AL/DD core +from . import ( + db_entry, + dd_helpers, + dd_zip, + util, +) + +PUBLISHED_DOCUMENTATION_ROOT = "https://imas-python.readthedocs.io/en/latest/" +"""URL to the published documentation.""" +OLDEST_SUPPORTED_VERSION = _V("3.22.0") +"""Oldest Data Dictionary version that is supported by imas-python.""" diff --git a/imas/__main__.py b/imas/__main__.py new file mode 100644 index 00000000..0b7834e5 --- /dev/null +++ b/imas/__main__.py @@ -0,0 +1,17 @@ +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Support module to run imas as a module: + +.. code-block:: bash + :caption: Options to run imas CLI interface + + # Run as a module (implemented in imas/__main__.py) + python -m imas + + # Run as "program" (see project.scripts in pyproject.toml) + imas +""" + +from imas.command.cli import cli + +cli() diff --git a/imaspy/_util.py b/imas/_util.py similarity index 94% rename from imaspy/_util.py rename to imas/_util.py index 44aee89e..82199275 100644 --- a/imaspy/_util.py +++ b/imas/_util.py @@ -1,8 +1,8 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """This file contains the implementation of all utility functions that need external modules. Implementation has been removed from util.py to improve the performance of -``import imaspy``. +``import imas``. """ import copy @@ -21,15 +21,15 @@ from rich.text import Text from rich.tree import Tree -from imaspy.ids_base import IDSBase -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS -from imaspy.ids_metadata import IDSMetadata -from imaspy.ids_primitive import IDSPrimitive -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure -from imaspy.ids_toplevel import IDSToplevel -from imaspy.util import idsdiffgen, visit_children +from imas.ids_base import IDSBase +from imas.ids_data_type import IDSDataType +from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS +from imas.ids_metadata import IDSMetadata +from imas.ids_primitive import IDSPrimitive +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure +from imas.ids_toplevel import IDSToplevel +from imas.util import idsdiffgen, visit_children logger = logging.getLogger(__name__) diff --git a/imas/assets/IDSDef/identifiers/core_instant_changes/core_instant_changes_identifier.xml b/imas/assets/IDSDef/identifiers/core_instant_changes/core_instant_changes_identifier.xml new file mode 100644 index 00000000..2cb8a799 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/core_instant_changes/core_instant_changes_identifier.xml @@ -0,0 +1,15 @@ + + +
+Translation table for types of instant changes to the plasma state. +
+ + + +0 +1 +2 +3 +4 + +
diff --git a/imas/assets/IDSDef/identifiers/core_sources/core_source_identifier.xml b/imas/assets/IDSDef/identifiers/core_sources/core_source_identifier.xml new file mode 100644 index 00000000..e737a204 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/core_sources/core_source_identifier.xml @@ -0,0 +1,76 @@ + + +
+Translation table for sources of particles, momentum and heat. +
+ + + + +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 + +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 + +200 +201 +202 +203 + +303 +304 +305 + +400 +401 +402 +403 + +501 + +601 +602 +603 + +801 +802 + +901 +902 +903 +904 +905 +906 +907 +908 +909 + + + +
diff --git a/imas/assets/IDSDef/identifiers/core_transport/core_transport_identifier.xml b/imas/assets/IDSDef/identifiers/core_transport/core_transport_identifier.xml new file mode 100644 index 00000000..1723b17d --- /dev/null +++ b/imas/assets/IDSDef/identifiers/core_transport/core_transport_identifier.xml @@ -0,0 +1,25 @@ + + +
+ Translation table for different types of transport coefficients. +
+ + + + +0 +1 +2 +3 +4 +5 +6 +19 +20 +21 +22 +23 +24 +25 + +
diff --git a/imas/assets/IDSDef/identifiers/edge_sources/edge_source_identifier.xml b/imas/assets/IDSDef/identifiers/edge_sources/edge_source_identifier.xml new file mode 100644 index 00000000..aee46091 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/edge_sources/edge_source_identifier.xml @@ -0,0 +1,31 @@ + + +
+Translation table for sources of particles, momentum and heat. +
+ + + +0 + +1 +701 +702 +703 +801 +705 +706 +707 +708 +709 +710 + +305 +11 +7 +200 + +715 +716 + +
diff --git a/imas/assets/IDSDef/identifiers/edge_transport/edge_transport_identifier.xml b/imas/assets/IDSDef/identifiers/edge_transport/edge_transport_identifier.xml new file mode 100644 index 00000000..7c208d2d --- /dev/null +++ b/imas/assets/IDSDef/identifiers/edge_transport/edge_transport_identifier.xml @@ -0,0 +1,25 @@ + + +
+ Translation table for different types of transport coefficients. +
+ + + + +0 +1 + +100 +101 +102 +103 + +200 +201 +202 +203 +204 +205 + +
diff --git a/imas/assets/IDSDef/identifiers/em_coupling/em_coupling_quantity_identifier.xml b/imas/assets/IDSDef/identifiers/em_coupling/em_coupling_quantity_identifier.xml new file mode 100644 index 00000000..8a53209c --- /dev/null +++ b/imas/assets/IDSDef/identifiers/em_coupling/em_coupling_quantity_identifier.xml @@ -0,0 +1,27 @@ + + +
+ Physical quantity described in the user-defined em_coupling matrix +
+ + + +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +
diff --git a/imas/assets/IDSDef/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml b/imas/assets/IDSDef/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml new file mode 100644 index 00000000..9c52b5b4 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml @@ -0,0 +1,21 @@ + + +
Various contributions to the B, j, and psi 2D maps
+ + +0 +1 +2 +3 +4 +
diff --git a/imas/assets/IDSDef/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml b/imas/assets/IDSDef/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml new file mode 100644 index 00000000..0b5bd928 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml @@ -0,0 +1,14 @@ + + +
+Type of flux loop +
+ + +1 +2 +3 +4 +5 +6 +
diff --git a/imas/assets/IDSDef/identifiers/magnetics/magnetics_probe_type_identifier.xml b/imas/assets/IDSDef/identifiers/magnetics/magnetics_probe_type_identifier.xml new file mode 100644 index 00000000..ab59dcaa --- /dev/null +++ b/imas/assets/IDSDef/identifiers/magnetics/magnetics_probe_type_identifier.xml @@ -0,0 +1,16 @@ + + +
+Type of magnetic field probe +
+ + + + +1 +2 +3 +4 +5 +6 +
diff --git a/imas/assets/IDSDef/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml b/imas/assets/IDSDef/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml new file mode 100644 index 00000000..dcadbf7a --- /dev/null +++ b/imas/assets/IDSDef/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml @@ -0,0 +1,13 @@ + + +
+Quantity measured by the Rogowski coil +
+ + +1 +2 +3 +4 +5 +
diff --git a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml new file mode 100644 index 00000000..d5a9793c --- /dev/null +++ b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml @@ -0,0 +1,13 @@ + + +
+Balooning type of the MHD mode +
+ + + +1 +2 +3 + +
diff --git a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_equations_identifier.xml b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_equations_identifier.xml new file mode 100644 index 00000000..5c41868f --- /dev/null +++ b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_equations_identifier.xml @@ -0,0 +1,15 @@ + + +
+Type of the MHD model used +
+ + + + +1 +11 +2 +21 + +
diff --git a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_model_identifier.xml b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_model_identifier.xml new file mode 100644 index 00000000..c184635a --- /dev/null +++ b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_model_identifier.xml @@ -0,0 +1,14 @@ + + +
+Type of the MHD model used +
+ + + + +1 +2 +3 + +
diff --git a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml new file mode 100644 index 00000000..e23ca4fb --- /dev/null +++ b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml @@ -0,0 +1,24 @@ + + +
+Type of the perturbation +
+ + + +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 + +
diff --git a/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_event_identifier.xml b/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_event_identifier.xml new file mode 100644 index 00000000..304864b3 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_event_identifier.xml @@ -0,0 +1,18 @@ + + +
+Translation table for type of events measured in the neutron detector +
+ + + + +1 +2 +3 +4 +5 +6 +7 + +
diff --git a/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_mode_identifier.xml b/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_mode_identifier.xml new file mode 100644 index 00000000..b9d4c3a7 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_mode_identifier.xml @@ -0,0 +1,15 @@ + + +
+Translation table for counting mode in the neutron detector +
+ + + +1 +2 +3 +4 +5 + +
diff --git a/imas/assets/IDSDef/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml b/imas/assets/IDSDef/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml new file mode 100644 index 00000000..a80425d6 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml @@ -0,0 +1,15 @@ + + +
+Type of mechanics sensor +
+ + + +0 +1 +2 +3 +4 + +
diff --git a/imas/assets/IDSDef/identifiers/pf_active/pf_active_coil_function_identifier.xml b/imas/assets/IDSDef/identifiers/pf_active/pf_active_coil_function_identifier.xml new file mode 100644 index 00000000..37d974ef --- /dev/null +++ b/imas/assets/IDSDef/identifiers/pf_active/pf_active_coil_function_identifier.xml @@ -0,0 +1,10 @@ + + +
Functions of PF coils
+ + +0 +1 +2 + +
\ No newline at end of file diff --git a/imas/assets/IDSDef/identifiers/plasma_sources/plasma_source_identifier.xml b/imas/assets/IDSDef/identifiers/plasma_sources/plasma_source_identifier.xml new file mode 100644 index 00000000..910a7907 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/plasma_sources/plasma_source_identifier.xml @@ -0,0 +1,70 @@ + + +
+Translation table for sources of particles, momentum and heat. +
+ + + +0 + +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 + +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 + +200 +201 +202 +203 + +303 +304 +305 + +400 +401 +402 +403 + +501 + +603 + +701 +702 +703 +705 +706 +707 +708 +709 +710 +715 +716 + +801 +802 + +
diff --git a/imas/assets/IDSDef/identifiers/plasma_transport/plasma_transport_identifier.xml b/imas/assets/IDSDef/identifiers/plasma_transport/plasma_transport_identifier.xml new file mode 100644 index 00000000..4e229087 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/plasma_transport/plasma_transport_identifier.xml @@ -0,0 +1,38 @@ + + +
+ Translation table for different types of transport coefficients. +
+ + + + +0 +1 + +2 +3 +4 +5 +6 +19 +20 +21 +22 +23 +24 +25 + +100 +101 +102 +103 + +200 +201 +202 +203 +204 +205 + +
diff --git a/imas/assets/IDSDef/identifiers/radiation/radiation_identifier.xml b/imas/assets/IDSDef/identifiers/radiation/radiation_identifier.xml new file mode 100644 index 00000000..1c5a713d --- /dev/null +++ b/imas/assets/IDSDef/identifiers/radiation/radiation_identifier.xml @@ -0,0 +1,23 @@ + + +
+Translation table for radiation processes +
+ + 0 + 6 + 8 + 9 + 10 + 11 + 501 + 901 + 902 + 903 + 904 + 905 + 906 + 907 + 908 + 909 +
diff --git a/imas/assets/IDSDef/identifiers/refractometer/refractometer_formula_identifier.xml b/imas/assets/IDSDef/identifiers/refractometer/refractometer_formula_identifier.xml new file mode 100644 index 00000000..25b8e077 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/refractometer/refractometer_formula_identifier.xml @@ -0,0 +1,12 @@ + + +
+Translation table for analytical formulas used by refractometer post-processing +
+ + + +1 +2 + +
diff --git a/imas/assets/IDSDef/identifiers/runaway_electrons/e_field_critical_identifier.xml b/imas/assets/IDSDef/identifiers/runaway_electrons/e_field_critical_identifier.xml new file mode 100644 index 00000000..7fdabea5 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/runaway_electrons/e_field_critical_identifier.xml @@ -0,0 +1,12 @@ + + +
+Definition of e_field_critical +
+ + + +1 +2 + +
diff --git a/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml b/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml new file mode 100644 index 00000000..2d76f750 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml @@ -0,0 +1,11 @@ + + +
+Definition of momentum_critical_avalanche +
+ + + +1 + +
diff --git a/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml b/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml new file mode 100644 index 00000000..9804a1b5 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml @@ -0,0 +1,11 @@ + + +
+Definition of momentum_critical_hot_tail +
+ + + +1 + +
diff --git a/imas/assets/IDSDef/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml b/imas/assets/IDSDef/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml new file mode 100644 index 00000000..88c97626 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml @@ -0,0 +1,13 @@ + + +
+Fitting method used to calculate isotope ratios +
+ + + + +1 +2 + +
diff --git a/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml b/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml new file mode 100644 index 00000000..3d3d97c0 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml @@ -0,0 +1,12 @@ + + +
+Crystal mesh type +
+ + + +1 +2 + +
diff --git a/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml b/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml new file mode 100644 index 00000000..66acb45c --- /dev/null +++ b/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml @@ -0,0 +1,14 @@ + + +
+Translation table for instrument function for X ray crystal spectrometer +
+ + + +1 +2 +3 +4 + +
diff --git a/imas/assets/IDSDef/identifiers/spi/shatter_cone_identifier.xml b/imas/assets/IDSDef/identifiers/spi/shatter_cone_identifier.xml new file mode 100644 index 00000000..5273b103 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/spi/shatter_cone_identifier.xml @@ -0,0 +1,11 @@ + + +
+Definition of the shatter cone +
+ + + +1 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/coordinate_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/coordinate_identifier.xml new file mode 100644 index 00000000..dbaa3853 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/coordinate_identifier.xml @@ -0,0 +1,51 @@ + + +
+Translation table for coordinate_identifier_definitions. +
+ + + +0 +1 +2 +3 +4 +5 + +10 +11 +12 +13 +14 + +20 +21 +22 + +100 +101 +102 +103 +104 +105 +106 +107 +108 +200 +201 +202 +203 + +300 +301 +302 + +400 +402 +403 +404 + +500 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/curved_object_curvature_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/curved_object_curvature_identifier.xml new file mode 100644 index 00000000..261243c9 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/curved_object_curvature_identifier.xml @@ -0,0 +1,16 @@ + + +
+Curvature of a curved object +
+ + + + +1 +2 +3 +4 +5 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/curved_object_geometry_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/curved_object_geometry_identifier.xml new file mode 100644 index 00000000..5117019b --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/curved_object_geometry_identifier.xml @@ -0,0 +1,14 @@ + + +
+Geometry of the contour of a planar or curved object +
+ + + + +1 +2 +3 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/data_type_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/data_type_identifier.xml new file mode 100644 index 00000000..ab94762b --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/data_type_identifier.xml @@ -0,0 +1,12 @@ + + +
+ Dataset type table +
+ + + +1 +2 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/distribution_source_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/distribution_source_identifier.xml new file mode 100644 index 00000000..0814b580 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/distribution_source_identifier.xml @@ -0,0 +1,36 @@ + + +
+Translation table for Heating and Current Drive (HCD) distsource types, i.e. types particles source in Fokker-Planck equation (from NBI and nuclear reactions). +
+ + + + +0 +1 + +100 + +101 +102 +103 +104 + +105 +106 +107 +108 + +109 +110 + +111 +112 + +113 +114 + +1000 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/emission_grid_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/emission_grid_identifier.xml new file mode 100644 index 00000000..d1002573 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/emission_grid_identifier.xml @@ -0,0 +1,9 @@ + + +
List of coordinate systems for describing the poloidal plane
+ + + +1 +
diff --git a/imas/assets/IDSDef/identifiers/utilities/ggd_geometry_content_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/ggd_geometry_content_identifier.xml new file mode 100644 index 00000000..32a039e2 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/ggd_geometry_content_identifier.xml @@ -0,0 +1,21 @@ + + +
Translation table for ggd_space_identifier_definitions.
+ + + + + + + + + + + +0 +1 +11 +21 +31 +32 +
\ No newline at end of file diff --git a/imas/assets/IDSDef/identifiers/utilities/ggd_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/ggd_identifier.xml new file mode 100644 index 00000000..b3389e68 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/ggd_identifier.xml @@ -0,0 +1,29 @@ + + +
Translation table for ggd_identifier_definitions.
+ + + + + + + + + + + +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +100 +
diff --git a/imas/assets/IDSDef/identifiers/utilities/ggd_space_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/ggd_space_identifier.xml new file mode 100644 index 00000000..dee0346c --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/ggd_space_identifier.xml @@ -0,0 +1,18 @@ + + +
Translation table for ggd_space_identifier_definitions.
+ + + + + + + + + + +0 +1 +2 +3 +
\ No newline at end of file diff --git a/imas/assets/IDSDef/identifiers/utilities/ggd_subset_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/ggd_subset_identifier.xml new file mode 100644 index 00000000..3e3a59c6 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/ggd_subset_identifier.xml @@ -0,0 +1,70 @@ + + +
Translation table for ggd_subset_identifier_definitions.
+ + + + + + + + + + +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +100 +101 +102 +103 +104 +105 +106 +
diff --git a/imas/assets/IDSDef/identifiers/utilities/materials_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/materials_identifier.xml new file mode 100644 index 00000000..4494f03b --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/materials_identifier.xml @@ -0,0 +1,43 @@ + + +
+Materials used in the device mechanical structures +
+ + + + +0 +1 +2 +3 +4 +5 +6 +7 +8 +17 +9 +10 +11 +12 +13 +14 +15 +16 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 + + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/midplane_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/midplane_identifier.xml new file mode 100644 index 00000000..3039e263 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/midplane_identifier.xml @@ -0,0 +1,14 @@ + + +
+ Translation table for identifying different midplane definitions +
+ + + +1 +2 +3 +4 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/neutrals_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/neutrals_identifier.xml new file mode 100644 index 00000000..e29024ee --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/neutrals_identifier.xml @@ -0,0 +1,20 @@ + + +
+ Translation table for identifying different types of neutral. + The neutrals are characterised by their energy and source of the neutrals. +
+ + + + + + + + +1 +2 +3 +4 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/occurrence_type_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/occurrence_type_identifier.xml new file mode 100644 index 00000000..e6554554 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/occurrence_type_identifier.xml @@ -0,0 +1,11 @@ + + +
List of possible occurrence types
+ + + +1 +2 +3 +4 +
diff --git a/imas/assets/IDSDef/identifiers/utilities/optical_element_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/optical_element_identifier.xml new file mode 100644 index 00000000..f560fdec --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/optical_element_identifier.xml @@ -0,0 +1,13 @@ + + +
+ Translation table for identifying optical element types +
+ + + +1 +2 +3 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/optical_element_material_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/optical_element_material_identifier.xml new file mode 100644 index 00000000..9cd99c1f --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/optical_element_material_identifier.xml @@ -0,0 +1,12 @@ + + +
+ Translation table for identifying optical element types +
+ + + +1 +2 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/orbit_type_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/orbit_type_identifier.xml new file mode 100644 index 00000000..2c55a7cb --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/orbit_type_identifier.xml @@ -0,0 +1,20 @@ + + +
+Translation table for orbit_type_identifier definitions. +
+ + + +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/poloidal_plane_coordinates_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/poloidal_plane_coordinates_identifier.xml new file mode 100644 index 00000000..9e3c42f6 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/poloidal_plane_coordinates_identifier.xml @@ -0,0 +1,119 @@ + + +
List of coordinate systems for describing the poloidal plane
+ + + + +1 +2 +11 +12 +13 + +14 +15 +16 + +21 +22 +23 + +24 +25 +26 + + +31 +32 +33 + +34 +35 +36 + +41 +42 +43 + +44 +45 +46 + +51 +52 +53 + +54 +55 +56 + +91 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/species_reference_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/species_reference_identifier.xml new file mode 100644 index 00000000..449b89cf --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/species_reference_identifier.xml @@ -0,0 +1,19 @@ + + +
+Translation table for species_reference_identifier_definition. +
+ + + + +0 +1 +2 +3 +4 +5 +6 +7 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/statistics_type_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/statistics_type_identifier.xml new file mode 100644 index 00000000..e1f891df --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/statistics_type_identifier.xml @@ -0,0 +1,17 @@ + + +
+ Translation table for statistics types +
+ + + +1 +2 +3 +4 +5 +6 +7 + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/surface_geometry_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/surface_geometry_identifier.xml new file mode 100644 index 00000000..3cbdaf62 --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/surface_geometry_identifier.xml @@ -0,0 +1,16 @@ + + +
+Geometry of the contour of surface in a local coordinate system +
+ + + +1 +2 +3 +4 +5 + + +
diff --git a/imas/assets/IDSDef/identifiers/utilities/wave_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/wave_identifier.xml new file mode 100644 index 00000000..8d122a7e --- /dev/null +++ b/imas/assets/IDSDef/identifiers/utilities/wave_identifier.xml @@ -0,0 +1,15 @@ + + +
+Translation table for wave field types. +
+ + + + +0 +1 +2 +3 + +
diff --git a/imas/assets/IDSDef/identifiers/wall/wall_component_identifier.xml b/imas/assets/IDSDef/identifiers/wall/wall_component_identifier.xml new file mode 100644 index 00000000..23e6450e --- /dev/null +++ b/imas/assets/IDSDef/identifiers/wall/wall_component_identifier.xml @@ -0,0 +1,18 @@ + + +
+Type of wall component +
+ + + +0 +1 +2 +3 +4 +5 +6 +7 + +
diff --git a/imas/assets/IDSDef/identifiers/wall/wall_description_2d_type_identifier.xml b/imas/assets/IDSDef/identifiers/wall/wall_description_2d_type_identifier.xml new file mode 100644 index 00000000..4dcde3ce --- /dev/null +++ b/imas/assets/IDSDef/identifiers/wall/wall_description_2d_type_identifier.xml @@ -0,0 +1,13 @@ + + +
+Type of wall component +
+ + + +0 +1 +2 + +
diff --git a/imaspy/assets/IDSDef2MDSpreTree.xsl b/imas/assets/IDSDef2MDSpreTree.xsl similarity index 100% rename from imaspy/assets/IDSDef2MDSpreTree.xsl rename to imas/assets/IDSDef2MDSpreTree.xsl diff --git a/imas/assets/IDSDef_correct/identifiers/core_instant_changes/core_instant_changes_identifier.xml b/imas/assets/IDSDef_correct/identifiers/core_instant_changes/core_instant_changes_identifier.xml new file mode 100644 index 00000000..2cb8a799 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/core_instant_changes/core_instant_changes_identifier.xml @@ -0,0 +1,15 @@ + + +
+Translation table for types of instant changes to the plasma state. +
+ + + +0 +1 +2 +3 +4 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/core_sources/core_source_identifier.xml b/imas/assets/IDSDef_correct/identifiers/core_sources/core_source_identifier.xml new file mode 100644 index 00000000..e737a204 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/core_sources/core_source_identifier.xml @@ -0,0 +1,76 @@ + + +
+Translation table for sources of particles, momentum and heat. +
+ + + + +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 + +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 + +200 +201 +202 +203 + +303 +304 +305 + +400 +401 +402 +403 + +501 + +601 +602 +603 + +801 +802 + +901 +902 +903 +904 +905 +906 +907 +908 +909 + + + +
diff --git a/imas/assets/IDSDef_correct/identifiers/core_transport/core_transport_identifier.xml b/imas/assets/IDSDef_correct/identifiers/core_transport/core_transport_identifier.xml new file mode 100644 index 00000000..1723b17d --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/core_transport/core_transport_identifier.xml @@ -0,0 +1,25 @@ + + +
+ Translation table for different types of transport coefficients. +
+ + + + +0 +1 +2 +3 +4 +5 +6 +19 +20 +21 +22 +23 +24 +25 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/edge_sources/edge_source_identifier.xml b/imas/assets/IDSDef_correct/identifiers/edge_sources/edge_source_identifier.xml new file mode 100644 index 00000000..aee46091 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/edge_sources/edge_source_identifier.xml @@ -0,0 +1,31 @@ + + +
+Translation table for sources of particles, momentum and heat. +
+ + + +0 + +1 +701 +702 +703 +801 +705 +706 +707 +708 +709 +710 + +305 +11 +7 +200 + +715 +716 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/edge_transport/edge_transport_identifier.xml b/imas/assets/IDSDef_correct/identifiers/edge_transport/edge_transport_identifier.xml new file mode 100644 index 00000000..7c208d2d --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/edge_transport/edge_transport_identifier.xml @@ -0,0 +1,25 @@ + + +
+ Translation table for different types of transport coefficients. +
+ + + + +0 +1 + +100 +101 +102 +103 + +200 +201 +202 +203 +204 +205 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/em_coupling/em_coupling_quantity_identifier.xml b/imas/assets/IDSDef_correct/identifiers/em_coupling/em_coupling_quantity_identifier.xml new file mode 100644 index 00000000..8a53209c --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/em_coupling/em_coupling_quantity_identifier.xml @@ -0,0 +1,27 @@ + + +
+ Physical quantity described in the user-defined em_coupling matrix +
+ + + +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +
diff --git a/imas/assets/IDSDef_correct/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml b/imas/assets/IDSDef_correct/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml new file mode 100644 index 00000000..9c52b5b4 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml @@ -0,0 +1,21 @@ + + +
Various contributions to the B, j, and psi 2D maps
+ + +0 +1 +2 +3 +4 +
diff --git a/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml new file mode 100644 index 00000000..0b5bd928 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml @@ -0,0 +1,14 @@ + + +
+Type of flux loop +
+ + +1 +2 +3 +4 +5 +6 +
diff --git a/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_probe_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_probe_type_identifier.xml new file mode 100644 index 00000000..ab59dcaa --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_probe_type_identifier.xml @@ -0,0 +1,16 @@ + + +
+Type of magnetic field probe +
+ + + + +1 +2 +3 +4 +5 +6 +
diff --git a/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml b/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml new file mode 100644 index 00000000..dcadbf7a --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml @@ -0,0 +1,13 @@ + + +
+Quantity measured by the Rogowski coil +
+ + +1 +2 +3 +4 +5 +
diff --git a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml new file mode 100644 index 00000000..d5a9793c --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml @@ -0,0 +1,13 @@ + + +
+Balooning type of the MHD mode +
+ + + +1 +2 +3 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_equations_identifier.xml b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_equations_identifier.xml new file mode 100644 index 00000000..5c41868f --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_equations_identifier.xml @@ -0,0 +1,15 @@ + + +
+Type of the MHD model used +
+ + + + +1 +11 +2 +21 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_model_identifier.xml b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_model_identifier.xml new file mode 100644 index 00000000..c184635a --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_model_identifier.xml @@ -0,0 +1,14 @@ + + +
+Type of the MHD model used +
+ + + + +1 +2 +3 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml new file mode 100644 index 00000000..e23ca4fb --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml @@ -0,0 +1,24 @@ + + +
+Type of the perturbation +
+ + + +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_event_identifier.xml b/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_event_identifier.xml new file mode 100644 index 00000000..304864b3 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_event_identifier.xml @@ -0,0 +1,18 @@ + + +
+Translation table for type of events measured in the neutron detector +
+ + + + +1 +2 +3 +4 +5 +6 +7 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_mode_identifier.xml b/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_mode_identifier.xml new file mode 100644 index 00000000..b9d4c3a7 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_mode_identifier.xml @@ -0,0 +1,15 @@ + + +
+Translation table for counting mode in the neutron detector +
+ + + +1 +2 +3 +4 +5 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml new file mode 100644 index 00000000..a80425d6 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml @@ -0,0 +1,15 @@ + + +
+Type of mechanics sensor +
+ + + +0 +1 +2 +3 +4 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/pf_active/pf_active_coil_function_identifier.xml b/imas/assets/IDSDef_correct/identifiers/pf_active/pf_active_coil_function_identifier.xml new file mode 100644 index 00000000..37d974ef --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/pf_active/pf_active_coil_function_identifier.xml @@ -0,0 +1,10 @@ + + +
Functions of PF coils
+ + +0 +1 +2 + +
\ No newline at end of file diff --git a/imas/assets/IDSDef_correct/identifiers/plasma_sources/plasma_source_identifier.xml b/imas/assets/IDSDef_correct/identifiers/plasma_sources/plasma_source_identifier.xml new file mode 100644 index 00000000..910a7907 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/plasma_sources/plasma_source_identifier.xml @@ -0,0 +1,70 @@ + + +
+Translation table for sources of particles, momentum and heat. +
+ + + +0 + +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 + +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 + +200 +201 +202 +203 + +303 +304 +305 + +400 +401 +402 +403 + +501 + +603 + +701 +702 +703 +705 +706 +707 +708 +709 +710 +715 +716 + +801 +802 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/plasma_transport/plasma_transport_identifier.xml b/imas/assets/IDSDef_correct/identifiers/plasma_transport/plasma_transport_identifier.xml new file mode 100644 index 00000000..4e229087 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/plasma_transport/plasma_transport_identifier.xml @@ -0,0 +1,38 @@ + + +
+ Translation table for different types of transport coefficients. +
+ + + + +0 +1 + +2 +3 +4 +5 +6 +19 +20 +21 +22 +23 +24 +25 + +100 +101 +102 +103 + +200 +201 +202 +203 +204 +205 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/radiation/radiation_identifier.xml b/imas/assets/IDSDef_correct/identifiers/radiation/radiation_identifier.xml new file mode 100644 index 00000000..1c5a713d --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/radiation/radiation_identifier.xml @@ -0,0 +1,23 @@ + + +
+Translation table for radiation processes +
+ + 0 + 6 + 8 + 9 + 10 + 11 + 501 + 901 + 902 + 903 + 904 + 905 + 906 + 907 + 908 + 909 +
diff --git a/imas/assets/IDSDef_correct/identifiers/refractometer/refractometer_formula_identifier.xml b/imas/assets/IDSDef_correct/identifiers/refractometer/refractometer_formula_identifier.xml new file mode 100644 index 00000000..25b8e077 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/refractometer/refractometer_formula_identifier.xml @@ -0,0 +1,12 @@ + + +
+Translation table for analytical formulas used by refractometer post-processing +
+ + + +1 +2 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/runaway_electrons/e_field_critical_identifier.xml b/imas/assets/IDSDef_correct/identifiers/runaway_electrons/e_field_critical_identifier.xml new file mode 100644 index 00000000..7fdabea5 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/runaway_electrons/e_field_critical_identifier.xml @@ -0,0 +1,12 @@ + + +
+Definition of e_field_critical +
+ + + +1 +2 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml b/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml new file mode 100644 index 00000000..2d76f750 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml @@ -0,0 +1,11 @@ + + +
+Definition of momentum_critical_avalanche +
+ + + +1 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml b/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml new file mode 100644 index 00000000..9804a1b5 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml @@ -0,0 +1,11 @@ + + +
+Definition of momentum_critical_hot_tail +
+ + + +1 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml b/imas/assets/IDSDef_correct/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml new file mode 100644 index 00000000..88c97626 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml @@ -0,0 +1,13 @@ + + +
+Fitting method used to calculate isotope ratios +
+ + + + +1 +2 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml b/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml new file mode 100644 index 00000000..3d3d97c0 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml @@ -0,0 +1,12 @@ + + +
+Crystal mesh type +
+ + + +1 +2 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml b/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml new file mode 100644 index 00000000..66acb45c --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml @@ -0,0 +1,14 @@ + + +
+Translation table for instrument function for X ray crystal spectrometer +
+ + + +1 +2 +3 +4 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/spi/shatter_cone_identifier.xml b/imas/assets/IDSDef_correct/identifiers/spi/shatter_cone_identifier.xml new file mode 100644 index 00000000..5273b103 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/spi/shatter_cone_identifier.xml @@ -0,0 +1,11 @@ + + +
+Definition of the shatter cone +
+ + + +1 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/coordinate_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/coordinate_identifier.xml new file mode 100644 index 00000000..dbaa3853 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/coordinate_identifier.xml @@ -0,0 +1,51 @@ + + +
+Translation table for coordinate_identifier_definitions. +
+ + + +0 +1 +2 +3 +4 +5 + +10 +11 +12 +13 +14 + +20 +21 +22 + +100 +101 +102 +103 +104 +105 +106 +107 +108 +200 +201 +202 +203 + +300 +301 +302 + +400 +402 +403 +404 + +500 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_curvature_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_curvature_identifier.xml new file mode 100644 index 00000000..261243c9 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_curvature_identifier.xml @@ -0,0 +1,16 @@ + + +
+Curvature of a curved object +
+ + + + +1 +2 +3 +4 +5 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_geometry_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_geometry_identifier.xml new file mode 100644 index 00000000..5117019b --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_geometry_identifier.xml @@ -0,0 +1,14 @@ + + +
+Geometry of the contour of a planar or curved object +
+ + + + +1 +2 +3 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/data_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/data_type_identifier.xml new file mode 100644 index 00000000..ab94762b --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/data_type_identifier.xml @@ -0,0 +1,12 @@ + + +
+ Dataset type table +
+ + + +1 +2 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/distribution_source_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/distribution_source_identifier.xml new file mode 100644 index 00000000..0814b580 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/distribution_source_identifier.xml @@ -0,0 +1,36 @@ + + +
+Translation table for Heating and Current Drive (HCD) distsource types, i.e. types particles source in Fokker-Planck equation (from NBI and nuclear reactions). +
+ + + + +0 +1 + +100 + +101 +102 +103 +104 + +105 +106 +107 +108 + +109 +110 + +111 +112 + +113 +114 + +1000 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/emission_grid_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/emission_grid_identifier.xml new file mode 100644 index 00000000..d1002573 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/emission_grid_identifier.xml @@ -0,0 +1,9 @@ + + +
List of coordinate systems for describing the poloidal plane
+ + + +1 +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_geometry_content_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_geometry_content_identifier.xml new file mode 100644 index 00000000..32a039e2 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_geometry_content_identifier.xml @@ -0,0 +1,21 @@ + + +
Translation table for ggd_space_identifier_definitions.
+ + + + + + + + + + + +0 +1 +11 +21 +31 +32 +
\ No newline at end of file diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_identifier.xml new file mode 100644 index 00000000..b3389e68 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_identifier.xml @@ -0,0 +1,29 @@ + + +
Translation table for ggd_identifier_definitions.
+ + + + + + + + + + + +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +100 +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_space_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_space_identifier.xml new file mode 100644 index 00000000..dee0346c --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_space_identifier.xml @@ -0,0 +1,18 @@ + + +
Translation table for ggd_space_identifier_definitions.
+ + + + + + + + + + +0 +1 +2 +3 +
\ No newline at end of file diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_subset_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_subset_identifier.xml new file mode 100644 index 00000000..3e3a59c6 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_subset_identifier.xml @@ -0,0 +1,70 @@ + + +
Translation table for ggd_subset_identifier_definitions.
+ + + + + + + + + + +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +100 +101 +102 +103 +104 +105 +106 +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/materials_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/materials_identifier.xml new file mode 100644 index 00000000..4494f03b --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/materials_identifier.xml @@ -0,0 +1,43 @@ + + +
+Materials used in the device mechanical structures +
+ + + + +0 +1 +2 +3 +4 +5 +6 +7 +8 +17 +9 +10 +11 +12 +13 +14 +15 +16 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 + + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/midplane_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/midplane_identifier.xml new file mode 100644 index 00000000..3039e263 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/midplane_identifier.xml @@ -0,0 +1,14 @@ + + +
+ Translation table for identifying different midplane definitions +
+ + + +1 +2 +3 +4 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/neutrals_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/neutrals_identifier.xml new file mode 100644 index 00000000..e29024ee --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/neutrals_identifier.xml @@ -0,0 +1,20 @@ + + +
+ Translation table for identifying different types of neutral. + The neutrals are characterised by their energy and source of the neutrals. +
+ + + + + + + + +1 +2 +3 +4 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/occurrence_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/occurrence_type_identifier.xml new file mode 100644 index 00000000..e6554554 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/occurrence_type_identifier.xml @@ -0,0 +1,11 @@ + + +
List of possible occurrence types
+ + + +1 +2 +3 +4 +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_identifier.xml new file mode 100644 index 00000000..f560fdec --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_identifier.xml @@ -0,0 +1,13 @@ + + +
+ Translation table for identifying optical element types +
+ + + +1 +2 +3 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_material_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_material_identifier.xml new file mode 100644 index 00000000..9cd99c1f --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_material_identifier.xml @@ -0,0 +1,12 @@ + + +
+ Translation table for identifying optical element types +
+ + + +1 +2 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/orbit_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/orbit_type_identifier.xml new file mode 100644 index 00000000..2c55a7cb --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/orbit_type_identifier.xml @@ -0,0 +1,20 @@ + + +
+Translation table for orbit_type_identifier definitions. +
+ + + +0 +1 +2 +3 +4 +5 +6 +7 +8 +9 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/poloidal_plane_coordinates_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/poloidal_plane_coordinates_identifier.xml new file mode 100644 index 00000000..9e3c42f6 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/poloidal_plane_coordinates_identifier.xml @@ -0,0 +1,119 @@ + + +
List of coordinate systems for describing the poloidal plane
+ + + + +1 +2 +11 +12 +13 + +14 +15 +16 + +21 +22 +23 + +24 +25 +26 + + +31 +32 +33 + +34 +35 +36 + +41 +42 +43 + +44 +45 +46 + +51 +52 +53 + +54 +55 +56 + +91 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/species_reference_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/species_reference_identifier.xml new file mode 100644 index 00000000..449b89cf --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/species_reference_identifier.xml @@ -0,0 +1,19 @@ + + +
+Translation table for species_reference_identifier_definition. +
+ + + + +0 +1 +2 +3 +4 +5 +6 +7 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/statistics_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/statistics_type_identifier.xml new file mode 100644 index 00000000..e1f891df --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/statistics_type_identifier.xml @@ -0,0 +1,17 @@ + + +
+ Translation table for statistics types +
+ + + +1 +2 +3 +4 +5 +6 +7 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/surface_geometry_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/surface_geometry_identifier.xml new file mode 100644 index 00000000..3cbdaf62 --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/surface_geometry_identifier.xml @@ -0,0 +1,16 @@ + + +
+Geometry of the contour of surface in a local coordinate system +
+ + + +1 +2 +3 +4 +5 + + +
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/wave_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/wave_identifier.xml new file mode 100644 index 00000000..8d122a7e --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/utilities/wave_identifier.xml @@ -0,0 +1,15 @@ + + +
+Translation table for wave field types. +
+ + + + +0 +1 +2 +3 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/wall/wall_component_identifier.xml b/imas/assets/IDSDef_correct/identifiers/wall/wall_component_identifier.xml new file mode 100644 index 00000000..23e6450e --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/wall/wall_component_identifier.xml @@ -0,0 +1,18 @@ + + +
+Type of wall component +
+ + + +0 +1 +2 +3 +4 +5 +6 +7 + +
diff --git a/imas/assets/IDSDef_correct/identifiers/wall/wall_description_2d_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/wall/wall_description_2d_type_identifier.xml new file mode 100644 index 00000000..4dcde3ce --- /dev/null +++ b/imas/assets/IDSDef_correct/identifiers/wall/wall_description_2d_type_identifier.xml @@ -0,0 +1,13 @@ + + +
+Type of wall component +
+ + + +0 +1 +2 + +
diff --git a/imaspy/assets/IDS_fake_toplevel.xml b/imas/assets/IDS_fake_toplevel.xml similarity index 100% rename from imaspy/assets/IDS_fake_toplevel.xml rename to imas/assets/IDS_fake_toplevel.xml diff --git a/imaspy/assets/IDS_minimal.xml b/imas/assets/IDS_minimal.xml similarity index 95% rename from imaspy/assets/IDS_minimal.xml rename to imas/assets/IDS_minimal.xml index 32d94e34..01764e95 100644 --- a/imaspy/assets/IDS_minimal.xml +++ b/imas/assets/IDS_minimal.xml @@ -3,7 +3,7 @@ 0.0.1 diff --git a/imaspy/assets/IDS_minimal_2.xml b/imas/assets/IDS_minimal_2.xml similarity index 95% rename from imaspy/assets/IDS_minimal_2.xml rename to imas/assets/IDS_minimal_2.xml index 9f38f5a4..57a90d23 100644 --- a/imaspy/assets/IDS_minimal_2.xml +++ b/imas/assets/IDS_minimal_2.xml @@ -3,7 +3,7 @@ 0.0.2 diff --git a/imaspy/assets/IDS_minimal_struct_array.xml b/imas/assets/IDS_minimal_struct_array.xml similarity index 96% rename from imaspy/assets/IDS_minimal_struct_array.xml rename to imas/assets/IDS_minimal_struct_array.xml index 5d644ee0..72845315 100644 --- a/imaspy/assets/IDS_minimal_struct_array.xml +++ b/imas/assets/IDS_minimal_struct_array.xml @@ -3,7 +3,7 @@ 0.0.1 diff --git a/imaspy/assets/IDS_minimal_types.xml b/imas/assets/IDS_minimal_types.xml similarity index 98% rename from imaspy/assets/IDS_minimal_types.xml rename to imas/assets/IDS_minimal_types.xml index 56cbbc57..d939aa32 100644 --- a/imaspy/assets/IDS_minimal_types.xml +++ b/imas/assets/IDS_minimal_types.xml @@ -3,7 +3,7 @@ 0.0.1 diff --git a/imaspy/assets/ITER_134173_106_core_profiles.ids b/imas/assets/ITER_134173_106_core_profiles.ids similarity index 100% rename from imaspy/assets/ITER_134173_106_core_profiles.ids rename to imas/assets/ITER_134173_106_core_profiles.ids diff --git a/imaspy/assets/ITER_134173_106_equilibrium.ids b/imas/assets/ITER_134173_106_equilibrium.ids similarity index 100% rename from imaspy/assets/ITER_134173_106_equilibrium.ids rename to imas/assets/ITER_134173_106_equilibrium.ids diff --git a/imaspy/assets/README.md b/imas/assets/README.md similarity index 100% rename from imaspy/assets/README.md rename to imas/assets/README.md diff --git a/imaspy/assets/core_profiles.ids b/imas/assets/core_profiles.ids similarity index 100% rename from imaspy/assets/core_profiles.ids rename to imas/assets/core_profiles.ids diff --git a/imaspy/assets/equilibrium.ids b/imas/assets/equilibrium.ids similarity index 100% rename from imaspy/assets/equilibrium.ids rename to imas/assets/equilibrium.ids diff --git a/imaspy/backends/__init__.py b/imas/backends/__init__.py similarity index 76% rename from imaspy/backends/__init__.py rename to imas/backends/__init__.py index 5fa32445..78cdd3f5 100644 --- a/imaspy/backends/__init__.py +++ b/imas/backends/__init__.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Logic for interacting with all data backends. Currently supported backends are: diff --git a/imaspy/backends/db_entry_impl.py b/imas/backends/db_entry_impl.py similarity index 83% rename from imaspy/backends/db_entry_impl.py rename to imas/backends/db_entry_impl.py index bc8ca10d..7e5dddef 100644 --- a/imaspy/backends/db_entry_impl.py +++ b/imas/backends/db_entry_impl.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. from abc import ABC, abstractmethod from dataclasses import dataclass @@ -7,9 +7,9 @@ import numpy -from imaspy.ids_convert import NBCPathMap -from imaspy.ids_factory import IDSFactory -from imaspy.ids_toplevel import IDSToplevel +from imas.ids_convert import NBCPathMap +from imas.ids_factory import IDSFactory +from imas.ids_toplevel import IDSToplevel @dataclass @@ -17,9 +17,9 @@ class GetSliceParameters: """Helper class to store parameters to get_slice.""" time_requested: float - """See :param:`imaspy.db_entry.DBEntry.get_slice.time_requested`.""" + """See :param:`imas.db_entry.DBEntry.get_slice.time_requested`.""" interpolation_method: int - """See :param:`imaspy.db_entry.DBEntry.get_slice.interpolation_method`.""" + """See :param:`imas.db_entry.DBEntry.get_slice.interpolation_method`.""" @dataclass @@ -27,13 +27,13 @@ class GetSampleParameters: """Helper class to store parameters to get_sample.""" tmin: float - """See :param:`imaspy.db_entry.DBEntry.get_sample.tmin`.""" + """See :param:`imas.db_entry.DBEntry.get_sample.tmin`.""" tmax: float - """See :param:`imaspy.db_entry.DBEntry.get_sample.tmax`.""" + """See :param:`imas.db_entry.DBEntry.get_sample.tmax`.""" dtime: Optional[numpy.ndarray] - """See :param:`imaspy.db_entry.DBEntry.get_sample.dtime`.""" + """See :param:`imas.db_entry.DBEntry.get_sample.dtime`.""" interpolation_method: Optional[int] - """See :param:`imaspy.db_entry.DBEntry.get_sample.interpolation_method`.""" + """See :param:`imas.db_entry.DBEntry.get_sample.interpolation_method`.""" class DBEntryImpl(ABC): diff --git a/imas/backends/imas_core/__init__.py b/imas/backends/imas_core/__init__.py new file mode 100644 index 00000000..5e7812f4 --- /dev/null +++ b/imas/backends/imas_core/__init__.py @@ -0,0 +1,4 @@ +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Subpackage implementing data access through the IMAS Access Layer Core. +""" diff --git a/imaspy/backends/imas_core/al_context.py b/imas/backends/imas_core/al_context.py similarity index 95% rename from imaspy/backends/imas_core/al_context.py rename to imas/backends/imas_core/al_context.py index 5d782fda..323cdd5d 100644 --- a/imaspy/backends/imas_core/al_context.py +++ b/imas/backends/imas_core/al_context.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Object-oriented interface to the IMAS lowlevel. """ @@ -10,9 +10,9 @@ import numpy -from imaspy.backends.imas_core.imas_interface import ll_interface -from imaspy.exception import LowlevelError -from imaspy.ids_defs import ( +from imas.backends.imas_core.imas_interface import ll_interface +from imas.exception import LowlevelError +from imas.ids_defs import ( CLOSEST_INTERP, LINEAR_INTERP, PREVIOUS_INTERP, @@ -27,8 +27,8 @@ ) if TYPE_CHECKING: - from imaspy.backends.imas_core.db_entry_al import ALDBEntryImpl - from imaspy.ids_convert import NBCPathMap + from imas.backends.imas_core.db_entry_al import ALDBEntryImpl + from imas.ids_convert import NBCPathMap logger = logging.getLogger(__name__) @@ -221,7 +221,7 @@ class LazyALContext: """Replacement for ALContext that is used during lazy loading. This class implements ``global_action``, ``slice_action`` and ``read_data``, such - that it can be used as a drop-in replacement in ``imaspy.db_entry._get_children`` + that it can be used as a drop-in replacement in ``imas.db_entry._get_children`` and only custom logic is needed for IDSStructArray there. This class tracks: @@ -233,7 +233,7 @@ class LazyALContext: arraystruct_action!). - The ALContext method and arguments that we need to call on the ALContext we obtain from our parent, to obtain the actual ALContext we should use for loading data. - - The NBC map that ``imaspy.db_entry._get_children`` needs when lazy loading + - The NBC map that ``imas.db_entry._get_children`` needs when lazy loading children of an IDSStructArray. When constructing a LazyALContext, you need to supply either the ``dbentry`` and @@ -298,7 +298,7 @@ def get_context(self) -> ALContext: if not cache or cache[-1] is not ctx: logger.warning( "Found an empty AL context cache: This should not happen, please " - "report this bug to the IMASPy developers." + "report this bug to the imas-python developers." ) else: return ctx diff --git a/imaspy/backends/imas_core/db_entry_al.py b/imas/backends/imas_core/db_entry_al.py similarity index 96% rename from imaspy/backends/imas_core/db_entry_al.py rename to imas/backends/imas_core/db_entry_al.py index 34a3ab32..e1d711a5 100644 --- a/imaspy/backends/imas_core/db_entry_al.py +++ b/imas/backends/imas_core/db_entry_al.py @@ -8,11 +8,11 @@ from typing import Any, Deque, List, Optional, Union from urllib.parse import urlparse -from imaspy.backends.db_entry_impl import GetSampleParameters, GetSliceParameters -from imaspy.db_entry import DBEntryImpl -from imaspy.exception import DataEntryException, LowlevelError -from imaspy.ids_convert import NBCPathMap, dd_version_map_from_factories -from imaspy.ids_defs import ( +from imas.backends.db_entry_impl import GetSampleParameters, GetSliceParameters +from imas.db_entry import DBEntryImpl +from imas.exception import DataEntryException, LowlevelError +from imas.ids_convert import NBCPathMap, dd_version_map_from_factories +from imas.ids_defs import ( ASCII_BACKEND, CHAR_DATA, CLOSE_PULSE, @@ -33,9 +33,9 @@ UNDEFINED_TIME, WRITE_OP, ) -from imaspy.ids_factory import IDSFactory -from imaspy.ids_metadata import IDSType -from imaspy.ids_toplevel import IDSToplevel +from imas.ids_factory import IDSFactory +from imas.ids_metadata import IDSType +from imas.ids_toplevel import IDSToplevel from .al_context import ALContext, LazyALContext from .db_entry_helpers import delete_children, get_children, put_children @@ -206,7 +206,8 @@ def _setup_backend( os.environ["IDSDEF_PATH"] = idsdef_path logger.warning( - "The UDA backend is not tested with IMASPy and may not work properly. " + "The UDA backend is not tested with " + "imas-python and may not work properly. " "Please raise any issues you find." ) @@ -214,7 +215,7 @@ def _setup_backend( pass # nothing to set up else: - logger.warning("Backend %s is unknown to IMASPy", backend) + logger.warning("Backend %s is unknown to imas-python", backend) def close(self, *, erase: bool = False) -> None: if self._db_ctx is None: diff --git a/imaspy/backends/imas_core/db_entry_helpers.py b/imas/backends/imas_core/db_entry_helpers.py similarity index 94% rename from imaspy/backends/imas_core/db_entry_helpers.py rename to imas/backends/imas_core/db_entry_helpers.py index f69eafd3..d6e3a596 100644 --- a/imaspy/backends/imas_core/db_entry_helpers.py +++ b/imas/backends/imas_core/db_entry_helpers.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Helper methods for loading data from and storing data to Data Entries. """ @@ -7,13 +7,13 @@ import numpy as np -from imaspy.ids_base import IDSBase -from imaspy.ids_convert import NBCPathMap -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS, IDS_TIME_MODE_INDEPENDENT -from imaspy.ids_metadata import IDSMetadata -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure +from imas.ids_base import IDSBase +from imas.ids_convert import NBCPathMap +from imas.ids_data_type import IDSDataType +from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS, IDS_TIME_MODE_INDEPENDENT +from imas.ids_metadata import IDSMetadata +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure from .al_context import ALContext, LazyALContext diff --git a/imaspy/backends/imas_core/imas_interface.py b/imas/backends/imas_core/imas_interface.py similarity index 87% rename from imaspy/backends/imas_core/imas_interface.py rename to imas/backends/imas_core/imas_interface.py index b92438b1..05634dfb 100644 --- a/imaspy/backends/imas_core/imas_interface.py +++ b/imas/backends/imas_core/imas_interface.py @@ -1,15 +1,13 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """ Helper module for providing a version-independent interface to the Access Layer. This module tries to abstract away most API incompatibilities between the supported Access Layer versions (for example the rename of _ual_lowlevel to _al_lowlevel). """ -import importlib import inspect import logging -import time from packaging.version import Version @@ -28,30 +26,15 @@ if enable_exceptions: enable_exceptions() -except ImportError: - # Fallback for AL 4.x or 5.0/5.1 - try: - tic = time.time() - # Don't directly `import imas`: code analyzers will break on the huge code base - imas = importlib.import_module("imas") - logger.info( - "Successfully imported `imas` (took %.3f seconds)", time.time() - tic - ) - try: - lowlevel = imas._al_lowlevel # AL 5.0/5.1 - except AttributeError: - lowlevel = imas._ual_lowlevel # AL 4.x - imasdef = imas.imasdef - - except ImportError as exc: - imas = None - has_imas = False - imasdef = None - lowlevel = None - logger.critical( - "Could not import 'imas': %s. Some functionality is not available.", - exc, - ) +except ImportError as exc: + imas = None + has_imas = False + imasdef = None + lowlevel = None + logger.critical( + "Could not import 'al_core': %s. Some functionality is not available.", + exc, + ) class LLInterfaceError(RuntimeError): @@ -61,8 +44,8 @@ class LLInterfaceError(RuntimeError): class LowlevelInterface: """Compatibility object. - Provides a stable API for the rest of IMASPy even when the `imas.lowlevel` interface - changes. + Provides a stable API for the rest of imas-python even when the + `imas.lowlevel` interface changes. .. rubric:: Developer notes @@ -230,4 +213,4 @@ def begin_timerange_action( func.__doc__ = f"Wrapper function for AL lowlevel method ``{funcname}``" ll_interface = LowlevelInterface(lowlevel) -"""IMASPy <-> IMAS lowlevel interface""" +"""imas-python <-> IMAS lowlevel interface""" diff --git a/imaspy/backends/imas_core/mdsplus_model.py b/imas/backends/imas_core/mdsplus_model.py similarity index 83% rename from imaspy/backends/imas_core/mdsplus_model.py rename to imas/backends/imas_core/mdsplus_model.py index 4d96bf6c..9b00f34c 100644 --- a/imaspy/backends/imas_core/mdsplus_model.py +++ b/imas/backends/imas_core/mdsplus_model.py @@ -1,5 +1,5 @@ # Helper functions to create MDSPlus reference models -# and store them in a cache directory (.cache/imaspy/MDSPlus/name-HASH/) +# and store them in a cache directory (.cache/imas/MDSPlus/name-HASH/) """Module for generating and working with MDSplus models. """ @@ -13,6 +13,7 @@ import time import uuid from pathlib import Path +from saxonche import PySaxonProcessor from subprocess import CalledProcessError, check_output from zlib import crc32 @@ -21,11 +22,9 @@ except ImportError: # Python 3.8 support from importlib_resources import as_file, files -import imaspy -from imaspy.dd_helpers import get_saxon -from imaspy.dd_zip import get_dd_xml, get_dd_xml_crc -from imaspy.exception import MDSPlusModelError -from imaspy.ids_factory import IDSFactory +from imas.dd_zip import get_dd_xml, get_dd_xml_crc +from imas.exception import MDSPlusModelError +from imas.ids_factory import IDSFactory logger = logging.getLogger(__name__) @@ -86,12 +85,7 @@ def mdsplus_model_dir(factory: IDSFactory) -> str: Given a filename and xml contents create an xml - document for the mdsplus model by running a command like the below: - - java net.sf.saxon.Transform -s:- -xsl: -o:${OUTPUT_FILE} - - with ENV: - env={"CLASSPATH": saxon_jar_path, "PATH": os.environ.get("PATH", "")} + document for the mdsplus model by rusing saxonche Args: factory: IDSFactory indicating the DD version / XML to build models for. @@ -117,14 +111,14 @@ def mdsplus_model_dir(factory: IDSFactory) -> str: crc = crc32(file.read()) cache_dir_name = "%s-%08x" % (xml_name, crc) - cache_dir_path = Path(_get_xdg_cache_dir()) / "imaspy" / "mdsplus" / cache_dir_name + cache_dir_path = Path(_get_xdg_cache_dir()) / "imas" / "mdsplus" / cache_dir_name # TODO: include hash or version of "IDSDef2MDSpreTree.xsl", which we should fetch # from the access layer instead of provide ourselves, if we wish to be resilient to # upgrades there (has happened early 2021 already once). of course, upgrades to the # on-disk formats should be versioned and documented properly, so this should never # happen again. - # There are multiple possible cases for the IMASPy cache + # There are multiple possible cases for the imas-python cache # 1. The cache exist and can be used # 2. The cache folder exists, and another process is creating it # 3. The cache folder exists, but the process creating it has stopped @@ -138,7 +132,7 @@ def mdsplus_model_dir(factory: IDSFactory) -> str: tmp_cache_dir_path = ( Path(tempfile.gettempdir()) / getpass.getuser() - / "imaspy" + / "imas" / "mdsplus" / f"{cache_dir_name}_{fuuid}" ) @@ -164,7 +158,7 @@ def mdsplus_model_dir(factory: IDSFactory) -> str: os.listdir(cache_dir_path), ) raise MDSPlusModelError( - "The IMASPy cache directory is corrupted. Please clean the" + "The imas-python cache directory is corrupted. Please clean the" f" cache directory ({cache_dir_path}) and try again." ) elif not cache_dir_path.is_dir() and not model_exists(cache_dir_path): @@ -242,30 +236,50 @@ def model_exists(path: Path) -> bool: def create_model_ids_xml(cache_dir_path, fname, version): - """Use saxon to compile an ids.xml suitable for creating an mdsplus model.""" - + """Use Saxon/C to compile an ids.xml suitable for creating an MDSplus model.""" try: - # we have to be careful to have the same version of this file as in the access - # layer: - with as_file(files(imaspy) / "assets" / "IDSDef2MDSpreTree.xsl") as xslfile: - check_output( - [ - "java", - "net.sf.saxon.Transform", - "-s:" + str(fname), - "-o:" + str(Path(cache_dir_path) / "ids.xml"), - "DD_GIT_DESCRIBE=" + str(version or fname), - # if this is expected as git describe it might break - # if we just pass a filename - "AL_GIT_DESCRIBE=" + os.environ.get("AL_VERSION", "0.0.0"), - "-xsl:" + str(xslfile), - ], - input=get_dd_xml(version) if version else None, - env={"CLASSPATH": get_saxon(), "PATH": os.environ.get("PATH", "")}, - ) - except CalledProcessError as e: + with as_file(files("imas") / "assets" / "IDSDef2MDSpreTree.xsl") as xslfile: + output_file = Path(cache_dir_path) / "ids.xml" + + with PySaxonProcessor(license=False) as proc: + xslt_processor = proc.new_xslt30_processor() + + xslt_processor.compile_stylesheet(stylesheet_file=str(xslfile)) + + input_xml = get_dd_xml(version) if version else None + if fname: + source_file = str(fname) + elif input_xml: + source_file = input_xml # Use standard input for the XML string + else: + raise ValueError( + "Either 'fname' or 'version' must be provided to generate XML." + ) + + # xdm_ddgit = proc.make_string_value(str(version or fname)) + # xsltproc.set_parameter("DD_GIT_DESCRIBE", xdm_ddgit) + # xdm_algit = proc.make_string_value(os.environ.get + # ("AL_VERSION", "0.0.0")) + # xsltproc.set_parameter("AL_GIT_DESCRIBE", xdm_algit) + # Transform XML + result = xslt_processor.transform_to_file( + source_file=source_file, + output_file=str(output_file), + initial_template_params={ + "DD_GIT_DESCRIBE": str(version or fname), + "AL_GIT_DESCRIBE": os.environ.get("AL_VERSION", "0.0.0"), + }, + ) + + if result is False: + logger.error( + "Transformation failed: Check Saxon/C logs for details." + ) + raise RuntimeError("Saxon/C XSLT transformation failed.") + + except Exception as e: if fname: - logger.error("Error making MDSPlus model IDS.xml for %s", fname) + logger.error("Error making MDSplus model IDS.xml for %s", fname) else: logger.error("Error making MDSplus model IDS.xml for %s", version) raise e diff --git a/imaspy/backends/imas_core/uda_support.py b/imas/backends/imas_core/uda_support.py similarity index 86% rename from imaspy/backends/imas_core/uda_support.py rename to imas/backends/imas_core/uda_support.py index 8b599faa..f051f549 100644 --- a/imaspy/backends/imas_core/uda_support.py +++ b/imas/backends/imas_core/uda_support.py @@ -3,7 +3,7 @@ from typing import Union from xml.etree import ElementTree as ET -from imaspy import dd_zip +from imas import dd_zip from .mdsplus_model import _get_xdg_cache_dir @@ -31,11 +31,11 @@ def get_dd_version_from_idsdef_xml(path: Union[str, Path]) -> str: def extract_idsdef(dd_version: str) -> str: """Extract the IDSDef.xml for the given version and return its path. - The IDSDef.xml is extracted to the imaspy cache folder: + The IDSDef.xml is extracted to the imas cache folder: - - If the file imaspy/uda/.xml already exists, we assume it is correct + - If the file imas/uda/.xml already exists, we assume it is correct """ - cache_dir_path = Path(_get_xdg_cache_dir()) / "imaspy" / "uda" + cache_dir_path = Path(_get_xdg_cache_dir()) / "imas" / "uda" cache_dir_path.mkdir(parents=True, exist_ok=True) # ensure cache folder exists idsdef_path = cache_dir_path / (dd_version + ".xml") diff --git a/imas/backends/netcdf/__init__.py b/imas/backends/netcdf/__init__.py new file mode 100644 index 00000000..86cc929e --- /dev/null +++ b/imas/backends/netcdf/__init__.py @@ -0,0 +1,4 @@ +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""NetCDF IO support for imas-python. Requires [netcdf] extra dependencies. +""" diff --git a/imaspy/backends/netcdf/db_entry_nc.py b/imas/backends/netcdf/db_entry_nc.py similarity index 93% rename from imaspy/backends/netcdf/db_entry_nc.py rename to imas/backends/netcdf/db_entry_nc.py index 732eb97d..8e37c464 100644 --- a/imaspy/backends/netcdf/db_entry_nc.py +++ b/imas/backends/netcdf/db_entry_nc.py @@ -3,17 +3,17 @@ import logging from typing import List, Optional, Union -from imaspy.backends.db_entry_impl import ( +from imas.backends.db_entry_impl import ( DBEntryImpl, GetSampleParameters, GetSliceParameters, ) -from imaspy.backends.netcdf.ids2nc import IDS2NC -from imaspy.backends.netcdf.nc2ids import NC2IDS -from imaspy.exception import DataEntryException, InvalidNetCDFEntry -from imaspy.ids_convert import NBCPathMap, convert_ids -from imaspy.ids_factory import IDSFactory -from imaspy.ids_toplevel import IDSToplevel +from imas.backends.netcdf.ids2nc import IDS2NC +from imas.backends.netcdf.nc2ids import NC2IDS +from imas.exception import DataEntryException, InvalidNetCDFEntry +from imas.ids_convert import NBCPathMap, convert_ids +from imas.ids_factory import IDSFactory +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -31,7 +31,7 @@ def __init__(self, fname: str, mode: str, factory: IDSFactory) -> None: if netCDF4 is None: raise RuntimeError( "The `netCDF4` python module is not available. Please install this " - "module to read/write IMAS netCDF files with IMASPy." + "module to read/write IMAS netCDF files with imas-python." ) self._dataset = netCDF4.Dataset( diff --git a/imaspy/backends/netcdf/ids2nc.py b/imas/backends/netcdf/ids2nc.py similarity index 96% rename from imaspy/backends/netcdf/ids2nc.py rename to imas/backends/netcdf/ids2nc.py index 34e63101..e56c32a0 100644 --- a/imaspy/backends/netcdf/ids2nc.py +++ b/imas/backends/netcdf/ids2nc.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""NetCDF IO support for IMASPy. Requires [netcdf] extra dependencies. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""NetCDF IO support for imas-python. Requires [netcdf] extra dependencies. """ from typing import Iterator, Tuple @@ -8,13 +8,13 @@ import netCDF4 import numpy -from imaspy.backends.netcdf.nc_metadata import NCMetadata -from imaspy.ids_base import IDSBase -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure -from imaspy.ids_toplevel import IDSToplevel +from imas.backends.netcdf.nc_metadata import NCMetadata +from imas.ids_base import IDSBase +from imas.ids_data_type import IDSDataType +from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure +from imas.ids_toplevel import IDSToplevel default_fillvals = { IDSDataType.INT: netCDF4.default_fillvals["i4"], diff --git a/imaspy/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py similarity index 95% rename from imaspy/backends/netcdf/nc2ids.py rename to imas/backends/netcdf/nc2ids.py index 50905ba8..50668dfb 100644 --- a/imaspy/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -4,15 +4,15 @@ import netCDF4 -from imaspy.backends.netcdf import ids2nc -from imaspy.backends.netcdf.nc_metadata import NCMetadata -from imaspy.exception import InvalidNetCDFEntry -from imaspy.ids_base import IDSBase -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS -from imaspy.ids_metadata import IDSMetadata -from imaspy.ids_structure import IDSStructure -from imaspy.ids_toplevel import IDSToplevel +from imas.backends.netcdf import ids2nc +from imas.backends.netcdf.nc_metadata import NCMetadata +from imas.exception import InvalidNetCDFEntry +from imas.ids_base import IDSBase +from imas.ids_data_type import IDSDataType +from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS +from imas.ids_metadata import IDSMetadata +from imas.ids_structure import IDSStructure +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -164,7 +164,7 @@ def run(self) -> None: def validate_variables(self) -> None: """Validate that all variables in the netCDF Group exist and match the DD.""" - disable_validate = os.environ.get("IMASPY_DISABLE_NC_VALIDATE") + disable_validate = os.environ.get("IMAS_DISABLE_NC_VALIDATE") if disable_validate and disable_validate != "0": logger.info( "NetCDF file validation disabled: " diff --git a/imaspy/backends/netcdf/nc_metadata.py b/imas/backends/netcdf/nc_metadata.py similarity index 98% rename from imaspy/backends/netcdf/nc_metadata.py rename to imas/backends/netcdf/nc_metadata.py index 06b71e3e..50545f8c 100644 --- a/imaspy/backends/netcdf/nc_metadata.py +++ b/imas/backends/netcdf/nc_metadata.py @@ -1,14 +1,14 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """NetCDF metadata for dimensions and tensorization of IDSs. """ from functools import lru_cache from typing import Dict, List, Optional, Set, Tuple -from imaspy.ids_coordinates import IDSCoordinate -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_metadata import IDSMetadata +from imas.ids_coordinates import IDSCoordinate +from imas.ids_data_type import IDSDataType +from imas.ids_metadata import IDSMetadata def _get_aos_label_coordinates(metadata: IDSMetadata) -> List[str]: @@ -153,7 +153,7 @@ def _parse_dimensions(self, metadata: IDSMetadata, aos_level: int) -> None: """Parse dimensions and auxiliary coordinates from DD coordinate metadata. DD coordinates come in different flavours (see also - :mod:`imaspy.ids_coordinates`), which we handle in this function: + :mod:`imas.ids_coordinates`), which we handle in this function: 1. Coordinate is an index. diff --git a/imaspy/backends/netcdf/nc_validate.py b/imas/backends/netcdf/nc_validate.py similarity index 91% rename from imaspy/backends/netcdf/nc_validate.py rename to imas/backends/netcdf/nc_validate.py index 49a14283..55dbbf2b 100644 --- a/imaspy/backends/netcdf/nc_validate.py +++ b/imas/backends/netcdf/nc_validate.py @@ -1,7 +1,7 @@ -from imaspy.backends.netcdf.db_entry_nc import NCDBEntryImpl -from imaspy.backends.netcdf.nc2ids import NC2IDS -from imaspy.db_entry import DBEntry -from imaspy.exception import InvalidNetCDFEntry +from imas.backends.netcdf.db_entry_nc import NCDBEntryImpl +from imas.backends.netcdf.nc2ids import NC2IDS +from imas.db_entry import DBEntry +from imas.exception import InvalidNetCDFEntry def validate_netcdf_file(filename: str) -> None: diff --git a/imaspy/command/cli.py b/imas/command/cli.py similarity index 83% rename from imaspy/command/cli.py rename to imas/command/cli.py index f894f02d..565262ee 100644 --- a/imaspy/command/cli.py +++ b/imas/command/cli.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """ Main CLI entry point """ import logging @@ -20,14 +20,14 @@ ) from rich.table import Table -import imaspy -import imaspy.backends.imas_core.imas_interface -from imaspy import DBEntry, dd_zip -from imaspy.backends.imas_core.imas_interface import ll_interface -from imaspy.command.db_analysis import analyze_db, process_db_analysis -from imaspy.command.helpers import min_version_guard, setup_rich_log_handler -from imaspy.command.timer import Timer -from imaspy.exception import UnknownDDVersion +import imas +import imas.backends.imas_core.imas_interface +from imas import DBEntry, dd_zip +from imas.backends.imas_core.imas_interface import ll_interface +from imas.command.db_analysis import analyze_db, process_db_analysis +from imas.command.helpers import min_version_guard, setup_rich_log_handler +from imas.command.timer import Timer +from imas.exception import UnknownDDVersion logger = logging.getLogger(__name__) @@ -42,14 +42,14 @@ def _excepthook(type_, value, tb): console.Console(stderr=True).print(rich_tb) -@click.group("imaspy", invoke_without_command=True, no_args_is_help=True) +@click.group("imas", invoke_without_command=True, no_args_is_help=True) def cli(): - """IMASPy command line interface. + """imas-python command line interface. Please use one of the available commands listed below. You can get help for each command by executing: - imaspy --help + imas --help """ # Limit the traceback to 1 item: avoid scaring CLI users with long traceback prints # and let them focus on the actual error message @@ -62,19 +62,24 @@ def cli(): @cli.command("version") def print_version(): - """Print version information of IMASPy.""" + """Print version information of imas-python.""" cons = console.Console() - grid = Table(title="IMASPy version info", show_header=False, title_style="bold") + grid = Table( + title="imas-python version info", show_header=False, title_style="bold" + ) grid.box = box.HORIZONTALS if cons.size.width > 120: grid.width = 120 - grid.add_row("IMASPy version:", imaspy.__version__) + grid.add_row("imas-python version:", imas.__version__) grid.add_section() - grid.add_row("Default data dictionary version:", imaspy.IDSFactory().dd_version) - dd_versions = ", ".join(imaspy.dd_zip.dd_xml_versions()) + grid.add_row("Default data dictionary version:", imas.IDSFactory().dd_version) + dd_versions = ", ".join(imas.dd_zip.dd_xml_versions()) grid.add_row("Available data dictionary versions:", dd_versions) grid.add_section() - grid.add_row("Access Layer core version:", ll_interface.get_al_version() or "N/A") + try: + grid.add_row("Access Layer core version:", ll_interface.get_al_version()) + except Exception: + grid.add_row("Access Layer core version:", "N/A") console.Console().print(grid) @@ -102,7 +107,7 @@ def print_ids(uri, ids, occurrence, print_all): with DBEntry(uri, "r") as dbentry: ids_obj = dbentry.get(ids, occurrence, autoconvert=False) - imaspy.util.print_tree(ids_obj, not print_all) + imas.util.print_tree(ids_obj, not print_all) @cli.command("convert", no_args_is_help=True) @@ -131,7 +136,7 @@ def convert_ids( Provide a different backend to URI_OUT than URI_IN to convert between backends. For example: - imaspy convert imas:mdsplus?path=db-in 3.41.0 imas:hdf5?path=db-out + imas convert imas:mdsplus?path=db-in 3.41.0 imas:hdf5?path=db-out \b uri_in URI of the input Data Entry. @@ -198,7 +203,7 @@ def convert_ids( ids2 = ids else: with timer("Convert", name): - ids2 = imaspy.convert_ids( + ids2 = imas.convert_ids( ids, None, factory=entry_out.factory, @@ -222,7 +227,7 @@ def convert_ids( @click.argument("filename", type=click.Path(exists=True, dir_okay=False)) def validate_nc(filename): """Validate if the provided netCDF file adheres to the IMAS conventions.""" - from imaspy.backends.netcdf.nc_validate import validate_netcdf_file + from imas.backends.netcdf.nc_validate import validate_netcdf_file try: validate_netcdf_file(filename) diff --git a/imaspy/command/db_analysis.py b/imas/command/db_analysis.py similarity index 96% rename from imaspy/command/db_analysis.py rename to imas/command/db_analysis.py index e687f94e..5cc946dc 100644 --- a/imaspy/command/db_analysis.py +++ b/imas/command/db_analysis.py @@ -1,4 +1,4 @@ -"""IMASPy-based command line tool for analysing fields in a database.""" +"""imas-python-based command line tool for analysing fields in a database.""" import gzip import json @@ -19,9 +19,9 @@ import rich.text import rich.tree -import imaspy -from imaspy.command.helpers import setup_rich_log_handler -from imaspy.ids_metadata import IDSMetadata +import imas +from imas.command.helpers import setup_rich_log_handler +from imas.ids_metadata import IDSMetadata directory_path = click.Path(exists=True, file_okay=False, path_type=Path) outfile_path = click.Path(dir_okay=False, writable=True, path_type=Path) @@ -36,7 +36,7 @@ "--output", "-o", type=outfile_path, - default="imaspy-db-analysis.json.gz", + default="imas-db-analysis.json.gz", help="Output file", ) def analyze_db(dbentry: Iterable[Path], output: Path) -> None: @@ -60,7 +60,7 @@ def analyze_db(dbentry: Iterable[Path], output: Path) -> None: data, the IDSs are inspected by looking at the HDF5 files directly. 2. This tool uses the optional `h5py` dependency. An error is raised when this package is not available. - 3. If your data is stored in another format than HDF5, you may use `imaspy convert` + 3. If your data is stored in another format than HDF5, you may use `imas convert` to convert the data into the HDF5 backend format first. """ # Test if h5py is available @@ -149,11 +149,11 @@ def process_db_analysis(infiles, show_empty_ids): \b Arguments: - INPUT_FILES File(s) produced by `imaspy analyze-db` to process. + INPUT_FILES File(s) produced by `imas analyze-db` to process. """ setup_rich_log_handler(False) - factory = imaspy.IDSFactory() + factory = imas.IDSFactory() filled_per_ids = {ids_name: set() for ids_name in factory.ids_names()} logger.info("Using Data Dictionary version %s.", factory.dd_version) logger.info("Reading %d input files...", len(infiles)) diff --git a/imaspy/command/helpers.py b/imas/command/helpers.py similarity index 69% rename from imaspy/command/helpers.py rename to imas/command/helpers.py index d3009ce4..f43a47e1 100644 --- a/imaspy/command/helpers.py +++ b/imas/command/helpers.py @@ -5,28 +5,28 @@ from packaging.version import Version from rich.logging import RichHandler -from imaspy.backends.imas_core.imas_interface import ll_interface +from imas.backends.imas_core.imas_interface import ll_interface def setup_rich_log_handler(quiet: bool): """Setup rich.logging.RichHandler on the root logger. Args: - quiet: When True: set log level of the `imaspy` logger to WARNING or higher. + quiet: When True: set log level of the `imas` logger to WARNING or higher. """ - # Disable default imaspy log handler - imaspy_logger = logging.getLogger("imaspy") - for handler in imaspy_logger.handlers: - imaspy_logger.removeHandler(handler) + # Disable default imas log handler + imas_logger = logging.getLogger("imas") + for handler in imas_logger.handlers: + imas_logger.removeHandler(handler) # Disable any root log handlers root_logger = logging.getLogger() for handler in root_logger.handlers: root_logger.removeHandler(handler) # Install rich handler on the root logger: root_logger.addHandler(RichHandler()) - if quiet: # Silence IMASPy INFO messages + if quiet: # Silence imas-python INFO messages # If loglevel is less than WARNING, set it to WARNING: - imaspy_logger.setLevel(max(logging.WARNING, imaspy_logger.getEffectiveLevel())) + imas_logger.setLevel(max(logging.WARNING, imas_logger.getEffectiveLevel())) def min_version_guard(al_version: Version): diff --git a/imaspy/command/timer.py b/imas/command/timer.py similarity index 95% rename from imaspy/command/timer.py rename to imas/command/timer.py index be9b21f7..9f43ee55 100644 --- a/imaspy/command/timer.py +++ b/imas/command/timer.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Utility class to time different sections of a CLI app.""" import time diff --git a/imaspy/db_entry.py b/imas/db_entry.py similarity index 91% rename from imaspy/db_entry.py rename to imas/db_entry.py index 3834655d..899dfd83 100644 --- a/imaspy/db_entry.py +++ b/imas/db_entry.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Logic for interacting with IMAS Data Entries. """ @@ -9,17 +9,17 @@ import numpy -import imaspy -from imaspy.backends.db_entry_impl import ( +import imas +from imas.backends.db_entry_impl import ( DBEntryImpl, GetSampleParameters, GetSliceParameters, ) -from imaspy.dd_zip import dd_xml_versions -from imaspy.exception import IDSNameError, UnknownDDVersion, ValidationError -from imaspy.ids_base import IDSBase -from imaspy.ids_convert import dd_version_map_from_factories -from imaspy.ids_defs import ( +from imas.dd_zip import dd_xml_versions +from imas.exception import IDSNameError, UnknownDDVersion, ValidationError +from imas.ids_base import IDSBase +from imas.ids_convert import dd_version_map_from_factories +from imas.ids_defs import ( CREATE_PULSE, FORCE_CREATE_PULSE, FORCE_OPEN_PULSE, @@ -27,9 +27,9 @@ IDS_TIME_MODES, OPEN_PULSE, ) -from imaspy.ids_factory import IDSFactory -from imaspy.ids_metadata import IDSType -from imaspy.ids_toplevel import IDSToplevel +from imas.ids_factory import IDSFactory +from imas.ids_metadata import IDSType +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -53,17 +53,17 @@ class DBEntry: .. code-block:: python - import imaspy + import imas - # AL4-style constructor: - with imaspy.DBEntry(imaspy.ids_defs.HDF5_BACKEND, "test", 1, 12) as dbentry: + # old constructor: + with imas.DBEntry(imas.ids_defs.HDF5_BACKEND, "test", 1, 12) as dbentry: # dbentry is now opened and can be used for reading data: ids = dbentry.get(...) # The dbentry is now closed - # AL5-style constructor also allows creating the Data Entry with the mode + # new constructor also allows creating the Data Entry with the mode # argument - with imaspy.DBEntry("imas:hdf5?path=testdb", "w") as dbentry: + with imas.DBEntry("imas:hdf5?path=testdb", "w") as dbentry: # dbentry is now created and can be used for writing data: dbentry.put(ids) # The dbentry is now closed @@ -190,9 +190,9 @@ def __init__( def _select_implementation(uri: Optional[str]) -> Type[DBEntryImpl]: """Select which DBEntry implementation to use based on the URI.""" if uri and uri.endswith(".nc") and not uri.startswith("imas:"): - from imaspy.backends.netcdf.db_entry_nc import NCDBEntryImpl as impl + from imas.backends.netcdf.db_entry_nc import NCDBEntryImpl as impl else: - from imaspy.backends.imas_core.db_entry_al import ALDBEntryImpl as impl + from imas.backends.imas_core.db_entry_al import ALDBEntryImpl as impl return impl def __enter__(self): @@ -243,10 +243,10 @@ def create(self, *, options=None, force=True) -> None: Example: .. code-block:: python - import imaspy - from imaspy.ids_defs import HDF5_BACKEND + import imas + from imas.ids_defs import HDF5_BACKEND - imas_entry = imaspy.DBEntry(HDF5_BACKEND, "test", 1, 1234) + imas_entry = imas.DBEntry(HDF5_BACKEND, "test", 1, 1234) imas_entry.create() """ self._open_pulse(FORCE_CREATE_PULSE if force else CREATE_PULSE, options) @@ -263,10 +263,10 @@ def open(self, mode=OPEN_PULSE, *, options=None, force=False) -> None: Example: .. code-block:: python - import imaspy - from imaspy.ids_defs import HDF5_BACKEND + import imas + from imas.ids_defs import HDF5_BACKEND - imas_entry = imaspy.DBEntry(HDF5_BACKEND, "test", 1, 1234) + imas_entry = imas.DBEntry(HDF5_BACKEND, "test", 1, 1234) imas_entry.open() """ if force: @@ -345,9 +345,9 @@ def get( Example: .. code-block:: python - import imaspy + import imas - imas_entry = imaspy.DBEntry(imaspy.ids_defs.MDSPLUS_BACKEND, "ITER", 131024, 41, "public") + imas_entry = imas.DBEntry(imas.ids_defs.MDSPLUS_BACKEND, "ITER", 131024, 41, "public") imas_entry.open() core_profiles = imas_entry.get("core_profiles") """ # noqa @@ -384,9 +384,9 @@ def get_slice( time_requested: Requested time slice interpolation_method: Interpolation method to use. Available options: - - :const:`~imaspy.ids_defs.CLOSEST_INTERP` - - :const:`~imaspy.ids_defs.PREVIOUS_INTERP` - - :const:`~imaspy.ids_defs.LINEAR_INTERP` + - :const:`~imas.ids_defs.CLOSEST_INTERP` + - :const:`~imas.ids_defs.PREVIOUS_INTERP` + - :const:`~imas.ids_defs.LINEAR_INTERP` occurrence: Which occurrence of the IDS to read. @@ -414,11 +414,11 @@ def get_slice( Example: .. code-block:: python - import imaspy + import imas - imas_entry = imaspy.DBEntry(imaspy.ids_defs.MDSPLUS_BACKEND, "ITER", 131024, 41, "public") + imas_entry = imas.DBEntry(imas.ids_defs.MDSPLUS_BACKEND, "ITER", 131024, 41, "public") imas_entry.open() - core_profiles = imas_entry.get_slice("core_profiles", 370, imaspy.ids_defs.PREVIOUS_INTERP) + core_profiles = imas_entry.get_slice("core_profiles", 370, imas.ids_defs.PREVIOUS_INTERP) """ # noqa return self._get( ids_name, @@ -483,9 +483,9 @@ def get_sample( containing an explicit time base to interpolate. interpolation_method: Interpolation method to use. Available options: - - :const:`~imaspy.ids_defs.CLOSEST_INTERP` - - :const:`~imaspy.ids_defs.PREVIOUS_INTERP` - - :const:`~imaspy.ids_defs.LINEAR_INTERP` + - :const:`~imas.ids_defs.CLOSEST_INTERP` + - :const:`~imas.ids_defs.PREVIOUS_INTERP` + - :const:`~imas.ids_defs.LINEAR_INTERP` occurrence: Which occurrence of the IDS to read. @@ -513,11 +513,11 @@ def get_sample( Example: .. code-block:: python - import imaspy + import imas import numpy - from imaspy import ids_defs + from imas import ids_defs - imas_entry = imaspy.DBEntry( + imas_entry = imas.DBEntry( "imas:mdsplus?user=public;pulse=131024;run=41;database=ITER", "r") # All time slices between t=200 and t=370 @@ -605,12 +605,13 @@ def _get( if dd_version.split(".")[0] != destination._dd_version.split(".")[0]: logger.warning( "On-disk data is stored in DD %s which has a different major " - "version than the requested DD version (%s). IMASPy will convert " - "the data automatically, but this does not cover all changes. See " - "%s/multi-dd.html#conversion-of-idss-between-dd-versions", + "version than the requested DD version (%s). imas-python will " + "convert the data automatically, but this does not cover all" + "changes. " + "See %s/multi-dd.html#conversion-of-idss-between-dd-versions", dd_version, destination._dd_version, - imaspy.PUBLISHED_DOCUMENTATION_ROOT, + imas.PUBLISHED_DOCUMENTATION_ROOT, ) ddmap, source_is_older = dd_version_map_from_factories( ids_name, IDSFactory(version=dd_version), self._ids_factory @@ -643,7 +644,7 @@ def put(self, ids: IDSToplevel, occurrence: int = 0) -> None: Example: .. code-block:: python - ids = imaspy.IDSFactory().pf_active() + ids = imas.IDSFactory().pf_active() ... # fill the pf_active IDS here imas_entry.put(ids) """ @@ -682,7 +683,7 @@ def put_slice(self, ids: IDSToplevel, occurrence: int = 0) -> None: .. code-block:: python - ids = imaspy.IDSFactory().pf_active() ... # fill the static data of the + ids = imas.IDSFactory().pf_active() ... # fill the static data of the pf_active IDS here for i in range(N): ... # fill time slice of the pf_active IDS imas_entry.put_slice(ids) """ @@ -733,7 +734,7 @@ def _put(self, ids: IDSToplevel, occurrence: int, is_slice: bool): version_put = ids.ids_properties.version_put version_put.data_dictionary = self._ids_factory._version version_put.access_layer = self._dbe_impl.access_layer_version() - version_put.access_layer_language = f"imaspy {imaspy.__version__}" + version_put.access_layer_language = f"imas {imas.__version__}" self._dbe_impl.put(ids, occurrence, is_slice) diff --git a/imas/dd_helpers.py b/imas/dd_helpers.py new file mode 100644 index 00000000..f5fd5070 --- /dev/null +++ b/imas/dd_helpers.py @@ -0,0 +1,168 @@ +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Helper functions to build IDSDef.xml""" + +import logging +import os +import shutil +from pathlib import Path +from typing import Tuple +from zipfile import ZIP_DEFLATED, ZipFile + +from packaging.version import Version as V +from saxonche import PySaxonProcessor + +logger = logging.getLogger(__name__) + +_idsdef_zip_relpath = Path("imas/assets/IDSDef.zip") +_build_dir = Path("build") + + +def prepare_data_dictionaries(): + """Build IMAS IDSDef.xml files for each tagged version in the DD repository + 1. Use saxonche for transformations + 2. Clone the DD repository (ask for user/pass unless ssh key access is available) + 3. Generate IDSDef.xml and rename to IDSDef_${version}.xml + 4. Zip all these IDSDefs together and include in wheel + """ + from git import Repo + + repo: Repo = get_data_dictionary_repo() + if repo: + newest_version_and_tag = (V("0"), None) + for tag in repo.tags: + version_and_tag = (V(str(tag)), tag) + if V(str(tag)) > V("3.21.1"): + newest_version_and_tag = max(newest_version_and_tag, version_and_tag) + logger.debug("Building data dictionary version %s", tag) + build_data_dictionary(repo, tag) + + logger.info("Creating zip file of DD versions") + + if _idsdef_zip_relpath.is_file(): + logger.warning("Overwriting '%s'", _idsdef_zip_relpath) + + with ZipFile( + _idsdef_zip_relpath, + mode="w", # this needs w, since zip can have multiple same entries + compression=ZIP_DEFLATED, + ) as dd_zip: + for filename in _build_dir.glob("[0-9]*.xml"): + arcname = Path("data-dictionary").joinpath(*filename.parts[1:]) + dd_zip.write(filename, arcname=arcname) + # Include identifiers from latest tag in zip file + repo.git.checkout(newest_version_and_tag[1], force=True) + # DD layout <= 4.0.0 + for filename in Path("data-dictionary").glob("*/*identifier.xml"): + arcname = Path("identifiers").joinpath(*filename.parts[1:]) + dd_zip.write(filename, arcname=arcname) + # DD layout > 4.0.0 + for filename in Path("data-dictionary").glob("schemas/*/*identifier.xml"): + arcname = Path("identifiers").joinpath(*filename.parts[2:]) + dd_zip.write(filename, arcname=arcname) + + +def get_data_dictionary_repo() -> Tuple[bool, bool]: + try: + import git # Import git here, the user might not have it! + except ModuleNotFoundError: + raise RuntimeError( + "Could not find 'git' module, try 'pip install gitpython'. \ + Will not build Data Dictionaries!" + ) + + # We need the actual source code (for now) so grab it from ITER + dd_repo_path = "data-dictionary" + + if "DD_DIRECTORY" in os.environ: + logger.info("Found DD_DIRECTORY, copying") + try: + shutil.copytree(os.environ["DD_DIRECTORY"], dd_repo_path) + except FileExistsError: + pass + else: + logger.info("Trying to pull data dictionary git repo from ITER") + + # Set up a bare repo and fetch the data-dictionary repository in it + os.makedirs(dd_repo_path, exist_ok=True) + try: + repo = git.Repo(dd_repo_path) + except git.exc.InvalidGitRepositoryError: + repo = git.Repo.init(dd_repo_path) + logger.info("Set up local git repository {!s}".format(repo)) + + try: + origin = repo.remote() + except ValueError: + dd_repo_url = "https://github.com/iterorganization/imas-data-dictionary.git" + origin = repo.create_remote("origin", url=dd_repo_url) + logger.info("Set up remote '{!s}' linking to '{!s}'".format(origin, origin.url)) + + try: + origin.fetch(tags=True) + except git.exc.GitCommandError as ee: + logger.warning( + "Could not fetch tags from %s. Git reports:\n %s." "\nTrying to continue", + list(origin.urls), + ee, + ) + else: + logger.info("Remote tags fetched") + return repo + + +def _run_xsl_transformation( + xsd_file: Path, xsl_file: Path, tag: str, output_file: Path +) -> None: + """ + This function performs an XSL transformation using Saxon-HE (saxonche) + with the provided XSD file, XSL file, tag, and output file. + + Args: + xsd_file (Path): XML Schema Definition (XSD) file + xsl_file (Path): The `xsl_file` parameter + tag (str): tag name to provide to 'DD_GIT_DESCRIBE' parameter + output_file (Path): The `output_file` parameter for resulting xml + """ + with PySaxonProcessor(license=False) as proc: + logger.debug("Initializing Saxon Processor") + xsltproc = proc.new_xslt30_processor() + xdm_ddgit = proc.make_string_value(tag) + xsltproc.set_parameter("DD_GIT_DESCRIBE", xdm_ddgit) + xsltproc.transform_to_file( + source_file=str(xsd_file), + stylesheet_file=str(xsl_file), + output_file=str(output_file), + ) + logger.info("Transformation complete: %s -> %s", xsd_file, output_file) + + +def build_data_dictionary(repo, tag: str, rebuild=False) -> None: + """Build a single version of the data dictionary given by the tag argument + if the IDS does not already exist. + + In the data-dictionary repository sometimes IDSDef.xml is stored + directly, in which case we do not call make. + + Args: + repo: Repository object containing the DD source code + tag: The DD version tag that will be build + rebuild: If true, overwrites existing pre-build tagged DD version + """ + _build_dir.mkdir(exist_ok=True) + result_xml = _build_dir / f"{tag}.xml" + + if result_xml.exists() and not rebuild: + logger.debug(f"XML for tag '{tag}' already exists, skipping") + return + + repo.git.checkout(tag, force=True) + + # Perform the XSL transformation with saxonche + dd_xsd = Path("data-dictionary/dd_data_dictionary.xml.xsd") + dd_xsl = Path("data-dictionary/dd_data_dictionary.xml.xsl") + _run_xsl_transformation(dd_xsd, dd_xsl, tag.name, result_xml) + + +if __name__ == "__main__": + prepare_data_dictionaries() diff --git a/imaspy/dd_zip.py b/imas/dd_zip.py similarity index 89% rename from imaspy/dd_zip.py rename to imas/dd_zip.py index 15354eb6..cd6fa1cd 100644 --- a/imaspy/dd_zip.py +++ b/imas/dd_zip.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """ Extract DD versions from a zip file. The zip file contains files as @@ -7,27 +7,27 @@ * `data-dictionary/3.29.0.xml` multiple paths are checked. See `ZIPFILE_LOCATIONS`. -First the environment variable IMASPY_DDZIP is checked. +First the environment variable IMAS_DDZIP is checked. If that exists and points to a file we will attempt to open it. Then, IDSDef.zip is searched in site-packages, the current folder, -in .config/imaspy/ (`$$XDG_CONFIG_HOME`) and in -the assets/ folder within the IMASPy package. +in .config/imas/ (`$$XDG_CONFIG_HOME`) and in +the assets/ folder within the imas-python package. -1. `$$IMASPY_DDZIP` +1. `$$IMAS_DDZIP` 2. The virtual environment -3. USER_BASE`imaspy/IDSDef.zip` -4. All `site-packages/imaspy/IDSDef.zip` +3. USER_BASE`imas/IDSDef.zip` +4. All `site-packages/imas/IDSDef.zip` 5. `./IDSDef.zip` -6. `~/.config/imaspy/IDSDef.zip` -7. `__file__/../../imaspy/assets/IDSDef.zip` +6. `~/.config/imas/IDSDef.zip` +7. `__file__/../../imas/assets/IDSDef.zip` -All files are checked, i.e. if your .config/imaspy/IDSDef.zip is outdated -the IMASPy-packaged version will be used. +All files are checked, i.e. if your .config/imas/IDSDef.zip is outdated +the imas-python-packaged version will be used. The `assets/IDSDef.zip` provided with the package can be updated with the `python setup.py build_DD` command, which is also performed on install if you have access to the ITER data-dictionary git repo. -Reinstalling imaspy thus also will give you access to the latest DD versions. +Reinstalling imas thus also will give you access to the latest DD versions. """ import logging import os @@ -53,8 +53,8 @@ from packaging.version import InvalidVersion, Version -import imaspy -from imaspy.exception import UnknownDDVersion +import imas +from imas.exception import UnknownDDVersion logger = logging.getLogger(__name__) @@ -70,20 +70,20 @@ def _get_xdg_config_dir(): def _generate_zipfile_locations() -> Iterator[Union[Path, Traversable]]: """Build a list of potential data dictionary locations. - We start with the path (if any) of the IMASPY_DDZIP env var. + We start with the path (if any) of the IMAS_DDZIP env var. Then we look for IDSDef.zip in the current folder, in the - default XDG config dir (~/.config/imaspy/IDSDef.zip) and + default XDG config dir (~/.config/imas/IDSDef.zip) and finally in the assets distributed with this package. """ zip_name = "IDSDef.zip" - environ = os.environ.get("IMASPY_DDZIP") + environ = os.environ.get("IMAS_DDZIP") if environ: yield Path(environ).resolve() yield Path(zip_name).resolve() - yield Path(_get_xdg_config_dir()).resolve() / "imaspy" / zip_name - yield files(imaspy) / "assets" / zip_name + yield Path(_get_xdg_config_dir()).resolve() / "imas" / zip_name + yield files(imas) / "assets" / zip_name def parse_dd_version(version: str) -> Version: @@ -281,12 +281,12 @@ def get_identifier_xml(identifier_name): def print_supported_version_warning(version): try: - if parse_dd_version(version) < imaspy.OLDEST_SUPPORTED_VERSION: + if parse_dd_version(version) < imas.OLDEST_SUPPORTED_VERSION: logger.warning( "Version %s is below lowest supported version of %s.\ Proceed at your own risk.", version, - imaspy.OLDEST_SUPPORTED_VERSION, + imas.OLDEST_SUPPORTED_VERSION, ) except InvalidVersion: logging.warning("Ignoring version parsing error.", exc_info=1) diff --git a/imaspy/exception.py b/imas/exception.py similarity index 92% rename from imaspy/exception.py rename to imas/exception.py index 550ce2ed..fa89c326 100644 --- a/imaspy/exception.py +++ b/imas/exception.py @@ -1,16 +1,16 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Exception classes used in IMASPy. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Exception classes used in imas-python. """ import difflib import logging from typing import TYPE_CHECKING, List -from imaspy.backends.imas_core import imas_interface as _imas_interface +from imas.backends.imas_core import imas_interface as _imas_interface if TYPE_CHECKING: - from imaspy.ids_factory import IDSFactory + from imas.ids_factory import IDSFactory logger = logging.getLogger(__name__) diff --git a/imaspy/ids_base.py b/imas/ids_base.py similarity index 90% rename from imaspy/ids_base.py rename to imas/ids_base.py index 1d299c39..3e182772 100644 --- a/imaspy/ids_base.py +++ b/imas/ids_base.py @@ -1,17 +1,17 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Base class for all IDS nodes. """ import logging from typing import TYPE_CHECKING, Optional, Type -from imaspy.exception import ValidationError -from imaspy.ids_defs import IDS_TIME_MODE_INDEPENDENT -from imaspy.ids_metadata import IDSMetadata +from imas.exception import ValidationError +from imas.ids_defs import IDS_TIME_MODE_INDEPENDENT +from imas.ids_metadata import IDSMetadata if TYPE_CHECKING: - from imaspy.ids_toplevel import IDSToplevel + from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -62,7 +62,7 @@ def _path(self) -> str: AoS. Usage of _path is (and should remain) limited to "interactive" use cases - (like in :mod:`imaspy.util` and ``__repr__``) or when reporting errors. + (like in :mod:`imas.util` and ``__repr__``) or when reporting errors. Examples: - ``ids.ids_properties.creation_data._path`` is @@ -70,7 +70,7 @@ def _path(self) -> str: - ``gyrokinetics.wavevector[0].radial_component_norm._path`` is ``"wavevector[0]/radial_component_norm"`` """ - from imaspy.ids_struct_array import IDSStructArray + from imas.ids_struct_array import IDSStructArray parent_path = self._parent._path my_path = self.metadata.name @@ -117,7 +117,7 @@ def _validate(self) -> None: """Actual implementation of validation logic. See also: - :py:meth:`imaspy.ids_toplevel.IDSToplevel.validate`. + :py:meth:`imas.ids_toplevel.IDSToplevel.validate`. Args: aos_indices: index_name -> index, e.g. {"i1": 1, "itime": 0}, for all parent diff --git a/imaspy/ids_convert.py b/imas/ids_convert.py similarity index 98% rename from imaspy/ids_convert.py rename to imas/ids_convert.py index f31d6676..95ccd92c 100644 --- a/imaspy/ids_convert.py +++ b/imas/ids_convert.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Functionality for converting IDSToplevels between DD versions. """ @@ -14,21 +14,21 @@ import numpy from packaging.version import InvalidVersion, Version -import imaspy -from imaspy.dd_zip import parse_dd_version -from imaspy.ids_base import IDSBase -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_factory import IDSFactory -from imaspy.ids_path import IDSPath -from imaspy.ids_primitive import ( +import imas +from imas.dd_zip import parse_dd_version +from imas.ids_base import IDSBase +from imas.ids_data_type import IDSDataType +from imas.ids_factory import IDSFactory +from imas.ids_path import IDSPath +from imas.ids_primitive import ( IDSNumeric0D, IDSNumericArray, IDSPrimitive, IDSString0D, ) -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure -from imaspy.ids_toplevel import IDSToplevel +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -183,7 +183,7 @@ def _check_data_type(self, old_item: Element, new_item: Element): else: logger.debug( "Data type of %s changed from %s to %s. This change is not " - "supported by IMASPy: no conversion will be done.", + "supported by imas-python: no conversion will be done.", new_item.get("path"), old_item.get("data_type"), new_item.get("data_type"), @@ -527,7 +527,7 @@ def _add_provenance_entry( source_txt = ( f"{provenance_origin_uri}; " f"This IDS has been converted from DD {source_version} to " - f"DD {target_ids._dd_version} by IMASPy {imaspy.__version__}." + f"DD {target_ids._dd_version} by imas-python {imas.__version__}." ) if hasattr(node, "reference"): # DD version after IMAS-5304 diff --git a/imaspy/ids_coordinates.py b/imas/ids_coordinates.py similarity index 94% rename from imaspy/ids_coordinates.py rename to imas/ids_coordinates.py index 831b3fd3..8e3a2b70 100644 --- a/imaspy/ids_coordinates.py +++ b/imas/ids_coordinates.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Logic for interpreting coordinates in an IDS. """ @@ -9,16 +9,16 @@ import numpy as np -from imaspy.exception import CoordinateError, CoordinateLookupError, ValidationError -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_defs import EMPTY_FLOAT -from imaspy.ids_defs import IDS_TIME_MODE_HETEROGENEOUS as HETEROGENEOUS_TIME -from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS as HOMOGENEOUS_TIME -from imaspy.ids_path import IDSPath +from imas.exception import CoordinateError, CoordinateLookupError, ValidationError +from imas.ids_data_type import IDSDataType +from imas.ids_defs import EMPTY_FLOAT +from imas.ids_defs import IDS_TIME_MODE_HETEROGENEOUS as HETEROGENEOUS_TIME +from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS as HOMOGENEOUS_TIME +from imas.ids_path import IDSPath if TYPE_CHECKING: # Prevent circular imports - from imaspy.ids_base import IDSBase - from imaspy.ids_primitive import IDSPrimitive + from imas.ids_base import IDSBase + from imas.ids_primitive import IDSPrimitive logger = logging.getLogger(__name__) @@ -142,10 +142,10 @@ class IDSCoordinates: Can be used to automatically retrieve coordinate values via the indexing operator. Example: - >>> import imaspy - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> import imas + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.ids_properties.homogeneous_time = \\ - ... imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + ... imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS >>> core_profiles.profiles_1d.coordinates[0] IDSNumericArray("/core_profiles/time", array([], dtype=float64)) """ @@ -281,7 +281,7 @@ def _validate(self): """Coordinate validation checks. See also: - :py:meth:`imaspy.ids_toplevel.IDSToplevel.validate`. + :py:meth:`imas.ids_toplevel.IDSToplevel.validate`. """ node = self._node shape = node.shape @@ -385,7 +385,9 @@ def _capture_goto_errors(self, dim, coordinate): "some coordinate metadata is incorrect." ) else: - version_error = "Please report this issue to the IMASPy developers." + version_error = ( + "Please report this issue to the imas-python developers." + ) logger.warning( "An error occurred while finding coordinate `%s` of dimension %s, " "which is ignored. %s", diff --git a/imaspy/ids_data_type.py b/imas/ids_data_type.py similarity index 95% rename from imaspy/ids_data_type.py rename to imas/ids_data_type.py index 794ccf3a..69a3a201 100644 --- a/imaspy/ids_data_type.py +++ b/imas/ids_data_type.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Data Dictionary type handling functionality. """ @@ -9,7 +9,7 @@ import numpy as np -from imaspy.ids_defs import ( +from imas.ids_defs import ( CHAR_DATA, COMPLEX_DATA, DOUBLE_DATA, diff --git a/imaspy/ids_defs.py b/imas/ids_defs.py similarity index 95% rename from imaspy/ids_defs.py rename to imas/ids_defs.py index b35352cb..26ac10c3 100644 --- a/imaspy/ids_defs.py +++ b/imas/ids_defs.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -""" Load IMASPy libs to provide constants +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +""" Load imas-python libs to provide constants .. _`Backend identifiers`: @@ -89,7 +89,7 @@ import functools import logging -from imaspy.backends.imas_core.imas_interface import has_imas, imasdef +from imas.backends.imas_core.imas_interface import has_imas, imasdef logger = logging.getLogger(__name__) diff --git a/imaspy/ids_factory.py b/imas/ids_factory.py similarity index 88% rename from imaspy/ids_factory.py rename to imas/ids_factory.py index e1bde19d..2173985b 100644 --- a/imaspy/ids_factory.py +++ b/imas/ids_factory.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Tools for generating IDSs from a Data Dictionary version. """ @@ -7,9 +7,9 @@ from functools import partial from typing import Any, Iterable, Iterator, List, Optional -from imaspy import dd_zip -from imaspy.exception import IDSNameError -from imaspy.ids_toplevel import IDSToplevel +from imas import dd_zip +from imas.exception import IDSNameError +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -21,9 +21,9 @@ class IDSFactory: >>> factory = IDSFactory() >>> factory.core_profiles() - + >>> factory.new("core_profiles") - + """ def __init__( @@ -31,7 +31,7 @@ def __init__( ) -> None: """Create a new IDS Factory - See :meth:`imaspy.dd_zip.dd_etree` for further details on the ``version`` and + See :meth:`imas.dd_zip.dd_etree` for further details on the ``version`` and ``xml_path`` arguments. Args: diff --git a/imaspy/ids_identifiers.py b/imas/ids_identifiers.py similarity index 94% rename from imaspy/ids_identifiers.py rename to imas/ids_identifiers.py index 397975d0..df72be32 100644 --- a/imaspy/ids_identifiers.py +++ b/imas/ids_identifiers.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""IMASPy module to support Data Dictionary identifiers. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""imas-python module to support Data Dictionary identifiers. """ import logging @@ -8,7 +8,7 @@ from typing import Iterable, List, Type from xml.etree.ElementTree import fromstring -from imaspy import dd_zip +from imas import dd_zip logger = logging.getLogger(__name__) @@ -107,7 +107,7 @@ def identifiers(self) -> List[str]: Example: .. code-block:: python - from imaspy import identifiers + from imas import identifiers # List all identifier names for identifier_name in identifiers.identifiers: print(identifier_name) diff --git a/imaspy/ids_metadata.py b/imas/ids_metadata.py similarity index 92% rename from imaspy/ids_metadata.py rename to imas/ids_metadata.py index 05172627..2cd0e224 100644 --- a/imaspy/ids_metadata.py +++ b/imas/ids_metadata.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Core of the IMASPy interpreted IDS metadata +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Core of the imas-python interpreted IDS metadata """ import re import types @@ -10,10 +10,10 @@ from typing import Any, Dict, Iterator, Optional, Tuple, Type from xml.etree.ElementTree import Element -from imaspy.ids_coordinates import IDSCoordinate -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_identifiers import IDSIdentifier, identifiers -from imaspy.ids_path import IDSPath +from imas.ids_coordinates import IDSCoordinate +from imas.ids_data_type import IDSDataType +from imas.ids_identifiers import IDSIdentifier, identifiers +from imas.ids_path import IDSPath class IDSType(Enum): @@ -26,7 +26,7 @@ class IDSType(Enum): operation); ``dynamic`` data are those which vary in time within the context of the data. - As in the Python HLI, IMASPy only distinguishes between dynamic and non-dynamic + As in the Python HLI, imas-python only distinguishes between dynamic and non-dynamic nodes. """ @@ -86,7 +86,7 @@ def _build_type_map(): This must be done in a separate function to avoid circular imports. """ - from imaspy.ids_primitive import ( + from imas.ids_primitive import ( IDSComplex0D, IDSFloat0D, IDSInt0D, @@ -94,9 +94,9 @@ def _build_type_map(): IDSString0D, IDSString1D, ) - from imaspy.ids_struct_array import IDSStructArray - from imaspy.ids_structure import IDSStructure - from imaspy.ids_toplevel import IDSToplevel + from imas.ids_struct_array import IDSStructArray + from imas.ids_structure import IDSStructure + from imas.ids_toplevel import IDSToplevel _type_map[(None, 0)] = IDSToplevel _type_map[(IDSDataType.STRUCTURE, 0)] = IDSStructure @@ -123,7 +123,7 @@ class IDSMetadata: .. code-block:: python - core_profiles = imaspy.IDSFactory().core_profiles() + core_profiles = imas.IDSFactory().core_profiles() # Get the metadata of the time child of the profiles_1d array of structures p1d_time_meta = core_profiles.metadata["profiles_1d/time"] @@ -154,7 +154,8 @@ def __init__( else: self._ctx_path = self.name - # These are special and used in IMASPy logic, so we need to ensure proper values + # These are special and used in imas-python logic, + # so we need to ensure proper values maxoccur = attrib.get("maxoccur", "unbounded") self.maxoccur: Optional[int] = ( None if maxoccur == "unbounded" else int(maxoccur) @@ -291,7 +292,7 @@ def identifier_enum(self) -> Optional[Type[IDSIdentifier]]: """The identifier enum for this IDS node (if available). This property is an identifier enum (a subclass of - :py:class:`imaspy.ids_identifiers.IDSIdentifier`) if this node represents an + :py:class:`imas.ids_identifiers.IDSIdentifier`) if this node represents an identifier, and the Data Dictionary defines the allowed identifier values. This property is ``None`` when this node is not an identifier, or the Data diff --git a/imaspy/ids_path.py b/imas/ids_path.py similarity index 97% rename from imaspy/ids_path.py rename to imas/ids_path.py index a0a90dbc..75fb6694 100644 --- a/imaspy/ids_path.py +++ b/imas/ids_path.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Logic for interpreting paths to elements in an IDS """ @@ -8,8 +8,8 @@ from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Tuple, Union if TYPE_CHECKING: # Prevent circular imports - from imaspy.ids_base import IDSBase - from imaspy.ids_metadata import IDSMetadata + from imas.ids_base import IDSBase + from imas.ids_metadata import IDSMetadata logger = logging.getLogger(__name__) @@ -181,7 +181,7 @@ def goto(self, from_element: "IDSBase", *, from_root: bool = True) -> "IDSBase": Example: .. code-block:: python - cp = imaspy.IDSFactory().core_profiles() + cp = imas.IDSFactory().core_profiles() cp.profiles_1d.resize(1) element = cp.profiles_1d[0] path1 = IDSPath("ids_properties/homogeneous_time") @@ -236,7 +236,7 @@ def goto_metadata(self, from_metadata: "IDSMetadata") -> "IDSMetadata": Example: .. code-block:: python - es = imaspy.IDSFactory().edge_sources() + es = imas.IDSFactory().edge_sources() path = IDSPath("source/ggd/ion/energy") energy_metadata = path.goto_metadata(es.metadata) """ diff --git a/imaspy/ids_primitive.py b/imas/ids_primitive.py similarity index 97% rename from imaspy/ids_primitive.py rename to imas/ids_primitive.py index 94f865b6..816ebac6 100644 --- a/imaspy/ids_primitive.py +++ b/imas/ids_primitive.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Provides the classes for IDS data nodes """ import logging @@ -13,10 +13,10 @@ import numpy as np from xxhash import xxh3_64, xxh3_64_digest -from imaspy.ids_base import IDSBase, IDSDoc -from imaspy.ids_coordinates import IDSCoordinates -from imaspy.ids_data_type import IDSDataType -from imaspy.ids_metadata import IDSMetadata +from imas.ids_base import IDSBase, IDSDoc +from imas.ids_coordinates import IDSCoordinates +from imas.ids_data_type import IDSDataType +from imas.ids_metadata import IDSMetadata logger = logging.getLogger(__name__) @@ -241,7 +241,7 @@ def _cast_value(self, value): @property def data_type(self): - """Combine imaspy ids_type and ndims to AL data_type""" + """Combine imas ids_type and ndims to AL data_type""" return "{!s}_{!s}D".format(self.metadata.data_type.value, self.metadata.ndim) def _validate(self) -> None: diff --git a/imaspy/ids_struct_array.py b/imas/ids_struct_array.py similarity index 93% rename from imaspy/ids_struct_array.py rename to imas/ids_struct_array.py index bc7dd92c..9f79a130 100644 --- a/imaspy/ids_struct_array.py +++ b/imas/ids_struct_array.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """IDS StructArray represents an Array of Structures in the IDS tree. """ @@ -9,11 +9,11 @@ from xxhash import xxh3_64 -from imaspy.backends.imas_core.al_context import LazyALArrayStructContext -from imaspy.ids_base import IDSBase, IDSDoc -from imaspy.ids_coordinates import IDSCoordinates -from imaspy.ids_identifiers import IDSIdentifier -from imaspy.ids_metadata import IDSMetadata +from imas.backends.imas_core.al_context import LazyALArrayStructContext +from imas.ids_base import IDSBase, IDSDoc +from imas.ids_coordinates import IDSCoordinates +from imas.ids_identifiers import IDSIdentifier +from imas.ids_metadata import IDSMetadata logger = logging.getLogger(__name__) @@ -107,7 +107,7 @@ def _load(self, item: Optional[int]) -> None: if item < 0 or item >= len(self): raise IndexError("list index out of range") # Create the requested item - from imaspy.ids_structure import IDSStructure + from imas.ids_structure import IDSStructure element = self.value[item] = IDSStructure(self, self.metadata) element._set_lazy_context(self._lazy_ctx.iterate_to_index(item)) @@ -115,7 +115,7 @@ def _load(self, item: Optional[int]) -> None: @property def _element_structure(self): """Prepare an element structure JIT""" - from imaspy.ids_structure import IDSStructure + from imas.ids_structure import IDSStructure struct = IDSStructure(self, self.metadata) return struct @@ -194,7 +194,7 @@ def resize(self, nbelt: int, keep: bool = False): cur = len(self.value) if nbelt > cur: # Create new structures to fill this AoS with - from imaspy.ids_structure import IDSStructure + from imas.ids_structure import IDSStructure new_els = [IDSStructure(self, self.metadata) for _ in range(nbelt - cur)] if cur: diff --git a/imaspy/ids_structure.py b/imas/ids_structure.py similarity index 91% rename from imaspy/ids_structure.py rename to imas/ids_structure.py index dd4d42ff..f55755fc 100644 --- a/imaspy/ids_structure.py +++ b/imas/ids_structure.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """A structure in an IDS """ @@ -10,13 +10,13 @@ from xxhash import xxh3_64 -from imaspy.backends.imas_core.al_context import LazyALContext -from imaspy.ids_base import IDSBase, IDSDoc -from imaspy.ids_identifiers import IDSIdentifier -from imaspy.ids_metadata import IDSDataType, IDSMetadata -from imaspy.ids_path import IDSPath -from imaspy.ids_primitive import IDSPrimitive -from imaspy.ids_struct_array import IDSStructArray +from imas.backends.imas_core.al_context import LazyALContext +from imas.ids_base import IDSBase, IDSDoc +from imas.ids_identifiers import IDSIdentifier +from imas.ids_metadata import IDSDataType, IDSMetadata +from imas.ids_path import IDSPath +from imas.ids_primitive import IDSPrimitive +from imas.ids_struct_array import IDSStructArray logger = logging.getLogger(__name__) @@ -63,7 +63,7 @@ def __getattr__(self, name): child = child_meta._node_type(self, child_meta) self.__dict__[name] = child # bypass setattr logic below: avoid recursion if self._lazy: # lazy load the child - from imaspy.backends.imas_core.db_entry_helpers import _get_child + from imas.backends.imas_core.db_entry_helpers import _get_child _get_child(child, self._lazy_context) return child @@ -98,7 +98,7 @@ def _assign_identifier(self, value: Union[IDSIdentifier, str, int]) -> None: def __setattr__(self, key, value): """ - 'Smart' setting of attributes. To be able to warn the user on imaspy + 'Smart' setting of attributes. To be able to warn the user on imas IDS interaction time, instead of on database put time Only try to cast user-facing attributes, as core developers might want to always bypass this mechanism (I know I do!) @@ -162,7 +162,7 @@ def __eq__(self, other) -> bool: return True if not isinstance(other, IDSStructure): return False if isinstance(other, IDSBase) else NotImplemented - from imaspy.util import idsdiffgen # local import to avoid circular import + from imas.util import idsdiffgen # local import to avoid circular import for _ in idsdiffgen(self, other): return False # Not equal if there is any difference @@ -211,8 +211,8 @@ def iter_nonempty_(self, *, accept_lazy=False) -> Generator[IDSBase, None, None] .. code-block:: python :caption: ``iter_nonempty_`` for fully loaded IDSs - >>> import imaspy.training - >>> entry = imaspy.training.get_training_db_entry() + >>> import imas.training + >>> entry = imas.training.get_training_db_entry() >>> cp = entry.get("core_profiles") >>> list(cp.iter_nonempty_()) [ @@ -225,8 +225,8 @@ def iter_nonempty_(self, *, accept_lazy=False) -> Generator[IDSBase, None, None] .. code-block:: python :caption: ``iter_nonempty_`` for lazy-loaded IDSs - >>> import imaspy.training - >>> entry = imaspy.training.get_training_db_entry() + >>> import imas.training + >>> entry = imas.training.get_training_db_entry() >>> cp = entry.get("core_profiles", lazy=True) >>> list(cp.iter_nonempty_()) RuntimeError: Iterating over non-empty nodes of a lazy loaded IDS will @@ -252,9 +252,9 @@ def iter_nonempty_(self, *, accept_lazy=False) -> Generator[IDSBase, None, None] "Iterating over non-empty nodes of a lazy loaded IDS will skip nodes " "that are not loaded. Set accept_lazy=True to continue. " "See the documentation for more information: " - "https://sharepoint.iter.org/departments/POP/CM/IMDesign/" - "Code%20Documentation/IMASPy-doc/generated/imaspy.ids_structure." - "IDSStructure.html#imaspy.ids_structure.IDSStructure.iter_nonempty_" + "https://imas-python.readthedocs.io/en/latest" + "/generated/imas.ids_structure." + "IDSStructure.html#imas.ids_structure.IDSStructure.iter_nonempty_" ) for child in self._children: if child in self.__dict__: diff --git a/imaspy/ids_toplevel.py b/imas/ids_toplevel.py similarity index 87% rename from imaspy/ids_toplevel.py rename to imas/ids_toplevel.py index faeb6366..d0fa8e0b 100644 --- a/imaspy/ids_toplevel.py +++ b/imas/ids_toplevel.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """Represents a Top-level IDS (like ``core_profiles``, ``equilibrium``, etc) """ @@ -11,11 +11,11 @@ import numpy -import imaspy -from imaspy.backends.imas_core.imas_interface import ll_interface, lowlevel -from imaspy.exception import ValidationError -from imaspy.ids_base import IDSDoc -from imaspy.ids_defs import ( +import imas +from imas.backends.imas_core.imas_interface import ll_interface, lowlevel +from imas.exception import ValidationError +from imas.ids_base import IDSDoc +from imas.ids_defs import ( ASCII_BACKEND, ASCII_SERIALIZER_PROTOCOL, CHAR_DATA, @@ -26,12 +26,12 @@ IDS_TIME_MODES, needs_imas, ) -from imaspy.ids_metadata import IDSMetadata, IDSType, get_toplevel_metadata -from imaspy.ids_structure import IDSStructure +from imas.ids_metadata import IDSMetadata, IDSType, get_toplevel_metadata +from imas.ids_structure import IDSStructure if TYPE_CHECKING: - from imaspy.db_entry import DBEntry - from imaspy.ids_factory import IDSFactory + from imas.db_entry import DBEntry + from imas.ids_factory import IDSFactory _FLEXBUFFERS_URI = "imas:flexbuffers?path=/" @@ -48,14 +48,14 @@ def _serializer_tmpdir() -> str: def _create_serialization_dbentry(filepath: str, dd_version: str) -> "DBEntry": """Create a temporary DBEntry for use in the ASCII serialization protocol.""" if ll_interface._al_version.major == 4: # AL4 compatibility - dbentry = imaspy.DBEntry( + dbentry = imas.DBEntry( ASCII_BACKEND, "serialize", 1, 1, "serialize", dd_version=dd_version ) dbentry.create(options=f"-fullpath {filepath}") return dbentry else: # AL5 path = Path(filepath) - return imaspy.DBEntry( + return imas.DBEntry( f"imas:ascii?path={path.parent};filename={path.name}", "w", dd_version=dd_version, @@ -119,7 +119,7 @@ def serialize(self, protocol=None) -> bytes: .. code-block: python - core_profiles = imaspy.IDSFactory().core_profiles() + core_profiles = imas.IDSFactory().core_profiles() # fill core_profiles with data ... @@ -128,7 +128,7 @@ def serialize(self, protocol=None) -> bytes: # For example, send `data` to another program with libmuscle. # Then deserialize on the receiving side: - core_profiles = imaspy.IDSFactory().core_profiles() + core_profiles = imas.IDSFactory().core_profiles() core_profiles.deserialize(data) # Use core_profiles: ... @@ -137,9 +137,9 @@ def serialize(self, protocol=None) -> bytes: protocol: Which serialization protocol to use. Uses ``DEFAULT_SERIALIZER_PROTOCOL`` when none specified. One of: - - :const:`~imaspy.ids_defs.ASCII_SERIALIZER_PROTOCOL` - - :const:`~imaspy.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` - - :const:`~imaspy.ids_defs.DEFAULT_SERIALIZER_PROTOCOL` + - :const:`~imas.ids_defs.ASCII_SERIALIZER_PROTOCOL` + - :const:`~imas.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` + - :const:`~imas.ids_defs.DEFAULT_SERIALIZER_PROTOCOL` The flexbuffers serializer protocol is only available when using ``imas_core >= 5.3``. It's the default protocol when it is available. @@ -169,7 +169,7 @@ def serialize(self, protocol=None) -> bytes: if protocol == FLEXBUFFERS_SERIALIZER_PROTOCOL: # Note: FLEXBUFFERS_SERIALIZER_PROTOCOL is None when imas_core doesn't # support this format - with imaspy.DBEntry(_FLEXBUFFERS_URI, "w", dd_version=dd_version) as entry: + with imas.DBEntry(_FLEXBUFFERS_URI, "w", dd_version=dd_version) as entry: entry.put(self) # Read serialized buffer status, buffer = lowlevel.al_read_data_array( @@ -207,7 +207,7 @@ def deserialize(self, data: bytes) -> None: if os.path.exists(filepath): os.unlink(filepath) elif protocol == FLEXBUFFERS_SERIALIZER_PROTOCOL: - with imaspy.DBEntry(_FLEXBUFFERS_URI, "r", dd_version=dd_version) as entry: + with imas.DBEntry(_FLEXBUFFERS_URI, "r", dd_version=dd_version) as entry: # Write serialized buffer to the flexbuffers backend buffer = numpy.frombuffer(data, dtype=numpy.int8) lowlevel._al_write_data_array( @@ -253,16 +253,16 @@ def validate(self): Example: - >>> core_profiles = imaspy.IDSFactory().core_profiles() + >>> core_profiles = imas.IDSFactory().core_profiles() >>> core_profiles.validate() # Did not set homogeneous_time [...] - imaspy.exception.ValidationError: Invalid value for ids_properties/homogeneous_time: IDSPrimitive("/core_profiles/ids_properties/homogeneous_time", -999999999) - >>> core_profiles.ids_properties.homogeneous_time = imaspy.ids_defs.IDS_TIME_MODE_HOMOGENEOUS + imas.exception.ValidationError: Invalid value for ids_properties/homogeneous_time: IDSPrimitive("/core_profiles/ids_properties/homogeneous_time", -999999999) + >>> core_profiles.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS >>> core_profiles.validate() # No error: IDS is valid >>> core_profiles.profiles_1d.resize(1) >>> core_profiles.validate() [...] - imaspy.exception.CoordinateError: Dimension 1 of element profiles_1d has incorrect size 1. Expected size is 0 (size of coordinate time). + imas.exception.CoordinateError: Dimension 1 of element profiles_1d has incorrect size 1. Expected size is 0 (size of coordinate time). >>> core_profiles.time = [1] >>> core_profiles.validate() # No error: IDS is valid @@ -303,7 +303,7 @@ def get(self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None) -> None """Get data from AL backend storage format. This method exists for API compatibility with the IMAS python HLI. - See :py:meth:`DBEntry.get `. + See :py:meth:`DBEntry.get `. """ if db_entry is None: raise NotImplementedError() @@ -320,7 +320,7 @@ def getSlice( """Get a slice from the backend. This method exists for API compatibility with the IMAS python HLI. - See :py:meth:`DBEntry.get_slice `. + See :py:meth:`DBEntry.get_slice `. """ if db_entry is None: raise NotImplementedError() @@ -339,7 +339,7 @@ def putSlice( """Put a single slice into the backend. This method exists for API compatibility with the IMAS python HLI. - See :py:meth:`DBEntry.put_slice `. + See :py:meth:`DBEntry.put_slice `. """ if db_entry is None: raise NotImplementedError() @@ -352,7 +352,7 @@ def deleteData( """Delete AL backend storage data. This method exists for API compatibility with the IMAS python HLI. - See :py:meth:`DBEntry.delete_data `. + See :py:meth:`DBEntry.delete_data `. """ if db_entry is None: raise NotImplementedError() @@ -363,7 +363,7 @@ def put(self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None) -> None """Put this IDS to the backend. This method exists for API compatibility with the IMAS python HLI. - See :py:meth:`DBEntry.put `. + See :py:meth:`DBEntry.put `. """ if db_entry is None: raise NotImplementedError() diff --git a/imaspy/setup_logging.py b/imas/setup_logging.py similarity index 83% rename from imaspy/setup_logging.py rename to imas/setup_logging.py index 94d49058..e7cfcd5f 100644 --- a/imaspy/setup_logging.py +++ b/imas/setup_logging.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Create a default log handler when IMASPy is imported. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Create a default log handler when imas-python is imported. """ import logging @@ -37,7 +37,7 @@ def format(self, record): def test_messages(): """Print out a message on each logging level""" - logger = logging.getLogger("imaspy.testlogger") + logger = logging.getLogger("imas.testlogger") logger.debug("Debug message") logger.info("Info message") logger.warning("Warning message") @@ -54,10 +54,10 @@ def connect_formatter(logger): # Log to console by default, and output it all -logger = logging.getLogger("imaspy") +logger = logging.getLogger("imas") connect_formatter(logger) -loglevel = os.getenv("IMASPY_LOGLEVEL") or "INFO" +loglevel = os.getenv("IMAS_LOGLEVEL") or "INFO" logger.setLevel(loglevel) if __name__ == "__main__": diff --git a/imaspy/test/test_all_dd_versions.py b/imas/test/test_all_dd_versions.py similarity index 89% rename from imaspy/test/test_all_dd_versions.py rename to imas/test/test_all_dd_versions.py index 25514eac..c30a7bca 100644 --- a/imaspy/test/test_all_dd_versions.py +++ b/imas/test/test_all_dd_versions.py @@ -1,7 +1,7 @@ import pytest -from imaspy import dd_zip, ids_metadata -from imaspy.ids_factory import IDSFactory +from imas import dd_zip, ids_metadata +from imas.ids_factory import IDSFactory @pytest.fixture diff --git a/imaspy/test/test_cli.py b/imas/test/test_cli.py similarity index 77% rename from imaspy/test/test_cli.py rename to imas/test/test_cli.py index 604a7f7e..8ee95e27 100644 --- a/imaspy/test/test_cli.py +++ b/imas/test/test_cli.py @@ -4,15 +4,15 @@ from click.testing import CliRunner from packaging.version import Version -from imaspy.backends.imas_core.imas_interface import ll_interface -from imaspy.command.cli import print_version -from imaspy.command.db_analysis import analyze_db, process_db_analysis -from imaspy.db_entry import DBEntry -from imaspy.test.test_helpers import fill_with_random_data +from imas.backends.imas_core.imas_interface import ll_interface +from imas.command.cli import print_version +from imas.command.db_analysis import analyze_db, process_db_analysis +from imas.db_entry import DBEntry +from imas.test.test_helpers import fill_with_random_data @pytest.mark.cli -def test_imaspy_version(): +def test_imas_version(): runner = CliRunner() result = runner.invoke(print_version) assert result.exit_code == 0 @@ -33,7 +33,7 @@ def test_db_analysis(tmp_path): analyze_result = runner.invoke(analyze_db, [str(db_path)]) assert analyze_result.exit_code == 0, analyze_result.output - outfile = Path(td) / "imaspy-db-analysis.json.gz" + outfile = Path(td) / "imas-db-analysis.json.gz" assert outfile.exists() # Show detailed output for core_profiles, and then an empty input to exit cleanly: diff --git a/imaspy/test/test_dbentry.py b/imas/test/test_dbentry.py similarity index 66% rename from imaspy/test/test_dbentry.py rename to imas/test/test_dbentry.py index 2d82af36..9ba43e00 100644 --- a/imaspy/test/test_dbentry.py +++ b/imas/test/test_dbentry.py @@ -1,21 +1,21 @@ import pytest -import imaspy -import imaspy.ids_defs -from imaspy.backends.imas_core.imas_interface import has_imas, ll_interface -from imaspy.exception import UnknownDDVersion -from imaspy.test.test_helpers import compare_children, open_dbentry +import imas +import imas.ids_defs +from imas.backends.imas_core.imas_interface import has_imas, ll_interface +from imas.exception import UnknownDDVersion +from imas.test.test_helpers import compare_children, open_dbentry def test_dbentry_contextmanager(requires_imas): - entry = imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "test", 1, 1) + entry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 1, 1) entry.create() ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 ids.ids_properties.comment = "test context manager" entry.put(ids) - with imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "test", 1, 1) as entry2: + with imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 1, 1) as entry2: ids2 = entry2.get("core_profiles") assert ids2.ids_properties.comment == ids.ids_properties.comment @@ -28,13 +28,13 @@ def test_dbentry_contextmanager(requires_imas): reason="URI API not available", ) def test_dbentry_contextmanager_uri(tmp_path): - entry = imaspy.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "w") + entry = imas.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "w") ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 ids.ids_properties.comment = "test context manager" entry.put(ids) - with imaspy.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "r") as entry2: + with imas.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "r") as entry2: ids2 = entry2.get("core_profiles") assert ids2.ids_properties.comment == ids.ids_properties.comment @@ -42,7 +42,7 @@ def test_dbentry_contextmanager_uri(tmp_path): assert entry2._dbe_impl is None -def get_entry_attrs(entry: imaspy.DBEntry): +def get_entry_attrs(entry: imas.DBEntry): return ( entry.backend_id, entry.db_name, @@ -55,35 +55,35 @@ def get_entry_attrs(entry: imaspy.DBEntry): def test_dbentry_constructor(): with pytest.raises(TypeError): - imaspy.DBEntry() # no arguments + imas.DBEntry() # no arguments with pytest.raises(TypeError): - imaspy.DBEntry(1) # not enough arguments + imas.DBEntry(1) # not enough arguments with pytest.raises(TypeError): - imaspy.DBEntry(1, 2, 3) # not enough arguments + imas.DBEntry(1, 2, 3) # not enough arguments with pytest.raises(TypeError): - imaspy.DBEntry(1, 2, 3, 4, 5, 6, 7) # too many arguments + imas.DBEntry(1, 2, 3, 4, 5, 6, 7) # too many arguments with pytest.raises(TypeError): - imaspy.DBEntry("test", uri="test") # Double URI argument + imas.DBEntry("test", uri="test") # Double URI argument with pytest.raises(TypeError): - imaspy.DBEntry(1, 2, 3, 4, shot=5) # Multiple values for argument pulse + imas.DBEntry(1, 2, 3, 4, shot=5) # Multiple values for argument pulse with pytest.raises(ValueError): - imaspy.DBEntry(1, 2, pulse=3, run=4, shot=5) # Both shot and pulse + imas.DBEntry(1, 2, pulse=3, run=4, shot=5) # Both shot and pulse - entry = imaspy.DBEntry(1, 2, 3, 4) + entry = imas.DBEntry(1, 2, 3, 4) assert get_entry_attrs(entry) == (1, 2, 3, 4, None, None) - entry = imaspy.DBEntry(backend_id=1, db_name=2, pulse=3, run=4) + entry = imas.DBEntry(backend_id=1, db_name=2, pulse=3, run=4) assert get_entry_attrs(entry) == (1, 2, 3, 4, None, None) # Shot behaves as alias of pulse - entry = imaspy.DBEntry(backend_id=1, db_name=2, shot=3, run=4) + entry = imas.DBEntry(backend_id=1, db_name=2, shot=3, run=4) assert get_entry_attrs(entry) == (1, 2, 3, 4, None, None) - entry = imaspy.DBEntry(1, 2, 3, 4, 5, 6) + entry = imas.DBEntry(1, 2, 3, 4, 5, 6) assert get_entry_attrs(entry) == (1, 2, 3, 4, 5, 6) - entry = imaspy.DBEntry(1, 2, 3, 4, data_version=6) + entry = imas.DBEntry(1, 2, 3, 4, data_version=6) assert get_entry_attrs(entry) == (1, 2, 3, 4, None, 6) def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path): - entry = open_dbentry(imaspy.ids_defs.MEMORY_BACKEND, "w", worker_id, tmp_path) + entry = open_dbentry(imas.ids_defs.MEMORY_BACKEND, "w", worker_id, tmp_path) ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 ids.ids_properties.comment = "Test unknown DD version" diff --git a/imas/test/test_dd_helpers.py b/imas/test/test_dd_helpers.py new file mode 100644 index 00000000..bb9d0d11 --- /dev/null +++ b/imas/test/test_dd_helpers.py @@ -0,0 +1,56 @@ +from pathlib import Path +import shutil +import pytest +import os +import zipfile + +from imas.dd_helpers import prepare_data_dictionaries, _idsdef_zip_relpath, _build_dir + +_idsdef_unzipped_relpath = Path("idsdef_unzipped") + + +def test_prepare_data_dictionaries(): + prepare_data_dictionaries() + assert os.path.exists( + _idsdef_zip_relpath + ), f"IDSDef.zip file does not exist at path: {_idsdef_zip_relpath}" + + expected_xml_files = [ + _build_dir / "3.40.0.xml", + _build_dir / "3.41.0.xml", + _build_dir / "3.42.0.xml", + _build_dir / "4.0.0.xml", + ] + + for xml_file in expected_xml_files: + assert os.path.exists(xml_file), f"{xml_file} does not exist" + + with zipfile.ZipFile(_idsdef_zip_relpath, "r") as zip_ref: + zip_ref.extractall(_idsdef_unzipped_relpath) + + expected_ids_directories = [ + _idsdef_unzipped_relpath / "data-dictionary" / "3.40.0.xml", + _idsdef_unzipped_relpath / "data-dictionary" / "3.41.0.xml", + _idsdef_unzipped_relpath / "data-dictionary" / "3.42.0.xml", + _idsdef_unzipped_relpath / "data-dictionary" / "4.0.0.xml", + _idsdef_unzipped_relpath + / "identifiers" + / "core_sources" + / "core_source_identifier.xml", + _idsdef_unzipped_relpath + / "identifiers" + / "equilibrium" + / "equilibrium_profiles_2d_identifier.xml", + ] + + for file_path in expected_ids_directories: + assert os.path.exists( + file_path + ), f"Expected_ids_directories {file_path} does not exist" + + if _build_dir.exists(): + shutil.rmtree(_idsdef_unzipped_relpath) + + +if __name__ == "__main__": + pytest.main() diff --git a/imas/test/test_dd_helpers_old.py b/imas/test/test_dd_helpers_old.py new file mode 100644 index 00000000..1c3d3772 --- /dev/null +++ b/imas/test/test_dd_helpers_old.py @@ -0,0 +1,92 @@ +import pytest +from pathlib import Path +from unittest.mock import patch, MagicMock +from imas.dd_helpers import transform_with_saxonche, build_data_dictionary +import logging + + +def test_transform_with_saxonche_success(tmp_path): + """Test that transform_with_saxonche performs transformation successfully.""" + input_xml = tmp_path / "input.xml" + xsl_file = tmp_path / "transform.xsl" + output_file = tmp_path / "output.xml" + + # Create dummy input files + input_xml.write_text("Test") + xsl_file.write_text( + """ + + + + + + """ + ) + + transform_with_saxonche(input_xml, xsl_file, output_file) + + # Assert output + assert output_file.exists() + assert ( + output_file.read_text() + == "Test" + ) + + +def test_transform_with_saxonche_failure(tmp_path): + """Test that transform_with_saxonche raises an error for invalid input.""" + input_xml = tmp_path / "input.xml" + xsl_file = tmp_path / "transform.xsl" + output_file = tmp_path / "output.xml" + + # Create invalid input files + input_xml.write_text("Test") + xsl_file.write_text("INVALID XSLT") + + with pytest.raises(Exception): + transform_with_saxonche(input_xml, xsl_file, output_file) + + +@patch("imas.dd_helpers.repo") +@patch("imas.dd_helpers.transform_with_saxonche") +def test_build_data_dictionary(mock_transform, mock_repo, tmp_path): + """Test build_data_dictionary function.""" + mock_repo.git.checkout = MagicMock() + + tag = "v1.0.0" + result_xml = tmp_path / f"{tag}.xml" + + with patch("imas.dd_helpers._build_dir", tmp_path): + build_data_dictionary(mock_repo, tag) + + # Verify the repo was checked out to the correct tag + mock_repo.git.checkout.assert_called_once_with(tag, force=True) + + # Verify the transform_with_saxonche function was called + mock_transform.assert_called_once() + + +def test_prepare_data_dictionaries(monkeypatch, tmp_path): + """Integration test for prepare_data_dictionaries.""" + from imas.dd_helpers import prepare_data_dictionaries + + class MockRepo: + tags = ["v3.21.2", "v3.22.0"] + + def git(self): + return MagicMock() + + mock_repo = MockRepo() + + def mock_get_data_dictionary_repo(): + return mock_repo + + monkeypatch.setattr( + "imas.dd_helpers.get_data_dictionary_repo", mock_get_data_dictionary_repo + ) + + with patch("imas.dd_helpers._build_dir", tmp_path): + prepare_data_dictionaries() + + # Check that the expected output files are created + assert len(list(tmp_path.glob("*.xml"))) == len(mock_repo.tags) diff --git a/imaspy/test/test_dd_zip.py b/imas/test/test_dd_zip.py similarity index 87% rename from imaspy/test/test_dd_zip.py rename to imas/test/test_dd_zip.py index 243256e5..88b5c420 100644 --- a/imaspy/test/test_dd_zip.py +++ b/imas/test/test_dd_zip.py @@ -1,8 +1,8 @@ import pytest from packaging.version import InvalidVersion -from imaspy.dd_zip import get_dd_xml, parse_dd_version -from imaspy.exception import UnknownDDVersion +from imas.dd_zip import get_dd_xml, parse_dd_version +from imas.exception import UnknownDDVersion def test_known_version(): diff --git a/imaspy/test/test_deepcopy.py b/imas/test/test_deepcopy.py similarity index 63% rename from imaspy/test/test_deepcopy.py rename to imas/test/test_deepcopy.py index 5e889d8e..07cd5645 100644 --- a/imaspy/test/test_deepcopy.py +++ b/imas/test/test_deepcopy.py @@ -1,9 +1,9 @@ import copy -import imaspy -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure -from imaspy.test.test_helpers import compare_children, fill_with_random_data +import imas +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure +from imas.test.test_helpers import compare_children, fill_with_random_data def validate_parent(node): @@ -14,7 +14,7 @@ def validate_parent(node): def test_deepcopy(): - factory = imaspy.IDSFactory() + factory = imas.IDSFactory() cp = factory.core_profiles() fill_with_random_data(cp) diff --git a/imaspy/test/test_exception.py b/imas/test/test_exception.py similarity index 67% rename from imaspy/test/test_exception.py rename to imas/test/test_exception.py index e27d21ef..37bebfce 100644 --- a/imaspy/test/test_exception.py +++ b/imas/test/test_exception.py @@ -1,12 +1,12 @@ import pytest -import imaspy -from imaspy.backends.imas_core.imas_interface import ll_interface +import imas +from imas.backends.imas_core.imas_interface import ll_interface def test_catch_al_exception(requires_imas): # Do something which lets the lowlevel Cython interface throw an ALException # Ensure we can catch it: - with pytest.raises(imaspy.exception.ALException): + with pytest.raises(imas.exception.ALException): # Try to write an unknown data type (object) ll_interface.write_data(-1, "X", "", object()) diff --git a/imaspy/test/test_get_sample.py b/imas/test/test_get_sample.py similarity index 98% rename from imaspy/test/test_get_sample.py rename to imas/test/test_get_sample.py index 0f5fed3e..c91ad356 100644 --- a/imaspy/test/test_get_sample.py +++ b/imas/test/test_get_sample.py @@ -1,10 +1,10 @@ import numpy as np import pytest -import imaspy -from imaspy.backends.imas_core.imas_interface import lowlevel -from imaspy.exception import DataEntryException -from imaspy.ids_defs import ( +import imas +from imas.backends.imas_core.imas_interface import lowlevel +from imas.exception import DataEntryException +from imas.ids_defs import ( CLOSEST_INTERP, HDF5_BACKEND, IDS_TIME_MODE_HETEROGENEOUS, @@ -28,7 +28,7 @@ def test_db_uri(backend, worker_id, tmp_path_factory): tmp_path = tmp_path_factory.mktemp(f"testdb.{worker_id}") backend_str = {HDF5_BACKEND: "hdf5", MDSPLUS_BACKEND: "mdsplus"}[backend] uri = f"imas:{backend_str}?path={tmp_path}" - entry = imaspy.DBEntry(uri, "x", dd_version="4.0.0") + entry = imas.DBEntry(uri, "x", dd_version="4.0.0") # Homogeneous core profiles: cp = entry.factory.core_profiles() @@ -102,7 +102,7 @@ def test_db_uri(backend, worker_id, tmp_path_factory): @pytest.fixture() def entry(test_db_uri): - return imaspy.DBEntry(test_db_uri, "r", dd_version="4.0.0") + return imas.DBEntry(test_db_uri, "r", dd_version="4.0.0") def test_invalid_arguments(entry): diff --git a/imaspy/test/test_hash.py b/imas/test/test_hash.py similarity index 69% rename from imaspy/test/test_hash.py rename to imas/test/test_hash.py index 1c82ecd3..903135ff 100644 --- a/imaspy/test/test_hash.py +++ b/imas/test/test_hash.py @@ -4,12 +4,12 @@ import pytest from xxhash import xxh3_64_digest -import imaspy +import imas @pytest.fixture def minimal(ids_minimal_types): - return imaspy.IDSFactory(xml_path=ids_minimal_types).new("minimal") + return imas.IDSFactory(xml_path=ids_minimal_types).new("minimal") def test_hash_str0d(minimal): @@ -17,7 +17,7 @@ def test_hash_str0d(minimal): minimal.str_0d = "Test str_0d hash" expected = xxh3_64_digest(s.encode("utf-8")) assert expected == b"r\x9d\x8dC.JN\x0e" - assert imaspy.util.calc_hash(minimal.str_0d) == expected + assert imas.util.calc_hash(minimal.str_0d) == expected def test_hash_str1d(minimal): @@ -30,7 +30,7 @@ def test_hash_str1d(minimal): hashes = list(map(xxh3_64_digest, string_list)) expected = xxh3_64_digest(struct.pack(" imaspy.DBEntry: - """Open and return an ``imaspy.DBEntry`` pointing to the training data.""" - return _initialize_training_db(imaspy.DBEntry) +def get_training_db_entry() -> imas.DBEntry: + """Open and return an ``imas.DBEntry`` pointing to the training data.""" + return _initialize_training_db(imas.DBEntry) def get_training_imas_db_entry(): diff --git a/imaspy/util.py b/imas/util.py similarity index 85% rename from imaspy/util.py rename to imas/util.py index 24af400b..5ab69349 100644 --- a/imaspy/util.py +++ b/imas/util.py @@ -1,6 +1,6 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Collection of useful helper methods when working with IMASPy. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. +"""Collection of useful helper methods when working with imas-python. """ @@ -10,14 +10,14 @@ import numpy -from imaspy.db_entry import DBEntry -from imaspy.ids_base import IDSBase -from imaspy.ids_factory import IDSFactory -from imaspy.ids_metadata import IDSMetadata -from imaspy.ids_primitive import IDSInt0D, IDSPrimitive -from imaspy.ids_struct_array import IDSStructArray -from imaspy.ids_structure import IDSStructure -from imaspy.ids_toplevel import IDSToplevel +from imas.db_entry import DBEntry +from imas.ids_base import IDSBase +from imas.ids_factory import IDSFactory +from imas.ids_metadata import IDSMetadata +from imas.ids_primitive import IDSInt0D, IDSPrimitive +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure +from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -32,7 +32,7 @@ def visit_children( ) -> None: """Apply a function to node and its children - IMASPy objects generally live in a tree structure. Similar to Pythons + imas-python objects generally live in a tree structure. Similar to Pythons :py:func:`map`, this method can be used to apply a function to objects within this tree structure. @@ -50,13 +50,13 @@ def visit_children( visit_empty: When set to True, also apply the function to empty nodes. accept_lazy: See documentation of :py:param:`iter_nonempty_() - `. Only + `. Only relevant when :param:`visit_empty` is False. Example: .. code-block:: python - # Print all filled leaf nodes in a given IMASPy IDSToplevel + # Print all filled leaf nodes in a given imas-python IDSToplevel visit_children(print, toplevel) See Also: @@ -80,9 +80,9 @@ def tree_iter( accept_lazy: bool = False, include_node: bool = False, ) -> Iterator[IDSBase]: - """Tree iterator for IMASPy structures. + """Tree iterator for imas-python structures. - Iterate (depth-first) through the whole subtree of an IMASPy structure. + Iterate (depth-first) through the whole subtree of an imas-python structure. Args: node: Node to start iterating from. @@ -95,7 +95,7 @@ def tree_iter( visit_empty: When set to True, iterate over empty nodes. accept_lazy: See documentation of :py:param:`iter_nonempty_() - `. Only + `. Only relevant when :param:`visit_empty` is False. include_node: When set to True the iterator will include the provided node (if the node is not a leaf node, it is included only when :param:`leaf_only` is @@ -104,7 +104,7 @@ def tree_iter( Example: .. code-block:: python - # Iterate over all filled leaf nodes in a given IMASPy IDSToplevel + # Iterate over all filled leaf nodes in a given imas-python IDSToplevel for node in tree_iter(toplevel): print(node) @@ -142,7 +142,7 @@ def idsdiff(struct1: IDSStructure, struct2: IDSStructure) -> None: struct1: IDS or structure within an IDS. struct2: IDS or structure within an IDS to compare against :param:`struct1`. """ - import imaspy._util as _util + import imas._util as _util _util.idsdiff_impl(struct1, struct2) @@ -161,7 +161,7 @@ def idsdiffgen( Keyword Args: accept_lazy: See documentation of :py:param:`iter_nonempty_() - `. + `. Yields: (description_or_path, node1, node2): tuple describing a difference: @@ -250,7 +250,7 @@ def _idsdiffgen( def resample(node, old_time, new_time, homogeneousTime=None, inplace=False, **kwargs): """Resample all primitives in their time dimension to a new time array""" - import imaspy._util as _util + import imas._util as _util return _util.resample_impl( node, old_time, new_time, homogeneousTime, inplace, **kwargs @@ -268,7 +268,7 @@ def print_tree(structure, hide_empty_nodes=True): structure: IDS structure to print hide_empty_nodes: Show or hide nodes without value. """ - import imaspy._util as _util + import imas._util as _util return _util.print_tree_impl(structure, hide_empty_nodes) @@ -289,16 +289,16 @@ def print_metadata_tree( Examples: .. code-block:: python - core_profiles = imaspy.IDSFactory().core_profiles() + core_profiles = imas.IDSFactory().core_profiles() # Print tree of the core_profiles IDS - imaspy.util.print_metadata_tree(core_profiles) + imas.util.print_metadata_tree(core_profiles) # Print descendants of the profiles_1d array of structure only: - imaspy.util.print_metadata_tree(core_profiles.metadata["profiles_1d"]) + imas.util.print_metadata_tree(core_profiles.metadata["profiles_1d"]) # Print descendants of the profiles_1d/electrons structure only: electrons_metadata = core_profiles.metadata["profiles_1d/electrons"] - imaspy.util.print_metadata_tree(electrons_metadata) + imas.util.print_metadata_tree(electrons_metadata) """ - import imaspy._util as _util + import imas._util as _util return _util.print_metadata_tree_impl(structure, maxdepth) @@ -308,7 +308,7 @@ def inspect(ids_node, hide_empty_nodes=False): Inspired by `rich.inspect`, but customized for IDS specifics. """ - import imaspy._util as _util + import imas._util as _util return _util.inspect_impl(ids_node, hide_empty_nodes) @@ -327,9 +327,9 @@ def find_paths(node: IDSBase, query: str) -> List[str]: A list of matching paths. Example: - >>> factory = imaspy.IDSFactory() + >>> factory = imas.IDSFactory() >>> core_profiles = factory.new("core_profiles") - >>> imaspy.util.find_paths(core_profiles, "(^|/)time$") + >>> imas.util.find_paths(core_profiles, "(^|/)time$") ['profiles_1d/time', 'profiles_2d/time', 'time'] """ dd_element = node.metadata._structure_xml @@ -401,10 +401,10 @@ def calc_hash(node: IDSBase) -> bytes: Example: .. code-block:: python - cp = imaspy.IDSFactory().core_profiles() + cp = imas.IDSFactory().core_profiles() cp.ids_properties.homogeneous_time = 0 - print(imaspy.util.calc_hash(cp).hex()) # 3b9b929756a242fd + print(imas.util.calc_hash(cp).hex()) # 3b9b929756a242fd """ return node._xxhash() @@ -421,17 +421,17 @@ def get_parent(node: IDSBase) -> Optional[IDSBase]: Example: .. code-block:: python - >>> cp = imaspy.IDSFactory().core_profiles() + >>> cp = imas.IDSFactory().core_profiles() >>> cp.profiles_1d.resize(2) - >>> imaspy.util.get_parent(cp.profiles_1d[0].electrons.temperature) + >>> imas.util.get_parent(cp.profiles_1d[0].electrons.temperature) - >>> imaspy.util.get_parent(cp.profiles_1d[0].electrons) + >>> imas.util.get_parent(cp.profiles_1d[0].electrons) - >>> imaspy.util.get_parent(cp.profiles_1d[0]) + >>> imas.util.get_parent(cp.profiles_1d[0]) - >>> imaspy.util.get_parent(cp.profiles_1d) + >>> imas.util.get_parent(cp.profiles_1d) - >>> imaspy.util.get_parent(cp) + >>> imas.util.get_parent(cp) >>> """ if isinstance(node, IDSToplevel): @@ -451,10 +451,10 @@ def get_time_mode(node: IDSBase) -> IDSInt0D: Example: .. code-block:: python - >>> cp = imaspy.IDSFactory().core_profiles() + >>> cp = imas.IDSFactory().core_profiles() >>> cp.ids_properties.homogeneous_time = 0 >>> cp.profiles_1d.resize(2) - >>> imaspy.util.get_time_mode(cp.profiles_1d[0].electrons.temperature) + >>> imas.util.get_time_mode(cp.profiles_1d[0].electrons.temperature) int(0) """ @@ -473,9 +473,9 @@ def get_toplevel(node: IDSBase) -> IDSToplevel: Example: .. code-block:: python - >>> cp = imaspy.IDSFactory().core_profiles() + >>> cp = imas.IDSFactory().core_profiles() >>> cp.profiles_1d.resize(2) - >>> imaspy.util.get_toplevel(cp.profiles_1d[0].electrons.temperature) + >>> imas.util.get_toplevel(cp.profiles_1d[0].electrons.temperature) """ return node._toplevel @@ -502,9 +502,9 @@ def get_full_path(node: IDSBase) -> str: Example: .. code-block:: python - >>> cp = imaspy.IDSFactory().core_profiles() + >>> cp = imas.IDSFactory().core_profiles() >>> cp.profiles_1d.resize(2) - >>> imaspy.util.get_full_path(cp.profiles_1d[1].electrons.temperature) + >>> imas.util.get_full_path(cp.profiles_1d[1].electrons.temperature) 'profiles_1d[1]/electrons/temperature' """ return node._path @@ -514,7 +514,7 @@ def get_data_dictionary_version(obj: Union[IDSBase, DBEntry, IDSFactory]) -> str """Find out the version of the data dictionary definitions that this object uses. Args: - obj: Any IMASPy object that is data-dictionary dependent. + obj: Any imas-python object that is data-dictionary dependent. Returns: The data dictionary version, e.g. ``"3.38.1"``. diff --git a/imaspy/__init__.py b/imaspy/__init__.py deleted file mode 100644 index a1947718..00000000 --- a/imaspy/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. - -# isort: skip_file - -from packaging.version import Version as _V - -from . import _version - -__version__ = _version.get_versions()["version"] - -version = __version__ - -# Import logging _first_ -from . import setup_logging - -# Import main user API objects in the imaspy module -from .db_entry import DBEntry -from .ids_factory import IDSFactory -from .ids_convert import convert_ids -from .ids_identifiers import identifiers - -# Load the IMASPy IMAS AL/DD core -from . import ( - db_entry, - dd_helpers, - dd_zip, - util, -) - -PUBLISHED_DOCUMENTATION_ROOT = ( - "https://sharepoint.iter.org/departments/POP/CM/IMDesign/" - "Code%20Documentation/IMASPy-doc" -) -"""URL to the published documentation.""" -OLDEST_SUPPORTED_VERSION = _V("3.22.0") -"""Oldest Data Dictionary version that is supported by IMASPy.""" diff --git a/imaspy/__main__.py b/imaspy/__main__.py deleted file mode 100644 index 6e82a92d..00000000 --- a/imaspy/__main__.py +++ /dev/null @@ -1,17 +0,0 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Support module to run imaspy as a module: - -.. code-block:: bash - :caption: Options to run imaspy CLI interface - - # Run as a module (implemented in imaspy/__main__.py) - python -m imaspy - - # Run as "program" (see project.scripts in pyproject.toml) - imaspy -""" - -from imaspy.command.cli import cli - -cli() diff --git a/imaspy/_version.py b/imaspy/_version.py deleted file mode 100644 index 581df976..00000000 --- a/imaspy/_version.py +++ /dev/null @@ -1,658 +0,0 @@ - -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. -# Generated by versioneer-0.28 -# https://github.com/python-versioneer/python-versioneer - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "$Format:%d$" - git_full = "$Format:%H$" - git_date = "$Format:%ci$" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "pep440" - cfg.tag_prefix = "" - cfg.parentdir_prefix = "" - cfg.versionfile_source = "imaspy/_version.py" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} diff --git a/imaspy/backends/imas_core/__init__.py b/imaspy/backends/imas_core/__init__.py deleted file mode 100644 index 8d266ac2..00000000 --- a/imaspy/backends/imas_core/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Subpackage implementing data access through the IMAS Access Layer Core. -""" diff --git a/imaspy/backends/netcdf/__init__.py b/imaspy/backends/netcdf/__init__.py deleted file mode 100644 index 93cc2a6f..00000000 --- a/imaspy/backends/netcdf/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""NetCDF IO support for IMASPy. Requires [netcdf] extra dependencies. -""" diff --git a/imaspy/dd_helpers.py b/imaspy/dd_helpers.py deleted file mode 100644 index 21a7775f..00000000 --- a/imaspy/dd_helpers.py +++ /dev/null @@ -1,300 +0,0 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. -"""Helper functions to build IDSDef.xml""" - -import logging -import os -import re -import shutil -import subprocess -from io import BytesIO -from pathlib import Path -from typing import Sequence, Tuple, Union -from urllib.request import urlopen -from zipfile import ZIP_DEFLATED, ZipFile - -from packaging.version import Version as V - -logger = logging.getLogger(__name__) - -_idsdef_zip_relpath = Path("imaspy/assets/IDSDef.zip") -_build_dir = Path("build") -_saxon_local_default_name = "saxon9he.jar" # For pre-3.30.0 builds -_saxon_regex = "saxon((.(?!test|xqj))*).jar" # Can be used in re.match - - -def prepare_data_dictionaries(): - """Build IMAS IDSDef.xml files for each tagged version in the DD repository - 1. Search for saxon or download it - 2. Clone the DD repository (ask for user/pass unless ssh key access is available) - 3. Generate IDSDef.xml and rename to IDSDef_${version}.xml - 4. Zip all these IDSDefs together and include in wheel - """ - from git import Repo - - saxon_jar_path = get_saxon() - repo: Repo = get_data_dictionary_repo() - if repo: - newest_version_and_tag = (V("0"), None) - for tag in repo.tags: - version_and_tag = (V(str(tag)), tag) - if V(str(tag)) > V("3.21.1"): - newest_version_and_tag = max(newest_version_and_tag, version_and_tag) - logger.debug("Building data dictionary version %s", tag) - build_data_dictionary(repo, tag, saxon_jar_path) - - logger.info("Creating zip file of DD versions") - - if _idsdef_zip_relpath.is_file(): - logger.warning("Overwriting '%s'", _idsdef_zip_relpath) - - with ZipFile( - _idsdef_zip_relpath, - mode="w", # this needs w, since zip can have multiple same entries - compression=ZIP_DEFLATED, - ) as dd_zip: - for filename in _build_dir.glob("[0-9]*.xml"): - arcname = Path("data-dictionary").joinpath(*filename.parts[1:]) - dd_zip.write(filename, arcname=arcname) - # Include identifiers from latest tag in zip file - repo.git.checkout(newest_version_and_tag[1], force=True) - # DD layout <= 4.0.0 - for filename in Path("data-dictionary").glob("*/*identifier.xml"): - arcname = Path("identifiers").joinpath(*filename.parts[1:]) - dd_zip.write(filename, arcname=arcname) - # DD layout > 4.0.0 - for filename in Path("data-dictionary").glob("schemas/*/*identifier.xml"): - arcname = Path("identifiers").joinpath(*filename.parts[2:]) - dd_zip.write(filename, arcname=arcname) - - -# pre 3.30.0 versions of the DD have the `saxon9he.jar` file path hardcoded -# in their makefiles. To be sure we can build everything, we link whatever -# saxon we can find to a local file called saxon9he.jar -def get_saxon() -> Path: - """Search for saxon*.jar and return the path or download it. - The DD build works by having Saxon in the CLASSPATH, called saxon9he.jar - until DD version 3.30.0. After 3.30.0 Saxon is found by the SAXONJARFILE env - variable. We will 'cheat' a little bit later by symlinking saxon9he.jar to - any version of saxon we found. - - Check: - 1. CLASSPATH - 2. `which saxon` - 3. /usr/share/java/* - 4. or download it - """ - - local_saxon_path = Path.cwd() / _saxon_local_default_name - if local_saxon_path.exists(): - logger.debug("Something already at '%s' not creating anew", local_saxon_path) - return local_saxon_path - - saxon_jar_origin = Path( - find_saxon_classpath() - or find_saxon_bin() - or find_saxon_jar() - or download_saxon() - ) - logger.info("Found Saxon JAR '%s'", saxon_jar_origin) - if saxon_jar_origin.name != _saxon_local_default_name: - try: - os.symlink(saxon_jar_origin, local_saxon_path) - except FileExistsError: - # Another process could have created the symlink while we were searching - logger.debug( - "Link '%s' exists, parallel process might've created it", - local_saxon_path, - ) - return local_saxon_path - return saxon_jar_origin - - -def find_saxon_jar(): - # This finds multiple versions on my system, but they are symlinked together. - # take the shortest one. - jars = [ - path - for path in Path("/usr/share/java").rglob("*") - if re.match(_saxon_regex, path.name, flags=re.IGNORECASE) - ] - - if jars: - saxon_jar_path = min(jars, key=lambda x: len(x.parts)) - return saxon_jar_path - - -def find_saxon_classpath(): - """Search JAVAs CLASSPATH for a Saxon .jar""" - classpath = os.environ.get("CLASSPATH", "") - for part in re.split(";|:", classpath): - if ( - part.endswith(".jar") - and part.split("/")[-1].startswith("saxon") - and "test" not in part - and "xqj" not in part - ): - return part - - -def find_saxon_bin(): - """Search for a saxon executable""" - saxon_bin = shutil.which("saxon") - if saxon_bin: - with open(saxon_bin, "r") as file: - for line in file: - saxon_jar_path = re.search("[^ ]*saxon[^ ]*jar", line) - if saxon_jar_path: - return saxon_jar_path.group(0) - - -def download_saxon(): - """Downloads a zipfile containing Saxon and extract it to the current dir. - Return the full path to Saxon. This can be any Saxon version. Scripts that - wrap this should probably manipulate either the name of this file, and/or - the CLASSPATH""" - - SAXON_PATH = "https://github.com/Saxonica/Saxon-HE/releases/download/SaxonHE10-9/SaxonHE10-9J.zip" # noqa: E501 - - resp = urlopen(SAXON_PATH, timeout=120.0) - zipfile = ZipFile(BytesIO(resp.read())) - # Zipfile has a list of the ZipInfos. Look inside for a Saxon jar - for file in zipfile.filelist: - if re.match(_saxon_regex, file.filename, flags=re.IGNORECASE): - path = zipfile.extract(file) - del zipfile - return path - raise FileNotFoundError(f"No Saxon jar found in given zipfile '{SAXON_PATH}'") - - -def get_data_dictionary_repo() -> Tuple[bool, bool]: - try: - import git # Import git here, the user might not have it! - except ModuleNotFoundError: - raise RuntimeError( - "Could not find 'git' module, try 'pip install gitpython'. \ - Will not build Data Dictionaries!" - ) - - # We need the actual source code (for now) so grab it from ITER - dd_repo_path = "data-dictionary" - - if "DD_DIRECTORY" in os.environ: - logger.info("Found DD_DIRECTORY, copying") - try: - shutil.copytree(os.environ["DD_DIRECTORY"], dd_repo_path) - except FileExistsError: - pass - else: - logger.info("Trying to pull data dictionary git repo from ITER") - - # Set up a bare repo and fetch the access-layer repository in it - os.makedirs(dd_repo_path, exist_ok=True) - try: - repo = git.Repo(dd_repo_path) - except git.exc.InvalidGitRepositoryError: - repo = git.Repo.init(dd_repo_path) - logger.info("Set up local git repository {!s}".format(repo)) - - try: - origin = repo.remote() - except ValueError: - dd_repo_url = "ssh://git@git.iter.org/imas/data-dictionary.git" - origin = repo.create_remote("origin", url=dd_repo_url) - logger.info("Set up remote '{!s}' linking to '{!s}'".format(origin, origin.url)) - - try: - origin.fetch(tags=True) - except git.exc.GitCommandError as ee: - logger.warning( - "Could not fetch tags from %s. Git reports:\n %s." "\nTrying to continue", - list(origin.urls), - ee, - ) - else: - logger.info("Remote tags fetched") - return repo - - -def _run_data_dictionary( - args: Union[Sequence, str], tag: str, saxon_jar_path: str -) -> int: - """Run in a Data Dictionary environment. Used e.g. to run the DD Makefile - - Args: - args: The "args" argument directly passed to :func:`subprocess.run`, - e.g. ``["make", "clean"]`` - tag: The DD version tag that will be printed on error - saxon_jar_path: The path to the saxon jar; Added to CLASSPATH and used - to generate the DD - """ - env = os.environ.copy() - env["CLASSPATH"] = f"{saxon_jar_path}:{env.get('CLASSPATH', '')}" - result = subprocess.run( - args, - bufsize=0, - capture_output=True, - cwd=os.getcwd() + "/data-dictionary", - env=env, - text=True, - ) - - if result.returncode != 0: - logger.warning("Error making DD version %s, make reported:", tag) - logger.warning("CLASSPATH ='%s'", saxon_jar_path) - logger.warning("PATH = '%s'", os.environ.get("PATH", "")) - logger.warning("stdout = '%s'", result.stdout.strip()) - logger.warning("stderr = '%s'", result.stderr.strip()) - logger.warning("continuing without DD version %s", tag) - else: - logger.debug( - "Successful make for DD %s.\n-- Make stdout --\n%s\n-- Make stderr --\n%s", - tag, - result.stdout, - result.stderr, - ) - return result.returncode - - -def build_data_dictionary(repo, tag: str, saxon_jar_path: str, rebuild=False) -> None: - """Build a single version of the data dictionary given by the tag argument - if the IDS does not already exist. - - In the data-dictionary repository sometimes IDSDef.xml is stored - directly, in which case we do not call make. - - Args: - repo: Repository object containing the DD source code - tag: The DD version tag that will be build - saxon_jar_path: The path to the saxon jar; Added to CLASSPATH and used - to generate the DD - rebuild: If true, overwrites existing pre-build tagged DD version - """ - _build_dir.mkdir(exist_ok=True) - result_xml = _build_dir / f"{tag}.xml" - - if result_xml.exists() and not rebuild: - logger.debug(f"XML for tag '{tag}' already exists, skipping") - return - - repo.git.checkout(tag, force=True) - if _run_data_dictionary(["make", "clean"], tag, saxon_jar_path) != 0: - return - if _run_data_dictionary(["make", "IDSDef.xml"], tag, saxon_jar_path) != 0: - return - - # copy and delete original instead of move (to follow symlink) - IDSDef = Path("data-dictionary/IDSDef.xml") - try: - shutil.copy( - IDSDef, # Hardcoded in access-layer makefile - result_xml, - follow_symlinks=True, - ) - except shutil.SameFileError: - pass - IDSDef.unlink(missing_ok=True) - - -if __name__ == "__main__": - prepare_data_dictionaries() diff --git a/imaspy/test/test_dd_helpers.py b/imaspy/test/test_dd_helpers.py deleted file mode 100644 index 324e789d..00000000 --- a/imaspy/test/test_dd_helpers.py +++ /dev/null @@ -1,134 +0,0 @@ -import pytest - -from imaspy.dd_helpers import find_saxon_classpath - -# TODO: Write tests! -# def prepare_data_dictionaries(): -# def get_saxon(): -# def find_saxon_jar(): - -# Quadruplets of (cluster, module, real path, name) -saxon_binary_quadruplets = ( - ( - "SDCC", - "Saxon-HE/10.3-Java-1.8", - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-1.8/saxon-he-10.3.jar", - "saxon-he-10.3.jar", - ), - ( - "SDCC", - "Saxon-HE/10.3-Java-11", - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar", - "saxon-he-10.3.jar", - ), - ( - "HPC", - "Saxon-HE/9.7.0.14-Java-1.6.0_45", - "/work/imas/opt/EasyBuild/software/Saxon-HE/9.7.0.14-Java-1.6.0_45/saxon9he.jar", - "saxon9he.jar", - ), - ( - "HPC", - "Saxon-HE/9.7.0.4-Java-1.7.0_79", - "/work/imas/opt/EasyBuild/software/Saxon-HE/9.7.0.4-Java-1.7.0_79/saxon9he.jar", - "saxon9he.jar", - ), - ( - "HPC", - "Saxon-HE/9.7.0.21-Java-1.8.0_162", - "/work/imas/opt/EasyBuild/software/Saxon-HE/9.7.0.21-Java-1.8.0_162/saxon9he.jar", - "saxon9he.jar", - ), - ( - "HPC", - "Saxon-HE/9.9.1.7-Java-13", - "/work/imas/opt/EasyBuild/software/Saxon-HE/9.9.1.7-Java-13/saxon9he.jar", - "saxon9he.jar", - ), - ( - "HPC", - "Saxon-HE/10.3-Java-11", - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar", - "saxon-he-10.3.jar", - ), -) - -saxon_nonmatches = ( - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-test-10.3.jar", -) - - -# find_saxon_bin tries to find saxon in the CLASSPATH env variable -# It is thus per definition environment dependent -def test_empty_classpath(monkeypatch): - monkeypatch.setenv("CLASSPATH", "") - saxon_jar_path = find_saxon_classpath() - assert saxon_jar_path is None - - -@pytest.mark.parametrize("cluster,module,path,name", saxon_binary_quadruplets) -def test_classpath(monkeypatch, cluster, module, path, name): - monkeypatch.setenv("CLASSPATH", path) - saxon_jar_path = find_saxon_classpath() - assert saxon_jar_path == path - - -@pytest.mark.parametrize("path", saxon_nonmatches) -def test_classpath_do_not_match(monkeypatch, path): - monkeypatch.setenv("CLASSPATH", path) - saxon_jar_path = find_saxon_classpath() - assert saxon_jar_path is None - - -# ITER SDCC login01 20210617 -# module load GCCcore/10.2.0 -# module load Python/3.8.6-GCCcore-10.2.0 -# module load MDSplus/7.96.17-GCCcore-10.2.0 -# module load HDF5/1.10.7-iimpi-2020b # todo: Intel MPI version? -# module load Boost/1.74.0-GCCcore-10.2.0 -# module load MDSplus-Java/7.96.17-GCCcore-10.2.0-Java-11 -# module load Saxon-HE/10.3-Java-11 -def test_classpath_sdcc(monkeypatch): - monkeypatch.setenv( - "CLASSPATH", - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-xqj-10.3.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-test-10.3.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/jline-2.9.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar:" - "/work/imas/opt/EasyBuild/software/MDSplus-Java/7.96.17-GCCcore-10.2.0-Java-11/java/classes/*", - ) - saxon_jar_path = find_saxon_classpath() - assert ( - saxon_jar_path - == "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar" - ) - - -# ITER HPC login01 20210617 -# module load GCCcore/10.2.0 -# module load Python/3.8.6-GCCcore-10.2.0 -# module load MDSplus/7.96.17-GCCcore-10.2.0 -# module load HDF5/1.10.7-iimpi-2020b # todo: Intel MPI version? -# module load Boost/1.74.0-GCCcore-10.2.0 -# module load MDSplus-Java/7.96.17-GCCcore-10.2.0-Java-11 -# module load Saxon-HE/10.3-Java-11 -def test_classpath_hpc(monkeypatch): - monkeypatch.setenv( - "CLASSPATH", - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-xqj-10.3.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-test-10.3.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/jline-2.9.jar:" - "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar:" - "/work/imas/opt/EasyBuild/software/MDSplus-Java/7.96.17-GCCcore-10.2.0-Java-11/java/classes/*", - ) - saxon_jar_path = find_saxon_classpath() - assert ( - saxon_jar_path - == "/work/imas/opt/EasyBuild/software/Saxon-HE/10.3-Java-11/saxon-he-10.3.jar" - ) - - -# TODO: Write tests! -# def find_saxon_bin(): -# def get_data_dictionary_repo(): -# def build_data_dictionary(): diff --git a/pyproject.toml b/pyproject.toml index dccd6912..87f92363 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,23 +1,25 @@ [build-system] # Minimum requirements for the build system to execute. # Keep this on a single line for the grep magic of build scripts to work -requires = ["setuptools>=61", "wheel", "numpy", "gitpython", "packaging", "tomli;python_version<'3.11'", "versioneer[toml]"] -# needs saxon9he.jar in CLASSPATH +requires = ["setuptools>=61", "wheel", "numpy", "gitpython", "saxonche","packaging", "tomli;python_version<'3.11'", "setuptools_scm>8"] build-backend = "setuptools.build_meta" [project] -name = "imaspy" +name = "imas-python" authors = [ - {name = "IMASPy Developers"}, - {name = "Olivier Hoenen", email = "olivier.hoenen@iter.org"}, + {name = "Karel Lucas van de Plassche", email = "karelvandeplassche@gmail.com"}, + {name = "Daan van Vugt", email = "dvanvugt@ignitioncomputing.com"}, + {name = "Maarten Sebregts", email = "msebregts@ignitioncomputing.com"}, + {name = "ITER Organization"}, + {email = "imas-support@iter.org"}, ] description = "Pythonic wrappers for the IMAS Access Layer" readme = {file = "README.md", content-type = "text/markdown"} requires-python = ">=3.7" license = {file = "LICENSE.txt"} classifiers = [ - "Development Status :: 3 - Alpha", + "Development Status :: 5 - Production/Stable", "Environment :: Console", "Environment :: Plugins", "Environment :: X11 Applications", @@ -32,9 +34,7 @@ classifiers = [ "Intended Audience :: Developers", "Intended Audience :: Science/Research", "Intended Audience :: System Administrators", -# The license is prepended by private which makes it a valid trove classifier, -# it will also prevent uploading to package indices such as PyPI - "Private :: License :: ITER License", + "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)", "Natural Language :: English", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", @@ -64,12 +64,14 @@ dependencies = [ "importlib_resources;python_version<'3.9'", "packaging", "xxhash >= 2", + "saxonche", + "gitpython" ] [project.optional-dependencies] # these self-dependencies are available since pip 21.2 all = [ - "imaspy[test,docs,imas-core,netcdf,h5py]" + "imas[test,docs,imas-core,netcdf,h5py]" ] docs = [ "sphinx>=6.0.0,<7.0.0", @@ -77,7 +79,10 @@ docs = [ "sphinx-immaterial>=0.11.0,<0.12", "sphinx-click", ] -imas-core = [ "imas-core@git+ssh://git@git.iter.org/imas/al-core.git@main" ] + +# TODO enable when imas-core is available on pypi +# imas-core = [ "imas-core@git+https://github.com/iterorganization/imas-core.git@main" ] + netcdf = [ "netCDF4>=1.7.0", ] @@ -99,15 +104,15 @@ test = [ ] [project.scripts] -build_DD = "imaspy.dd_helpers:prepare_data_dictionaries" -imaspy = "imaspy.command.cli:cli" +build_DD = "imas.dd_helpers:prepare_data_dictionaries" +imas = "imas.command.cli:cli" [project.urls] -homepage = "https://git.iter.org/projects/IMAS/repos/imaspy" +homepage = "https://github.com/iterorganization/imas-python" [tool.setuptools.packages.find] where = ["."] -include = ["imaspy*"] +include = ["imas*"] [tool.pytest.ini_options] minversion = "6.0" @@ -116,19 +121,11 @@ markers = [ "cli: Tests for the command line interface.", "slow: Slow tests.", ] -testpaths = "imaspy" - -[tool.versioneer] -VCS = "git" -style = "pep440" -versionfile_source = "imaspy/_version.py" -versionfile_build = "imaspy/_version.py" -tag_prefix = "" -parentdir_prefix = "" +testpaths = "imas" -[tool.black] -# Exclude versioneer-generated file -extend-exclude = "imaspy/_version.py" +[tool.setuptools_scm] +write_to = "imas/_version.py" +local_scheme = "no-local-version" [tool.isort] profile = "black" diff --git a/setup.cfg b/setup.cfg index f8fff4f2..8e5dd292 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,7 +13,7 @@ max-line-length = 88 per-file-ignores= # Ignore import errors in __init__.py (import not at top of file; imported but # unused) - imaspy/__init__.py:E402,F401 + imas/__init__.py:E402,F401 # Lots of CLASSPATHS in this test file: adhering to line length would be less # readable - imaspy/test/test_dd_helpers.py:E501 + imas/test/test_dd_helpers.py:E501 diff --git a/setup.py b/setup.py index a859c164..692fb5cf 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,11 @@ # pylint: disable=wrong-import-position -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. """ Packaging settings. Inspired by a minimal setup.py file, the Pandas cython build and the access-layer setup template. -The installable IMASPy package tries to follow in the following order: +The installable imas-python package tries to follow in the following order: - The style guide for Python code [PEP8](https://www.python.org/dev/peps/pep-0008/) - The [PyPA guide on packaging projects]( https://packaging.python.org/guides/distributing-packages-using-setuptools/#distributing-packages) @@ -25,11 +25,9 @@ import importlib.util import site import traceback - # Allow importing local files, see https://snarky.ca/what-the-heck-is-pyproject-toml/ import sys import warnings - # Import other stdlib packages from pathlib import Path @@ -49,7 +47,6 @@ # Ensure the current folder is on the import path: sys.path.append(str(Path(__file__).parent.resolve())) -import versioneer # noqa cannonical_python_command = "module load Python/3.8.6-GCCcore-10.2.0" @@ -80,13 +77,13 @@ this_dir = this_file.parent.resolve() # Start: Load dd_helpers -dd_helpers_file = this_dir / "imaspy/dd_helpers.py" +dd_helpers_file = this_dir / "imas/dd_helpers.py" assert dd_helpers_file.is_file() spec = importlib.util.spec_from_file_location("dd_helpers", dd_helpers_file) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) -sys.modules["imaspy.dd_helpers"] = module -from imaspy.dd_helpers import prepare_data_dictionaries # noqa +sys.modules["imas.dd_helpers"] = module +from imas.dd_helpers import prepare_data_dictionaries # noqa # End: Load dd_helpers @@ -114,9 +111,8 @@ def run(self): # - `pip install -e .`` (from git clone) # - `python -m build`` # - Source tarball from git-archive. Note: version only picked up when doing git-archive -# from a tagged release, otherwise version will be "0+unknown" (expected versioneer -# behaviour). -# `git archive HEAD -v -o imaspy.tar.gz && pip install imaspy.tar.gz` +# from a tagged release, +# `git archive HEAD -v -o imas.tar.gz && pip install imas.tar.gz` cmd_class = {} build_overrides = {"build_ext": build_ext, "build_py": build_py, "sdist": sdist} if bdist_wheel: @@ -139,7 +135,6 @@ def run(self): if __name__ == "__main__": setup( - version=versioneer.get_version(), zip_safe=False, # https://mypy.readthedocs.io/en/latest/installed_packages.html - cmdclass=versioneer.get_cmdclass({"build_DD": BuildDDCommand, **cmd_class}), - ) + cmdclass={"build_DD": BuildDDCommand, **cmd_class} + ) \ No newline at end of file diff --git a/tools/compare_lowlevel_access_patterns.py b/tools/compare_lowlevel_access_patterns.py index 03b3e6d2..88a5f2aa 100644 --- a/tools/compare_lowlevel_access_patterns.py +++ b/tools/compare_lowlevel_access_patterns.py @@ -1,4 +1,4 @@ -"""Compare the access patterns of the lowlevel AL API between IMASPy and the HLI. +"""Compare the access patterns of the lowlevel AL API between imas-python and the HLI. """ from functools import wraps @@ -9,9 +9,9 @@ import click import imas -import imaspy -from imaspy.test.test_helpers import fill_with_random_data -from imaspy.ids_defs import IDS_TIME_MODE_HETEROGENEOUS +import imas +from imas.test.test_helpers import fill_with_random_data +from imas.ids_defs import IDS_TIME_MODE_HETEROGENEOUS class ALWrapper: @@ -46,10 +46,10 @@ def wrapper(*args, **kwargs): setattr(sys.modules[item], alias, wrapper) -def compare_ids_put(imaspy_ids, hli_ids): +def compare_ids_put(imas_ids, hli_ids): imas._al_lowlevel._log.clear() # Start with hli IDS - dbentry = imas.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") + dbentry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") dbentry.create() try: dbentry.put(hli_ids) @@ -59,49 +59,49 @@ def compare_ids_put(imaspy_ids, hli_ids): dbentry.close() hli_log = imas._al_lowlevel._log imas._al_lowlevel._log = [] - # And then the imaspy IDS - dbentry = imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") + # And then the imas IDS + dbentry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") dbentry.create() try: - dbentry.put(imaspy_ids) + dbentry.put(imas_ids) except Exception as exc: - print("Caught error while putting imaspy ids:", exc) + print("Caught error while putting imas ids:", exc) traceback.print_exc() dbentry.close() - imaspy_log = imas._al_lowlevel._log + imas_log = imas._al_lowlevel._log imas._al_lowlevel._log = [] hli_log_text = "\n".join("\t".join(item) for item in hli_log) - imaspy_log_text = "\n".join("\t".join(item) for item in imaspy_log) + imas_log_text = "\n".join("\t".join(item) for item in imas_log) Path("/tmp/hli.log").write_text(hli_log_text) - Path("/tmp/imaspy.log").write_text(imaspy_log_text) - print("Logs stored in /tmp/hli.log and /tmp/imaspy.log") + Path("/tmp/imas.log").write_text(imas_log_text) + print("Logs stored in /tmp/hli.log and /tmp/imas.log") -def compare_ids_get(imaspy_ids): +def compare_ids_get(imas_ids): # First put the ids - idbentry = imaspy.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") + idbentry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") idbentry.create() - idbentry.put(imaspy_ids) + idbentry.put(imas_ids) - dbentry = imas.DBEntry(imaspy.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") + dbentry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "ITER", 1, 1, "test") dbentry.open() # Start with hli IDS imas._al_lowlevel._log.clear() - dbentry.get(imaspy_ids.metadata.name) + dbentry.get(imas_ids.metadata.name) hli_log = imas._al_lowlevel._log imas._al_lowlevel._log = [] - # And then the imaspy IDS - idbentry.get(imaspy_ids.metadata.name) - imaspy_log = imas._al_lowlevel._log + # And then the imas IDS + idbentry.get(imas_ids.metadata.name) + imas_log = imas._al_lowlevel._log imas._al_lowlevel._log = [] # Cleanup dbentry.close() idbentry.close() hli_log_text = "\n".join("\t".join(item) for item in hli_log) - imaspy_log_text = "\n".join("\t".join(item) for item in imaspy_log) + imas_log_text = "\n".join("\t".join(item) for item in imas_log) Path("/tmp/hli.log").write_text(hli_log_text) - Path("/tmp/imaspy.log").write_text(imaspy_log_text) - print("Logs stored in /tmp/hli.log and /tmp/imaspy.log") + Path("/tmp/imas.log").write_text(imas_log_text) + print("Logs stored in /tmp/hli.log and /tmp/imas.log") @click.command() @@ -113,33 +113,33 @@ def compare_ids_get(imaspy_ids): help="Use heterogeneous time mode instead of homogeneous time.", ) def main(ids_name, method, heterogeneous): - """Compare lowlevel calls done by IMASPy vs. the Python HLI + """Compare lowlevel calls done by imas-python vs. the Python HLI This program fills the provided IDS with random data, then does I/O with it using - both the Python HLI and the IMASPy APIs. The resulting calls to the lowlevel Access - Layer are logged to respectively /tmp/hli.log and /tmp/imaspy.log. + both the Python HLI and the imas-python APIs. The resulting calls to the lowlevel Access + Layer are logged to respectively /tmp/hli.log and /tmp/imas.log. You may use your favorite diff tool to compare the two files. \b IDS_NAME: The name of the IDS to use for testing, for example "core_profiles". """ - imaspy_ids = imaspy.IDSFactory().new(ids_name) + imas_ids = imas.IDSFactory().new(ids_name) hli_ids = getattr(imas, ids_name)() - fill_with_random_data(imaspy_ids) - hli_ids.deserialize(imaspy_ids.serialize()) + fill_with_random_data(imas_ids) + hli_ids.deserialize(imas_ids.serialize()) if heterogeneous: # Change time mode time_mode = IDS_TIME_MODE_HETEROGENEOUS - imaspy_ids.ids_properties.homogeneous_time = time_mode + imas_ids.ids_properties.homogeneous_time = time_mode hli_ids.ids_properties.homogeneous_time = time_mode if method == "put": - compare_ids_put(imaspy_ids, hli_ids) + compare_ids_put(imas_ids, hli_ids) elif method == "get": - compare_ids_get(imaspy_ids) + compare_ids_get(imas_ids) if __name__ == "__main__": diff --git a/tools/extract_test_data.py b/tools/extract_test_data.py index 60e61116..d64c4f51 100644 --- a/tools/extract_test_data.py +++ b/tools/extract_test_data.py @@ -1,5 +1,5 @@ -# This file is part of IMASPy. -# You should have received the IMASPy LICENSE file with this project. +# This file is part of imas-python. +# You should have received the imas-python LICENSE file with this project. import os import imas From 6e9d47f1ba514119f9907574d2168a176ad04877 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Wed, 22 Jan 2025 09:51:57 +0100 Subject: [PATCH 32/97] added github actions and readthedocs conf --- .github/workflows/linting.yml | 31 ++++++++++++++++ .github/workflows/publish.yml | 67 +++++++++++++++++++++++++++++++++++ .readthedocs.yml | 23 ++++++++++++ 3 files changed, 121 insertions(+) create mode 100644 .github/workflows/linting.yml create mode 100644 .github/workflows/publish.yml create mode 100644 .readthedocs.yml diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml new file mode 100644 index 00000000..2bb329f6 --- /dev/null +++ b/.github/workflows/linting.yml @@ -0,0 +1,31 @@ +name: imas-python + +on: push + +jobs: + build: + + runs-on: ubuntu-latest + + steps: + - name: Checkout imas-python sources + uses: actions/checkout@v4 + with: + ref: rename-imaspy-to-imas + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + + - name: Display Python version + run: python -c "import sys; print(sys.version)" + + - name: Install the code linting and formatting tools + run: pip install --upgrade 'black >=24,<25' flake8 + + - name: Check formatting of code with black + run: black --check imas + + - name: Check linting with flake8 + run: flake8 imas diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..3623ebab --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,67 @@ +name: build-wheel-and-publish-test-pypi + +on: push + +jobs: + build: + name: Build distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.x" + - name: Install pypa/build + run: >- + python3 -m pip install pip setuptools wheel build + - name: Build a binary wheel and a source tarball + run: python3 -m build . + - name: Store the distribution packages + uses: actions/upload-artifact@v4 + with: + name: python-package-distributions + path: dist/ + + publish-to-pypi: + name: Publish imas-python distribution to PyPI + needs: + - build + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/imas-python + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + + publish-to-testpypi: + name: Publish imas-python distribution to TestPyPI + needs: + - build + runs-on: ubuntu-latest + environment: + name: testpypi + url: https://test.pypi.org/p/imas-python + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution to TestPyPI + uses: pypa/gh-action-pypi-publish@unstable/v1 + with: + repository-url: https://test.pypi.org/legacy/ + verbose: true \ No newline at end of file diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 00000000..426920c7 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,23 @@ +version: 2 + +build: + os: "ubuntu-22.04" + tools: + python: "3.11" + jobs: + post_checkout: + - git fetch --unshallow || true + +python: + install: + - method: pip + path: . + extra_requirements: + - docs + - netcdf + - h5py + +sphinx: + builder: html + configuration: docs/source/conf.py + fail_on_warning: false \ No newline at end of file From 36a7d8b2d8754495d394631400206084aa9aa8b6 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Wed, 22 Jan 2025 09:53:10 +0100 Subject: [PATCH 33/97] configures setuptools_scm --- .git_archival.txt | 3 + .gitattributes | 2 +- versioneer.py | 2205 --------------------------------------------- 3 files changed, 4 insertions(+), 2206 deletions(-) create mode 100644 .git_archival.txt delete mode 100644 versioneer.py diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 00000000..b423033f --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,3 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$ \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 88136b1b..a94cb2f8 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1 @@ -imaspy/_version.py export-subst +.git_archival.txt export-subst diff --git a/versioneer.py b/versioneer.py deleted file mode 100644 index 18e34c2f..00000000 --- a/versioneer.py +++ /dev/null @@ -1,2205 +0,0 @@ - -# Version: 0.28 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/python-versioneer/python-versioneer -* Brian Warner -* License: Public Domain (Unlicense) -* Compatible with: Python 3.7, 3.8, 3.9, 3.10 and pypy3 -* [![Latest Version][pypi-image]][pypi-url] -* [![Build Status][travis-image]][travis-url] - -This is a tool for managing a recorded version number in setuptools-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -Versioneer provides two installation modes. The "classic" vendored mode installs -a copy of versioneer into your repository. The experimental build-time dependency mode -is intended to allow you to skip this step and simplify the process of upgrading. - -### Vendored mode - -* `pip install versioneer` to somewhere in your $PATH - * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is - available, so you can also use `conda install -c conda-forge versioneer` -* add a `[tool.versioneer]` section to your `pyproject.toml` or a - `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) - * Note that you will need to add `tomli; python_version < "3.11"` to your - build-time dependencies if you use `pyproject.toml` -* run `versioneer install --vendor` in your source tree, commit the results -* verify version information with `python setup.py version` - -### Build-time dependency mode - -* `pip install versioneer` to somewhere in your $PATH - * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is - available, so you can also use `conda install -c conda-forge versioneer` -* add a `[tool.versioneer]` section to your `pyproject.toml` or a - `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) -* add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`) - to the `requires` key of the `build-system` table in `pyproject.toml`: - ```toml - [build-system] - requires = ["setuptools", "versioneer[toml]"] - build-backend = "setuptools.build_meta" - ``` -* run `versioneer install --no-vendor` in your source tree, commit the results -* verify version information with `python setup.py version` - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes). - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from ._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/python-versioneer/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "master" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other languages) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg` and `pyproject.toml`, if necessary, - to include any new configuration settings indicated by the release notes. - See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install --[no-]vendor` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - -## Similar projects - -* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time - dependency -* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of - versioneer -* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools - plugin - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the "Unlicense", as described in -https://unlicense.org/. - -[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg -[pypi-url]: https://pypi.python.org/pypi/versioneer/ -[travis-image]: -https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg -[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer - -""" -# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring -# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements -# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error -# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with -# pylint:disable=attribute-defined-outside-init,too-many-arguments - -import configparser -import errno -import json -import os -import re -import subprocess -import sys -from pathlib import Path -from typing import Callable, Dict -import functools - -have_tomllib = True -if sys.version_info >= (3, 11): - import tomllib -else: - try: - import tomli as tomllib - except ImportError: - have_tomllib = False - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ("Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND').") - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - my_path = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(my_path)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals(): - print("Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(my_path), versioneer_py)) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise OSError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - root = Path(root) - pyproject_toml = root / "pyproject.toml" - setup_cfg = root / "setup.cfg" - section = None - if pyproject_toml.exists() and have_tomllib: - try: - with open(pyproject_toml, 'rb') as fobj: - pp = tomllib.load(fobj) - section = pp['tool']['versioneer'] - except (tomllib.TOMLDecodeError, KeyError): - pass - if not section: - parser = configparser.ConfigParser() - with open(setup_cfg) as cfg_file: - parser.read_file(cfg_file) - parser.get("versioneer", "VCS") # raise error if missing - - section = parser["versioneer"] - - cfg = VersioneerConfig() - cfg.VCS = section['VCS'] - cfg.style = section.get("style", "") - cfg.versionfile_source = section.get("versionfile_source") - cfg.versionfile_build = section.get("versionfile_build") - cfg.tag_prefix = section.get("tag_prefix") - if cfg.tag_prefix in ("''", '""', None): - cfg.tag_prefix = "" - cfg.parentdir_prefix = section.get("parentdir_prefix") - cfg.verbose = section.get("verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - HANDLERS.setdefault(vcs, {})[method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -LONG_VERSION_PY['git'] = r''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. -# Generated by versioneer-0.28 -# https://github.com/python-versioneer/python-versioneer - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%%d" %% (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [versionfile_source] - if ipy: - files.append(ipy) - if "VERSIONEER_PEP518" not in globals(): - try: - my_path = __file__ - if my_path.endswith((".pyc", ".pyo")): - my_path = os.path.splitext(my_path)[0] + ".py" - versioneer_file = os.path.relpath(my_path) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - with open(".gitattributes", "r") as fobj: - for line in fobj: - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - break - except OSError: - pass - if not present: - with open(".gitattributes", "a+") as fobj: - fobj.write(f"{versionfile_source} export-subst\n") - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.28) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except OSError: - raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, - indent=1, separators=(",", ": ")) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, \ - "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, "error": "unable to compute version", - "date": None} - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(cmdclass=None): - """Get the custom setuptools subclasses used by Versioneer. - - If the package uses a different cmdclass (e.g. one from numpy), it - should be provide as an argument. - """ - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - - cmds = {} if cmdclass is None else cmdclass.copy() - - # we add "version" to setuptools - from setuptools import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - cmds["version"] = cmd_version - - # we override "build_py" in setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # pip install -e . and setuptool/editable_wheel will invoke build_py - # but the build_py command is not expected to copy any files. - - # we override different "build_py" commands for both environments - if 'build_py' in cmds: - _build_py = cmds['build_py'] - else: - from setuptools.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - if getattr(self, "editable_mode", False): - # During editable installs `.py` and data files are - # not copied to build_lib - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_py"] = cmd_build_py - - if 'build_ext' in cmds: - _build_ext = cmds['build_ext'] - else: - from setuptools.command.build_ext import build_ext as _build_ext - - class cmd_build_ext(_build_ext): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_ext.run(self) - if self.inplace: - # build_ext --inplace will only build extensions in - # build/lib<..> dir with no _version.py to write to. - # As in place builds will already have a _version.py - # in the module dir, we do not need to write one. - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if not cfg.versionfile_build: - return - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - if not os.path.exists(target_versionfile): - print(f"Warning: {target_versionfile} does not exist, skipping " - "version update. This can happen if you are running build_ext " - "without first running build_py.") - return - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_ext"] = cmd_build_ext - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if 'py2exe' in sys.modules: # py2exe enabled? - try: - from py2exe.setuptools_buildexe import py2exe as _py2exe - except ImportError: - from py2exe.distutils_buildexe import py2exe as _py2exe - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["py2exe"] = cmd_py2exe - - # sdist farms its file list building out to egg_info - if 'egg_info' in cmds: - _egg_info = cmds['egg_info'] - else: - from setuptools.command.egg_info import egg_info as _egg_info - - class cmd_egg_info(_egg_info): - def find_sources(self): - # egg_info.find_sources builds the manifest list and writes it - # in one shot - super().find_sources() - - # Modify the filelist and normalize it - root = get_root() - cfg = get_config_from_root(root) - self.filelist.append('versioneer.py') - if cfg.versionfile_source: - # There are rare cases where versionfile_source might not be - # included by default, so we must be explicit - self.filelist.append(cfg.versionfile_source) - self.filelist.sort() - self.filelist.remove_duplicates() - - # The write method is hidden in the manifest_maker instance that - # generated the filelist and was thrown away - # We will instead replicate their final normalization (to unicode, - # and POSIX-style paths) - from setuptools import unicode_utils - normalized = [unicode_utils.filesys_decode(f).replace(os.sep, '/') - for f in self.filelist.files] - - manifest_filename = os.path.join(self.egg_info, 'SOURCES.txt') - with open(manifest_filename, 'w') as fobj: - fobj.write('\n'.join(normalized)) - - cmds['egg_info'] = cmd_egg_info - - # we override different "sdist" commands for both environments - if 'sdist' in cmds: - _sdist = cmds['sdist'] - else: - from setuptools.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, - self._versioneer_generated_versions) - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -OLD_SNIPPET = """ -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - -INIT_PY_SNIPPET = """ -from . import {0} -__version__ = {0}.get_versions()['version'] -""" - - -def do_setup(): - """Do main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except (OSError, configparser.NoSectionError, - configparser.NoOptionError) as e: - if isinstance(e, (OSError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", - file=sys.stderr) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except OSError: - old = "" - module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] - snippet = INIT_PY_SNIPPET.format(module) - if OLD_SNIPPET in old: - print(" replacing boilerplate in %s" % ipy) - with open(ipy, "w") as f: - f.write(old.replace(OLD_SNIPPET, snippet)) - elif snippet not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(snippet) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -def setup_command(): - """Set up Versioneer and exit with appropriate error code.""" - errors = do_setup() - errors += scan_setup_py() - sys.exit(1 if errors else 0) - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - setup_command() From 42c3f9160564753400e4147dc9dc77d22eb0bcbb Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Wed, 22 Jan 2025 14:30:35 +0100 Subject: [PATCH 34/97] removed dd_helpers_old --- .github/workflows/linting.yml | 2 - imas/test/test_dd_helpers_old.py | 92 -------------------------------- 2 files changed, 94 deletions(-) delete mode 100644 imas/test/test_dd_helpers_old.py diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 2bb329f6..5c54081b 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -10,8 +10,6 @@ jobs: steps: - name: Checkout imas-python sources uses: actions/checkout@v4 - with: - ref: rename-imaspy-to-imas - name: Set up Python uses: actions/setup-python@v5 diff --git a/imas/test/test_dd_helpers_old.py b/imas/test/test_dd_helpers_old.py deleted file mode 100644 index 1c3d3772..00000000 --- a/imas/test/test_dd_helpers_old.py +++ /dev/null @@ -1,92 +0,0 @@ -import pytest -from pathlib import Path -from unittest.mock import patch, MagicMock -from imas.dd_helpers import transform_with_saxonche, build_data_dictionary -import logging - - -def test_transform_with_saxonche_success(tmp_path): - """Test that transform_with_saxonche performs transformation successfully.""" - input_xml = tmp_path / "input.xml" - xsl_file = tmp_path / "transform.xsl" - output_file = tmp_path / "output.xml" - - # Create dummy input files - input_xml.write_text("Test") - xsl_file.write_text( - """ - - - - - - """ - ) - - transform_with_saxonche(input_xml, xsl_file, output_file) - - # Assert output - assert output_file.exists() - assert ( - output_file.read_text() - == "Test" - ) - - -def test_transform_with_saxonche_failure(tmp_path): - """Test that transform_with_saxonche raises an error for invalid input.""" - input_xml = tmp_path / "input.xml" - xsl_file = tmp_path / "transform.xsl" - output_file = tmp_path / "output.xml" - - # Create invalid input files - input_xml.write_text("Test") - xsl_file.write_text("INVALID XSLT") - - with pytest.raises(Exception): - transform_with_saxonche(input_xml, xsl_file, output_file) - - -@patch("imas.dd_helpers.repo") -@patch("imas.dd_helpers.transform_with_saxonche") -def test_build_data_dictionary(mock_transform, mock_repo, tmp_path): - """Test build_data_dictionary function.""" - mock_repo.git.checkout = MagicMock() - - tag = "v1.0.0" - result_xml = tmp_path / f"{tag}.xml" - - with patch("imas.dd_helpers._build_dir", tmp_path): - build_data_dictionary(mock_repo, tag) - - # Verify the repo was checked out to the correct tag - mock_repo.git.checkout.assert_called_once_with(tag, force=True) - - # Verify the transform_with_saxonche function was called - mock_transform.assert_called_once() - - -def test_prepare_data_dictionaries(monkeypatch, tmp_path): - """Integration test for prepare_data_dictionaries.""" - from imas.dd_helpers import prepare_data_dictionaries - - class MockRepo: - tags = ["v3.21.2", "v3.22.0"] - - def git(self): - return MagicMock() - - mock_repo = MockRepo() - - def mock_get_data_dictionary_repo(): - return mock_repo - - monkeypatch.setattr( - "imas.dd_helpers.get_data_dictionary_repo", mock_get_data_dictionary_repo - ) - - with patch("imas.dd_helpers._build_dir", tmp_path): - prepare_data_dictionaries() - - # Check that the expected output files are created - assert len(list(tmp_path.glob("*.xml"))) == len(mock_repo.tags) From 5b3d0fe61e368e8beb8c1bb1bff996e028f4fc0b Mon Sep 17 00:00:00 2001 From: Anushan Fernando Date: Mon, 20 Jan 2025 23:34:52 +0000 Subject: [PATCH 35/97] Modifications for compatibility with TORAX. --- imaspy/backends/netcdf/db_entry_nc.py | 7 +++++++ imaspy/backends/netcdf/ids2nc.py | 6 +++++- imaspy/backends/netcdf/nc_validate.py | 9 +++++---- imaspy/ids_primitive.py | 2 +- imaspy/test/test_cli.py | 6 ++++-- imaspy/test/test_dbentry.py | 1 + imaspy/test/test_ids_mixin.py | 3 +++ imaspy/test/test_ids_toplevel.py | 2 +- imaspy/test/test_minimal_types.py | 5 +++-- imaspy/test/test_nbc_change.py | 2 +- imaspy/test/test_static_ids.py | 2 +- imaspy/test/test_util.py | 6 +++--- 12 files changed, 35 insertions(+), 16 deletions(-) diff --git a/imaspy/backends/netcdf/db_entry_nc.py b/imaspy/backends/netcdf/db_entry_nc.py index 732eb97d..c008262c 100644 --- a/imaspy/backends/netcdf/db_entry_nc.py +++ b/imaspy/backends/netcdf/db_entry_nc.py @@ -33,12 +33,19 @@ def __init__(self, fname: str, mode: str, factory: IDSFactory) -> None: "The `netCDF4` python module is not available. Please install this " "module to read/write IMAS netCDF files with IMASPy." ) + # To support netcdf v1.4 (which has no mode "x") we map it to "w" with `clobber=True`. + if mode == "x": + mode = "w" + clobber = False + else: + clobber = True self._dataset = netCDF4.Dataset( fname, mode, format="NETCDF4", auto_complex=True, + clobber=clobber, ) """NetCDF4 dataset.""" self._factory = factory diff --git a/imaspy/backends/netcdf/ids2nc.py b/imaspy/backends/netcdf/ids2nc.py index 34e63101..61e42cf2 100644 --- a/imaspy/backends/netcdf/ids2nc.py +++ b/imaspy/backends/netcdf/ids2nc.py @@ -7,6 +7,7 @@ import netCDF4 import numpy +from packaging import version from imaspy.backends.netcdf.nc_metadata import NCMetadata from imaspy.ids_base import IDSBase @@ -187,7 +188,10 @@ def create_variables(self) -> None: dtype = dtypes[metadata.data_type] kwargs = {} if dtype is not str: # Enable compression: - kwargs.update(compression="zlib", complevel=1) + if version.parse(netCDF4.__version__) > version.parse("1.4.1"): + kwargs.update(compression="zlib", complevel=1) + else: + kwargs.update(zlib=True, complevel=1) if dtype is not dtypes[IDSDataType.CPX]: # Set fillvalue kwargs.update(fill_value=default_fillvals[metadata.data_type]) # Create variable diff --git a/imaspy/backends/netcdf/nc_validate.py b/imaspy/backends/netcdf/nc_validate.py index 49a14283..07a7ad78 100644 --- a/imaspy/backends/netcdf/nc_validate.py +++ b/imaspy/backends/netcdf/nc_validate.py @@ -23,23 +23,24 @@ def validate_netcdf_file(filename: str) -> None: # additional variables are smuggled inside: groups = [dataset] + [dataset[group] for group in dataset.groups] for group in groups: + group_name = group.path.split('/')[-1] if group.variables or group.dimensions: raise InvalidNetCDFEntry( "NetCDF file should not have variables or dimensions in the " - f"{group.name} group." + f"{group_name} group." ) if group is dataset: continue - if group.name not in ids_names: + if group_name not in ids_names: raise InvalidNetCDFEntry( - f"Invalid group name {group.name}: there is no IDS with this name." + f"Invalid group name {group_name}: there is no IDS with this name." ) for subgroup in group.groups: try: int(subgroup) except ValueError: raise InvalidNetCDFEntry( - f"Invalid group name {group.name}/{subgroup}: " + f"Invalid group name {group_name}/{subgroup}: " f"{subgroup} is not a valid occurrence number." ) diff --git a/imaspy/ids_primitive.py b/imaspy/ids_primitive.py index 94f865b6..e27eb93f 100644 --- a/imaspy/ids_primitive.py +++ b/imaspy/ids_primitive.py @@ -481,7 +481,7 @@ def _cast_value(self, value): value = np.asanyarray(value) if value.dtype != dtype: logger.info(_CONVERT_MSG, value.dtype, self) - value = np.array(value, dtype=dtype, copy=False) + value = np.asarray(value, dtype=dtype,) if value.ndim != self.metadata.ndim: raise ValueError(f"Trying to assign a {value.ndim}D value to {self!r}.") return value diff --git a/imaspy/test/test_cli.py b/imaspy/test/test_cli.py index 604a7f7e..f9ee5383 100644 --- a/imaspy/test/test_cli.py +++ b/imaspy/test/test_cli.py @@ -4,6 +4,7 @@ from click.testing import CliRunner from packaging.version import Version +from imaspy.backends.imas_core.imas_interface import has_imas from imaspy.backends.imas_core.imas_interface import ll_interface from imaspy.command.cli import print_version from imaspy.command.db_analysis import analyze_db, process_db_analysis @@ -12,6 +13,7 @@ @pytest.mark.cli +@pytest.mark.skipif(not has_imas, reason="Requires IMAS Core.") def test_imaspy_version(): runner = CliRunner() result = runner.invoke(print_version) @@ -19,8 +21,8 @@ def test_imaspy_version(): @pytest.mark.cli -@pytest.mark.skipif(ll_interface._al_version < Version("5.0"), reason="Needs AL >= 5") -def test_db_analysis(tmp_path): +@pytest.mark.skipif(not has_imas or ll_interface._al_version < Version("5.0"), reason="Needs AL >= 5 AND Requires IMAS Core.") +def test_db_analysis(tmp_path,): # This only tests the happy flow, error handling is not tested db_path = tmp_path / "test_db_analysis" with DBEntry(f"imas:hdf5?path={db_path}", "w") as entry: diff --git a/imaspy/test/test_dbentry.py b/imaspy/test/test_dbentry.py index 2d82af36..d67fae0d 100644 --- a/imaspy/test/test_dbentry.py +++ b/imaspy/test/test_dbentry.py @@ -82,6 +82,7 @@ def test_dbentry_constructor(): assert get_entry_attrs(entry) == (1, 2, 3, 4, None, 6) +@pytest.mark.skipif(not has_imas, reason="Requires IMAS Core.") def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path): entry = open_dbentry(imaspy.ids_defs.MEMORY_BACKEND, "w", worker_id, tmp_path) ids = entry.factory.core_profiles() diff --git a/imaspy/test/test_ids_mixin.py b/imaspy/test/test_ids_mixin.py index 164adcdd..2b3f7b03 100644 --- a/imaspy/test/test_ids_mixin.py +++ b/imaspy/test/test_ids_mixin.py @@ -1,7 +1,10 @@ # This file is part of IMASPy. # You should have received the IMASPy LICENSE file with this project. +import pytest +from imaspy.backends.imas_core.imas_interface import has_imas +@pytest.mark.skipif(has_imas, reason="Requires IMAS Core.") def test_toplevel(fake_filled_toplevel): top = fake_filled_toplevel assert top.wavevector._toplevel == top diff --git a/imaspy/test/test_ids_toplevel.py b/imaspy/test/test_ids_toplevel.py index 4721f3c3..0e8d8c32 100644 --- a/imaspy/test/test_ids_toplevel.py +++ b/imaspy/test/test_ids_toplevel.py @@ -46,7 +46,7 @@ def test_pretty_print(ids): assert pprint.pformat(ids) == "" -def test_serialize_nondefault_dd_version(): +def test_serialize_nondefault_dd_version(requires_imas): ids = IDSFactory("3.31.0").core_profiles() fill_with_random_data(ids) data = ids.serialize() diff --git a/imaspy/test/test_minimal_types.py b/imaspy/test/test_minimal_types.py index ee38761c..0bb9ac30 100644 --- a/imaspy/test/test_minimal_types.py +++ b/imaspy/test/test_minimal_types.py @@ -1,5 +1,6 @@ # A minimal testcase loading an IDS file and checking that the structure built is ok from numbers import Complex, Integral, Number, Real +from packaging import version import numpy as np import pytest @@ -61,7 +62,7 @@ def test_assign_str_1d(minimal, caplog): # Prevent the expected numpy ComplexWarnings from cluttering pytest output -@pytest.mark.filterwarnings("ignore::numpy.ComplexWarning") +@pytest.mark.filterwarnings("ignore::numpy.ComplexWarning" if version.parse(np.__version__) < version.parse("2.0.0") else "ignore::numpy.exceptions.ComplexWarning") @pytest.mark.parametrize("typ, max_dim", [("flt", 6), ("cpx", 6), ("int", 3)]) def test_assign_numeric_types(minimal, caplog, typ, max_dim): caplog.set_level("INFO", "imaspy") @@ -87,7 +88,7 @@ def test_assign_numeric_types(minimal, caplog, typ, max_dim): len(caplog.records) == 1 elif dim == other_ndim >= 1 and other_typ == "cpx": # np allows casting of complex to float or int, but warns: - with pytest.warns(np.ComplexWarning): + with pytest.warns(np.ComplexWarning if version.parse(np.__version__) < version.parse("2.0.0") else np.exceptions.ComplexWarning): caplog.clear() minimal[name].value = value assert len(caplog.records) == 1 diff --git a/imaspy/test/test_nbc_change.py b/imaspy/test/test_nbc_change.py index cbcf3f58..2e328982 100644 --- a/imaspy/test/test_nbc_change.py +++ b/imaspy/test/test_nbc_change.py @@ -54,7 +54,7 @@ def test_nbc_structure_to_aos(caplog): assert caplog.record_tuples[0][:2] == ("imaspy.ids_convert", logging.WARNING) -def test_nbc_0d_to_1d(caplog): +def test_nbc_0d_to_1d(caplog, requires_imas): # channel/filter_spectrometer/radiance_calibration in spectrometer visible changed # from FLT_0D to FLT_1D in DD 3.39.0 ids = IDSFactory("3.32.0").spectrometer_visible() diff --git a/imaspy/test/test_static_ids.py b/imaspy/test/test_static_ids.py index 1f430c10..680ecd2b 100644 --- a/imaspy/test/test_static_ids.py +++ b/imaspy/test/test_static_ids.py @@ -21,7 +21,7 @@ def test_ids_valid_type(): assert ids_types in ({IDSType.NONE}, {IDSType.CONSTANT, IDSType.DYNAMIC}) -def test_constant_ids(caplog): +def test_constant_ids(caplog, requires_imas): ids = imaspy.IDSFactory().new("amns_data") if ids.metadata.type is IDSType.NONE: pytest.skip("IDS definition has no constant IDSs") diff --git a/imaspy/test/test_util.py b/imaspy/test/test_util.py index 37c419a0..2c4dad97 100644 --- a/imaspy/test/test_util.py +++ b/imaspy/test/test_util.py @@ -54,7 +54,7 @@ def test_inspect(): inspect(cp.profiles_1d[1].grid.rho_tor_norm) # IDSPrimitive -def test_inspect_lazy(): +def test_inspect_lazy(requires_imas): with get_training_db_entry() as entry: cp = entry.get("core_profiles", lazy=True) inspect(cp) @@ -141,7 +141,7 @@ def test_idsdiffgen(): assert diff[0] == ("profiles_1d/time", -1, 0) -def test_idsdiff(): +def test_idsdiff(requires_imas): # Test the diff rendering for two sample IDSs with get_training_db_entry() as entry: imaspy.util.idsdiff(entry.get("core_profiles"), entry.get("equilibrium")) @@ -179,7 +179,7 @@ def test_get_toplevel(): assert get_toplevel(cp) is cp -def test_is_lazy_loaded(): +def test_is_lazy_loaded(requires_imas): with get_training_db_entry() as entry: assert is_lazy_loaded(entry.get("core_profiles")) is False assert is_lazy_loaded(entry.get("core_profiles", lazy=True)) is True From c1f7a968f1729c9aee5c7318062084d516b0dfec Mon Sep 17 00:00:00 2001 From: Anushan Fernando Date: Mon, 20 Jan 2025 23:54:44 +0000 Subject: [PATCH 36/97] Modify tests to use fixture. --- imaspy/test/test_cli.py | 3 +-- imaspy/test/test_dbentry.py | 3 +-- imaspy/test/test_ids_mixin.py | 5 +---- 3 files changed, 3 insertions(+), 8 deletions(-) diff --git a/imaspy/test/test_cli.py b/imaspy/test/test_cli.py index f9ee5383..db7c462f 100644 --- a/imaspy/test/test_cli.py +++ b/imaspy/test/test_cli.py @@ -13,8 +13,7 @@ @pytest.mark.cli -@pytest.mark.skipif(not has_imas, reason="Requires IMAS Core.") -def test_imaspy_version(): +def test_imaspy_version(requires_imas): runner = CliRunner() result = runner.invoke(print_version) assert result.exit_code == 0 diff --git a/imaspy/test/test_dbentry.py b/imaspy/test/test_dbentry.py index d67fae0d..cb7ebe12 100644 --- a/imaspy/test/test_dbentry.py +++ b/imaspy/test/test_dbentry.py @@ -82,8 +82,7 @@ def test_dbentry_constructor(): assert get_entry_attrs(entry) == (1, 2, 3, 4, None, 6) -@pytest.mark.skipif(not has_imas, reason="Requires IMAS Core.") -def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path): +def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path, requires_imas): entry = open_dbentry(imaspy.ids_defs.MEMORY_BACKEND, "w", worker_id, tmp_path) ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 diff --git a/imaspy/test/test_ids_mixin.py b/imaspy/test/test_ids_mixin.py index 2b3f7b03..675e2575 100644 --- a/imaspy/test/test_ids_mixin.py +++ b/imaspy/test/test_ids_mixin.py @@ -1,11 +1,8 @@ # This file is part of IMASPy. # You should have received the IMASPy LICENSE file with this project. -import pytest -from imaspy.backends.imas_core.imas_interface import has_imas -@pytest.mark.skipif(has_imas, reason="Requires IMAS Core.") -def test_toplevel(fake_filled_toplevel): +def test_toplevel(fake_filled_toplevel, requires_imas): top = fake_filled_toplevel assert top.wavevector._toplevel == top assert top.wavevector[0].radial_component_norm._toplevel == top From abcaf3f8df9c999e175f0125fcd88c2f8e94da47 Mon Sep 17 00:00:00 2001 From: Anushan Fernando <35841118+Nush395@users.noreply.github.com> Date: Tue, 21 Jan 2025 10:27:25 +0000 Subject: [PATCH 37/97] Update imaspy/test/test_ids_mixin.py Co-authored-by: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> --- imaspy/test/test_ids_mixin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imaspy/test/test_ids_mixin.py b/imaspy/test/test_ids_mixin.py index 675e2575..164adcdd 100644 --- a/imaspy/test/test_ids_mixin.py +++ b/imaspy/test/test_ids_mixin.py @@ -2,7 +2,7 @@ # You should have received the IMASPy LICENSE file with this project. -def test_toplevel(fake_filled_toplevel, requires_imas): +def test_toplevel(fake_filled_toplevel): top = fake_filled_toplevel assert top.wavevector._toplevel == top assert top.wavevector[0].radial_component_norm._toplevel == top From aab7f664d3754e48192c73bb41cbd7e44b59ba62 Mon Sep 17 00:00:00 2001 From: Anushan Fernando <35841118+Nush395@users.noreply.github.com> Date: Tue, 21 Jan 2025 10:27:45 +0000 Subject: [PATCH 38/97] Update imaspy/test/test_cli.py Co-authored-by: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> --- imaspy/test/test_cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imaspy/test/test_cli.py b/imaspy/test/test_cli.py index db7c462f..e6a420c7 100644 --- a/imaspy/test/test_cli.py +++ b/imaspy/test/test_cli.py @@ -21,7 +21,7 @@ def test_imaspy_version(requires_imas): @pytest.mark.cli @pytest.mark.skipif(not has_imas or ll_interface._al_version < Version("5.0"), reason="Needs AL >= 5 AND Requires IMAS Core.") -def test_db_analysis(tmp_path,): +def test_db_analysis(tmp_path): # This only tests the happy flow, error handling is not tested db_path = tmp_path / "test_db_analysis" with DBEntry(f"imas:hdf5?path={db_path}", "w") as entry: From 006580a4cca1a8ffdb601c8ed62dfccf011dc92a Mon Sep 17 00:00:00 2001 From: Anushan Fernando Date: Tue, 21 Jan 2025 10:52:21 +0000 Subject: [PATCH 39/97] Add error message when attemtping to store complex number with netcdf<1.7.0. --- imaspy/backends/netcdf/ids2nc.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/imaspy/backends/netcdf/ids2nc.py b/imaspy/backends/netcdf/ids2nc.py index 61e42cf2..45a04d6b 100644 --- a/imaspy/backends/netcdf/ids2nc.py +++ b/imaspy/backends/netcdf/ids2nc.py @@ -10,6 +10,7 @@ from packaging import version from imaspy.backends.netcdf.nc_metadata import NCMetadata +from imaspy.exception import InvalidNetCDFEntry from imaspy.ids_base import IDSBase from imaspy.ids_data_type import IDSDataType from imaspy.ids_defs import IDS_TIME_MODE_HOMOGENEOUS @@ -186,6 +187,8 @@ def create_variables(self) -> None: else: dtype = dtypes[metadata.data_type] + if version.parse(netCDF4.__version__) < version.parse("1.7.0") and dtype is dtypes[IDSDataType.CPX]: + raise InvalidNetCDFEntry(f"Found complex data in {var_name}, NetCDF 1.7.0 or later is required for complex data types") kwargs = {} if dtype is not str: # Enable compression: if version.parse(netCDF4.__version__) > version.parse("1.4.1"): From b2afe0770030d6a6d2d1084288fabf24bf68260c Mon Sep 17 00:00:00 2001 From: Anushan Fernando Date: Tue, 21 Jan 2025 10:57:10 +0000 Subject: [PATCH 40/97] Formatting. --- imaspy/backends/netcdf/db_entry_nc.py | 3 ++- imaspy/backends/netcdf/ids2nc.py | 10 ++++++++-- imaspy/backends/netcdf/nc_validate.py | 2 +- imaspy/ids_primitive.py | 5 ++++- imaspy/test/test_cli.py | 5 ++++- imaspy/test/test_minimal_types.py | 12 ++++++++++-- 6 files changed, 29 insertions(+), 8 deletions(-) diff --git a/imaspy/backends/netcdf/db_entry_nc.py b/imaspy/backends/netcdf/db_entry_nc.py index c008262c..97d5dffe 100644 --- a/imaspy/backends/netcdf/db_entry_nc.py +++ b/imaspy/backends/netcdf/db_entry_nc.py @@ -33,7 +33,8 @@ def __init__(self, fname: str, mode: str, factory: IDSFactory) -> None: "The `netCDF4` python module is not available. Please install this " "module to read/write IMAS netCDF files with IMASPy." ) - # To support netcdf v1.4 (which has no mode "x") we map it to "w" with `clobber=True`. + # To support netcdf v1.4 (which has no mode "x") we map it to "w" with + # `clobber=True`. if mode == "x": mode = "w" clobber = False diff --git a/imaspy/backends/netcdf/ids2nc.py b/imaspy/backends/netcdf/ids2nc.py index 45a04d6b..0328b635 100644 --- a/imaspy/backends/netcdf/ids2nc.py +++ b/imaspy/backends/netcdf/ids2nc.py @@ -187,8 +187,14 @@ def create_variables(self) -> None: else: dtype = dtypes[metadata.data_type] - if version.parse(netCDF4.__version__) < version.parse("1.7.0") and dtype is dtypes[IDSDataType.CPX]: - raise InvalidNetCDFEntry(f"Found complex data in {var_name}, NetCDF 1.7.0 or later is required for complex data types") + if ( + version.parse(netCDF4.__version__) < version.parse("1.7.0") + and dtype is dtypes[IDSDataType.CPX] + ): + raise InvalidNetCDFEntry( + f"Found complex data in {var_name}, NetCDF 1.7.0 or" + f" later is required for complex data types" + ) kwargs = {} if dtype is not str: # Enable compression: if version.parse(netCDF4.__version__) > version.parse("1.4.1"): diff --git a/imaspy/backends/netcdf/nc_validate.py b/imaspy/backends/netcdf/nc_validate.py index 07a7ad78..f7528a8a 100644 --- a/imaspy/backends/netcdf/nc_validate.py +++ b/imaspy/backends/netcdf/nc_validate.py @@ -23,7 +23,7 @@ def validate_netcdf_file(filename: str) -> None: # additional variables are smuggled inside: groups = [dataset] + [dataset[group] for group in dataset.groups] for group in groups: - group_name = group.path.split('/')[-1] + group_name = group.path.split("/")[-1] if group.variables or group.dimensions: raise InvalidNetCDFEntry( "NetCDF file should not have variables or dimensions in the " diff --git a/imaspy/ids_primitive.py b/imaspy/ids_primitive.py index e27eb93f..71b1744a 100644 --- a/imaspy/ids_primitive.py +++ b/imaspy/ids_primitive.py @@ -481,7 +481,10 @@ def _cast_value(self, value): value = np.asanyarray(value) if value.dtype != dtype: logger.info(_CONVERT_MSG, value.dtype, self) - value = np.asarray(value, dtype=dtype,) + value = np.asarray( + value, + dtype=dtype, + ) if value.ndim != self.metadata.ndim: raise ValueError(f"Trying to assign a {value.ndim}D value to {self!r}.") return value diff --git a/imaspy/test/test_cli.py b/imaspy/test/test_cli.py index e6a420c7..fdea00f4 100644 --- a/imaspy/test/test_cli.py +++ b/imaspy/test/test_cli.py @@ -20,7 +20,10 @@ def test_imaspy_version(requires_imas): @pytest.mark.cli -@pytest.mark.skipif(not has_imas or ll_interface._al_version < Version("5.0"), reason="Needs AL >= 5 AND Requires IMAS Core.") +@pytest.mark.skipif( + not has_imas or ll_interface._al_version < Version("5.0"), + reason="Needs AL >= 5 AND Requires IMAS Core.", +) def test_db_analysis(tmp_path): # This only tests the happy flow, error handling is not tested db_path = tmp_path / "test_db_analysis" diff --git a/imaspy/test/test_minimal_types.py b/imaspy/test/test_minimal_types.py index 0bb9ac30..d4614de5 100644 --- a/imaspy/test/test_minimal_types.py +++ b/imaspy/test/test_minimal_types.py @@ -62,7 +62,11 @@ def test_assign_str_1d(minimal, caplog): # Prevent the expected numpy ComplexWarnings from cluttering pytest output -@pytest.mark.filterwarnings("ignore::numpy.ComplexWarning" if version.parse(np.__version__) < version.parse("2.0.0") else "ignore::numpy.exceptions.ComplexWarning") +@pytest.mark.filterwarnings( + "ignore::numpy.ComplexWarning" + if version.parse(np.__version__) < version.parse("2.0.0") + else "ignore::numpy.exceptions.ComplexWarning" +) @pytest.mark.parametrize("typ, max_dim", [("flt", 6), ("cpx", 6), ("int", 3)]) def test_assign_numeric_types(minimal, caplog, typ, max_dim): caplog.set_level("INFO", "imaspy") @@ -88,7 +92,11 @@ def test_assign_numeric_types(minimal, caplog, typ, max_dim): len(caplog.records) == 1 elif dim == other_ndim >= 1 and other_typ == "cpx": # np allows casting of complex to float or int, but warns: - with pytest.warns(np.ComplexWarning if version.parse(np.__version__) < version.parse("2.0.0") else np.exceptions.ComplexWarning): + with pytest.warns( + np.ComplexWarning + if version.parse(np.__version__) < version.parse("2.0.0") + else np.exceptions.ComplexWarning + ): caplog.clear() minimal[name].value = value assert len(caplog.records) == 1 From 816bbd43784c3cb13bd8481326bb7eab6693342b Mon Sep 17 00:00:00 2001 From: Anushan Fernando Date: Tue, 21 Jan 2025 13:56:46 +0000 Subject: [PATCH 41/97] Add tests for different versions of netcdf. --- conftest.py | 14 ++++++++++ imaspy/test/test_cli.py | 7 +++-- imaspy/test/test_dbentry.py | 3 ++- imaspy/test/test_helpers.py | 34 ++++++++++++++++++------- imaspy/test/test_nc_autofill.py | 45 +++++++++++++++++++++++++++++++-- pyproject.toml | 2 +- 6 files changed, 90 insertions(+), 15 deletions(-) diff --git a/conftest.py b/conftest.py index 20b26679..d1893f76 100644 --- a/conftest.py +++ b/conftest.py @@ -7,6 +7,7 @@ # - Fixtures that are useful across test modules import functools +import importlib import logging import os import sys @@ -72,6 +73,19 @@ def pytest_addoption(parser): } +# This is a dummy fixture, usually provided by pytest-xdist that isn't available +# in google3. +# The `worker_id` is only used by tests that require IMAS Core which we never +# run +try: + import pytest_xdist +except ImportError: + # If pytest-xdist is not available we provide a dummy worker_id fixture. + @pytest.fixture() + def worker_id(): + return "master" + + @pytest.fixture(params=_BACKENDS) def backend(pytestconfig: pytest.Config, request: pytest.FixtureRequest): backends_provided = any(map(pytestconfig.getoption, _BACKENDS)) diff --git a/imaspy/test/test_cli.py b/imaspy/test/test_cli.py index fdea00f4..810acda6 100644 --- a/imaspy/test/test_cli.py +++ b/imaspy/test/test_cli.py @@ -13,7 +13,8 @@ @pytest.mark.cli -def test_imaspy_version(requires_imas): +@pytest.mark.skipif(not has_imas, reason="Requires IMAS Core.") +def test_imaspy_version(): runner = CliRunner() result = runner.invoke(print_version) assert result.exit_code == 0 @@ -24,7 +25,9 @@ def test_imaspy_version(requires_imas): not has_imas or ll_interface._al_version < Version("5.0"), reason="Needs AL >= 5 AND Requires IMAS Core.", ) -def test_db_analysis(tmp_path): +def test_db_analysis( + tmp_path, +): # This only tests the happy flow, error handling is not tested db_path = tmp_path / "test_db_analysis" with DBEntry(f"imas:hdf5?path={db_path}", "w") as entry: diff --git a/imaspy/test/test_dbentry.py b/imaspy/test/test_dbentry.py index cb7ebe12..d67fae0d 100644 --- a/imaspy/test/test_dbentry.py +++ b/imaspy/test/test_dbentry.py @@ -82,7 +82,8 @@ def test_dbentry_constructor(): assert get_entry_attrs(entry) == (1, 2, 3, 4, None, 6) -def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path, requires_imas): +@pytest.mark.skipif(not has_imas, reason="Requires IMAS Core.") +def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path): entry = open_dbentry(imaspy.ids_defs.MEMORY_BACKEND, "w", worker_id, tmp_path) ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 diff --git a/imaspy/test/test_helpers.py b/imaspy/test/test_helpers.py index 63a1cf79..8a651d93 100644 --- a/imaspy/test/test_helpers.py +++ b/imaspy/test/test_helpers.py @@ -93,7 +93,9 @@ def fill_with_random_data(structure, max_children=3): child.value = random_data(child.metadata.data_type, child.metadata.ndim) -def maybe_set_random_value(primitive: IDSPrimitive, leave_empty: float) -> None: +def maybe_set_random_value( + primitive: IDSPrimitive, leave_empty: float, skip_complex: bool +) -> None: """Set the value of an IDS primitive with a certain chance. If the IDSPrimitive has coordinates, then the size of the coordinates is taken into @@ -153,7 +155,7 @@ def maybe_set_random_value(primitive: IDSPrimitive, leave_empty: float) -> None: # Scale chance of not setting a coordinate by our number of dimensions, # such that overall there is roughly a 50% chance that any coordinate # remains empty - maybe_set_random_value(coordinate_element, 0.5**ndim) + maybe_set_random_value(coordinate_element, 0.5**ndim, skip_complex) size = coordinate_element.shape[0 if coordinate.references else dim] if coordinate.size: # coordinateX = OR 1...1 @@ -176,13 +178,18 @@ def maybe_set_random_value(primitive: IDSPrimitive, leave_empty: float) -> None: elif primitive.metadata.data_type is IDSDataType.FLT: primitive.value = np.random.random_sample(size=shape) elif primitive.metadata.data_type is IDSDataType.CPX: + if skip_complex: + # If we are skipping complex numbers then leave the value empty. + return val = np.random.random_sample(shape) + 1j * np.random.random_sample(shape) primitive.value = val else: raise ValueError(f"Invalid IDS data type: {primitive.metadata.data_type}") -def fill_consistent(structure: IDSStructure, leave_empty: float = 0.2): +def fill_consistent( + structure: IDSStructure, leave_empty: float = 0.2, skip_complex: bool = False +): """Fill a structure with random data, such that coordinate sizes are consistent. Sets homogeneous_time to heterogeneous (always). @@ -196,6 +203,9 @@ def fill_consistent(structure: IDSStructure, leave_empty: float = 0.2): exclusive_coordinates: list of IDSPrimitives that have exclusive alternative coordinates. These are initially not filled, and only at the very end of filling an IDSToplevel, a choice is made between the exclusive coordinates. + skip_complex: Whether to skip over populating complex numbers. This is + useful for maintaining compatibility with older versions of netCDF4 + (<1.7.0) where complex numbers are not supported. """ if isinstance(structure, IDSToplevel): unsupported_ids_name = ( @@ -218,7 +228,9 @@ def fill_consistent(structure: IDSStructure, leave_empty: float = 0.2): for child in structure: if isinstance(child, IDSStructure): - exclusive_coordinates.extend(fill_consistent(child, leave_empty)) + exclusive_coordinates.extend( + fill_consistent(child, leave_empty, skip_complex) + ) elif isinstance(child, IDSStructArray): if child.metadata.coordinates[0].references: @@ -230,7 +242,7 @@ def fill_consistent(structure: IDSStructure, leave_empty: float = 0.2): if isinstance(coor, IDSPrimitive): # maybe fill with random data: try: - maybe_set_random_value(coor, leave_empty) + maybe_set_random_value(coor, leave_empty, skip_complex) except (RuntimeError, ValueError): pass child.resize(len(coor)) @@ -244,7 +256,9 @@ def fill_consistent(structure: IDSStructure, leave_empty: float = 0.2): else: child.resize(child.metadata.coordinates[0].size or 1) for ele in child: - exclusive_coordinates.extend(fill_consistent(ele, leave_empty)) + exclusive_coordinates.extend( + fill_consistent(ele, leave_empty, skip_complex) + ) else: # IDSPrimitive coordinates = child.metadata.coordinates @@ -256,7 +270,7 @@ def fill_consistent(structure: IDSStructure, leave_empty: float = 0.2): exclusive_coordinates.append(child) else: try: - maybe_set_random_value(child, leave_empty) + maybe_set_random_value(child, leave_empty, skip_complex) except (RuntimeError, ValueError): pass @@ -278,7 +292,7 @@ def fill_consistent(structure: IDSStructure, leave_empty: float = 0.2): coor = filled_refs.pop() unset_coordinate(coor) - maybe_set_random_value(element, leave_empty) + maybe_set_random_value(element, leave_empty, skip_complex) else: return exclusive_coordinates @@ -301,7 +315,9 @@ def callback(element): visit_children(callback, parent) -def compare_children(st1, st2, deleted_paths=set(), accept_lazy=False): +def compare_children( + st1, st2, deleted_paths=set(), accept_lazy=False, skip_complex=False +): """Perform a deep compare of two structures using asserts. All paths in ``deleted_paths`` are asserted that they are deleted in st2. diff --git a/imaspy/test/test_nc_autofill.py b/imaspy/test/test_nc_autofill.py index e0d3fe91..01280672 100644 --- a/imaspy/test/test_nc_autofill.py +++ b/imaspy/test/test_nc_autofill.py @@ -1,11 +1,52 @@ from imaspy.db_entry import DBEntry +from imaspy.exception import InvalidNetCDFEntry from imaspy.test.test_helpers import compare_children, fill_consistent +import re +import pytest +import netCDF4 +from packaging import version -def test_nc_latest_dd_autofill_put_get(ids_name, tmp_path): +def test_nc_latest_dd_autofill_put_get_skip_complex(ids_name, tmp_path): with DBEntry(f"{tmp_path}/test-{ids_name}.nc", "x") as entry: ids = entry.factory.new(ids_name) - fill_consistent(ids, 0.5) + fill_consistent(ids, leave_empty=0.5, skip_complex=True) + + entry.put(ids) + ids2 = entry.get(ids_name) + + compare_children(ids, ids2) + + +@pytest.mark.skipif( + version.parse(netCDF4.__version__) < version.parse("1.7.0"), + reason="NetCDF4 versions < 1.7.0 do not support complex numbers", +) +def test_nc_latest_dd_autofill_put_get_with_complex(ids_name, tmp_path): + with DBEntry(f"{tmp_path}/test-{ids_name}.nc", "x") as entry: + ids = entry.factory.new(ids_name) + fill_consistent(ids, leave_empty=0.5, skip_complex=False) + try: + entry.put(ids) + ids2 = entry.get(ids_name) + compare_children(ids, ids2) + except InvalidNetCDFEntry as e: + # This is expected, as these versions of NetCDF4 do not support + # complex numbers. + if not re.search( + r".*NetCDF 1.7.0 or later is required for complex data types", str(e) + ): + raise InvalidNetCDFEntry(e) from e + + +@pytest.mark.skipif( + version.parse(netCDF4.__version__) >= version.parse("1.7.0"), + reason="NetCDF4 versions >= 1.7.0 support complex numbers", +) +def test_nc_latest_dd_autofill_put_get_with_complex(ids_name, tmp_path): + with DBEntry(f"{tmp_path}/test-{ids_name}.nc", "x") as entry: + ids = entry.factory.new(ids_name) + fill_consistent(ids, leave_empty=0.5, skip_complex=False) entry.put(ids) ids2 = entry.get(ids_name) diff --git a/pyproject.toml b/pyproject.toml index dccd6912..36e5fffb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -79,7 +79,7 @@ docs = [ ] imas-core = [ "imas-core@git+ssh://git@git.iter.org/imas/al-core.git@main" ] netcdf = [ - "netCDF4>=1.7.0", + "netCDF4>=1.4.1", ] h5py = [ "h5py", From 2f7b591cd9a4b825d92d3a47677171c4fc5e8ff9 Mon Sep 17 00:00:00 2001 From: Anushan Fernando Date: Tue, 21 Jan 2025 14:00:44 +0000 Subject: [PATCH 42/97] Minor changes to tests. --- conftest.py | 5 ----- imaspy/test/test_cli.py | 3 +-- imaspy/test/test_dbentry.py | 3 +-- imaspy/test/test_helpers.py | 4 +--- 4 files changed, 3 insertions(+), 12 deletions(-) diff --git a/conftest.py b/conftest.py index d1893f76..91a9a046 100644 --- a/conftest.py +++ b/conftest.py @@ -7,7 +7,6 @@ # - Fixtures that are useful across test modules import functools -import importlib import logging import os import sys @@ -73,10 +72,6 @@ def pytest_addoption(parser): } -# This is a dummy fixture, usually provided by pytest-xdist that isn't available -# in google3. -# The `worker_id` is only used by tests that require IMAS Core which we never -# run try: import pytest_xdist except ImportError: diff --git a/imaspy/test/test_cli.py b/imaspy/test/test_cli.py index 810acda6..d3642410 100644 --- a/imaspy/test/test_cli.py +++ b/imaspy/test/test_cli.py @@ -13,8 +13,7 @@ @pytest.mark.cli -@pytest.mark.skipif(not has_imas, reason="Requires IMAS Core.") -def test_imaspy_version(): +def test_imaspy_version(requires_imas): runner = CliRunner() result = runner.invoke(print_version) assert result.exit_code == 0 diff --git a/imaspy/test/test_dbentry.py b/imaspy/test/test_dbentry.py index d67fae0d..cb7ebe12 100644 --- a/imaspy/test/test_dbentry.py +++ b/imaspy/test/test_dbentry.py @@ -82,8 +82,7 @@ def test_dbentry_constructor(): assert get_entry_attrs(entry) == (1, 2, 3, 4, None, 6) -@pytest.mark.skipif(not has_imas, reason="Requires IMAS Core.") -def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path): +def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path, requires_imas): entry = open_dbentry(imaspy.ids_defs.MEMORY_BACKEND, "w", worker_id, tmp_path) ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 diff --git a/imaspy/test/test_helpers.py b/imaspy/test/test_helpers.py index 8a651d93..0b7e2b43 100644 --- a/imaspy/test/test_helpers.py +++ b/imaspy/test/test_helpers.py @@ -315,9 +315,7 @@ def callback(element): visit_children(callback, parent) -def compare_children( - st1, st2, deleted_paths=set(), accept_lazy=False, skip_complex=False -): +def compare_children(st1, st2, deleted_paths=set(), accept_lazy=False): """Perform a deep compare of two structures using asserts. All paths in ``deleted_paths`` are asserted that they are deleted in st2. From 3591f176cc5a9642f0c5be9c85bf54e725c29823 Mon Sep 17 00:00:00 2001 From: Anushan Fernando Date: Tue, 21 Jan 2025 14:03:50 +0000 Subject: [PATCH 43/97] Rename test. --- imaspy/test/test_nc_autofill.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/imaspy/test/test_nc_autofill.py b/imaspy/test/test_nc_autofill.py index 01280672..806caa7a 100644 --- a/imaspy/test/test_nc_autofill.py +++ b/imaspy/test/test_nc_autofill.py @@ -22,7 +22,9 @@ def test_nc_latest_dd_autofill_put_get_skip_complex(ids_name, tmp_path): version.parse(netCDF4.__version__) < version.parse("1.7.0"), reason="NetCDF4 versions < 1.7.0 do not support complex numbers", ) -def test_nc_latest_dd_autofill_put_get_with_complex(ids_name, tmp_path): +def test_nc_latest_dd_autofill_put_get_with_complex_older_netCDF4( + ids_name, tmp_path +): with DBEntry(f"{tmp_path}/test-{ids_name}.nc", "x") as entry: ids = entry.factory.new(ids_name) fill_consistent(ids, leave_empty=0.5, skip_complex=False) @@ -43,7 +45,9 @@ def test_nc_latest_dd_autofill_put_get_with_complex(ids_name, tmp_path): version.parse(netCDF4.__version__) >= version.parse("1.7.0"), reason="NetCDF4 versions >= 1.7.0 support complex numbers", ) -def test_nc_latest_dd_autofill_put_get_with_complex(ids_name, tmp_path): +def test_nc_latest_dd_autofill_put_get_with_complex_newer_netCDF4( + ids_name, tmp_path +): with DBEntry(f"{tmp_path}/test-{ids_name}.nc", "x") as entry: ids = entry.factory.new(ids_name) fill_consistent(ids, leave_empty=0.5, skip_complex=False) From 0d48b0b5bf4bcdadb0580219685522f50b57c2cd Mon Sep 17 00:00:00 2001 From: Anushan Fernando Date: Tue, 21 Jan 2025 14:28:48 +0000 Subject: [PATCH 44/97] Update numpy exception version change to 1.25. --- imaspy/test/test_minimal_types.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/imaspy/test/test_minimal_types.py b/imaspy/test/test_minimal_types.py index d4614de5..07a51346 100644 --- a/imaspy/test/test_minimal_types.py +++ b/imaspy/test/test_minimal_types.py @@ -64,7 +64,7 @@ def test_assign_str_1d(minimal, caplog): # Prevent the expected numpy ComplexWarnings from cluttering pytest output @pytest.mark.filterwarnings( "ignore::numpy.ComplexWarning" - if version.parse(np.__version__) < version.parse("2.0.0") + if version.parse(np.__version__) < version.parse("1.25") else "ignore::numpy.exceptions.ComplexWarning" ) @pytest.mark.parametrize("typ, max_dim", [("flt", 6), ("cpx", 6), ("int", 3)]) @@ -94,7 +94,7 @@ def test_assign_numeric_types(minimal, caplog, typ, max_dim): # np allows casting of complex to float or int, but warns: with pytest.warns( np.ComplexWarning - if version.parse(np.__version__) < version.parse("2.0.0") + if version.parse(np.__version__) < version.parse("1.25") else np.exceptions.ComplexWarning ): caplog.clear() From 68c225dde4768c12a99492ab14de191125869944 Mon Sep 17 00:00:00 2001 From: Anushan Fernando Date: Tue, 21 Jan 2025 15:53:59 +0000 Subject: [PATCH 45/97] Fix bug in skip logic of tests. --- imaspy/test/test_nc_autofill.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/imaspy/test/test_nc_autofill.py b/imaspy/test/test_nc_autofill.py index 806caa7a..9bbc0f1e 100644 --- a/imaspy/test/test_nc_autofill.py +++ b/imaspy/test/test_nc_autofill.py @@ -19,7 +19,7 @@ def test_nc_latest_dd_autofill_put_get_skip_complex(ids_name, tmp_path): @pytest.mark.skipif( - version.parse(netCDF4.__version__) < version.parse("1.7.0"), + version.parse(netCDF4.__version__) >= version.parse("1.7.0"), reason="NetCDF4 versions < 1.7.0 do not support complex numbers", ) def test_nc_latest_dd_autofill_put_get_with_complex_older_netCDF4( @@ -42,7 +42,7 @@ def test_nc_latest_dd_autofill_put_get_with_complex_older_netCDF4( @pytest.mark.skipif( - version.parse(netCDF4.__version__) >= version.parse("1.7.0"), + version.parse(netCDF4.__version__) < version.parse("1.7.0"), reason="NetCDF4 versions >= 1.7.0 support complex numbers", ) def test_nc_latest_dd_autofill_put_get_with_complex_newer_netCDF4( From 00e1b38bbc5ecf039a203601ceba6a19615a17eb Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Wed, 22 Jan 2025 16:22:45 +0100 Subject: [PATCH 46/97] fixed ci scripts and formatting --- ci/build_dd_zip.sh | 11 +- ci/build_docs_and_dist.sh | 8 +- ci/linting.sh | 13 +- ci/run_benchmark.sh | 10 +- ci/run_pytest.sh | 17 ++- .../core_instant_changes_identifier.xml | 15 --- .../core_sources/core_source_identifier.xml | 76 ----------- .../core_transport_identifier.xml | 25 ---- .../edge_sources/edge_source_identifier.xml | 31 ----- .../edge_transport_identifier.xml | 25 ---- .../em_coupling_quantity_identifier.xml | 27 ---- .../equilibrium_profiles_2d_identifier.xml | 21 ---- .../magnetics_flux_loop_type_identifier.xml | 14 --- .../magnetics_probe_type_identifier.xml | 16 --- ...magnetics_rogowski_measured_identifier.xml | 13 -- .../mhd_linear_ballooning_identifier.xml | 13 -- .../mhd_linear_equations_identifier.xml | 15 --- .../mhd_linear_model_identifier.xml | 14 --- .../mhd_linear_perturbation_identifier.xml | 24 ---- .../neutron_event_identifier.xml | 18 --- .../neutron_mode_identifier.xml | 15 --- .../operational_sensor_type_identifier.xml | 15 --- .../pf_active_coil_function_identifier.xml | 10 -- .../plasma_source_identifier.xml | 70 ----------- .../plasma_transport_identifier.xml | 38 ------ .../radiation/radiation_identifier.xml | 23 ---- .../refractometer_formula_identifier.xml | 12 -- .../e_field_critical_identifier.xml | 12 -- ...momentum_critical_avalanche_identifier.xml | 11 -- .../momentum_critical_hot_tail_identifier.xml | 11 -- ...spectrometer_visible_method_identifier.xml | 13 -- .../crystal_mesh_identifier.xml | 12 -- ...ectro_x_instrument_function_identifier.xml | 14 --- .../spi/shatter_cone_identifier.xml | 11 -- .../utilities/coordinate_identifier.xml | 51 -------- .../curved_object_curvature_identifier.xml | 16 --- .../curved_object_geometry_identifier.xml | 14 --- .../utilities/data_type_identifier.xml | 12 -- .../distribution_source_identifier.xml | 36 ------ .../utilities/emission_grid_identifier.xml | 9 -- .../ggd_geometry_content_identifier.xml | 21 ---- .../identifiers/utilities/ggd_identifier.xml | 29 ----- .../utilities/ggd_space_identifier.xml | 18 --- .../utilities/ggd_subset_identifier.xml | 70 ----------- .../utilities/materials_identifier.xml | 43 ------- .../utilities/midplane_identifier.xml | 14 --- .../utilities/neutrals_identifier.xml | 20 --- .../utilities/occurrence_type_identifier.xml | 11 -- .../utilities/optical_element_identifier.xml | 13 -- .../optical_element_material_identifier.xml | 12 -- .../utilities/orbit_type_identifier.xml | 20 --- .../poloidal_plane_coordinates_identifier.xml | 119 ------------------ .../species_reference_identifier.xml | 19 --- .../utilities/statistics_type_identifier.xml | 17 --- .../utilities/surface_geometry_identifier.xml | 16 --- .../identifiers/utilities/wave_identifier.xml | 15 --- .../wall/wall_component_identifier.xml | 18 --- .../wall_description_2d_type_identifier.xml | 13 -- .../core_instant_changes_identifier.xml | 15 --- .../core_sources/core_source_identifier.xml | 76 ----------- .../core_transport_identifier.xml | 25 ---- .../edge_sources/edge_source_identifier.xml | 31 ----- .../edge_transport_identifier.xml | 25 ---- .../em_coupling_quantity_identifier.xml | 27 ---- .../equilibrium_profiles_2d_identifier.xml | 21 ---- .../magnetics_flux_loop_type_identifier.xml | 14 --- .../magnetics_probe_type_identifier.xml | 16 --- ...magnetics_rogowski_measured_identifier.xml | 13 -- .../mhd_linear_ballooning_identifier.xml | 13 -- .../mhd_linear_equations_identifier.xml | 15 --- .../mhd_linear_model_identifier.xml | 14 --- .../mhd_linear_perturbation_identifier.xml | 24 ---- .../neutron_event_identifier.xml | 18 --- .../neutron_mode_identifier.xml | 15 --- .../operational_sensor_type_identifier.xml | 15 --- .../pf_active_coil_function_identifier.xml | 10 -- .../plasma_source_identifier.xml | 70 ----------- .../plasma_transport_identifier.xml | 38 ------ .../radiation/radiation_identifier.xml | 23 ---- .../refractometer_formula_identifier.xml | 12 -- .../e_field_critical_identifier.xml | 12 -- ...momentum_critical_avalanche_identifier.xml | 11 -- .../momentum_critical_hot_tail_identifier.xml | 11 -- ...spectrometer_visible_method_identifier.xml | 13 -- .../crystal_mesh_identifier.xml | 12 -- ...ectro_x_instrument_function_identifier.xml | 14 --- .../spi/shatter_cone_identifier.xml | 11 -- .../utilities/coordinate_identifier.xml | 51 -------- .../curved_object_curvature_identifier.xml | 16 --- .../curved_object_geometry_identifier.xml | 14 --- .../utilities/data_type_identifier.xml | 12 -- .../distribution_source_identifier.xml | 36 ------ .../utilities/emission_grid_identifier.xml | 9 -- .../ggd_geometry_content_identifier.xml | 21 ---- .../identifiers/utilities/ggd_identifier.xml | 29 ----- .../utilities/ggd_space_identifier.xml | 18 --- .../utilities/ggd_subset_identifier.xml | 70 ----------- .../utilities/materials_identifier.xml | 43 ------- .../utilities/midplane_identifier.xml | 14 --- .../utilities/neutrals_identifier.xml | 20 --- .../utilities/occurrence_type_identifier.xml | 11 -- .../utilities/optical_element_identifier.xml | 13 -- .../optical_element_material_identifier.xml | 12 -- .../utilities/orbit_type_identifier.xml | 20 --- .../poloidal_plane_coordinates_identifier.xml | 119 ------------------ .../species_reference_identifier.xml | 19 --- .../utilities/statistics_type_identifier.xml | 17 --- .../utilities/surface_geometry_identifier.xml | 16 --- .../identifiers/utilities/wave_identifier.xml | 15 --- .../wall/wall_component_identifier.xml | 18 --- .../wall_description_2d_type_identifier.xml | 13 -- imas/backends/netcdf/ids2nc.py | 1 + imas/test/test_nc_autofill.py | 8 +- 113 files changed, 46 insertions(+), 2502 deletions(-) delete mode 100644 imas/assets/IDSDef/identifiers/core_instant_changes/core_instant_changes_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/core_sources/core_source_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/core_transport/core_transport_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/edge_sources/edge_source_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/edge_transport/edge_transport_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/em_coupling/em_coupling_quantity_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/magnetics/magnetics_probe_type_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_equations_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_model_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_event_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_mode_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/pf_active/pf_active_coil_function_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/plasma_sources/plasma_source_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/plasma_transport/plasma_transport_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/radiation/radiation_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/refractometer/refractometer_formula_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/runaway_electrons/e_field_critical_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/spi/shatter_cone_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/coordinate_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/curved_object_curvature_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/curved_object_geometry_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/data_type_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/distribution_source_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/emission_grid_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/ggd_geometry_content_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/ggd_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/ggd_space_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/ggd_subset_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/materials_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/midplane_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/neutrals_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/occurrence_type_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/optical_element_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/optical_element_material_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/orbit_type_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/poloidal_plane_coordinates_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/species_reference_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/statistics_type_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/surface_geometry_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/utilities/wave_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/wall/wall_component_identifier.xml delete mode 100644 imas/assets/IDSDef/identifiers/wall/wall_description_2d_type_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/core_instant_changes/core_instant_changes_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/core_sources/core_source_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/core_transport/core_transport_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/edge_sources/edge_source_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/edge_transport/edge_transport_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/em_coupling/em_coupling_quantity_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_probe_type_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_equations_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_model_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_event_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_mode_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/pf_active/pf_active_coil_function_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/plasma_sources/plasma_source_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/plasma_transport/plasma_transport_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/radiation/radiation_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/refractometer/refractometer_formula_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/runaway_electrons/e_field_critical_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/spi/shatter_cone_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/coordinate_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/curved_object_curvature_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/curved_object_geometry_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/data_type_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/distribution_source_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/emission_grid_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/ggd_geometry_content_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/ggd_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/ggd_space_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/ggd_subset_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/materials_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/midplane_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/neutrals_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/occurrence_type_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/optical_element_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/optical_element_material_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/orbit_type_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/poloidal_plane_coordinates_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/species_reference_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/statistics_type_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/surface_geometry_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/utilities/wave_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/wall/wall_component_identifier.xml delete mode 100644 imas/assets/IDSDef_correct/identifiers/wall/wall_description_2d_type_identifier.xml diff --git a/ci/build_dd_zip.sh b/ci/build_dd_zip.sh index e225e2a2..1b95bc4b 100755 --- a/ci/build_dd_zip.sh +++ b/ci/build_dd_zip.sh @@ -3,18 +3,23 @@ # Note: this script should be run from the root of the git repository # Debuggging: -set -e -o pipefail +if [[ "$(uname -n)" == *"bamboo"* ]]; then + set -e -o pipefail +fi echo "Loading modules..." # Set up environment such that module files can be loaded source /etc/profile.d/modules.sh module purge # Modules are supplied as arguments in the CI job: -module load $@ +if [ -z "$@" ]; then + module load Python +else + module load $@ +fi # Debuggging: echo "Done loading modules" -set -x # Build the DD zip rm -rf venv # Environment should be clean, but remove directory to be sure diff --git a/ci/build_docs_and_dist.sh b/ci/build_docs_and_dist.sh index a83ed031..1f077015 100755 --- a/ci/build_docs_and_dist.sh +++ b/ci/build_docs_and_dist.sh @@ -3,7 +3,9 @@ # Note: this script should be run from the root of the git repository # Debuggging: -set -e -o pipefail +if [[ "$(uname -n)" == *"bamboo"* ]]; then + set -e -o pipefail +fi echo "Loading modules:" $@ # Set up environment such that module files can be loaded @@ -14,8 +16,6 @@ module load $@ # Debuggging: echo "Done loading modules" -set -x - # Set up the testing venv rm -rf venv # Environment should be clean, but remove directory to be sure @@ -41,3 +41,5 @@ export SPHINXOPTS='-W -n --keep-going' # Run sphinx to create the documentation make -C docs clean html + +deactivate diff --git a/ci/linting.sh b/ci/linting.sh index 415ad1bf..d9164777 100755 --- a/ci/linting.sh +++ b/ci/linting.sh @@ -3,18 +3,23 @@ # Note: this script should be run from the root of the git repository # Debuggging: -set -e -o pipefail +if [[ "$(uname -n)" == *"bamboo"* ]]; then + set -e -o pipefail +fi echo "Loading modules..." # Set up environment such that module files can be loaded source /etc/profile.d/modules.sh module purge # Modules are supplied as arguments in the CI job: -module load $@ +if [ -z "$@" ]; then + module load Python +else + module load $@ +fi # Debuggging: echo "Done loading modules" -set -x # Create a venv rm -rf venv @@ -26,3 +31,5 @@ pip install --upgrade 'black >=24,<25' flake8 black --check imas flake8 imas + +deactivate \ No newline at end of file diff --git a/ci/run_benchmark.sh b/ci/run_benchmark.sh index 022804fd..daa9a012 100755 --- a/ci/run_benchmark.sh +++ b/ci/run_benchmark.sh @@ -17,12 +17,16 @@ source /etc/profile.d/modules.sh module purge # Modules are supplied as arguments in the CI job: # IMAS-AL-Python/5.2.1-intel-2023b-DD-3.41.0 Saxon-HE/12.4-Java-21 -module load IMAS-AL-Core/5.4.3-intel-2023b Saxon-HE/12.4-Java-21 +if [ -z "$@" ]; then + module load IMAS-AL-Core +else + module load $@ +fi + # Debuggging: echo "Done loading modules" -set -x # Export current PYTHONPATH so ASV benchmarks can import imas export ASV_PYTHONPATH="$PYTHONPATH" @@ -71,3 +75,5 @@ asv publish # And persistently store them cp -rf .asv/{results,html} "$BENCHMARKS_DIR" +deactivate + diff --git a/ci/run_pytest.sh b/ci/run_pytest.sh index 7b204bf8..b7490f96 100755 --- a/ci/run_pytest.sh +++ b/ci/run_pytest.sh @@ -3,19 +3,23 @@ # Note: this script should be run from the root of the git repository # Debuggging: -set -e -o pipefail +if [[ "$(uname -n)" == *"bamboo"* ]]; then + set -e -o pipefail +fi echo "Loading modules:" $@ # Set up environment such that module files can be loaded source /etc/profile.d/modules.sh module purge # Modules are supplied as arguments in the CI job: -module load $@ +if [ -z "$@" ]; then + module load IMAS-AL-Core +else + module load $@ +fi # Debuggging: echo "Done loading modules" -set -x - # Set up the testing venv rm -rf venv # Environment should be clean, but remove directory to be sure @@ -33,5 +37,8 @@ pip freeze # Clean artifacts created by pytest rm -f junit.xml rm -rf htmlcov +mkdir -p ~/tmp +export PYTEST_DEBUG_TEMPROOT=~/tmp +python -m pytest -n=auto --cov=imas --cov-report=term-missing --cov-report=html --junit-xml=junit.xml -x -python -m pytest -n=auto --cov=imas --cov-report=term-missing --cov-report=html --junit-xml=junit.xml +deactivate diff --git a/imas/assets/IDSDef/identifiers/core_instant_changes/core_instant_changes_identifier.xml b/imas/assets/IDSDef/identifiers/core_instant_changes/core_instant_changes_identifier.xml deleted file mode 100644 index 2cb8a799..00000000 --- a/imas/assets/IDSDef/identifiers/core_instant_changes/core_instant_changes_identifier.xml +++ /dev/null @@ -1,15 +0,0 @@ - - -
-Translation table for types of instant changes to the plasma state. -
- - - -0 -1 -2 -3 -4 - -
diff --git a/imas/assets/IDSDef/identifiers/core_sources/core_source_identifier.xml b/imas/assets/IDSDef/identifiers/core_sources/core_source_identifier.xml deleted file mode 100644 index e737a204..00000000 --- a/imas/assets/IDSDef/identifiers/core_sources/core_source_identifier.xml +++ /dev/null @@ -1,76 +0,0 @@ - - -
-Translation table for sources of particles, momentum and heat. -
- - - - -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -13 -14 - -100 -101 -102 -103 -104 -105 -106 -107 -108 -109 - -200 -201 -202 -203 - -303 -304 -305 - -400 -401 -402 -403 - -501 - -601 -602 -603 - -801 -802 - -901 -902 -903 -904 -905 -906 -907 -908 -909 - - - -
diff --git a/imas/assets/IDSDef/identifiers/core_transport/core_transport_identifier.xml b/imas/assets/IDSDef/identifiers/core_transport/core_transport_identifier.xml deleted file mode 100644 index 1723b17d..00000000 --- a/imas/assets/IDSDef/identifiers/core_transport/core_transport_identifier.xml +++ /dev/null @@ -1,25 +0,0 @@ - - -
- Translation table for different types of transport coefficients. -
- - - - -0 -1 -2 -3 -4 -5 -6 -19 -20 -21 -22 -23 -24 -25 - -
diff --git a/imas/assets/IDSDef/identifiers/edge_sources/edge_source_identifier.xml b/imas/assets/IDSDef/identifiers/edge_sources/edge_source_identifier.xml deleted file mode 100644 index aee46091..00000000 --- a/imas/assets/IDSDef/identifiers/edge_sources/edge_source_identifier.xml +++ /dev/null @@ -1,31 +0,0 @@ - - -
-Translation table for sources of particles, momentum and heat. -
- - - -0 - -1 -701 -702 -703 -801 -705 -706 -707 -708 -709 -710 - -305 -11 -7 -200 - -715 -716 - -
diff --git a/imas/assets/IDSDef/identifiers/edge_transport/edge_transport_identifier.xml b/imas/assets/IDSDef/identifiers/edge_transport/edge_transport_identifier.xml deleted file mode 100644 index 7c208d2d..00000000 --- a/imas/assets/IDSDef/identifiers/edge_transport/edge_transport_identifier.xml +++ /dev/null @@ -1,25 +0,0 @@ - - -
- Translation table for different types of transport coefficients. -
- - - - -0 -1 - -100 -101 -102 -103 - -200 -201 -202 -203 -204 -205 - -
diff --git a/imas/assets/IDSDef/identifiers/em_coupling/em_coupling_quantity_identifier.xml b/imas/assets/IDSDef/identifiers/em_coupling/em_coupling_quantity_identifier.xml deleted file mode 100644 index 8a53209c..00000000 --- a/imas/assets/IDSDef/identifiers/em_coupling/em_coupling_quantity_identifier.xml +++ /dev/null @@ -1,27 +0,0 @@ - - -
- Physical quantity described in the user-defined em_coupling matrix -
- - - -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -13 -14 -15 -16 -17 -18 -
diff --git a/imas/assets/IDSDef/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml b/imas/assets/IDSDef/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml deleted file mode 100644 index 9c52b5b4..00000000 --- a/imas/assets/IDSDef/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml +++ /dev/null @@ -1,21 +0,0 @@ - - -
Various contributions to the B, j, and psi 2D maps
- - -0 -1 -2 -3 -4 -
diff --git a/imas/assets/IDSDef/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml b/imas/assets/IDSDef/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml deleted file mode 100644 index 0b5bd928..00000000 --- a/imas/assets/IDSDef/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml +++ /dev/null @@ -1,14 +0,0 @@ - - -
-Type of flux loop -
- - -1 -2 -3 -4 -5 -6 -
diff --git a/imas/assets/IDSDef/identifiers/magnetics/magnetics_probe_type_identifier.xml b/imas/assets/IDSDef/identifiers/magnetics/magnetics_probe_type_identifier.xml deleted file mode 100644 index ab59dcaa..00000000 --- a/imas/assets/IDSDef/identifiers/magnetics/magnetics_probe_type_identifier.xml +++ /dev/null @@ -1,16 +0,0 @@ - - -
-Type of magnetic field probe -
- - - - -1 -2 -3 -4 -5 -6 -
diff --git a/imas/assets/IDSDef/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml b/imas/assets/IDSDef/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml deleted file mode 100644 index dcadbf7a..00000000 --- a/imas/assets/IDSDef/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml +++ /dev/null @@ -1,13 +0,0 @@ - - -
-Quantity measured by the Rogowski coil -
- - -1 -2 -3 -4 -5 -
diff --git a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml deleted file mode 100644 index d5a9793c..00000000 --- a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml +++ /dev/null @@ -1,13 +0,0 @@ - - -
-Balooning type of the MHD mode -
- - - -1 -2 -3 - -
diff --git a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_equations_identifier.xml b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_equations_identifier.xml deleted file mode 100644 index 5c41868f..00000000 --- a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_equations_identifier.xml +++ /dev/null @@ -1,15 +0,0 @@ - - -
-Type of the MHD model used -
- - - - -1 -11 -2 -21 - -
diff --git a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_model_identifier.xml b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_model_identifier.xml deleted file mode 100644 index c184635a..00000000 --- a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_model_identifier.xml +++ /dev/null @@ -1,14 +0,0 @@ - - -
-Type of the MHD model used -
- - - - -1 -2 -3 - -
diff --git a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml b/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml deleted file mode 100644 index e23ca4fb..00000000 --- a/imas/assets/IDSDef/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml +++ /dev/null @@ -1,24 +0,0 @@ - - -
-Type of the perturbation -
- - - -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -13 -14 - -
diff --git a/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_event_identifier.xml b/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_event_identifier.xml deleted file mode 100644 index 304864b3..00000000 --- a/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_event_identifier.xml +++ /dev/null @@ -1,18 +0,0 @@ - - -
-Translation table for type of events measured in the neutron detector -
- - - - -1 -2 -3 -4 -5 -6 -7 - -
diff --git a/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_mode_identifier.xml b/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_mode_identifier.xml deleted file mode 100644 index b9d4c3a7..00000000 --- a/imas/assets/IDSDef/identifiers/neutron_diagnostic/neutron_mode_identifier.xml +++ /dev/null @@ -1,15 +0,0 @@ - - -
-Translation table for counting mode in the neutron detector -
- - - -1 -2 -3 -4 -5 - -
diff --git a/imas/assets/IDSDef/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml b/imas/assets/IDSDef/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml deleted file mode 100644 index a80425d6..00000000 --- a/imas/assets/IDSDef/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml +++ /dev/null @@ -1,15 +0,0 @@ - - -
-Type of mechanics sensor -
- - - -0 -1 -2 -3 -4 - -
diff --git a/imas/assets/IDSDef/identifiers/pf_active/pf_active_coil_function_identifier.xml b/imas/assets/IDSDef/identifiers/pf_active/pf_active_coil_function_identifier.xml deleted file mode 100644 index 37d974ef..00000000 --- a/imas/assets/IDSDef/identifiers/pf_active/pf_active_coil_function_identifier.xml +++ /dev/null @@ -1,10 +0,0 @@ - - -
Functions of PF coils
- - -0 -1 -2 - -
\ No newline at end of file diff --git a/imas/assets/IDSDef/identifiers/plasma_sources/plasma_source_identifier.xml b/imas/assets/IDSDef/identifiers/plasma_sources/plasma_source_identifier.xml deleted file mode 100644 index 910a7907..00000000 --- a/imas/assets/IDSDef/identifiers/plasma_sources/plasma_source_identifier.xml +++ /dev/null @@ -1,70 +0,0 @@ - - -
-Translation table for sources of particles, momentum and heat. -
- - - -0 - -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -13 -14 - -100 -101 -102 -103 -104 -105 -106 -107 -108 -109 - -200 -201 -202 -203 - -303 -304 -305 - -400 -401 -402 -403 - -501 - -603 - -701 -702 -703 -705 -706 -707 -708 -709 -710 -715 -716 - -801 -802 - -
diff --git a/imas/assets/IDSDef/identifiers/plasma_transport/plasma_transport_identifier.xml b/imas/assets/IDSDef/identifiers/plasma_transport/plasma_transport_identifier.xml deleted file mode 100644 index 4e229087..00000000 --- a/imas/assets/IDSDef/identifiers/plasma_transport/plasma_transport_identifier.xml +++ /dev/null @@ -1,38 +0,0 @@ - - -
- Translation table for different types of transport coefficients. -
- - - - -0 -1 - -2 -3 -4 -5 -6 -19 -20 -21 -22 -23 -24 -25 - -100 -101 -102 -103 - -200 -201 -202 -203 -204 -205 - -
diff --git a/imas/assets/IDSDef/identifiers/radiation/radiation_identifier.xml b/imas/assets/IDSDef/identifiers/radiation/radiation_identifier.xml deleted file mode 100644 index 1c5a713d..00000000 --- a/imas/assets/IDSDef/identifiers/radiation/radiation_identifier.xml +++ /dev/null @@ -1,23 +0,0 @@ - - -
-Translation table for radiation processes -
- - 0 - 6 - 8 - 9 - 10 - 11 - 501 - 901 - 902 - 903 - 904 - 905 - 906 - 907 - 908 - 909 -
diff --git a/imas/assets/IDSDef/identifiers/refractometer/refractometer_formula_identifier.xml b/imas/assets/IDSDef/identifiers/refractometer/refractometer_formula_identifier.xml deleted file mode 100644 index 25b8e077..00000000 --- a/imas/assets/IDSDef/identifiers/refractometer/refractometer_formula_identifier.xml +++ /dev/null @@ -1,12 +0,0 @@ - - -
-Translation table for analytical formulas used by refractometer post-processing -
- - - -1 -2 - -
diff --git a/imas/assets/IDSDef/identifiers/runaway_electrons/e_field_critical_identifier.xml b/imas/assets/IDSDef/identifiers/runaway_electrons/e_field_critical_identifier.xml deleted file mode 100644 index 7fdabea5..00000000 --- a/imas/assets/IDSDef/identifiers/runaway_electrons/e_field_critical_identifier.xml +++ /dev/null @@ -1,12 +0,0 @@ - - -
-Definition of e_field_critical -
- - - -1 -2 - -
diff --git a/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml b/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml deleted file mode 100644 index 2d76f750..00000000 --- a/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml +++ /dev/null @@ -1,11 +0,0 @@ - - -
-Definition of momentum_critical_avalanche -
- - - -1 - -
diff --git a/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml b/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml deleted file mode 100644 index 9804a1b5..00000000 --- a/imas/assets/IDSDef/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml +++ /dev/null @@ -1,11 +0,0 @@ - - -
-Definition of momentum_critical_hot_tail -
- - - -1 - -
diff --git a/imas/assets/IDSDef/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml b/imas/assets/IDSDef/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml deleted file mode 100644 index 88c97626..00000000 --- a/imas/assets/IDSDef/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml +++ /dev/null @@ -1,13 +0,0 @@ - - -
-Fitting method used to calculate isotope ratios -
- - - - -1 -2 - -
diff --git a/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml b/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml deleted file mode 100644 index 3d3d97c0..00000000 --- a/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml +++ /dev/null @@ -1,12 +0,0 @@ - - -
-Crystal mesh type -
- - - -1 -2 - -
diff --git a/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml b/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml deleted file mode 100644 index 66acb45c..00000000 --- a/imas/assets/IDSDef/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml +++ /dev/null @@ -1,14 +0,0 @@ - - -
-Translation table for instrument function for X ray crystal spectrometer -
- - - -1 -2 -3 -4 - -
diff --git a/imas/assets/IDSDef/identifiers/spi/shatter_cone_identifier.xml b/imas/assets/IDSDef/identifiers/spi/shatter_cone_identifier.xml deleted file mode 100644 index 5273b103..00000000 --- a/imas/assets/IDSDef/identifiers/spi/shatter_cone_identifier.xml +++ /dev/null @@ -1,11 +0,0 @@ - - -
-Definition of the shatter cone -
- - - -1 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/coordinate_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/coordinate_identifier.xml deleted file mode 100644 index dbaa3853..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/coordinate_identifier.xml +++ /dev/null @@ -1,51 +0,0 @@ - - -
-Translation table for coordinate_identifier_definitions. -
- - - -0 -1 -2 -3 -4 -5 - -10 -11 -12 -13 -14 - -20 -21 -22 - -100 -101 -102 -103 -104 -105 -106 -107 -108 -200 -201 -202 -203 - -300 -301 -302 - -400 -402 -403 -404 - -500 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/curved_object_curvature_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/curved_object_curvature_identifier.xml deleted file mode 100644 index 261243c9..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/curved_object_curvature_identifier.xml +++ /dev/null @@ -1,16 +0,0 @@ - - -
-Curvature of a curved object -
- - - - -1 -2 -3 -4 -5 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/curved_object_geometry_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/curved_object_geometry_identifier.xml deleted file mode 100644 index 5117019b..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/curved_object_geometry_identifier.xml +++ /dev/null @@ -1,14 +0,0 @@ - - -
-Geometry of the contour of a planar or curved object -
- - - - -1 -2 -3 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/data_type_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/data_type_identifier.xml deleted file mode 100644 index ab94762b..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/data_type_identifier.xml +++ /dev/null @@ -1,12 +0,0 @@ - - -
- Dataset type table -
- - - -1 -2 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/distribution_source_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/distribution_source_identifier.xml deleted file mode 100644 index 0814b580..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/distribution_source_identifier.xml +++ /dev/null @@ -1,36 +0,0 @@ - - -
-Translation table for Heating and Current Drive (HCD) distsource types, i.e. types particles source in Fokker-Planck equation (from NBI and nuclear reactions). -
- - - - -0 -1 - -100 - -101 -102 -103 -104 - -105 -106 -107 -108 - -109 -110 - -111 -112 - -113 -114 - -1000 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/emission_grid_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/emission_grid_identifier.xml deleted file mode 100644 index d1002573..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/emission_grid_identifier.xml +++ /dev/null @@ -1,9 +0,0 @@ - - -
List of coordinate systems for describing the poloidal plane
- - - -1 -
diff --git a/imas/assets/IDSDef/identifiers/utilities/ggd_geometry_content_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/ggd_geometry_content_identifier.xml deleted file mode 100644 index 32a039e2..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/ggd_geometry_content_identifier.xml +++ /dev/null @@ -1,21 +0,0 @@ - - -
Translation table for ggd_space_identifier_definitions.
- - - - - - - - - - - -0 -1 -11 -21 -31 -32 -
\ No newline at end of file diff --git a/imas/assets/IDSDef/identifiers/utilities/ggd_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/ggd_identifier.xml deleted file mode 100644 index b3389e68..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/ggd_identifier.xml +++ /dev/null @@ -1,29 +0,0 @@ - - -
Translation table for ggd_identifier_definitions.
- - - - - - - - - - - -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -100 -
diff --git a/imas/assets/IDSDef/identifiers/utilities/ggd_space_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/ggd_space_identifier.xml deleted file mode 100644 index dee0346c..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/ggd_space_identifier.xml +++ /dev/null @@ -1,18 +0,0 @@ - - -
Translation table for ggd_space_identifier_definitions.
- - - - - - - - - - -0 -1 -2 -3 -
\ No newline at end of file diff --git a/imas/assets/IDSDef/identifiers/utilities/ggd_subset_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/ggd_subset_identifier.xml deleted file mode 100644 index 3e3a59c6..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/ggd_subset_identifier.xml +++ /dev/null @@ -1,70 +0,0 @@ - - -
Translation table for ggd_subset_identifier_definitions.
- - - - - - - - - - -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -13 -14 -15 -16 -17 -18 -19 -20 -21 -22 -23 -24 -25 -26 -27 -28 -29 -30 -31 -32 -33 -34 -35 -36 -37 -38 -39 -40 -41 -42 -43 -44 -45 -46 -47 -48 -100 -101 -102 -103 -104 -105 -106 -
diff --git a/imas/assets/IDSDef/identifiers/utilities/materials_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/materials_identifier.xml deleted file mode 100644 index 4494f03b..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/materials_identifier.xml +++ /dev/null @@ -1,43 +0,0 @@ - - -
-Materials used in the device mechanical structures -
- - - - -0 -1 -2 -3 -4 -5 -6 -7 -8 -17 -9 -10 -11 -12 -13 -14 -15 -16 -18 -19 -20 -21 -22 -23 -24 -25 -26 -27 -28 -29 -30 - - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/midplane_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/midplane_identifier.xml deleted file mode 100644 index 3039e263..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/midplane_identifier.xml +++ /dev/null @@ -1,14 +0,0 @@ - - -
- Translation table for identifying different midplane definitions -
- - - -1 -2 -3 -4 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/neutrals_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/neutrals_identifier.xml deleted file mode 100644 index e29024ee..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/neutrals_identifier.xml +++ /dev/null @@ -1,20 +0,0 @@ - - -
- Translation table for identifying different types of neutral. - The neutrals are characterised by their energy and source of the neutrals. -
- - - - - - - - -1 -2 -3 -4 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/occurrence_type_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/occurrence_type_identifier.xml deleted file mode 100644 index e6554554..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/occurrence_type_identifier.xml +++ /dev/null @@ -1,11 +0,0 @@ - - -
List of possible occurrence types
- - - -1 -2 -3 -4 -
diff --git a/imas/assets/IDSDef/identifiers/utilities/optical_element_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/optical_element_identifier.xml deleted file mode 100644 index f560fdec..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/optical_element_identifier.xml +++ /dev/null @@ -1,13 +0,0 @@ - - -
- Translation table for identifying optical element types -
- - - -1 -2 -3 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/optical_element_material_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/optical_element_material_identifier.xml deleted file mode 100644 index 9cd99c1f..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/optical_element_material_identifier.xml +++ /dev/null @@ -1,12 +0,0 @@ - - -
- Translation table for identifying optical element types -
- - - -1 -2 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/orbit_type_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/orbit_type_identifier.xml deleted file mode 100644 index 2c55a7cb..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/orbit_type_identifier.xml +++ /dev/null @@ -1,20 +0,0 @@ - - -
-Translation table for orbit_type_identifier definitions. -
- - - -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/poloidal_plane_coordinates_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/poloidal_plane_coordinates_identifier.xml deleted file mode 100644 index 9e3c42f6..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/poloidal_plane_coordinates_identifier.xml +++ /dev/null @@ -1,119 +0,0 @@ - - -
List of coordinate systems for describing the poloidal plane
- - - - -1 -2 -11 -12 -13 - -14 -15 -16 - -21 -22 -23 - -24 -25 -26 - - -31 -32 -33 - -34 -35 -36 - -41 -42 -43 - -44 -45 -46 - -51 -52 -53 - -54 -55 -56 - -91 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/species_reference_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/species_reference_identifier.xml deleted file mode 100644 index 449b89cf..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/species_reference_identifier.xml +++ /dev/null @@ -1,19 +0,0 @@ - - -
-Translation table for species_reference_identifier_definition. -
- - - - -0 -1 -2 -3 -4 -5 -6 -7 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/statistics_type_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/statistics_type_identifier.xml deleted file mode 100644 index e1f891df..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/statistics_type_identifier.xml +++ /dev/null @@ -1,17 +0,0 @@ - - -
- Translation table for statistics types -
- - - -1 -2 -3 -4 -5 -6 -7 - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/surface_geometry_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/surface_geometry_identifier.xml deleted file mode 100644 index 3cbdaf62..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/surface_geometry_identifier.xml +++ /dev/null @@ -1,16 +0,0 @@ - - -
-Geometry of the contour of surface in a local coordinate system -
- - - -1 -2 -3 -4 -5 - - -
diff --git a/imas/assets/IDSDef/identifiers/utilities/wave_identifier.xml b/imas/assets/IDSDef/identifiers/utilities/wave_identifier.xml deleted file mode 100644 index 8d122a7e..00000000 --- a/imas/assets/IDSDef/identifiers/utilities/wave_identifier.xml +++ /dev/null @@ -1,15 +0,0 @@ - - -
-Translation table for wave field types. -
- - - - -0 -1 -2 -3 - -
diff --git a/imas/assets/IDSDef/identifiers/wall/wall_component_identifier.xml b/imas/assets/IDSDef/identifiers/wall/wall_component_identifier.xml deleted file mode 100644 index 23e6450e..00000000 --- a/imas/assets/IDSDef/identifiers/wall/wall_component_identifier.xml +++ /dev/null @@ -1,18 +0,0 @@ - - -
-Type of wall component -
- - - -0 -1 -2 -3 -4 -5 -6 -7 - -
diff --git a/imas/assets/IDSDef/identifiers/wall/wall_description_2d_type_identifier.xml b/imas/assets/IDSDef/identifiers/wall/wall_description_2d_type_identifier.xml deleted file mode 100644 index 4dcde3ce..00000000 --- a/imas/assets/IDSDef/identifiers/wall/wall_description_2d_type_identifier.xml +++ /dev/null @@ -1,13 +0,0 @@ - - -
-Type of wall component -
- - - -0 -1 -2 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/core_instant_changes/core_instant_changes_identifier.xml b/imas/assets/IDSDef_correct/identifiers/core_instant_changes/core_instant_changes_identifier.xml deleted file mode 100644 index 2cb8a799..00000000 --- a/imas/assets/IDSDef_correct/identifiers/core_instant_changes/core_instant_changes_identifier.xml +++ /dev/null @@ -1,15 +0,0 @@ - - -
-Translation table for types of instant changes to the plasma state. -
- - - -0 -1 -2 -3 -4 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/core_sources/core_source_identifier.xml b/imas/assets/IDSDef_correct/identifiers/core_sources/core_source_identifier.xml deleted file mode 100644 index e737a204..00000000 --- a/imas/assets/IDSDef_correct/identifiers/core_sources/core_source_identifier.xml +++ /dev/null @@ -1,76 +0,0 @@ - - -
-Translation table for sources of particles, momentum and heat. -
- - - - -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -13 -14 - -100 -101 -102 -103 -104 -105 -106 -107 -108 -109 - -200 -201 -202 -203 - -303 -304 -305 - -400 -401 -402 -403 - -501 - -601 -602 -603 - -801 -802 - -901 -902 -903 -904 -905 -906 -907 -908 -909 - - - -
diff --git a/imas/assets/IDSDef_correct/identifiers/core_transport/core_transport_identifier.xml b/imas/assets/IDSDef_correct/identifiers/core_transport/core_transport_identifier.xml deleted file mode 100644 index 1723b17d..00000000 --- a/imas/assets/IDSDef_correct/identifiers/core_transport/core_transport_identifier.xml +++ /dev/null @@ -1,25 +0,0 @@ - - -
- Translation table for different types of transport coefficients. -
- - - - -0 -1 -2 -3 -4 -5 -6 -19 -20 -21 -22 -23 -24 -25 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/edge_sources/edge_source_identifier.xml b/imas/assets/IDSDef_correct/identifiers/edge_sources/edge_source_identifier.xml deleted file mode 100644 index aee46091..00000000 --- a/imas/assets/IDSDef_correct/identifiers/edge_sources/edge_source_identifier.xml +++ /dev/null @@ -1,31 +0,0 @@ - - -
-Translation table for sources of particles, momentum and heat. -
- - - -0 - -1 -701 -702 -703 -801 -705 -706 -707 -708 -709 -710 - -305 -11 -7 -200 - -715 -716 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/edge_transport/edge_transport_identifier.xml b/imas/assets/IDSDef_correct/identifiers/edge_transport/edge_transport_identifier.xml deleted file mode 100644 index 7c208d2d..00000000 --- a/imas/assets/IDSDef_correct/identifiers/edge_transport/edge_transport_identifier.xml +++ /dev/null @@ -1,25 +0,0 @@ - - -
- Translation table for different types of transport coefficients. -
- - - - -0 -1 - -100 -101 -102 -103 - -200 -201 -202 -203 -204 -205 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/em_coupling/em_coupling_quantity_identifier.xml b/imas/assets/IDSDef_correct/identifiers/em_coupling/em_coupling_quantity_identifier.xml deleted file mode 100644 index 8a53209c..00000000 --- a/imas/assets/IDSDef_correct/identifiers/em_coupling/em_coupling_quantity_identifier.xml +++ /dev/null @@ -1,27 +0,0 @@ - - -
- Physical quantity described in the user-defined em_coupling matrix -
- - - -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -13 -14 -15 -16 -17 -18 -
diff --git a/imas/assets/IDSDef_correct/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml b/imas/assets/IDSDef_correct/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml deleted file mode 100644 index 9c52b5b4..00000000 --- a/imas/assets/IDSDef_correct/identifiers/equilibrium/equilibrium_profiles_2d_identifier.xml +++ /dev/null @@ -1,21 +0,0 @@ - - -
Various contributions to the B, j, and psi 2D maps
- - -0 -1 -2 -3 -4 -
diff --git a/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml deleted file mode 100644 index 0b5bd928..00000000 --- a/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_flux_loop_type_identifier.xml +++ /dev/null @@ -1,14 +0,0 @@ - - -
-Type of flux loop -
- - -1 -2 -3 -4 -5 -6 -
diff --git a/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_probe_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_probe_type_identifier.xml deleted file mode 100644 index ab59dcaa..00000000 --- a/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_probe_type_identifier.xml +++ /dev/null @@ -1,16 +0,0 @@ - - -
-Type of magnetic field probe -
- - - - -1 -2 -3 -4 -5 -6 -
diff --git a/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml b/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml deleted file mode 100644 index dcadbf7a..00000000 --- a/imas/assets/IDSDef_correct/identifiers/magnetics/magnetics_rogowski_measured_identifier.xml +++ /dev/null @@ -1,13 +0,0 @@ - - -
-Quantity measured by the Rogowski coil -
- - -1 -2 -3 -4 -5 -
diff --git a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml deleted file mode 100644 index d5a9793c..00000000 --- a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_ballooning_identifier.xml +++ /dev/null @@ -1,13 +0,0 @@ - - -
-Balooning type of the MHD mode -
- - - -1 -2 -3 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_equations_identifier.xml b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_equations_identifier.xml deleted file mode 100644 index 5c41868f..00000000 --- a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_equations_identifier.xml +++ /dev/null @@ -1,15 +0,0 @@ - - -
-Type of the MHD model used -
- - - - -1 -11 -2 -21 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_model_identifier.xml b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_model_identifier.xml deleted file mode 100644 index c184635a..00000000 --- a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_model_identifier.xml +++ /dev/null @@ -1,14 +0,0 @@ - - -
-Type of the MHD model used -
- - - - -1 -2 -3 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml b/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml deleted file mode 100644 index e23ca4fb..00000000 --- a/imas/assets/IDSDef_correct/identifiers/mhd_linear/mhd_linear_perturbation_identifier.xml +++ /dev/null @@ -1,24 +0,0 @@ - - -
-Type of the perturbation -
- - - -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -13 -14 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_event_identifier.xml b/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_event_identifier.xml deleted file mode 100644 index 304864b3..00000000 --- a/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_event_identifier.xml +++ /dev/null @@ -1,18 +0,0 @@ - - -
-Translation table for type of events measured in the neutron detector -
- - - - -1 -2 -3 -4 -5 -6 -7 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_mode_identifier.xml b/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_mode_identifier.xml deleted file mode 100644 index b9d4c3a7..00000000 --- a/imas/assets/IDSDef_correct/identifiers/neutron_diagnostic/neutron_mode_identifier.xml +++ /dev/null @@ -1,15 +0,0 @@ - - -
-Translation table for counting mode in the neutron detector -
- - - -1 -2 -3 -4 -5 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml deleted file mode 100644 index a80425d6..00000000 --- a/imas/assets/IDSDef_correct/identifiers/operational_instrumentation/operational_sensor_type_identifier.xml +++ /dev/null @@ -1,15 +0,0 @@ - - -
-Type of mechanics sensor -
- - - -0 -1 -2 -3 -4 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/pf_active/pf_active_coil_function_identifier.xml b/imas/assets/IDSDef_correct/identifiers/pf_active/pf_active_coil_function_identifier.xml deleted file mode 100644 index 37d974ef..00000000 --- a/imas/assets/IDSDef_correct/identifiers/pf_active/pf_active_coil_function_identifier.xml +++ /dev/null @@ -1,10 +0,0 @@ - - -
Functions of PF coils
- - -0 -1 -2 - -
\ No newline at end of file diff --git a/imas/assets/IDSDef_correct/identifiers/plasma_sources/plasma_source_identifier.xml b/imas/assets/IDSDef_correct/identifiers/plasma_sources/plasma_source_identifier.xml deleted file mode 100644 index 910a7907..00000000 --- a/imas/assets/IDSDef_correct/identifiers/plasma_sources/plasma_source_identifier.xml +++ /dev/null @@ -1,70 +0,0 @@ - - -
-Translation table for sources of particles, momentum and heat. -
- - - -0 - -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -13 -14 - -100 -101 -102 -103 -104 -105 -106 -107 -108 -109 - -200 -201 -202 -203 - -303 -304 -305 - -400 -401 -402 -403 - -501 - -603 - -701 -702 -703 -705 -706 -707 -708 -709 -710 -715 -716 - -801 -802 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/plasma_transport/plasma_transport_identifier.xml b/imas/assets/IDSDef_correct/identifiers/plasma_transport/plasma_transport_identifier.xml deleted file mode 100644 index 4e229087..00000000 --- a/imas/assets/IDSDef_correct/identifiers/plasma_transport/plasma_transport_identifier.xml +++ /dev/null @@ -1,38 +0,0 @@ - - -
- Translation table for different types of transport coefficients. -
- - - - -0 -1 - -2 -3 -4 -5 -6 -19 -20 -21 -22 -23 -24 -25 - -100 -101 -102 -103 - -200 -201 -202 -203 -204 -205 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/radiation/radiation_identifier.xml b/imas/assets/IDSDef_correct/identifiers/radiation/radiation_identifier.xml deleted file mode 100644 index 1c5a713d..00000000 --- a/imas/assets/IDSDef_correct/identifiers/radiation/radiation_identifier.xml +++ /dev/null @@ -1,23 +0,0 @@ - - -
-Translation table for radiation processes -
- - 0 - 6 - 8 - 9 - 10 - 11 - 501 - 901 - 902 - 903 - 904 - 905 - 906 - 907 - 908 - 909 -
diff --git a/imas/assets/IDSDef_correct/identifiers/refractometer/refractometer_formula_identifier.xml b/imas/assets/IDSDef_correct/identifiers/refractometer/refractometer_formula_identifier.xml deleted file mode 100644 index 25b8e077..00000000 --- a/imas/assets/IDSDef_correct/identifiers/refractometer/refractometer_formula_identifier.xml +++ /dev/null @@ -1,12 +0,0 @@ - - -
-Translation table for analytical formulas used by refractometer post-processing -
- - - -1 -2 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/runaway_electrons/e_field_critical_identifier.xml b/imas/assets/IDSDef_correct/identifiers/runaway_electrons/e_field_critical_identifier.xml deleted file mode 100644 index 7fdabea5..00000000 --- a/imas/assets/IDSDef_correct/identifiers/runaway_electrons/e_field_critical_identifier.xml +++ /dev/null @@ -1,12 +0,0 @@ - - -
-Definition of e_field_critical -
- - - -1 -2 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml b/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml deleted file mode 100644 index 2d76f750..00000000 --- a/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_avalanche_identifier.xml +++ /dev/null @@ -1,11 +0,0 @@ - - -
-Definition of momentum_critical_avalanche -
- - - -1 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml b/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml deleted file mode 100644 index 9804a1b5..00000000 --- a/imas/assets/IDSDef_correct/identifiers/runaway_electrons/momentum_critical_hot_tail_identifier.xml +++ /dev/null @@ -1,11 +0,0 @@ - - -
-Definition of momentum_critical_hot_tail -
- - - -1 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml b/imas/assets/IDSDef_correct/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml deleted file mode 100644 index 88c97626..00000000 --- a/imas/assets/IDSDef_correct/identifiers/spectrometer_visible/spectrometer_visible_method_identifier.xml +++ /dev/null @@ -1,13 +0,0 @@ - - -
-Fitting method used to calculate isotope ratios -
- - - - -1 -2 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml b/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml deleted file mode 100644 index 3d3d97c0..00000000 --- a/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/crystal_mesh_identifier.xml +++ /dev/null @@ -1,12 +0,0 @@ - - -
-Crystal mesh type -
- - - -1 -2 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml b/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml deleted file mode 100644 index 66acb45c..00000000 --- a/imas/assets/IDSDef_correct/identifiers/spectrometer_x_ray_crystal/spectro_x_instrument_function_identifier.xml +++ /dev/null @@ -1,14 +0,0 @@ - - -
-Translation table for instrument function for X ray crystal spectrometer -
- - - -1 -2 -3 -4 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/spi/shatter_cone_identifier.xml b/imas/assets/IDSDef_correct/identifiers/spi/shatter_cone_identifier.xml deleted file mode 100644 index 5273b103..00000000 --- a/imas/assets/IDSDef_correct/identifiers/spi/shatter_cone_identifier.xml +++ /dev/null @@ -1,11 +0,0 @@ - - -
-Definition of the shatter cone -
- - - -1 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/coordinate_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/coordinate_identifier.xml deleted file mode 100644 index dbaa3853..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/coordinate_identifier.xml +++ /dev/null @@ -1,51 +0,0 @@ - - -
-Translation table for coordinate_identifier_definitions. -
- - - -0 -1 -2 -3 -4 -5 - -10 -11 -12 -13 -14 - -20 -21 -22 - -100 -101 -102 -103 -104 -105 -106 -107 -108 -200 -201 -202 -203 - -300 -301 -302 - -400 -402 -403 -404 - -500 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_curvature_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_curvature_identifier.xml deleted file mode 100644 index 261243c9..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_curvature_identifier.xml +++ /dev/null @@ -1,16 +0,0 @@ - - -
-Curvature of a curved object -
- - - - -1 -2 -3 -4 -5 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_geometry_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_geometry_identifier.xml deleted file mode 100644 index 5117019b..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/curved_object_geometry_identifier.xml +++ /dev/null @@ -1,14 +0,0 @@ - - -
-Geometry of the contour of a planar or curved object -
- - - - -1 -2 -3 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/data_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/data_type_identifier.xml deleted file mode 100644 index ab94762b..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/data_type_identifier.xml +++ /dev/null @@ -1,12 +0,0 @@ - - -
- Dataset type table -
- - - -1 -2 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/distribution_source_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/distribution_source_identifier.xml deleted file mode 100644 index 0814b580..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/distribution_source_identifier.xml +++ /dev/null @@ -1,36 +0,0 @@ - - -
-Translation table for Heating and Current Drive (HCD) distsource types, i.e. types particles source in Fokker-Planck equation (from NBI and nuclear reactions). -
- - - - -0 -1 - -100 - -101 -102 -103 -104 - -105 -106 -107 -108 - -109 -110 - -111 -112 - -113 -114 - -1000 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/emission_grid_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/emission_grid_identifier.xml deleted file mode 100644 index d1002573..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/emission_grid_identifier.xml +++ /dev/null @@ -1,9 +0,0 @@ - - -
List of coordinate systems for describing the poloidal plane
- - - -1 -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_geometry_content_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_geometry_content_identifier.xml deleted file mode 100644 index 32a039e2..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_geometry_content_identifier.xml +++ /dev/null @@ -1,21 +0,0 @@ - - -
Translation table for ggd_space_identifier_definitions.
- - - - - - - - - - - -0 -1 -11 -21 -31 -32 -
\ No newline at end of file diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_identifier.xml deleted file mode 100644 index b3389e68..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_identifier.xml +++ /dev/null @@ -1,29 +0,0 @@ - - -
Translation table for ggd_identifier_definitions.
- - - - - - - - - - - -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -100 -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_space_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_space_identifier.xml deleted file mode 100644 index dee0346c..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_space_identifier.xml +++ /dev/null @@ -1,18 +0,0 @@ - - -
Translation table for ggd_space_identifier_definitions.
- - - - - - - - - - -0 -1 -2 -3 -
\ No newline at end of file diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_subset_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/ggd_subset_identifier.xml deleted file mode 100644 index 3e3a59c6..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/ggd_subset_identifier.xml +++ /dev/null @@ -1,70 +0,0 @@ - - -
Translation table for ggd_subset_identifier_definitions.
- - - - - - - - - - -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 -10 -11 -12 -13 -14 -15 -16 -17 -18 -19 -20 -21 -22 -23 -24 -25 -26 -27 -28 -29 -30 -31 -32 -33 -34 -35 -36 -37 -38 -39 -40 -41 -42 -43 -44 -45 -46 -47 -48 -100 -101 -102 -103 -104 -105 -106 -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/materials_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/materials_identifier.xml deleted file mode 100644 index 4494f03b..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/materials_identifier.xml +++ /dev/null @@ -1,43 +0,0 @@ - - -
-Materials used in the device mechanical structures -
- - - - -0 -1 -2 -3 -4 -5 -6 -7 -8 -17 -9 -10 -11 -12 -13 -14 -15 -16 -18 -19 -20 -21 -22 -23 -24 -25 -26 -27 -28 -29 -30 - - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/midplane_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/midplane_identifier.xml deleted file mode 100644 index 3039e263..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/midplane_identifier.xml +++ /dev/null @@ -1,14 +0,0 @@ - - -
- Translation table for identifying different midplane definitions -
- - - -1 -2 -3 -4 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/neutrals_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/neutrals_identifier.xml deleted file mode 100644 index e29024ee..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/neutrals_identifier.xml +++ /dev/null @@ -1,20 +0,0 @@ - - -
- Translation table for identifying different types of neutral. - The neutrals are characterised by their energy and source of the neutrals. -
- - - - - - - - -1 -2 -3 -4 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/occurrence_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/occurrence_type_identifier.xml deleted file mode 100644 index e6554554..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/occurrence_type_identifier.xml +++ /dev/null @@ -1,11 +0,0 @@ - - -
List of possible occurrence types
- - - -1 -2 -3 -4 -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_identifier.xml deleted file mode 100644 index f560fdec..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_identifier.xml +++ /dev/null @@ -1,13 +0,0 @@ - - -
- Translation table for identifying optical element types -
- - - -1 -2 -3 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_material_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_material_identifier.xml deleted file mode 100644 index 9cd99c1f..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/optical_element_material_identifier.xml +++ /dev/null @@ -1,12 +0,0 @@ - - -
- Translation table for identifying optical element types -
- - - -1 -2 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/orbit_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/orbit_type_identifier.xml deleted file mode 100644 index 2c55a7cb..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/orbit_type_identifier.xml +++ /dev/null @@ -1,20 +0,0 @@ - - -
-Translation table for orbit_type_identifier definitions. -
- - - -0 -1 -2 -3 -4 -5 -6 -7 -8 -9 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/poloidal_plane_coordinates_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/poloidal_plane_coordinates_identifier.xml deleted file mode 100644 index 9e3c42f6..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/poloidal_plane_coordinates_identifier.xml +++ /dev/null @@ -1,119 +0,0 @@ - - -
List of coordinate systems for describing the poloidal plane
- - - - -1 -2 -11 -12 -13 - -14 -15 -16 - -21 -22 -23 - -24 -25 -26 - - -31 -32 -33 - -34 -35 -36 - -41 -42 -43 - -44 -45 -46 - -51 -52 -53 - -54 -55 -56 - -91 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/species_reference_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/species_reference_identifier.xml deleted file mode 100644 index 449b89cf..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/species_reference_identifier.xml +++ /dev/null @@ -1,19 +0,0 @@ - - -
-Translation table for species_reference_identifier_definition. -
- - - - -0 -1 -2 -3 -4 -5 -6 -7 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/statistics_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/statistics_type_identifier.xml deleted file mode 100644 index e1f891df..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/statistics_type_identifier.xml +++ /dev/null @@ -1,17 +0,0 @@ - - -
- Translation table for statistics types -
- - - -1 -2 -3 -4 -5 -6 -7 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/surface_geometry_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/surface_geometry_identifier.xml deleted file mode 100644 index 3cbdaf62..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/surface_geometry_identifier.xml +++ /dev/null @@ -1,16 +0,0 @@ - - -
-Geometry of the contour of surface in a local coordinate system -
- - - -1 -2 -3 -4 -5 - - -
diff --git a/imas/assets/IDSDef_correct/identifiers/utilities/wave_identifier.xml b/imas/assets/IDSDef_correct/identifiers/utilities/wave_identifier.xml deleted file mode 100644 index 8d122a7e..00000000 --- a/imas/assets/IDSDef_correct/identifiers/utilities/wave_identifier.xml +++ /dev/null @@ -1,15 +0,0 @@ - - -
-Translation table for wave field types. -
- - - - -0 -1 -2 -3 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/wall/wall_component_identifier.xml b/imas/assets/IDSDef_correct/identifiers/wall/wall_component_identifier.xml deleted file mode 100644 index 23e6450e..00000000 --- a/imas/assets/IDSDef_correct/identifiers/wall/wall_component_identifier.xml +++ /dev/null @@ -1,18 +0,0 @@ - - -
-Type of wall component -
- - - -0 -1 -2 -3 -4 -5 -6 -7 - -
diff --git a/imas/assets/IDSDef_correct/identifiers/wall/wall_description_2d_type_identifier.xml b/imas/assets/IDSDef_correct/identifiers/wall/wall_description_2d_type_identifier.xml deleted file mode 100644 index 4dcde3ce..00000000 --- a/imas/assets/IDSDef_correct/identifiers/wall/wall_description_2d_type_identifier.xml +++ /dev/null @@ -1,13 +0,0 @@ - - -
-Type of wall component -
- - - -0 -1 -2 - -
diff --git a/imas/backends/netcdf/ids2nc.py b/imas/backends/netcdf/ids2nc.py index 557f8724..bbdeb2e8 100644 --- a/imas/backends/netcdf/ids2nc.py +++ b/imas/backends/netcdf/ids2nc.py @@ -10,6 +10,7 @@ from packaging import version from imas.backends.netcdf.nc_metadata import NCMetadata +from imas.exception import InvalidNetCDFEntry from imas.ids_base import IDSBase from imas.ids_data_type import IDSDataType from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS diff --git a/imas/test/test_nc_autofill.py b/imas/test/test_nc_autofill.py index 4cefa3f3..b941967f 100644 --- a/imas/test/test_nc_autofill.py +++ b/imas/test/test_nc_autofill.py @@ -22,9 +22,7 @@ def test_nc_latest_dd_autofill_put_get_skip_complex(ids_name, tmp_path): version.parse(netCDF4.__version__) >= version.parse("1.7.0"), reason="NetCDF4 versions < 1.7.0 do not support complex numbers", ) -def test_nc_latest_dd_autofill_put_get_with_complex_older_netCDF4( - ids_name, tmp_path -): +def test_nc_latest_dd_autofill_put_get_with_complex_older_netCDF4(ids_name, tmp_path): with DBEntry(f"{tmp_path}/test-{ids_name}.nc", "x") as entry: ids = entry.factory.new(ids_name) fill_consistent(ids, leave_empty=0.5, skip_complex=False) @@ -45,9 +43,7 @@ def test_nc_latest_dd_autofill_put_get_with_complex_older_netCDF4( version.parse(netCDF4.__version__) < version.parse("1.7.0"), reason="NetCDF4 versions >= 1.7.0 support complex numbers", ) -def test_nc_latest_dd_autofill_put_get_with_complex_newer_netCDF4( - ids_name, tmp_path -): +def test_nc_latest_dd_autofill_put_get_with_complex_newer_netCDF4(ids_name, tmp_path): with DBEntry(f"{tmp_path}/test-{ids_name}.nc", "x") as entry: ids = entry.factory.new(ids_name) fill_consistent(ids, leave_empty=0.5, skip_complex=False) From 95d587b0d10ae4554a941c9ead7e193df1755627 Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Wed, 22 Jan 2025 16:57:21 +0100 Subject: [PATCH 47/97] Adding release notes for tag 1.2.0 --- docs/source/changelog.rst | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 2601639a..ac0a1571 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -3,6 +3,26 @@ Changelog ========= +What's new in IMASPy 1.2.0 +-------------------------- + +New features and improvements +''''''''''''''''''''''''''''' + +- Add :py:func:`imaspy.DBEntry.get_sample` (requires imas_core >= 5.4.0) +- Improved validation of netCDF files +- Improve compatibility with the UDA backend in imas_core +- Extend the support of netCDF to >= 1.4.1 (without complex numbers) +- Allow running test without imas_core + +Bug fixes +''''''''' + +- Fix a bug when lazy loading multiple IDSs from the same HDF5 DBEntry +- Fix a bug when lazy loading a child quantity that was added in a newer DD version than stored on disk + + + What's new in IMASPy 1.1.1 -------------------------- From 835dccf5225cb091de702b751e0295de1a8e431a Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Thu, 23 Jan 2025 11:54:29 +0100 Subject: [PATCH 48/97] renaming of imas-python to IMAS-Python --- CONTRIBUTING.md | 8 ++-- README.md | 10 ++--- asv.conf.json | 4 +- benchmarks/core_profiles.py | 8 ++-- benchmarks/edge_profiles.py | 8 ++-- benchmarks/utils.py | 2 +- ci/build_docs_and_dist.sh | 4 +- ci/run_benchmark.sh | 2 +- ci/run_pytest.sh | 2 +- conftest.py | 4 +- docs/source/api.rst | 4 +- docs/source/benchmarking.rst | 24 +++++------ docs/source/changelog.rst | 40 +++++++++---------- docs/source/ci_config.rst | 22 +++++----- docs/source/cli.rst | 16 ++++---- docs/source/code_style.rst | 4 +- docs/source/conf.py | 10 ++--- docs/source/configuring.rst | 8 ++-- docs/source/courses/advanced/dd_versions.rst | 32 +++++++-------- docs/source/courses/advanced/explore.rst | 10 ++--- docs/source/courses/advanced/hashing.rst | 6 +-- .../imas_snippets/alternative_coordinates.py | 2 +- .../advanced/imas_snippets/autoconvert_get.py | 2 +- .../advanced/imas_snippets/coordinates.py | 4 +- .../advanced/imas_snippets/ids_convert.py | 4 +- docs/source/courses/advanced/metadata.rst | 8 ++-- docs/source/courses/advanced/xarray.rst | 2 +- .../source/courses/advanced_user_training.rst | 8 ++-- docs/source/courses/basic/analyze.rst | 28 ++++++------- docs/source/courses/basic/create.rst | 18 ++++----- docs/source/courses/basic/explore.rst | 20 +++++----- .../imas_snippets/create_core_profiles.py | 2 +- .../courses/basic/imas_snippets/print_idss.py | 2 +- .../basic/imas_snippets/transform_grid.py | 2 +- docs/source/courses/basic/setup.rst | 10 ++--- docs/source/courses/basic/transform.rst | 14 +++---- docs/source/courses/basic_user_training.rst | 8 ++-- docs/source/identifiers.rst | 8 ++-- docs/source/imas_architecture.rst | 18 ++++----- docs/source/index.rst | 12 +++--- docs/source/installing.rst | 22 +++++----- docs/source/intro.rst | 24 +++++------ docs/source/lazy_loading.rst | 6 +-- docs/source/mdsplus.rst | 6 +-- docs/source/metadata.rst | 16 ++++---- docs/source/multi-dd.rst | 22 +++++----- docs/source/netcdf.rst | 12 +++--- docs/source/netcdf/conventions.rst | 2 +- docs/source/release_imas.rst | 22 +++++----- docs/source/validation.rst | 6 +-- imas/__init__.py | 10 ++--- imas/__main__.py | 4 +- imas/_util.py | 4 +- imas/backends/__init__.py | 4 +- imas/backends/db_entry_impl.py | 4 +- imas/backends/imas_core/__init__.py | 4 +- imas/backends/imas_core/al_context.py | 6 +-- imas/backends/imas_core/db_entry_al.py | 4 +- imas/backends/imas_core/db_entry_helpers.py | 4 +- imas/backends/imas_core/imas_interface.py | 8 ++-- imas/backends/imas_core/mdsplus_model.py | 4 +- imas/backends/netcdf/__init__.py | 6 +-- imas/backends/netcdf/db_entry_nc.py | 2 +- imas/backends/netcdf/ids2nc.py | 6 +-- imas/backends/netcdf/nc_metadata.py | 4 +- imas/command/cli.py | 12 +++--- imas/command/db_analysis.py | 2 +- imas/command/helpers.py | 2 +- imas/command/timer.py | 4 +- imas/db_entry.py | 6 +-- imas/dd_helpers.py | 4 +- imas/dd_zip.py | 8 ++-- imas/exception.py | 6 +-- imas/ids_base.py | 4 +- imas/ids_convert.py | 8 ++-- imas/ids_coordinates.py | 6 +-- imas/ids_data_type.py | 4 +- imas/ids_defs.py | 6 +-- imas/ids_factory.py | 4 +- imas/ids_identifiers.py | 6 +-- imas/ids_metadata.py | 10 ++--- imas/ids_path.py | 4 +- imas/ids_primitive.py | 4 +- imas/ids_struct_array.py | 4 +- imas/ids_structure.py | 4 +- imas/ids_toplevel.py | 4 +- imas/setup_logging.py | 6 +-- imas/test/test_dd_helpers.py | 2 +- imas/test/test_hash.py | 2 +- imas/test/test_ids_ascii_data.py | 4 +- imas/test/test_ids_convert.py | 4 +- imas/test/test_ids_mixin.py | 4 +- imas/test/test_ids_primitive.py | 6 +-- imas/test/test_ids_structure.py | 4 +- imas/training.py | 6 +-- imas/util.py | 18 ++++----- setup.py | 6 +-- tools/compare_lowlevel_access_patterns.py | 6 +-- tools/extract_test_data.py | 4 +- 99 files changed, 398 insertions(+), 398 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 661eedb0..0563dde3 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,23 +1,23 @@ # Contributing guidelines -We welcome any kind of contribution to `imas-python`, +We welcome any kind of contribution to `IMAS-Python`, from a simple comment, a question or even a full fledged pull request. Please first make sure you read and follow the [Code of Conduct](CODE_OF_CONDUCT.md). ## You think you found a bug in the code, or have a question in its use -1. use the [issue search](https://github.com/iterorganization/imas-python/issues) +1. use the [issue search](https://github.com/iterorganization/IMAS-Python/issues) to check if someone already created a similar issue; 3. if not, make a **new issue** to describe your problem or question. In the case of a bug suspiscion, please try to give all the relevant information to allow reproducing the error or identifying -its root cause (version of the imas-python, OS and relevant +its root cause (version of the IMAS-Python, OS and relevant dependencies, snippet of code); 4. apply relevant labels to the issue. ## You want to make or ask some change to the code -1. use the [issue search](https://github.com/iterorganization/imas-python/issues) +1. use the [issue search](https://github.com/iterorganization/IMAS-Python/issues) to check if someone already proposed a similar idea/change; 3. if not, create a **new issue** to describe what change you would like to see implemented and specify it if you intend to work on it yourself or if some help diff --git a/README.md b/README.md index c2afa4ad..d28328cb 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ -# imas-python +# IMAS-Python -imas-python is a pure-python library to handle arbitrarily nested data structures. -imas-python is designed for, but not necessarily bound to, interacting with Interface +IMAS-Python is a pure-python library to handle arbitrarily nested data structures. +It is designed for, but not necessarily bound to, interacting with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) Data Model. @@ -11,7 +11,7 @@ Data Model. Install steps are described in the documentation generated from `/docs/source/installing.rst`. Documentation is autogenerated from the source using [Sphinx](http://sphinx-doc.org/) -and can be found at the [readthedocs](https://imas-python.readthedocs.io/en/latest/) +and can be found at the [readthedocs](https://IMAS-Python.readthedocs.io/en/latest/) The documentation can be manually generated by installing sphinx and running: @@ -40,7 +40,7 @@ A quick 5 minutes introduction is available in the documentation generated from ## Legal -imas-python is Copyright 2020-2024 ITER Organization, Copyright 2020-2023 Karel Lucas van de +IMAS-Python is Copyright 2020-2025 ITER Organization, Copyright 2020-2023 Karel Lucas van de Plassche , Copyright 2020-2022 Daan van Vugt , and Copyright 2020 Dutch Institute for Fundamental Energy Research . It is licensed under [LGPL 3.0](LICENSE.txt). diff --git a/asv.conf.json b/asv.conf.json index 0b11cf72..dc455d85 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -7,7 +7,7 @@ "project": "imas", // The project's homepage - "project_url": "https://github.com/iterorganization/imas-python", + "project_url": "https://github.com/iterorganization/IMAS-Python", // The URL or local path of the source code repository for the // project being benchmarked @@ -53,7 +53,7 @@ //"install_timeout": 600, // the base URL to show a commit for the project. - "show_commit_url": "https://github.com/iterorganization/imas-python/commits/main/", + "show_commit_url": "https://github.com/iterorganization/IMAS-Python/commits/main/", // The Pythons you'd like to test against. If not provided, defaults // to the current version of Python used to run `asv`. diff --git a/benchmarks/core_profiles.py b/benchmarks/core_profiles.py index 047b5afc..d7ab54cd 100644 --- a/benchmarks/core_profiles.py +++ b/benchmarks/core_profiles.py @@ -22,16 +22,16 @@ def fill_slices(core_profiles, times): """Fill a time slice of a core_profiles IDS with generated data. Args: - core_profiles: core_profiles IDS (either from imas-python or AL HLI) + core_profiles: core_profiles IDS (either from IMAS-Python or AL Python) times: time values to fill a slice for """ core_profiles.ids_properties.homogeneous_time = 1 # HOMOGENEOUS - core_profiles.ids_properties.comment = "Generated for the imas-python benchmark suite" + core_profiles.ids_properties.comment = "Generated for the IMAS-Python benchmark suite" core_profiles.ids_properties.creation_date = datetime.date.today().isoformat() - core_profiles.code.name = "imas-python ASV benchmark" + core_profiles.code.name = "IMAS-Python ASV benchmark" core_profiles.code.version = imas.__version__ core_profiles.code.repository = ( - "https://github.com/iterorganization/imas-python" + "https://github.com/iterorganization/IMAS-Python" ) core_profiles.time = np.array(times) diff --git a/benchmarks/edge_profiles.py b/benchmarks/edge_profiles.py index c2f69c28..cb78629f 100644 --- a/benchmarks/edge_profiles.py +++ b/benchmarks/edge_profiles.py @@ -17,18 +17,18 @@ def fill_ggd(edge_profiles, times): """Fill nested arrays of structures in grids_ggd and ggd substructures. Args: - edge_profiles: edge_profiles IDS object (either from imas-python or AL HLI) + edge_profiles: edge_profiles IDS object (either from IMAS-Python or AL Python) times: time values to fill """ edge_profiles.ids_properties.homogeneous_time = ( imas.ids_defs.IDS_TIME_MODE_HETEROGENEOUS ) - edge_profiles.ids_properties.comment = "Generated for imas-python benchmark suite" + edge_profiles.ids_properties.comment = "Generated for IMAS-Python benchmark suite" edge_profiles.ids_properties.creation_date = datetime.date.today().isoformat() - edge_profiles.code.name = "imas-python ASV benchmark" + edge_profiles.code.name = "IMAS-Python ASV benchmark" edge_profiles.code.version = imas.__version__ edge_profiles.code.repository = ( - "https://github.com/iterorganization/imas-python" + "https://github.com/iterorganization/IMAS-Python" ) # This GGD grid is not a valid description, but it's a good stress test for the diff --git a/benchmarks/utils.py b/benchmarks/utils.py index 0d2a9958..47ae2576 100644 --- a/benchmarks/utils.py +++ b/benchmarks/utils.py @@ -76,7 +76,7 @@ def create_dbentry(hli, backend): ) except (AttributeError, AssertionError): raise NotImplementedError( - "This version of imas-python doesn't implement netCDF." + "This version of IMAS-Python doesn't implement netCDF." ) from None path = Path.cwd() / f"DB-{hli}-{backend}" diff --git a/ci/build_docs_and_dist.sh b/ci/build_docs_and_dist.sh index 1f077015..f0084b8a 100755 --- a/ci/build_docs_and_dist.sh +++ b/ci/build_docs_and_dist.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Bamboo CI script to install imas and run all tests +# Bamboo CI script to install imas Python module and run all tests # Note: this script should be run from the root of the git repository # Debuggging: @@ -27,7 +27,7 @@ pip install --upgrade pip setuptools wheel build rm -rf dist python -m build . -# Install imas and documentation dependencies from the just-built wheel +# Install imas Python module and documentation dependencies from the just-built wheel pip install "`readlink -f dist/*.whl`[docs,netcdf]" # Debugging: diff --git a/ci/run_benchmark.sh b/ci/run_benchmark.sh index daa9a012..1fe77bcc 100755 --- a/ci/run_benchmark.sh +++ b/ci/run_benchmark.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Bamboo CI script to install imas and run all tests +# Bamboo CI script to install imas Python module and run all tests # Note: this script should be run from the root of the git repository # Debuggging: diff --git a/ci/run_pytest.sh b/ci/run_pytest.sh index b7490f96..511264b2 100755 --- a/ci/run_pytest.sh +++ b/ci/run_pytest.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Bamboo CI script to install imas and run all tests +# Bamboo CI script to install imas Python module and run all tests # Note: this script should be run from the root of the git repository # Debuggging: diff --git a/conftest.py b/conftest.py index 80bb8614..b7ab1fe4 100644 --- a/conftest.py +++ b/conftest.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. # # Set up pytest: # - Backend parametrization (and corresponding command line options) diff --git a/docs/source/api.rst b/docs/source/api.rst index 87be0471..5df6e579 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -3,11 +3,11 @@ API reference ============= -This page provides an auto-generated summary of imas-python's API. For more details +This page provides an auto-generated summary of IMAS-Python's API. For more details and examples, refer to the relevant chapters in the main part of the documentation. -imas-python IDS manipulation +IMAS-Python IDS manipulation ---------------------------- .. currentmodule:: imas diff --git a/docs/source/benchmarking.rst b/docs/source/benchmarking.rst index ce7b0090..a0cf0ca5 100644 --- a/docs/source/benchmarking.rst +++ b/docs/source/benchmarking.rst @@ -1,16 +1,16 @@ .. _`benchmarking IMAS`: -Benchmarking imas-python +Benchmarking IMAS-Python ======================== -imas-python integrates with the `airspeed velocity +IMAS-Python integrates with the `airspeed velocity `_ ``asv`` package for benchmarking. -imas-python benchmarks +IMAS-Python benchmarks ---------------------- -imas-python benchmarks are stored in the ``benchmarks`` folder in the git repository. We can +IMAS-Python benchmarks are stored in the ``benchmarks`` folder in the git repository. We can currently distinguish three types of benchmarks: Technical benchmarks @@ -20,14 +20,14 @@ Technical benchmarks Basic functional benchmarks These are for benchmarking functionality with an equivalent feature in the IMAS - Access Layer HLI. In addition to tracking the performance of the imas-python features + Access Layer HLI. In addition to tracking the performance of the IMAS-Python features over time, we can also benchmark the performance against the traditional HLI. For example: putting and getting IDSs. -imas-python-specific functional benchmarks +IMAS-Python-specific functional benchmarks These are for benchmarking functionality without an equivalent feature in the IMAS - Access Layer HLI. We use these for tracking the imas-python performance over time. + Access Layer HLI. We use these for tracking the IMAS-Python performance over time. For example: data conversion between DD versions. @@ -35,7 +35,7 @@ imas-python-specific functional benchmarks Running benchmarks (quick) -------------------------- -When you have an existing imas-python installation, you can run the benchmarks like this: +When you have an existing IMAS-Python installation, you can run the benchmarks like this: .. code-block:: console @@ -103,8 +103,8 @@ Running benchmarks (advanced) ----------------------------- Running benchmarks quickly, as explained in the previous section, is great during -development and for comparing the performance of imas-python against the imas HLI. However, -``asv`` can also track the performance of benchmarks over various commits of imas-python. +development and for comparing the performance of IMAS-Python against the imas HLI. However, +``asv`` can also track the performance of benchmarks over various commits of IMAS-Python. Unfortunately this is a bit more tricky to set up. @@ -112,7 +112,7 @@ Setup advanced benchmarking ''''''''''''''''''''''''''' First, some background on how ``asv`` tracks performance: it creates an isolated virtual -environment (using the ``virtualenv`` package) and installs imas-python for each commit that +environment (using the ``virtualenv`` package) and installs IMAS-Python for each commit that will be benchmarked. However, because the virtual environment is isolated, the ``imas`` package won't be available. We need to work around it by setting the environment variable ``ASV_PYTHONPATH``: @@ -171,7 +171,7 @@ Instead, you can submit a benchmark job to the compute nodes. #!/bin/bash # Set SLURM options: - #SBATCH --job-name=imas-python-benchmark + #SBATCH --job-name=IMAS-Python-benchmark #SBATCH --time=1:00:00 #SBATCH --partition=gen10_ib # Note: for proper benchmarking we need to exclusively reserve a node, even though diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index d3a4ef93..d724ac4f 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -3,7 +3,7 @@ Changelog ========= -What's new in imas-python 1.1.1 +What's new in IMAS-Python 1.1.1 ------------------------------- This is a small release that mainly fixes issues related to the recent Data @@ -19,7 +19,7 @@ Bug fixes Dictionary 4.0.0 and 3.42.0. In other cases, the Data Dictionary version is now explicitly indicated. -- :issue:`IMAS-5560`: Fix a bug where imas-python would not correctly recognize that +- :issue:`IMAS-5560`: Fix a bug where IMAS-Python would not correctly recognize that the UDA backend is used. - :issue:`IMAS-5541`: Fix a bug when converting a closed contour to Data Dictionary version 4.0.0. @@ -29,7 +29,7 @@ Bug fixes recent Data Dictionary version than the on-disk data was stored with. -What's new in imas-python 1.1 +What's new in IMAS-Python 1.1 ----------------------------- New features @@ -37,7 +37,7 @@ New features - :ref:`1.1/improved performance`. - :ref:`1.1/improved conversion`. -- imas-python 1.1 adds support for Identifiers defined by the Data Dictionary. This +- IMAS-Python 1.1 adds support for Identifiers defined by the Data Dictionary. This functionality is described in detail in :ref:`Identifiers`. - Support for the new :py:const:`~imaspy.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` that is @@ -52,7 +52,7 @@ New features netCDF file, which can be used for sharing and/or archiving data. This feature is in `preview` status, meaning that it may change in upcoming - minor releases of imas-python. + minor releases of IMAS-Python. - Additional utility functions in :py:mod:`imaspy.util`: @@ -71,7 +71,7 @@ New features - :py:func:`imaspy.util.get_data_dictionary_version` returns the Data Dictionary version for which an IDS was created. -- Add support for IMAS Access Layer Core 5.2 and later. imas-python can now be used +- Add support for IMAS Access Layer Core 5.2 and later. IMAS-Python can now be used with just the Access Layer Core package available, the full AL-Python HLI is no longer required. @@ -91,16 +91,16 @@ New features backend. During a :py:meth:`~imaspy.db_entry.DBEntry.get` or - :py:meth:`~imaspy.db_entry.DBEntry.get_slice`, imas-python first reads the version + :py:meth:`~imaspy.db_entry.DBEntry.get_slice`, IMAS-Python first reads the version of the Data Dictionary that was used to store the IDS. When this version is - not known to imas-python, an error is raised. This error can now be ignored by + not known to IMAS-Python, an error is raised. This error can now be ignored by setting the parameter :py:param:`~imaspy.db_entry.DBEntry.get.ignore_unknown_dd_version` to - ``True``, and imas-python will do its best to load the data anyway. + ``True``, and IMAS-Python will do its best to load the data anyway. - A new command line tool exists for analyzing which Data Dictionary fields are used in provided Data Entries. This tool is explained in detail in - :ref:`imas-python Data Entry analysis`. + :ref:`IMAS-Python Data Entry analysis`. - Various improvements to the documentation were made. @@ -110,7 +110,7 @@ Breaking changes .. note:: - We attempt to keep the public API of imas-python stable with minor releases. The + We attempt to keep the public API of IMAS-Python stable with minor releases. The following breaking change is the result of an upgrade of the IMAS Access Layer. - Starting with Access Layer 5.2 or newer, the Access Layer will raise @@ -121,8 +121,8 @@ Breaking changes You may need to update the :py:class:`Exception` classes in ``try/except`` blocks to the new Exception classes raised by ``imas_core``. - When using an older version of the Access Layer, the behaviour of imas-python is no - different than in imas-python 1.0. + When using an older version of the Access Layer, the behaviour of IMAS-Python is no + different than in IMAS-Python 1.0. Bug fixes @@ -136,10 +136,10 @@ Bug fixes - Fixed a bug with :py:func:`~imaspy.ids_toplevel.IDSToplevel.serialize` when the IDS is in a non-default Data Dictionary version. - Fixed a bug when assigning ``nan`` to a FLT_0D, which would lead to a - confusing and incorrect log message in imas-python 1.0. -- Fixed incorrect oldest supported DD version. Previously imas-python indicated that + confusing and incorrect log message in IMAS-Python 1.0. +- Fixed incorrect oldest supported DD version. Previously IMAS-Python indicated that DD ``3.21.1`` was supported, however ``3.22.0`` is the oldest Data Dictionary - tested (and provided) with imas-python. :py:attr:`imaspy.OLDEST_SUPPORTED_VERSION` + tested (and provided) with IMAS-Python. :py:attr:`imaspy.OLDEST_SUPPORTED_VERSION` has been updated to reflect this. - Fixed a bug when using numpy functions, such as :external:py:func:`numpy.isclose` on scalar numbers. Previously an error was @@ -158,11 +158,11 @@ Improved performance '''''''''''''''''''' - Improved performance of :py:meth:`~imaspy.ids_toplevel.IDSToplevel.validate`. -- Improved creation of imas-python IDS objects. This made filling IDSs and loading +- Improved creation of IMAS-Python IDS objects. This made filling IDSs and loading them with :py:meth:`~imaspy.db_entry.DBEntry.get` / :py:meth:`~imaspy.db_entry.DBEntry.get_slice` 10-20% faster. - Improved the performance of lazy loading. This is most noticeable with the - ``HDF5`` backend, which is now up to 40x faster than with imas-python 1.0. + ``HDF5`` backend, which is now up to 40x faster than with IMAS-Python 1.0. - Improved the performance of :py:meth:`~imaspy.db_entry.DBEntry.get` / :py:meth:`~imaspy.db_entry.DBEntry.get_slice` / :py:meth:`~imaspy.db_entry.DBEntry.put` / @@ -180,7 +180,7 @@ Converting IDSs between Data Dictionary versions has several improvements for recent DD versions. Further details on IDS conversion can be found in :ref:`Conversion of IDSs between DD versions`. -- The imas-python Command Line Interface for converting Data Entries between different +- The IMAS-Python Command Line Interface for converting Data Entries between different versions of the Data Dictionary has been improved. See :ref:`Command line tool reference` or execute ``imas convert --help`` in a shell for further details. @@ -190,7 +190,7 @@ recent DD versions. Further details on IDS conversion can be found in For example, in the ``pulse_schedule`` IDS, the node ``ec/beam/power_launched/reference`` in Data Dictionary ``3.40.0`` was renamed from ``ec/launcher/power/reference/data`` in Data Dictionary ``3.39.0``. This - use case is now supported by imas-python. + use case is now supported by IMAS-Python. - Automatically convert data between 0D and 1D when possible (`IMAS-5170 `__). diff --git a/docs/source/ci_config.rst b/docs/source/ci_config.rst index 2fd284f4..2fcf1d9b 100644 --- a/docs/source/ci_config.rst +++ b/docs/source/ci_config.rst @@ -3,25 +3,25 @@ CI configuration ================ -imas-python uses `ITER Bamboo `_ for CI. This page provides an overview +IMAS-Python uses `ITER Bamboo `_ for CI. This page provides an overview of the CI Plan and deployment projects. CI Plan ------- -The `imas-python CI plan `_ consists of 4 types of jobs: +The `IMAS-Python CI plan `_ consists of 4 types of jobs: Linting and DD ZIP This job is responsible for three things: 1. Verify that the ``IDSDef2MDSplusPreTree.xsl`` file matches the one in the Access Layer repository. This file is required for building MDSplus models and the - models built by imas-python should match those built by the Access Layer. - 2. Linting: run ``black`` and ``flake8`` on the imas-python code base. See :ref:`code + models built by IMAS-Python should match those built by the Access Layer. + 2. Linting: run ``black`` and ``flake8`` on the IMAS-Python code base. See :ref:`code style and linting`. 3. Build the Data Dictionary zip file. This Task builds the Data Dictionary for all tagged releases since DD version ``3.22.0``. These are combined into the - ``IDSDef.zip`` file, which is distributed with imas-python. + ``IDSDef.zip`` file, which is distributed with IMAS-Python. The ZIP file is built in a separate job, such that the subsequent test jobs can reuse this. @@ -62,7 +62,7 @@ Benchmark The CI script executed in this job is: ``ci/run_benchmark.sh``. Build docs and dists - This job builds the Sphinx documentation and python packages for imas-python (``sdist`` + This job builds the Sphinx documentation and python packages for IMAS-Python (``sdist`` and ``wheel``). The CI script executed in this job is: ``ci/build_docs_and_dist.sh``. @@ -71,14 +71,14 @@ Build docs and dists Deployment projects ------------------- -There is github workflow for imas-python: +There is github workflow for IMAS-Python: -`imas-python-PyPi `_ +`IMAS-Python-PyPi `_ Deploy the python packages job to the https://pypi.org/ server and https://test.pypi.org/ server. - You can find link here : `imas-python `_ + You can find link here : `IMAS-Python `_ -`Deploy imas-python-doc `_ +`Deploy IMAS-Python-doc `_ Deploy the documentation using `readthedocs - `_. + `_. diff --git a/docs/source/cli.rst b/docs/source/cli.rst index 0fa3819a..df6db851 100644 --- a/docs/source/cli.rst +++ b/docs/source/cli.rst @@ -1,31 +1,31 @@ -.. _`imas-python Command Line tool`: +.. _`IMAS-Python Command Line tool`: -imas-python Command Line tool +IMAS-Python Command Line tool ============================= -imas-python comes with a command line tool: ``imas``. This allows you to execute +IMAS-Python comes with a command line tool: ``imas``. This allows you to execute some tasks without writing Python code: - ``imas convert`` can convert Data Entries (or, optionally, single IDSs from a Data Entry) to a different DD version. This command can also be used to convert IDSs between different backends. - ``imas print`` can print the contents of an IDS to the terminal. -- ``imas version`` shows version information of imas-python. +- ``imas version`` shows version information of IMAS-Python. - ``imas analyze-db`` and ``imas process-db-analysis`` analyze the contents of one or more Data Entries (stored in the HDF5 backend format). This tool is - explained in more detail :ref:`below `. + explained in more detail :ref:`below `. You can get further details, including the expected command line arguments and options, by running any tool with the ``--help`` flag. This help is also available in the :ref:`Command line tool reference` below. -.. _`imas-python Data Entry analysis`: +.. _`IMAS-Python Data Entry analysis`: -imas-python Data Entry analysis +IMAS-Python Data Entry analysis ------------------------------- -The imas-python Data Entry analysis tool is a set of two command line programs: +The IMAS-Python Data Entry analysis tool is a set of two command line programs: ``imas analyze-db`` and ``imas process-db-analysis``. The tool analyzes the files from the HDF5 backend to figure out which IDSs are stored in the Data Entry, and which fields from the Data Dictionary have any data stored. This diff --git a/docs/source/code_style.rst b/docs/source/code_style.rst index e7f3913c..0539fc61 100644 --- a/docs/source/code_style.rst +++ b/docs/source/code_style.rst @@ -7,7 +7,7 @@ Code style and linting Code style ---------- -imas-python follows `The Black Code Style +IMAS-Python follows `The Black Code Style `_. All Python files should be formatted with the ``black`` command line tool (this is checked in :ref:`CI `). @@ -48,7 +48,7 @@ with pre-commit hooks): Linting ------- -imas-python uses `flake8 `_ for linting (static code +IMAS-Python uses `flake8 `_ for linting (static code analysis). Flake8 should not report any violations when running it on the ``imas`` code base. Again, this is checked in CI. diff --git a/docs/source/conf.py b/docs/source/conf.py index d6e32651..65f5e5f4 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -24,7 +24,7 @@ # -- Project information ----------------------------------------------------- # The documented project’s name -project = src_project = PROJECT = "imas-python" +project = src_project = PROJECT = "IMAS-Python" PACKAGE = "imas" GROUP = "IMAS" @@ -39,9 +39,9 @@ iter_projects = "https://github.com/iterorganization/" dd_url = urljoin(iter_projects, "imas-data-dictionary/") al_url = urljoin(iter_projects, "imas-core/") -issue_url = jira_url = "https://github.com/iterorganization/imas-python/issues" +issue_url = jira_url = "https://github.com/iterorganization/IMAS-Python/issues" -# imas-python +# IMAS-Python repository_url = f"{iter_projects}/{src_project}/" blob_url = repository_url mr_url = urljoin(repository_url, "/pulls") @@ -134,8 +134,8 @@ # and # https://sphinx-immaterial.readthedocs.io/en/latest/customization.html#confval-html_theme_options html_theme_options = { - "repo_url": "https://github.com/iterorganization/imas-python", - "repo_name": "imas-python", + "repo_url": "https://github.com/iterorganization/IMAS-Python", + "repo_name": "IMAS-Python", "icon": { "repo": "fontawesome/brands/github", }, diff --git a/docs/source/configuring.rst b/docs/source/configuring.rst index 388ac813..bb5c4293 100644 --- a/docs/source/configuring.rst +++ b/docs/source/configuring.rst @@ -1,7 +1,7 @@ -Configuring imas-python +Configuring IMAS-Python ======================= -imas-python has a couple of environment variables that can be used to control its behaviour. +IMAS-Python has a couple of environment variables that can be used to control its behaviour. This page provides an overview of available variables. .. note:: @@ -13,7 +13,7 @@ This page provides an overview of available variables. ``IMAS_LOGLEVEL`` - Sets the log level used by the imas-python logger. + Sets the log level used by the IMAS-Python logger. By default (when this environment variable is not set), all log messages of ``INFO`` or more severe are logged. You may set this to, for example, @@ -44,7 +44,7 @@ Environment variables shared with the IMAS Python HLI ----------------------------------------------------- ``IMAS_AL_DISABLE_VALIDATE`` - By default, imas-python :ref:`validates ` IDSs to check that all data is + By default, IMAS-Python :ref:`validates ` IDSs to check that all data is consistent with their coordinates during a :py:meth:`~imas.db_entry.DBEntry.put` or :py:meth:`~imas.db_entry.DBEntry.put_slice`. diff --git a/docs/source/courses/advanced/dd_versions.rst b/docs/source/courses/advanced/dd_versions.rst index 5ccb2474..3f7f19fa 100644 --- a/docs/source/courses/advanced/dd_versions.rst +++ b/docs/source/courses/advanced/dd_versions.rst @@ -3,13 +3,13 @@ Working with multiple data dictionary versions ============================================== -Contrary to most high level interface for IMAS, imas-python code is not tied to a specific -version of the Data Dictionary. In this lesson we will explore how imas-python handles +Contrary to most high level interface for IMAS, IMAS-Python code is not tied to a specific +version of the Data Dictionary. In this lesson we will explore how IMAS-Python handles different DD versions (including development builds of the DD), and how we can convert IDSs between different versions of the Data Dictionary. .. note:: - Most of the time you won't need to worry about DD versions and the default imas-python + Most of the time you won't need to worry about DD versions and the default IMAS-Python behaviour should be fine. @@ -19,7 +19,7 @@ The default Data Dictionary version ----------------------------------- In the other training lessons, we didn't explicitly work with Data Dictionary versions. -Therefore imas-python was always using the `default` DD version. Let's find out what that +Therefore IMAS-Python was always using the `default` DD version. Let's find out what that version is: @@ -45,20 +45,20 @@ Exercise 1: The default DD version .. literalinclude:: imas_snippets/dd_versions.py -Okay, so now you know what your default DD version is. But how is it determined? imas-python +Okay, so now you know what your default DD version is. But how is it determined? IMAS-Python first checks if you have an IMAS environment loaded by checking the environment variable ``IMAS_VERSION``. If you are on a cluster and have used ``module load IMAS`` or similar, this environment variable will indicate what data dictionary version this module is -using. imas-python will use that version as its default. +using. IMAS-Python will use that version as its default. -If the ``IMAS_VERSION`` environment is not set, imas-python will take the newest version of +If the ``IMAS_VERSION`` environment is not set, IMAS-Python will take the newest version of the Data Dictionary that came bundled with it. Which brings us to the following topic: Bundled Data Dictionary definitions ----------------------------------- -imas-python comes bundled [#DDdefs]_ with many versions of the Data Dictionary definitions. +IMAS-Python comes bundled [#DDdefs]_ with many versions of the Data Dictionary definitions. You can find out which versions are available by calling :py:meth:`imas.dd_zip.dd_xml_versions`. @@ -74,7 +74,7 @@ things that could change: - Change the data type of an IDS node - Rename an IDS node -imas-python can convert between different versions of the DD and will migrate the data as +IMAS-Python can convert between different versions of the DD and will migrate the data as much as possible. Let's see how this works in the following exercise. @@ -103,9 +103,9 @@ Exercise 2: Convert an IDS between DD versions convert the IDS to DD version 3.39.0. The ``antenna`` structure that we filled in the old version of the DD has since been renamed to ``launcher``, and the ``launching_angle_*`` structures to ``steering_angle``. Check that - imas-python has converted the data successfully (for example with + IMAS-Python has converted the data successfully (for example with :py:func:`imas.util.print_tree`). - 5. By default, imas-python creates a shallow copy of the data, which means that the + 5. By default, IMAS-Python creates a shallow copy of the data, which means that the underlying data arrays are shared between the IDSs of both versions. Update the ``time`` data of the original IDS (for example: :code:`pulse_schedule.time[1] = 3`) and print the ``time`` data of the @@ -137,7 +137,7 @@ Automatic conversion between DD versions When loading data (with :py:meth:`~imas.db_entry.DBEntry.get` or :py:meth:`~imas.db_entry.DBEntry.get_slice`) or storing data (with :py:meth:`~imas.db_entry.DBEntry.put` or -:py:meth:`~imas.db_entry.DBEntry.put_slice`), imas-python automatically converts the DD +:py:meth:`~imas.db_entry.DBEntry.put_slice`), IMAS-Python automatically converts the DD version for you. In this section we will see how that works. @@ -255,7 +255,7 @@ contain large changes between DD versions, such as: Using custom builds of the Data Dictionary ------------------------------------------ -In the previous sections we showed how you can direct imas-python to use a specific released +In the previous sections we showed how you can direct IMAS-Python to use a specific released version of the Data Dictionary definitions. Sometimes it is useful to work with unreleased (development or custom) versions of the data dictionaries as well. @@ -267,11 +267,11 @@ unreleased (development or custom) versions of the data dictionaries as well. might not be read properly in the future. If you build the Data Dictionary, a file called ``IDSDef.xml`` is created. This file -contains all IDS definitions. To work with a custom DD build, you need to point imas-python +contains all IDS definitions. To work with a custom DD build, you need to point IMAS-Python to this ``IDSDef.xml`` file: .. code-block:: python - :caption: Use a custom Data Dictionary build with imas-python + :caption: Use a custom Data Dictionary build with IMAS-Python my_idsdef_file = "path/to/IDSDef.xml" # Replace with the actual path @@ -291,5 +291,5 @@ build, you can use them like you normally would. .. rubric:: Footnotes .. [#DDdefs] To be more precise, the Data Dictionary definitions are generated when the - imas-python package is created. See :ref:`this reference
` for more + IMAS-Python package is created. See :ref:`this reference
` for more details. diff --git a/docs/source/courses/advanced/explore.rst b/docs/source/courses/advanced/explore.rst index 5fa6fdca..7b383bc5 100644 --- a/docs/source/courses/advanced/explore.rst +++ b/docs/source/courses/advanced/explore.rst @@ -1,10 +1,10 @@ Advanced data exploration ========================= -In the :ref:`basic/explore` training we have seen how to explore imas-python data structures +In the :ref:`basic/explore` training we have seen how to explore IMAS-Python data structures in an interactive way. -In this lesson, we will go a step further and look at methods to explore imas-python data +In this lesson, we will go a step further and look at methods to explore IMAS-Python data structures programmatically. This can be useful for, for example, writing plotting tools, analysis scripts, etc. @@ -13,7 +13,7 @@ Exploring IDS (sub)structures ----------------------------- An IDS structure is a collection of IDS nodes (which could be structures, or arrays of -structures themselves). In imas-python this is represented by the +structures themselves). In IMAS-Python this is represented by the :py:class:`~imas.ids_structure.IDSStructure` class. You will find these classes in a lot of places: @@ -106,7 +106,7 @@ Some methods and properties are defined for all data nodes and arrays of structu details. .. seealso:: - You can find more details on IDS data node related classes and methods in the imas-python Architecture documentation: + You can find more details on IDS data node related classes and methods in the IMAS-Python Architecture documentation: :ref:`imas_architecture/IDS_nodes` Apply a function to all nodes in an IDS @@ -142,7 +142,7 @@ Exercise 2: Explore data nodes .. hint:: :collapsible: - Review imas-python Architecture documentation for data node methods: + Review IMAS-Python Architecture documentation for data node methods: :ref:`imas_architecture/IDS_nodes` .. md-tab-item:: Solution diff --git a/docs/source/courses/advanced/hashing.rst b/docs/source/courses/advanced/hashing.rst index bc9d77fe..c37ed8b8 100644 --- a/docs/source/courses/advanced/hashing.rst +++ b/docs/source/courses/advanced/hashing.rst @@ -1,14 +1,14 @@ Calculating hashes of IMAS data =============================== -imas-python can calculate *hashes* of IMAS data. As `Wikipedia explains better than I could +IMAS-Python can calculate *hashes* of IMAS data. As `Wikipedia explains better than I could do `__: A hash function is any function that can be used to map data of arbitrary size to fixed-size values, [...]. The values returned by a hash function are called *hash values*, *hash codes*, *hash digests*, *digests*, or simply *hashes*. -imas-python is using the XXH3 hash function from the `xxHash project +IMAS-Python is using the XXH3 hash function from the `xxHash project `__. This is a *non-cryptographic* hash and returns 64-bit hashes. @@ -54,7 +54,7 @@ Exercise 1: Calculate some hashes .. literalinclude:: imas_snippets/hashing.py -Properties of imas-python's hashes +Properties of IMAS-Python's hashes ---------------------------------- The implementation of the hash function has the following properties: diff --git a/docs/source/courses/advanced/imas_snippets/alternative_coordinates.py b/docs/source/courses/advanced/imas_snippets/alternative_coordinates.py index e4adfceb..dce460c7 100644 --- a/docs/source/courses/advanced/imas_snippets/alternative_coordinates.py +++ b/docs/source/courses/advanced/imas_snippets/alternative_coordinates.py @@ -21,7 +21,7 @@ # What do you notice: in both dimensions there are multiple options for the coordinate. # 3. Retrieve the coordinate values through the ``coordinates`` attribute. -# This will raise a coordinate lookup error because imas-python cannot choose which of the +# This will raise a coordinate lookup error because IMAS-Python cannot choose which of the # coordinates to use: try: print(p2d.density.coordinates[0]) diff --git a/docs/source/courses/advanced/imas_snippets/autoconvert_get.py b/docs/source/courses/advanced/imas_snippets/autoconvert_get.py index 76ee8e90..d2fcc221 100644 --- a/docs/source/courses/advanced/imas_snippets/autoconvert_get.py +++ b/docs/source/courses/advanced/imas_snippets/autoconvert_get.py @@ -11,7 +11,7 @@ # Fill the IDS with some test data pulse_schedule.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS -pulse_schedule.ids_properties.comment = "Testing renamed IDS nodes with imas-python" +pulse_schedule.ids_properties.comment = "Testing renamed IDS nodes with IMAS-Python" pulse_schedule.time = [1.0, 1.1, 1.2] pulse_schedule.ec.antenna.resize(1) diff --git a/docs/source/courses/advanced/imas_snippets/coordinates.py b/docs/source/courses/advanced/imas_snippets/coordinates.py index 8b9b67fe..3c818989 100644 --- a/docs/source/courses/advanced/imas_snippets/coordinates.py +++ b/docs/source/courses/advanced/imas_snippets/coordinates.py @@ -23,7 +23,7 @@ # [-9.e+40 -9.e+40 -9.e+40] # # In heterogeneous time, the coordinate of profiles_1d is profiles_1d/time, which is a -# scalar. imas-python will construct a numpy array for you where +# scalar. IMAS-Python will construct a numpy array for you where # array[i] := profiles_1d[i]/time # Since we didn't set these values, they are set to the default EMPTY_FLOAT, which is # -9e+40. @@ -37,7 +37,7 @@ # This will output: # (IDSCoordinate('1...N'),) # The coordinate of profiles_2d is an index. When requesting the coordinate values, -# imas-python will generate an index array for you: +# IMAS-Python will generate an index array for you: print(slice0.profiles_2d.coordinates[0]) # -> array([0]) diff --git a/docs/source/courses/advanced/imas_snippets/ids_convert.py b/docs/source/courses/advanced/imas_snippets/ids_convert.py index 77ea422c..ceaab8bc 100644 --- a/docs/source/courses/advanced/imas_snippets/ids_convert.py +++ b/docs/source/courses/advanced/imas_snippets/ids_convert.py @@ -12,7 +12,7 @@ pulse_schedule.ids_properties.homogeneous_time = \ imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS pulse_schedule.ids_properties.comment = \ - "Testing renamed IDS nodes with imas-python" + "Testing renamed IDS nodes with IMAS-Python" pulse_schedule.time = [1., 1.1, 1.2] pulse_schedule.ec.antenna.resize(1) @@ -41,7 +41,7 @@ print(pulse_schedule_3_39.ids_properties.comment) # What do you notice? # This prints the original value of the comment ("Testing renamed IDS -# nodes with imas-python"). +# nodes with IMAS-Python"). # This is actually the same that you get when creating a shallow copy # with ``copy.copy`` of a regular Python dictionary: import copy diff --git a/docs/source/courses/advanced/metadata.rst b/docs/source/courses/advanced/metadata.rst index 42cb6abc..70060c8e 100644 --- a/docs/source/courses/advanced/metadata.rst +++ b/docs/source/courses/advanced/metadata.rst @@ -3,7 +3,7 @@ Using Data Dictionary metadata ============================== -imas-python provides convenient access to Data Dictionary metadata of any IDS node through +IMAS-Python provides convenient access to Data Dictionary metadata of any IDS node through the ``metadata`` attribute: .. code-block:: python @@ -23,10 +23,10 @@ cases. Overview of available metadata ------------------------------ -The data dictionary metadata that is parsed by imas-python is listed in the API +The data dictionary metadata that is parsed by IMAS-Python is listed in the API documentation for :py:class:`~imas.ids_metadata.IDSMetadata`. -Note that not all metadata from the IMAS Data Dictionary is parsed by imas-python. +Note that not all metadata from the IMAS Data Dictionary is parsed by IMAS-Python. This metadata is still accessible on the :code:`metadata` attribute. You can use :py:func:`imas.util.inspect` to get an overview of all metadata associated to an element in an IDS. @@ -208,7 +208,7 @@ Units and dimensional analysis with Pint `_. The Data Dictionary specifies the units of stored quantities. This metadata is -accessible in imas-python via :py:attr:`metadata.units +accessible in IMAS-Python via :py:attr:`metadata.units `. In most cases, these units are in a format that ``pint`` can understand (for example ``T``, ``Wb``, ``m^-3``, ``m.s^-1``). diff --git a/docs/source/courses/advanced/xarray.rst b/docs/source/courses/advanced/xarray.rst index e1fb5498..f28b452b 100644 --- a/docs/source/courses/advanced/xarray.rst +++ b/docs/source/courses/advanced/xarray.rst @@ -4,7 +4,7 @@ Create ``xarray.DataArray`` from an IDS .. info:: In this lesson you will create a ``DataArray`` manually. In a future version of - imas-python we plan to include functionality that will automatically do this for you. + IMAS-Python we plan to include functionality that will automatically do this for you. That should further simplify working with data inside IDSs. Let's start with an introduction of Xarray. According to `their website diff --git a/docs/source/courses/advanced_user_training.rst b/docs/source/courses/advanced_user_training.rst index c91be432..36fe8c90 100644 --- a/docs/source/courses/advanced_user_training.rst +++ b/docs/source/courses/advanced_user_training.rst @@ -1,9 +1,9 @@ -Advanced imas-python +Advanced IMAS-Python ==================== -In this imas-python training, we dive into more advanced features of imas-python. It is assumed -you are familiar with the basic features of imas-python, which are introduced in the -:ref:`imas-python 101` training. +In this IMAS-Python training, we dive into more advanced features of IMAS-Python. It is assumed +you are familiar with the basic features of IMAS-Python, which are introduced in the +:ref:`IMAS-Python 101` training. .. note:: diff --git a/docs/source/courses/basic/analyze.rst b/docs/source/courses/basic/analyze.rst index a17fa20b..2dabad0d 100644 --- a/docs/source/courses/basic/analyze.rst +++ b/docs/source/courses/basic/analyze.rst @@ -1,4 +1,4 @@ -Analyze with imas-python +Analyze with IMAS-Python ======================== For this part of the training we will learn to open an IMAS database entry, and @@ -23,13 +23,13 @@ We load data in memory with the :meth:`~imas.db_entry.DBEntry.get()` and can use the data. .. hint:: - Use the ASCII data supplied with imas-python for all exercises. It contains two + Use the ASCII data supplied with IMAS-Python for all exercises. It contains two IDSs (``equilibrium`` and ``core_profiles``) filled with data from three time slices of ITER reference data. Two convenience methods are available in the :mod:`imas.training` module to open the DBEntry for this training data. 1. :meth:`imas.training.get_training_db_entry()` returns an opened - ``imas.DBEntry`` object. Use this method if you want to use the imas-python + ``imas.DBEntry`` object. Use this method if you want to use the IMAS-Python interface. 2. :meth:`imas.training.get_training_imas_db_entry()` returns an opened ``imas.DBEntry`` object. Use this method if you want to use the Python Access @@ -69,7 +69,7 @@ Exercise 1 ``core_profiles.profiles_1d[i].electrons.temperature``) from the ``core_profiles`` IDS at time slice :math:`t\approx 433\,\mathrm{s}` - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/read_whole_equilibrium.py @@ -80,7 +80,7 @@ Exercise 1 The recommendations for larger data files are: - Only load the time slice(s) that you are interested in. - - Alternatively, imas-python allows to load data on-demand, see + - Alternatively, IMAS-Python allows to load data on-demand, see :ref:`Lazy loading` for more details. @@ -105,21 +105,21 @@ Exercise 2 :external:func:`numpy.argmin`. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/read_equilibrium_time_array.py .. attention:: - imas-python objects mostly behave the same way as numpy arrays. However, in some cases - functions explicitly expect a pure numpy array and supplying an imas-python object raises + IMAS-Python objects mostly behave the same way as numpy arrays. However, in some cases + functions explicitly expect a pure numpy array and supplying an IMAS-Python object raises an exception. When this is the case, the ``.value`` attribute can be used to obtain the underlying data. .. note:: - imas-python has two main ways of accessing IDSs. In the exercises above, we used + IMAS-Python has two main ways of accessing IDSs. In the exercises above, we used the "attribute-like" access. This is the main way of navigating the IDS tree. - However, imas-python also provides a "dict-like" interface to access data, which + However, IMAS-Python also provides a "dict-like" interface to access data, which might be more convenient in some cases. For example: .. literalinclude:: imas_snippets/iterate_core_profiles.py @@ -162,7 +162,7 @@ Exercise 3 of its arguments, here you can use ``imas.ids_defs.CLOSEST_INTERP``. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/read_core_profiles_ne_timeslice.py @@ -185,7 +185,7 @@ Exercise 4 Using ``matplotlib``, create a plot of :math:`n_e` on the y-axis and :math:`\rho_{tor, norm}` on the x-axis at :math:`t=433\mathrm{s}` - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/plot_core_profiles_ne_timeslice.py @@ -205,7 +205,7 @@ When you are interested in the time evolution of a quantity, using ``get_slice`` impractical. It gets around the limitation of the data not fitting in memory, but will still need to read all of the data from disk (just not at once). -imas-python has a `lazy loading` mode, where it will only read the requested data from disk +IMAS-Python has a `lazy loading` mode, where it will only read the requested data from disk when you try to access it. You can enable it by supplying ``lazy=True`` to a call to :meth:`~imas.db_entry.DBEntry.get()` or :meth:`~imas.db_entry.DBEntry.get_slice()`. @@ -233,7 +233,7 @@ Exercise 5 data_entry = imas.DBEntry(MDSPLUS_BACKEND, database, pulse, run, user) data_entry.open() - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/plot_core_profiles_te.py diff --git a/docs/source/courses/basic/create.rst b/docs/source/courses/basic/create.rst index 2db00104..fbfaa74a 100644 --- a/docs/source/courses/basic/create.rst +++ b/docs/source/courses/basic/create.rst @@ -1,4 +1,4 @@ -Create with imas-python +Create with IMAS-Python ======================= In this section of the training, we will have a look at creating (and filling) IDSs from @@ -7,7 +7,7 @@ scratch. Create an empty IDS ------------------- -Empty IDSs in imas-python are created by the :py:meth:`~imas.ids_factory.IDSFactory.new` +Empty IDSs in IMAS-Python are created by the :py:meth:`~imas.ids_factory.IDSFactory.new` method of an :py:class:`~imas.ids_factory.IDSFactory`. .. note:: @@ -24,7 +24,7 @@ Exercise 1 Create an empty ``core_profiles`` IDS. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/create_core_profiles.py :end-before: # Set properties @@ -59,7 +59,7 @@ Exercise 2 Fill the ``core_profiles`` IDS with the fields as described above. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Set properties @@ -67,7 +67,7 @@ Exercise 2 .. note:: - Observe that we can assign a Python list to ``cp.time``. imas-python will + Observe that we can assign a Python list to ``cp.time``. IMAS-Python will automatically convert it to a numpy array. @@ -89,7 +89,7 @@ Exercise 3 Validate the just-filled IDS. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Validate the IDS for consistency @@ -115,7 +115,7 @@ Exercise 4 Fix the coordinate consistency error. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Fill in the missing rho_tor_norm coordinate @@ -162,7 +162,7 @@ Exercise 5 The signature of :meth:`~imas.db_entry.DBEntry()` is: ``DBEntry(backend, database, pulse, run)`` - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/create_core_profiles.py :start-at: # Create a new data entry for storing the IDS @@ -183,6 +183,6 @@ Congratulations for completing this section of the course. You have: Click on the tabs to see the complete source, combining all exercises. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/create_core_profiles.py diff --git a/docs/source/courses/basic/explore.rst b/docs/source/courses/basic/explore.rst index f01e23d8..776918b5 100644 --- a/docs/source/courses/basic/explore.rst +++ b/docs/source/courses/basic/explore.rst @@ -1,6 +1,6 @@ .. _`basic/explore`: -Explore with imas-python +Explore with IMAS-Python ======================== In this part of the training, we will learn how to use Python to explore data @@ -32,10 +32,10 @@ Exercise 1 .. hint:: The module ``imas.ids_names`` contains information on the available IDSs. - In imas-python, you can use :py:class:`~imas.ids_factory.IDSFactory` to figure + In IMAS-Python, you can use :py:class:`~imas.ids_factory.IDSFactory` to figure out which IDSs are avaible. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/print_idss.py @@ -43,7 +43,7 @@ Exercise 1 Explore the structure and contents of an IDS -------------------------------------------- -imas-python has several features and utilities for exploring an IDS. These are best used in +IMAS-Python has several features and utilities for exploring an IDS. These are best used in an interactive Python console, such as the default python console or the `IPython `_ console. @@ -51,7 +51,7 @@ an interactive Python console, such as the default python console or the `IPytho Tab completion '''''''''''''' -As with most Python objects, you can use :kbd:`Tab` completion on imas-python objects. +As with most Python objects, you can use :kbd:`Tab` completion on IMAS-Python objects. .. note:: In the python console, you need to press :kbd:`Tab` twice to show suggestions. @@ -74,7 +74,7 @@ As with most Python objects, you can use :kbd:`Tab` completion on imas-python ob Interactive help '''''''''''''''' -Use the built-in :external:py:func:`help()` function to get more information on imas-python +Use the built-in :external:py:func:`help()` function to get more information on IMAS-Python functions, objects, etc. .. code-block:: pycon @@ -87,11 +87,11 @@ functions, objects, etc. [...] -Inspecting imas-python objects +Inspecting IMAS-Python objects '''''''''''''''''''''''''''''' :kbd:`Tab` completion is nice when you already know more or less what attribute you are -looking for. For a more comprehensive overview of any imas-python node, you can use +looking for. For a more comprehensive overview of any IMAS-Python node, you can use :py:meth:`imas.util.inspect` to show: 1. The path to the node (relative to the IDS it is contained in) @@ -119,7 +119,7 @@ looking for. For a more comprehensive overview of any imas-python node, you can Printing an IDS tree '''''''''''''''''''' -Another useful utility function in imas-python is :py:meth:`imas.util.print_tree`. This +Another useful utility function in IMAS-Python is :py:meth:`imas.util.print_tree`. This will print a complete tree structure of all non-empty quantities in the provided node. As an argument you can give a complete IDS, or any structure in the IDS such as ``ids_properties``: @@ -138,7 +138,7 @@ As an argument you can give a complete IDS, or any structure in the IDS such as Find paths in an IDS '''''''''''''''''''' -In imas-python you can also search for paths inside an IDS: +In IMAS-Python you can also search for paths inside an IDS: :py:meth:`imas.util.find_paths`. This can be useful when you know what quantity you are looking for, but aren't sure exactly in which (sub)structure of the IDS it is located. diff --git a/docs/source/courses/basic/imas_snippets/create_core_profiles.py b/docs/source/courses/basic/imas_snippets/create_core_profiles.py index b263299c..1ea149e5 100644 --- a/docs/source/courses/basic/imas_snippets/create_core_profiles.py +++ b/docs/source/courses/basic/imas_snippets/create_core_profiles.py @@ -11,7 +11,7 @@ # Set properties cp.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS -cp.ids_properties.comment = "Synthetic IDS created for the imas-python course" +cp.ids_properties.comment = "Synthetic IDS created for the IMAS-Python course" cp.ids_properties.creation_date = datetime.date.today().isoformat() # Set a time array diff --git a/docs/source/courses/basic/imas_snippets/print_idss.py b/docs/source/courses/basic/imas_snippets/print_idss.py index 5cb3f1cc..01cdba21 100644 --- a/docs/source/courses/basic/imas_snippets/print_idss.py +++ b/docs/source/courses/basic/imas_snippets/print_idss.py @@ -1,6 +1,6 @@ import imas -# imas-python has multiple DD versions inside, which makes this exercise harder. +# IMAS-Python has multiple DD versions inside, which makes this exercise harder. # We provide possible solutions here # Option 1: Print the IDSs in the default-selected DD version diff --git a/docs/source/courses/basic/imas_snippets/transform_grid.py b/docs/source/courses/basic/imas_snippets/transform_grid.py index f4cef866..98a2c537 100644 --- a/docs/source/courses/basic/imas_snippets/transform_grid.py +++ b/docs/source/courses/basic/imas_snippets/transform_grid.py @@ -29,7 +29,7 @@ eq = entry.get_slice("equilibrium", time, imas.ids_defs.CLOSEST_INTERP) # Update comment - eq.ids_properties.comment = "imas-python training: transform coordinate system" + eq.ids_properties.comment = "IMAS-Python training: transform coordinate system" p2d = eq.time_slice[0].profiles_2d[0] # Get `.value` so we can plot the original values after the IDS node is overwritten diff --git a/docs/source/courses/basic/setup.rst b/docs/source/courses/basic/setup.rst index 3034cf76..87fb4511 100644 --- a/docs/source/courses/basic/setup.rst +++ b/docs/source/courses/basic/setup.rst @@ -1,12 +1,12 @@ -imas-python 101: setup imas-python +IMAS-Python 101: setup IMAS-Python ================================== -This course was written for imas-python version 0.8.0 and requires an IMAS installation to -load IMAS data. imas-python may be installed on your cluster, in which case you can do +This course was written for IMAS-Python version 0.8.0 and requires an IMAS installation to +load IMAS data. IMAS-Python may be installed on your cluster, in which case you can do .. code-block:: console - $ module load imas-python IMAS + $ module load IMAS-Python IMAS $ python -c 'import imas; print(imas.__version__)' -Have a look at the :ref:`Installing imas-python` page for more details on installing imas-python. +Have a look at the :ref:`Installing IMAS-Python` page for more details on installing IMAS-Python. diff --git a/docs/source/courses/basic/transform.rst b/docs/source/courses/basic/transform.rst index 6f208443..e94d5360 100644 --- a/docs/source/courses/basic/transform.rst +++ b/docs/source/courses/basic/transform.rst @@ -1,4 +1,4 @@ -Transform with imas-python +Transform with IMAS-Python ========================== In this part of the course we'll perform a coordinate transformation. Our input data is @@ -27,7 +27,7 @@ Exercise 1: Check which time slices exist .. hint:: You can use :ref:`lazy loading` to avoid loading all data in memory. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Open input data entry @@ -44,7 +44,7 @@ Exercise 2: Load a time slice Loop over each available time in the IDS and load the time slice inside the loop. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Loop over each time slice @@ -74,7 +74,7 @@ We will apply the transformation of the data as follows: .. md-tab-set:: - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Loop over each time slice @@ -90,7 +90,7 @@ Exercise 4: Store a time slice Store the time slice after the transformation. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Create output data entry @@ -114,7 +114,7 @@ Exercise 5: Plotting data before and after the transformation :math:`\rho,\theta` plane (transformed data) to verify that the transformation is correct. - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/transform_grid.py :start-at: # Create a plot @@ -125,7 +125,7 @@ Bringing it all together .. md-tab-set:: - .. md-tab-item:: imas-python + .. md-tab-item:: IMAS-Python .. literalinclude:: imas_snippets/transform_grid.py :caption: Source code for the complete exercise diff --git a/docs/source/courses/basic_user_training.rst b/docs/source/courses/basic_user_training.rst index a3703462..b2e62d4a 100644 --- a/docs/source/courses/basic_user_training.rst +++ b/docs/source/courses/basic_user_training.rst @@ -1,10 +1,10 @@ -.. _`imas-python 101`: +.. _`IMAS-Python 101`: -imas-python 101 +IMAS-Python 101 =============== -In this imas-python training, we introduce you to the basic concepts and features of -imas-python. You will need some basic familiarity with Python. For a refresher, see +In this IMAS-Python training, we introduce you to the basic concepts and features of +IMAS-Python. You will need some basic familiarity with Python. For a refresher, see the `Python tutorial `_. We also assume some basic knowledge of the ITER IMAS infrastructure. diff --git a/docs/source/identifiers.rst b/docs/source/identifiers.rst index de885a3d..312749e1 100644 --- a/docs/source/identifiers.rst +++ b/docs/source/identifiers.rst @@ -20,10 +20,10 @@ representations: 3. A description (long string) -Identifiers in imas-python +Identifiers in IMAS-Python -------------------------- -imas-python implements identifiers as an :py:class:`enum.Enum`. Identifiers are +IMAS-Python implements identifiers as an :py:class:`enum.Enum`. Identifiers are constructed on-demand from the loaded Data Dictionary definitions. All identifier enums can be accessed through ``imas.identifiers``. A list of @@ -59,10 +59,10 @@ the available identifiers is stored as ``imas.identifiers.identifiers``. print(core_sources.source[0].identifier.metadata.identifier_enum) -Assigning identifiers in imas-python +Assigning identifiers in IMAS-Python ------------------------------------ -imas-python implements smart assignment of identifiers. You may assign an identifier +IMAS-Python implements smart assignment of identifiers. You may assign an identifier enum value (for example ``imas.identifiers.core_source_identifier.total``), a string (for example ``"total"``) or an integer (for example ``"1"``) to an identifier structure (for example ``core_profiles.source[0].identifier``) to set diff --git a/docs/source/imas_architecture.rst b/docs/source/imas_architecture.rst index bfcab45d..b1764bed 100644 --- a/docs/source/imas_architecture.rst +++ b/docs/source/imas_architecture.rst @@ -1,7 +1,7 @@ -imas-python architecture +IMAS-Python architecture ======================== -This document provides a brief overview of the components of imas-python, grouped into +This document provides a brief overview of the components of IMAS-Python, grouped into different functional areas. We don't aim to give detailed explanations of the code or the algorithms in it. These @@ -54,14 +54,14 @@ immutable. .. caution:: - Although an :py:class:`~imas.ids_path.IDSPath` in imas-python implements roughly + Although an :py:class:`~imas.ids_path.IDSPath` in IMAS-Python implements roughly the same concept as `the "IDS Path syntax" in the Data Dictionary `__, they are not necessarily the same thing! At the moment of writing this (January 2024), the IDS path definition in the Data Dictionary is not yet finalized. - Be aware that the syntax of imas-python's :py:class:`~imas.ids_path.IDSPath` may + Be aware that the syntax of IMAS-Python's :py:class:`~imas.ids_path.IDSPath` may differ slightly and might be incompatible with the definition from the Data Dictionary. @@ -288,14 +288,14 @@ MDSplus support models are specific to a DD version and are required when using the MDSplus backend for creating new Data Entries. - .. seealso:: :ref:`MDSplus in imas-python` + .. seealso:: :ref:`MDSplus in IMAS-Python` Versioning ---------- -imas-python uses `setuptools-scm `_ for -versioning. An imas-python release has a corresponding tag (which sets the version). +IMAS-Python uses `setuptools-scm `_ for +versioning. An IMAS-Python release has a corresponding tag (which sets the version). The ``imas._version`` module is generated by ``setuptools-scm`` and implements this logic for editable installs. This module is generated by ``setuptools-scm`` when building python packages. @@ -333,8 +333,8 @@ Miscelleneous The following is a list of miscelleneous modules, which don't belong to any of the other categories on this page. -- :py:mod:`imas.exception` contains all Exception classes that imas-python may raise. -- :py:mod:`imas.setup_logging` initializes a logging handler for imas-python. +- :py:mod:`imas.exception` contains all Exception classes that IMAS-Python may raise. +- :py:mod:`imas.setup_logging` initializes a logging handler for IMAS-Python. - :py:mod:`imas.training` contains helper methods for making training data available. - :py:mod:`imas.util` contains useful utility methods. It is imported automatically. diff --git a/docs/source/index.rst b/docs/source/index.rst index 20a5d80a..5db1a1fa 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -3,11 +3,11 @@ define TOC here, but it'll be put in the sidebar by the theme ================== -imas-python manual +IMAS-Python manual ================== -imas-python is a pure-python library to handle arbitrarily nested -data structures. imas-python is designed for, but not necessarily bound to, +IMAS-Python is a pure-python library to handle arbitrarily nested +data structures. IMAS-Python is designed for, but not necessarily bound to, interacting with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) Data Model. @@ -22,7 +22,7 @@ It provides: The README is best read on :src:`#imas`. -Read what's new in the current version of imas-python in our :ref:`changelog`! +Read what's new in the current version of IMAS-Python in our :ref:`changelog`! Manual @@ -48,7 +48,7 @@ Manual changelog .. toctree:: - :caption: imas-python training courses + :caption: IMAS-Python training courses :maxdepth: 1 courses/basic_user_training @@ -64,7 +64,7 @@ Manual .. toctree:: - :caption: imas-python development + :caption: IMAS-Python development :maxdepth: 1 imas_architecture diff --git a/docs/source/installing.rst b/docs/source/installing.rst index a454d0aa..2e67a210 100644 --- a/docs/source/installing.rst +++ b/docs/source/installing.rst @@ -1,24 +1,24 @@ -.. _`Installing imas-python`: +.. _`Installing IMAS-Python`: -Installing imas-python +Installing IMAS-Python ====================== -imas-python is a pure Python package. For full functionality of the package you need +IMAS-Python is a pure Python package. For full functionality of the package you need an installation of `the IMAS Core library `_. See -:ref:`imas-python 5 minute introduction` for an overview of functionality which does +:ref:`IMAS-Python 5 minute introduction` for an overview of functionality which does (not) require the IMAS Core library available. -To get started, you can install it from `pypi.org `_: +To get started, you can install it from `pypi.org `_: .. code-block:: bash - pip install imas-python + pip install IMAS-Python Local installation from sources ------------------------------- -We recommend using a :external:py:mod:`venv`. Then, clone the imas-python repository +We recommend using a :external:py:mod:`venv`. Then, clone the IMAS-Python repository and run `pip install`: .. code-block:: bash @@ -26,7 +26,7 @@ and run `pip install`: python3 -m venv ./venv . venv/bin/activate - git clone ssh://git@github.com:iterorganization/imas-python.git + git clone ssh://git@github.com:iterorganization/IMAS-Python.git cd imas pip install --upgrade pip pip install --upgrade wheel setuptools @@ -50,17 +50,17 @@ Test your installation by trying cd ~ python -c "import imas; print(imas.__version__)" -This is how to run the imas-python test suite: +This is how to run the IMAS-Python test suite: .. code-block:: bash - # inside the imas-python git repository + # inside the IMAS-Python git repository pytest imas --mini # run with a specific backend pytest imas --ascii --mini -And to build the imas-python documentation, execute: +And to build the IMAS-Python documentation, execute: .. code-block:: bash diff --git a/docs/source/intro.rst b/docs/source/intro.rst index 30c75e1e..0118f217 100644 --- a/docs/source/intro.rst +++ b/docs/source/intro.rst @@ -1,6 +1,6 @@ -.. _`imas-python 5 minute introduction`: +.. _`IMAS-Python 5 minute introduction`: -imas-python 5 minute introduction +IMAS-Python 5 minute introduction --------------------------------- .. contents:: Contents @@ -12,7 +12,7 @@ Verify your IMAS installation ''''''''''''''''''''''''''''' Before continuing, verify that your imas install is working. Check the -:ref:`Installing imas-python` page for installation instructions if below fails for +:ref:`Installing IMAS-Python` page for installation instructions if below fails for you. Start python and import imas. Note that the version in below output may be outdated. @@ -24,8 +24,8 @@ be outdated. .. note:: - If you have an imas-python install without the IMAS Access Layer, importing - imas-python will display an error message. You can still use imas-python, but not all + If you have an IMAS-Python install without the IMAS Access Layer, importing + IMAS-Python will display an error message. You can still use IMAS-Python, but not all functionalities are available. @@ -34,7 +34,7 @@ Create and use an IDS To create an IDS, you must first make an :py:class:`~imas.ids_factory.IDSFactory` object. The IDS factory is necessary for specifying which version of the IMAS Data -Dictionary you want to use. If you don't specify anything, imas-python uses the same Data +Dictionary you want to use. If you don't specify anything, IMAS-Python uses the same Data Dictionary version as the loaded IMAS environment, or the latest available version. See :ref:`Using multiple DD versions in the same environment` for more information on different Data Dictionary versions. @@ -52,7 +52,7 @@ We can now use this ``core_profiles`` IDS and assign some data to it: .. code-block:: python - >>> core_profiles.ids_properties.comment = "Testing imas-python" + >>> core_profiles.ids_properties.comment = "Testing IMAS-Python" >>> core_profiles.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS >>> # array quantities are automatically converted to the appropriate numpy arrays >>> core_profiles.time = [1, 2, 3] @@ -68,7 +68,7 @@ We can now use this ``core_profiles`` IDS and assign some data to it: >>> core_profiles.profiles_1d[0].grid.rho_tor_norm = [0, 0.5, 1.0] >>> core_profiles.profiles_1d[0].j_tor = [0, 0, 0] -As you can see in the example above, imas-python automatically checks the data you try to +As you can see in the example above, IMAS-Python automatically checks the data you try to assign to an IDS with the data type specified in the Data Dictionary. When possible, your data is automatically converted to the expected type. You will get an error message if this is not possible: @@ -90,7 +90,7 @@ Store an IDS to disk .. note:: - This functionality requires the IMAS Access Layer. - - This API will change when imas-python is moving to Access Layer 5 (expected Q2 + - This API will change when IMAS-Python is moving to Access Layer 5 (expected Q2 2023). To store an IDS to disk, we need to indicate the following information to the @@ -102,7 +102,7 @@ IMAS Access Layer. Please check the `IMAS Access Layer documentation - ``pulse`` - ``run`` -In imas-python you do this as follows: +In IMAS-Python you do this as follows: .. code-block:: python @@ -122,7 +122,7 @@ Load an IDS from disk .. note:: - This functionality requires the IMAS Access Layer. - - This API will change when imas-python is moving to Access Layer 5 (expected Q2 + - This API will change when IMAS-Python is moving to Access Layer 5 (expected Q2 2023). To load an IDS from disk, you need to specify the same information as @@ -136,4 +136,4 @@ can use ``.get()`` to load IDS data from disk: >>> dbentry2.open() >>> core_profiles2 = dbentry2.get("core_profiles") >>> print(core_profiles2.ids_properties.comment.value) - Testing imas-python + Testing IMAS-Python diff --git a/docs/source/lazy_loading.rst b/docs/source/lazy_loading.rst index a4317d5d..9dda19e0 100644 --- a/docs/source/lazy_loading.rst +++ b/docs/source/lazy_loading.rst @@ -9,7 +9,7 @@ When reading data from a data entry (using :meth:`DBEntry.get lowlevel Access Layer backend. This may take a long time to complete if the data entry has a lot of data stored for the requested IDS. -Instead of reading data immediately, imas-python can also `lazy load` the data when you need +Instead of reading data immediately, IMAS-Python can also `lazy load` the data when you need it. This will speed up your program in cases where you are interested in a subset of all the data stored in an IDS. @@ -71,7 +71,7 @@ Lazy loading of data may speed up your programs, but also comes with some limita - Copying lazy-loaded IDSs (through :external:py:func:`copy.deepcopy`) is not implemented. -2. imas-python **assumes** that the underlying data entry is not modified. +2. IMAS-Python **assumes** that the underlying data entry is not modified. When you (or another user) overwrite or add data to the same data entry, you may end up with a mix of old and new data in the lazy loaded IDS. @@ -89,5 +89,5 @@ Lazy loading of data may speed up your programs, but also comes with some limita 4. Lazy loading has more overhead for reading data from the lowlevel: it is therefore more efficient to do a full :code:`get()` or :code:`get_slice()` when you intend to use most of the data stored in an IDS. -5. When using imas-python with remote data access (i.e. the UDA backend), a full +5. When using IMAS-Python with remote data access (i.e. the UDA backend), a full :code:`get()` or :code:`get_slice()` is more efficient than lazy loading. diff --git a/docs/source/mdsplus.rst b/docs/source/mdsplus.rst index 1ff6e74e..e2a1d1d5 100644 --- a/docs/source/mdsplus.rst +++ b/docs/source/mdsplus.rst @@ -1,11 +1,11 @@ -.. _`MDSplus in imas-python`: +.. _`MDSplus in IMAS-Python`: -MDSplus in imas-python +MDSplus in IMAS-Python ====================== `MDSplus `_ is a set of software tools for data acquisition and storage and a methodology for management of complex -scientific data. imas-python uses the IMAS LowLevel interface to interact +scientific data. IMAS-Python uses the IMAS LowLevel interface to interact with MDSplus data. The model files required to read IMAS IDS-structured data are generated on demand, whenever a specific DD version is used by the user. As this generation might take a while, MDSplus models are diff --git a/docs/source/metadata.rst b/docs/source/metadata.rst index 32545c35..784e49e6 100644 --- a/docs/source/metadata.rst +++ b/docs/source/metadata.rst @@ -5,14 +5,14 @@ IDS metadata Besides the data structure, the IMAS Data Dictionary also defines metadata associated with elements in the IDS, such as coordinate information, units, etc. -imas-python provides the :py:class:`~imas.ids_metadata.IDSMetadata` API for +IMAS-Python provides the :py:class:`~imas.ids_metadata.IDSMetadata` API for interacting with this metadata. On this page you find several examples for querying and using the metadata of IDS elements. .. seealso:: - imas-python advanced training: :ref:`Using metadata` + IMAS-Python advanced training: :ref:`Using metadata` Overview of available metadata @@ -68,7 +68,7 @@ looked up. See below example. >>> profile.electrons.temperature.coordinates[0] IDSNumericArray("/core_profiles/profiles_1d/1/grid/rho_tor_norm", array([0. , 0.15, 0.3 , 0.45, 0.6 ])) -When a coordinate is just an index, imas-python generates a +When a coordinate is just an index, IMAS-Python generates a :external:py:func:`numpy.arange` with the same length as the data. See below example. @@ -84,7 +84,7 @@ example. .. rubric:: Time coordinates Time coordinates are a special case: the coordinates depend on whether the IDS -is in homogeneous time mode or not. imas-python handles this transparently. +is in homogeneous time mode or not. IMAS-Python handles this transparently. .. code-block:: python :caption: Example getting time coordinate values @@ -116,15 +116,15 @@ used as a coordinate. For example, the ``distribution(i1)/profiles_2d(itime)/grid/r OR distribution(i1)/profiles_2d(itime)/grid/rho_tor_norm``. This means that either ``r`` or ``rho_tor_norm`` can be used as coordinate. When requesting such a -coordinate from imas-python, four things may happen: +coordinate from IMAS-Python, four things may happen: 1. When ``r`` is empty and ``rho_tor_norm`` not, ``coordinates[0]`` will return ``rho_tor_norm``. 2. When ``rho_tor_norm`` is empty and ``r`` not, ``coordinates[0]`` will return ``r``. -3. When both ``r`` and ``rho_tor_norm`` are not empty, imas-python raises an error +3. When both ``r`` and ``rho_tor_norm`` are not empty, IMAS-Python raises an error because it cannot determine which of the two coordinates should be used. -4. Similarly, an error is raised by imas-python when neither ``r`` nor +4. Similarly, an error is raised by IMAS-Python when neither ``r`` nor ``rho_tor_norm`` are set. @@ -135,7 +135,7 @@ coordinate from imas-python, four things may happen: Query coordinate information '''''''''''''''''''''''''''' -In imas-python you can query coordinate information in two ways: +In IMAS-Python you can query coordinate information in two ways: 1. Directly query the coordinate attribute on the metadata: :code:`.metadata.coordinate2` gives you the coordinate information diff --git a/docs/source/multi-dd.rst b/docs/source/multi-dd.rst index 372aaae0..6ddd7cd1 100644 --- a/docs/source/multi-dd.rst +++ b/docs/source/multi-dd.rst @@ -4,9 +4,9 @@ Using multiple DD versions in the same environment ================================================== Whereas the default IMAS High Level Interface is built for a single Data Dictionary -version, imas-python can transparently handle multiple DD versions. +version, IMAS-Python can transparently handle multiple DD versions. -By default, imas-python uses the same Data Dictionary version as the loaded IMAS environment +By default, IMAS-Python uses the same Data Dictionary version as the loaded IMAS environment is using, as specified by the environment variable ``IMAS_VERSION``. If no IMAS environment is loaded, the last available DD version is used. @@ -34,7 +34,7 @@ example: Conversion of IDSs between DD versions -------------------------------------- -imas-python can convert IDSs between different versions of the data dictionary. This uses the +IMAS-Python can convert IDSs between different versions of the data dictionary. This uses the "non-backwards compatible changes" metadata from the DD definitions. There are two conversion modes: @@ -75,8 +75,8 @@ the backend and the stored data), but it doesn't support all conversion logic be more efficient to convert the data to your DD version, store it and then use it. This avoids conversion every time you read the data. - Converting an entire Data Entry can also be done with the imas-python command - line interface. See :ref:`imas-python Command Line tool`. + Converting an entire Data Entry can also be done with the IMAS-Python command + line interface. See :ref:`IMAS-Python Command Line tool`. Explicit conversion @@ -114,7 +114,7 @@ Explicit conversion .. note:: Not all data may be converted. For example, when an IDS node is removed between DD - versions, the corresponding data is not copied. imas-python provides logging to indicate + versions, the corresponding data is not copied. IMAS-Python provides logging to indicate when this happens. @@ -181,16 +181,16 @@ explicit conversion mechanisms. Background information ---------------------- -Since imas-python needs to have access to multiple DD versions it was chosen to +Since IMAS-Python needs to have access to multiple DD versions it was chosen to bundle these with the code at build-time, in setup.py. If a git clone of the Data Dictionary succeeds, the setup tools automatically download saxon and generate ``IDSDef.xml`` for each of the tagged versions in the DD git repository. These are then gathered into ``IDSDef.zip``, which is -distributed inside the imas-python package. +distributed inside the IMAS-Python package. To update the set of data dictionaries new versions can be added to the zipfile. A reinstall of the package will ensure that all available versions are included -in imas-python. Additionally an explicit path to an XML file can be specified, which +in IMAS-Python. Additionally an explicit path to an XML file can be specified, which is useful for development. Automated tests have been provided that check the loading of all of the DD @@ -203,14 +203,14 @@ Extending the DD set Use the command ``python setup.py build_DD`` to build a new ``IDSDef.zip``. This fetches all tags from the data dictionary git repository and builds the ``IDSDef.zip``. -imas-python searches for an ``IDSDef.zip`` in the following locations: +IMAS-Python searches for an ``IDSDef.zip`` in the following locations: 1. The environment variable ``$IMAS_DDZIP`` (path to a zip file) 2. The file ``./IDSDef.zip`` in the current working directory 3. In the local configuration folder: ``~/.config/imas/IDSDef.zip``, or ``$XDG_CONFIG_DIR/imas/IDSDef.zip`` (if the environment variable ``$XDG_CONFIG_DIR`` is set) -4. The zipfile bundled with the imas-python installation: ``assets/IDSDef.zip`` +4. The zipfile bundled with the IMAS-Python installation: ``assets/IDSDef.zip`` All paths are searched in order when loading the definitions of a specific data dictionary version: the first zip file that contains the definitions of the requested diff --git a/docs/source/netcdf.rst b/docs/source/netcdf.rst index 4ef62a2a..2ff50c41 100644 --- a/docs/source/netcdf.rst +++ b/docs/source/netcdf.rst @@ -9,9 +9,9 @@ IMAS netCDF files netcdf/conventions -imas-python supports reading IDSs from and writing IDSs to IMAS netCDF files. This +IMAS-Python supports reading IDSs from and writing IDSs to IMAS netCDF files. This feature is currently in alpha status, and its functionality may change in -upcoming minor releases of imas-python. +upcoming minor releases of IMAS-Python. A detailed description of the IMAS netCDF format and conventions can be found on the :ref:`IMAS conventions for the netCDF data format` page. @@ -45,12 +45,12 @@ will be used for :py:meth:`~imas.db_entry.DBEntry.get` and Using IMAS netCDF files with 3rd-party tools -------------------------------------------- -The netCDF files produces by imas-python can be read with external tools. In this +The netCDF files produces by IMAS-Python can be read with external tools. In this section we will show how to load data with the `xarray `__ package. Let's first create a small netCDF file in the current working directory based on -the imas-python training data: +the IMAS-Python training data: .. code-block:: python :caption: Store ``core_profiles`` training data in a netCDF file @@ -107,6 +107,6 @@ your directory. Let's open this file with ``xarray.load_dataset``: Validating an IMAS netCDF file ------------------------------ -IMAS netCDF files can be validated with imas-python through the command line ``imas -validate_nc ``. See also :ref:`imas-python Command Line tool` or type +IMAS netCDF files can be validated with IMAS-Python through the command line ``imas +validate_nc ``. See also :ref:`IMAS-Python Command Line tool` or type ``imas validate_nc --help`` in a command line. diff --git a/docs/source/netcdf/conventions.rst b/docs/source/netcdf/conventions.rst index 2dbbacb4..2b31b9b7 100644 --- a/docs/source/netcdf/conventions.rst +++ b/docs/source/netcdf/conventions.rst @@ -263,7 +263,7 @@ IMAS netCDF writers are recommended to overwrite the following metadata: - ``ids_properties.version_put.access_layer``: fill with ``"N/A"``, since this IDS is not written by the IMAS Access Layer. - ``ids_properties.version_put.access_layer_language``: fill with the name and - version of the netCDF writer, for example ``imas-python 1.1.0``. + version of the netCDF writer, for example ``IMAS-Python 1.1.0``. All other IDS metadata and provenance should be filled by the user or software that provides the IDS data. diff --git a/docs/source/release_imas.rst b/docs/source/release_imas.rst index 4606118d..994bd88a 100644 --- a/docs/source/release_imas.rst +++ b/docs/source/release_imas.rst @@ -1,12 +1,12 @@ -imas-python development and release process +IMAS-Python development and release process =========================================== -imas-python development follows the a fork-based model described in +IMAS-Python development follows the a fork-based model described in `the contributing guidelines -`_. +`_. -Creating an imas-python release +Creating an IMAS-Python release ------------------------------- 1. Create a Pull Request using fork based workflow from ``develop`` to ``main``. @@ -15,11 +15,11 @@ Creating an imas-python release 3. The PR is reviewed and merged by the maintainers who also create the release tags. 4. After the release PR is merged, update the Easybuild configurations for SDCC modules in the `easybuild-easyconfigs repository - `_. + `_. See the next section for more details on how to do this. -Updating and testing the imas-python Easybuild configuration +Updating and testing the IMAS-Python Easybuild configuration ------------------------------------------------------------ The following steps can be taken on an SDCC login node. @@ -70,7 +70,7 @@ The following steps must be performed for each of the tool chains (currently a. Copy the ``.eb`` file from the previous release. b. Update the ``version`` to reflect the just-released version tag. - c. If any of the imas-python dependencies in ``pyproject.toml`` where updated or changed + c. If any of the IMAS-Python dependencies in ``pyproject.toml`` where updated or changed since the previous release, update the easybuild dependencies: - ``builddependencies`` contains build-time dependencies which are available @@ -79,7 +79,7 @@ The following steps must be performed for each of the tool chains (currently .. note:: The IMAS module is a build-time dependency only and not a runtime - dependency. This allows imas-python users to load the imas-python module and + dependency. This allows IMAS-Python users to load the IMAS-Python module and **any** supported IMAS module. - ``dependencies`` contains run-time dependencies which are available as a @@ -88,13 +88,13 @@ The following steps must be performed for each of the tool chains (currently dependencies of dependencies) which are not available in any of the Python modules on SDCC. - d. Update the checksum of imas: download an archive of the imas-python repository from + d. Update the checksum of imas: download an archive of the IMAS-Python repository from bitbucket. This is easiest to do by copying the following URL, replace ```` with the version tag, and paste it in a web browser: .. code-block:: text - https://github.com/iterorganization/imas-python/archive/refs/heads/.tar.gz + https://github.com/iterorganization/IMAS-Python/archive/refs/heads/.tar.gz Then, calculate the hash of the downloaded archive with ``sha256sum`` and update it in the ``.eb`` file. @@ -120,7 +120,7 @@ The following steps must be performed for each of the tool chains (currently module purge module use ~/.local/easybuild/modules/all/ - module load imas-python/- + module load IMAS-Python/- module laod IMAS c. Sanity check the module, for example by running the ``pytest`` unit tests. diff --git a/docs/source/validation.rst b/docs/source/validation.rst index 472c70ca..95b937d2 100644 --- a/docs/source/validation.rst +++ b/docs/source/validation.rst @@ -3,7 +3,7 @@ IDS validation ============== -The IDSs you fill should be consistent. To help you in validating that, imas-python has a +The IDSs you fill should be consistent. To help you in validating that, IMAS-Python has a :py:meth:`~imas.ids_toplevel.IDSToplevel.validate` method that executes the following checks. @@ -11,7 +11,7 @@ checks. :local: :depth: 1 -If you call this method and your IDS fails validation, imas-python raises an error explaining +If you call this method and your IDS fails validation, IMAS-Python raises an error explaining the problem. See the following example: >>> import imas @@ -19,7 +19,7 @@ the problem. See the following example: >>> core_profiles.validate() imas.exception.ValidationError: Invalid value for ids_properties.homogeneous_time: -999999999 -imas-python also automatically validates an IDS every time you do a +IMAS-Python also automatically validates an IDS every time you do a :py:meth:`~imas.db_entry.DBEntry.put` or :py:meth:`~imas.db_entry.DBEntry.put_slice`. To disable this feature, you must set the environment variable ``IMAS_AL_DISABLE_VALIDATE`` to ``1``. diff --git a/imas/__init__.py b/imas/__init__.py index b0b8f567..e7c773a4 100644 --- a/imas/__init__.py +++ b/imas/__init__.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. # isort: skip_file @@ -17,7 +17,7 @@ from .ids_convert import convert_ids from .ids_identifiers import identifiers -# Load the imas-python IMAS AL/DD core +# Load the IMAS-Python IMAS AL/DD core from . import ( db_entry, dd_helpers, @@ -25,7 +25,7 @@ util, ) -PUBLISHED_DOCUMENTATION_ROOT = "https://imas-python.readthedocs.io/en/latest/" +PUBLISHED_DOCUMENTATION_ROOT = "https://IMAS-Python.readthedocs.io/en/latest/" """URL to the published documentation.""" OLDEST_SUPPORTED_VERSION = _V("3.22.0") -"""Oldest Data Dictionary version that is supported by imas-python.""" +"""Oldest Data Dictionary version that is supported by IMAS-Python.""" diff --git a/imas/__main__.py b/imas/__main__.py index 0b7834e5..989ada7f 100644 --- a/imas/__main__.py +++ b/imas/__main__.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Support module to run imas as a module: .. code-block:: bash diff --git a/imas/_util.py b/imas/_util.py index 82199275..da231bc6 100644 --- a/imas/_util.py +++ b/imas/_util.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """This file contains the implementation of all utility functions that need external modules. Implementation has been removed from util.py to improve the performance of ``import imas``. diff --git a/imas/backends/__init__.py b/imas/backends/__init__.py index 78cdd3f5..1c3729f5 100644 --- a/imas/backends/__init__.py +++ b/imas/backends/__init__.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Logic for interacting with all data backends. Currently supported backends are: diff --git a/imas/backends/db_entry_impl.py b/imas/backends/db_entry_impl.py index 7e5dddef..df1e4638 100644 --- a/imas/backends/db_entry_impl.py +++ b/imas/backends/db_entry_impl.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. from abc import ABC, abstractmethod from dataclasses import dataclass diff --git a/imas/backends/imas_core/__init__.py b/imas/backends/imas_core/__init__.py index 5e7812f4..14b3a768 100644 --- a/imas/backends/imas_core/__init__.py +++ b/imas/backends/imas_core/__init__.py @@ -1,4 +1,4 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Subpackage implementing data access through the IMAS Access Layer Core. """ diff --git a/imas/backends/imas_core/al_context.py b/imas/backends/imas_core/al_context.py index 323cdd5d..b33c99b3 100644 --- a/imas/backends/imas_core/al_context.py +++ b/imas/backends/imas_core/al_context.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Object-oriented interface to the IMAS lowlevel. """ @@ -298,7 +298,7 @@ def get_context(self) -> ALContext: if not cache or cache[-1] is not ctx: logger.warning( "Found an empty AL context cache: This should not happen, please " - "report this bug to the imas-python developers." + "report this bug to the IMAS-Python developers." ) else: return ctx diff --git a/imas/backends/imas_core/db_entry_al.py b/imas/backends/imas_core/db_entry_al.py index e1d711a5..52d82fe6 100644 --- a/imas/backends/imas_core/db_entry_al.py +++ b/imas/backends/imas_core/db_entry_al.py @@ -207,7 +207,7 @@ def _setup_backend( os.environ["IDSDEF_PATH"] = idsdef_path logger.warning( "The UDA backend is not tested with " - "imas-python and may not work properly. " + "IMAS-Python and may not work properly. " "Please raise any issues you find." ) @@ -215,7 +215,7 @@ def _setup_backend( pass # nothing to set up else: - logger.warning("Backend %s is unknown to imas-python", backend) + logger.warning("Backend %s is unknown to IMAS-Python", backend) def close(self, *, erase: bool = False) -> None: if self._db_ctx is None: diff --git a/imas/backends/imas_core/db_entry_helpers.py b/imas/backends/imas_core/db_entry_helpers.py index d6e3a596..4216db5d 100644 --- a/imas/backends/imas_core/db_entry_helpers.py +++ b/imas/backends/imas_core/db_entry_helpers.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Helper methods for loading data from and storing data to Data Entries. """ diff --git a/imas/backends/imas_core/imas_interface.py b/imas/backends/imas_core/imas_interface.py index 05634dfb..6f4b3ba6 100644 --- a/imas/backends/imas_core/imas_interface.py +++ b/imas/backends/imas_core/imas_interface.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """ Helper module for providing a version-independent interface to the Access Layer. @@ -44,7 +44,7 @@ class LLInterfaceError(RuntimeError): class LowlevelInterface: """Compatibility object. - Provides a stable API for the rest of imas-python even when the + Provides a stable API for the rest of IMAS-Python even when the `imas.lowlevel` interface changes. .. rubric:: Developer notes @@ -213,4 +213,4 @@ def begin_timerange_action( func.__doc__ = f"Wrapper function for AL lowlevel method ``{funcname}``" ll_interface = LowlevelInterface(lowlevel) -"""imas-python <-> IMAS lowlevel interface""" +"""IMAS-Python <-> IMAS lowlevel interface""" diff --git a/imas/backends/imas_core/mdsplus_model.py b/imas/backends/imas_core/mdsplus_model.py index 9b00f34c..00514e01 100644 --- a/imas/backends/imas_core/mdsplus_model.py +++ b/imas/backends/imas_core/mdsplus_model.py @@ -118,7 +118,7 @@ def mdsplus_model_dir(factory: IDSFactory) -> str: # on-disk formats should be versioned and documented properly, so this should never # happen again. - # There are multiple possible cases for the imas-python cache + # There are multiple possible cases for the IMAS-Python cache # 1. The cache exist and can be used # 2. The cache folder exists, and another process is creating it # 3. The cache folder exists, but the process creating it has stopped @@ -158,7 +158,7 @@ def mdsplus_model_dir(factory: IDSFactory) -> str: os.listdir(cache_dir_path), ) raise MDSPlusModelError( - "The imas-python cache directory is corrupted. Please clean the" + "The IMAS-Python cache directory is corrupted. Please clean the" f" cache directory ({cache_dir_path}) and try again." ) elif not cache_dir_path.is_dir() and not model_exists(cache_dir_path): diff --git a/imas/backends/netcdf/__init__.py b/imas/backends/netcdf/__init__.py index 86cc929e..b4fd429d 100644 --- a/imas/backends/netcdf/__init__.py +++ b/imas/backends/netcdf/__init__.py @@ -1,4 +1,4 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. -"""NetCDF IO support for imas-python. Requires [netcdf] extra dependencies. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +"""NetCDF IO support for IMAS-Python. Requires [netcdf] extra dependencies. """ diff --git a/imas/backends/netcdf/db_entry_nc.py b/imas/backends/netcdf/db_entry_nc.py index aabaf3e0..a23005a6 100644 --- a/imas/backends/netcdf/db_entry_nc.py +++ b/imas/backends/netcdf/db_entry_nc.py @@ -31,7 +31,7 @@ def __init__(self, fname: str, mode: str, factory: IDSFactory) -> None: if netCDF4 is None: raise RuntimeError( "The `netCDF4` python module is not available. Please install this " - "module to read/write IMAS netCDF files with imas-python." + "module to read/write IMAS netCDF files with IMAS-Python." ) # To support netcdf v1.4 (which has no mode "x") we map it to "w" with # `clobber=True`. diff --git a/imas/backends/netcdf/ids2nc.py b/imas/backends/netcdf/ids2nc.py index bbdeb2e8..2b892838 100644 --- a/imas/backends/netcdf/ids2nc.py +++ b/imas/backends/netcdf/ids2nc.py @@ -1,6 +1,6 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. -"""NetCDF IO support for imas-python. Requires [netcdf] extra dependencies. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +"""NetCDF IO support for IMAS-Python. Requires [netcdf] extra dependencies. """ from typing import Iterator, Tuple diff --git a/imas/backends/netcdf/nc_metadata.py b/imas/backends/netcdf/nc_metadata.py index 50545f8c..94929957 100644 --- a/imas/backends/netcdf/nc_metadata.py +++ b/imas/backends/netcdf/nc_metadata.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """NetCDF metadata for dimensions and tensorization of IDSs. """ diff --git a/imas/command/cli.py b/imas/command/cli.py index 565262ee..5e18d008 100644 --- a/imas/command/cli.py +++ b/imas/command/cli.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """ Main CLI entry point """ import logging @@ -44,7 +44,7 @@ def _excepthook(type_, value, tb): @click.group("imas", invoke_without_command=True, no_args_is_help=True) def cli(): - """imas-python command line interface. + """IMAS-Python command line interface. Please use one of the available commands listed below. You can get help for each command by executing: @@ -62,15 +62,15 @@ def cli(): @cli.command("version") def print_version(): - """Print version information of imas-python.""" + """Print version information of IMAS-Python.""" cons = console.Console() grid = Table( - title="imas-python version info", show_header=False, title_style="bold" + title="IMAS-Python version info", show_header=False, title_style="bold" ) grid.box = box.HORIZONTALS if cons.size.width > 120: grid.width = 120 - grid.add_row("imas-python version:", imas.__version__) + grid.add_row("IMAS-Python version:", imas.__version__) grid.add_section() grid.add_row("Default data dictionary version:", imas.IDSFactory().dd_version) dd_versions = ", ".join(imas.dd_zip.dd_xml_versions()) diff --git a/imas/command/db_analysis.py b/imas/command/db_analysis.py index 5cc946dc..8f262e27 100644 --- a/imas/command/db_analysis.py +++ b/imas/command/db_analysis.py @@ -1,4 +1,4 @@ -"""imas-python-based command line tool for analysing fields in a database.""" +"""IMAS-Python-based command line tool for analysing fields in a database.""" import gzip import json diff --git a/imas/command/helpers.py b/imas/command/helpers.py index f43a47e1..8c664306 100644 --- a/imas/command/helpers.py +++ b/imas/command/helpers.py @@ -24,7 +24,7 @@ def setup_rich_log_handler(quiet: bool): root_logger.removeHandler(handler) # Install rich handler on the root logger: root_logger.addHandler(RichHandler()) - if quiet: # Silence imas-python INFO messages + if quiet: # Silence IMAS-Python INFO messages # If loglevel is less than WARNING, set it to WARNING: imas_logger.setLevel(max(logging.WARNING, imas_logger.getEffectiveLevel())) diff --git a/imas/command/timer.py b/imas/command/timer.py index 9f43ee55..46edff9c 100644 --- a/imas/command/timer.py +++ b/imas/command/timer.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Utility class to time different sections of a CLI app.""" import time diff --git a/imas/db_entry.py b/imas/db_entry.py index 899dfd83..d7d74574 100644 --- a/imas/db_entry.py +++ b/imas/db_entry.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Logic for interacting with IMAS Data Entries. """ @@ -605,7 +605,7 @@ def _get( if dd_version.split(".")[0] != destination._dd_version.split(".")[0]: logger.warning( "On-disk data is stored in DD %s which has a different major " - "version than the requested DD version (%s). imas-python will " + "version than the requested DD version (%s). IMAS-Python will " "convert the data automatically, but this does not cover all" "changes. " "See %s/multi-dd.html#conversion-of-idss-between-dd-versions", diff --git a/imas/dd_helpers.py b/imas/dd_helpers.py index f5fd5070..0cfabc10 100644 --- a/imas/dd_helpers.py +++ b/imas/dd_helpers.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Helper functions to build IDSDef.xml""" import logging diff --git a/imas/dd_zip.py b/imas/dd_zip.py index cd6fa1cd..2d62224a 100644 --- a/imas/dd_zip.py +++ b/imas/dd_zip.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """ Extract DD versions from a zip file. The zip file contains files as @@ -11,7 +11,7 @@ If that exists and points to a file we will attempt to open it. Then, IDSDef.zip is searched in site-packages, the current folder, in .config/imas/ (`$$XDG_CONFIG_HOME`) and in -the assets/ folder within the imas-python package. +the assets/ folder within the IMAS-Python package. 1. `$$IMAS_DDZIP` 2. The virtual environment @@ -22,7 +22,7 @@ 7. `__file__/../../imas/assets/IDSDef.zip` All files are checked, i.e. if your .config/imas/IDSDef.zip is outdated -the imas-python-packaged version will be used. +the IMAS-Python-packaged version will be used. The `assets/IDSDef.zip` provided with the package can be updated with the `python setup.py build_DD` command, which is also performed on install diff --git a/imas/exception.py b/imas/exception.py index fa89c326..513c2caa 100644 --- a/imas/exception.py +++ b/imas/exception.py @@ -1,6 +1,6 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. -"""Exception classes used in imas-python. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +"""Exception classes used in IMAS-Python. """ import difflib diff --git a/imas/ids_base.py b/imas/ids_base.py index 3e182772..5c74bf5c 100644 --- a/imas/ids_base.py +++ b/imas/ids_base.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Base class for all IDS nodes. """ diff --git a/imas/ids_convert.py b/imas/ids_convert.py index 95ccd92c..f66f519d 100644 --- a/imas/ids_convert.py +++ b/imas/ids_convert.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Functionality for converting IDSToplevels between DD versions. """ @@ -183,7 +183,7 @@ def _check_data_type(self, old_item: Element, new_item: Element): else: logger.debug( "Data type of %s changed from %s to %s. This change is not " - "supported by imas-python: no conversion will be done.", + "supported by IMAS-Python: no conversion will be done.", new_item.get("path"), old_item.get("data_type"), new_item.get("data_type"), @@ -527,7 +527,7 @@ def _add_provenance_entry( source_txt = ( f"{provenance_origin_uri}; " f"This IDS has been converted from DD {source_version} to " - f"DD {target_ids._dd_version} by imas-python {imas.__version__}." + f"DD {target_ids._dd_version} by IMAS-Python {imas.__version__}." ) if hasattr(node, "reference"): # DD version after IMAS-5304 diff --git a/imas/ids_coordinates.py b/imas/ids_coordinates.py index 8e3a2b70..29e62a87 100644 --- a/imas/ids_coordinates.py +++ b/imas/ids_coordinates.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Logic for interpreting coordinates in an IDS. """ @@ -386,7 +386,7 @@ def _capture_goto_errors(self, dim, coordinate): ) else: version_error = ( - "Please report this issue to the imas-python developers." + "Please report this issue to the IMAS-Python developers." ) logger.warning( "An error occurred while finding coordinate `%s` of dimension %s, " diff --git a/imas/ids_data_type.py b/imas/ids_data_type.py index 69a3a201..50f6f80b 100644 --- a/imas/ids_data_type.py +++ b/imas/ids_data_type.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Data Dictionary type handling functionality. """ diff --git a/imas/ids_defs.py b/imas/ids_defs.py index 26ac10c3..af4ed45c 100644 --- a/imas/ids_defs.py +++ b/imas/ids_defs.py @@ -1,6 +1,6 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. -""" Load imas-python libs to provide constants +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +""" Load IMAS-Python libs to provide constants .. _`Backend identifiers`: diff --git a/imas/ids_factory.py b/imas/ids_factory.py index 2173985b..cd88952d 100644 --- a/imas/ids_factory.py +++ b/imas/ids_factory.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Tools for generating IDSs from a Data Dictionary version. """ diff --git a/imas/ids_identifiers.py b/imas/ids_identifiers.py index df72be32..a64dd87f 100644 --- a/imas/ids_identifiers.py +++ b/imas/ids_identifiers.py @@ -1,6 +1,6 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. -"""imas-python module to support Data Dictionary identifiers. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +"""IMAS-Python module to support Data Dictionary identifiers. """ import logging diff --git a/imas/ids_metadata.py b/imas/ids_metadata.py index 2cd0e224..4d2d5dbb 100644 --- a/imas/ids_metadata.py +++ b/imas/ids_metadata.py @@ -1,6 +1,6 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. -"""Core of the imas-python interpreted IDS metadata +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +"""Core of the IMAS-Python interpreted IDS metadata """ import re import types @@ -26,7 +26,7 @@ class IDSType(Enum): operation); ``dynamic`` data are those which vary in time within the context of the data. - As in the Python HLI, imas-python only distinguishes between dynamic and non-dynamic + As in the Python HLI, IMAS-Python only distinguishes between dynamic and non-dynamic nodes. """ @@ -154,7 +154,7 @@ def __init__( else: self._ctx_path = self.name - # These are special and used in imas-python logic, + # These are special and used in IMAS-Python logic, # so we need to ensure proper values maxoccur = attrib.get("maxoccur", "unbounded") self.maxoccur: Optional[int] = ( diff --git a/imas/ids_path.py b/imas/ids_path.py index 75fb6694..97f9b695 100644 --- a/imas/ids_path.py +++ b/imas/ids_path.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Logic for interpreting paths to elements in an IDS """ diff --git a/imas/ids_primitive.py b/imas/ids_primitive.py index d1b5fb83..a86faa95 100644 --- a/imas/ids_primitive.py +++ b/imas/ids_primitive.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Provides the classes for IDS data nodes """ import logging diff --git a/imas/ids_struct_array.py b/imas/ids_struct_array.py index 9f79a130..b1768649 100644 --- a/imas/ids_struct_array.py +++ b/imas/ids_struct_array.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """IDS StructArray represents an Array of Structures in the IDS tree. """ diff --git a/imas/ids_structure.py b/imas/ids_structure.py index f55755fc..3482d6ef 100644 --- a/imas/ids_structure.py +++ b/imas/ids_structure.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """A structure in an IDS """ diff --git a/imas/ids_toplevel.py b/imas/ids_toplevel.py index d0fa8e0b..15ae0970 100644 --- a/imas/ids_toplevel.py +++ b/imas/ids_toplevel.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """Represents a Top-level IDS (like ``core_profiles``, ``equilibrium``, etc) """ diff --git a/imas/setup_logging.py b/imas/setup_logging.py index e7cfcd5f..4ec3a34a 100644 --- a/imas/setup_logging.py +++ b/imas/setup_logging.py @@ -1,6 +1,6 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. -"""Create a default log handler when imas-python is imported. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +"""Create a default log handler when IMAS-Python is imported. """ import logging diff --git a/imas/test/test_dd_helpers.py b/imas/test/test_dd_helpers.py index bb9d0d11..20a8cad1 100644 --- a/imas/test/test_dd_helpers.py +++ b/imas/test/test_dd_helpers.py @@ -8,7 +8,7 @@ _idsdef_unzipped_relpath = Path("idsdef_unzipped") - +@pytest.mark.skip(reason="skipping IDSDef.zip generation") def test_prepare_data_dictionaries(): prepare_data_dictionaries() assert os.path.exists( diff --git a/imas/test/test_hash.py b/imas/test/test_hash.py index 903135ff..f583788d 100644 --- a/imas/test/test_hash.py +++ b/imas/test/test_hash.py @@ -162,7 +162,7 @@ def test_hash_ids(): cp = imas.IDSFactory().core_profiles() cp.ids_properties.homogeneous_time = 1 cp.ids_properties.comment = "Testing hash function" - cp.code.name = "imas-python" + cp.code.name = "IMAS-Python" cp.time = [1.0, 2.0, 3.0, 4.0] cp.profiles_1d.resize(4) for p1d in cp.profiles_1d: diff --git a/imas/test/test_ids_ascii_data.py b/imas/test/test_ids_ascii_data.py index 127c0948..d15fecf1 100644 --- a/imas/test/test_ids_ascii_data.py +++ b/imas/test/test_ids_ascii_data.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. from pathlib import PosixPath import pytest diff --git a/imas/test/test_ids_convert.py b/imas/test/test_ids_convert.py index 6cd85ca8..20dcd8c3 100644 --- a/imas/test/test_ids_convert.py +++ b/imas/test/test_ids_convert.py @@ -188,8 +188,8 @@ def test_provenance_entry(factory): # Check that origin and destination DD versions are included assert "3.31.0" in provenance_txt assert "3.38.0" in provenance_txt - # Check that imas-python is mentioned - assert "imas-python" in provenance_txt + # Check that IMAS-Python is mentioned + assert "IMAS-Python" in provenance_txt # Test logic branch for node.reference implemented with IMAS-5304 cp4 = convert_ids(cp2, "3.42.0", provenance_origin_uri="") diff --git a/imas/test/test_ids_mixin.py b/imas/test/test_ids_mixin.py index 39b55dd9..23286790 100644 --- a/imas/test/test_ids_mixin.py +++ b/imas/test/test_ids_mixin.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. def test_toplevel(fake_filled_toplevel): diff --git a/imas/test/test_ids_primitive.py b/imas/test/test_ids_primitive.py index 49cae27a..0083511c 100644 --- a/imas/test/test_ids_primitive.py +++ b/imas/test/test_ids_primitive.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. import pprint import numpy as np @@ -38,7 +38,7 @@ def test_pretty_print(fake_filled_toplevel): def test_value_attribute(fake_filled_toplevel): - """Test if the value attribute acts as imas-python expects""" + """Test if the value attribute acts as IMAS-Python expects""" eig = fake_filled_toplevel.wavevector[0].eigenmode[0] assert isinstance(eig.frequency_norm, IDSPrimitive) assert hasattr(eig.frequency_norm, "value") diff --git a/imas/test/test_ids_structure.py b/imas/test/test_ids_structure.py index 3b74c613..ef6e59f6 100644 --- a/imas/test/test_ids_structure.py +++ b/imas/test/test_ids_structure.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. import copy import pprint diff --git a/imas/training.py b/imas/training.py index 02d5de18..9c4df602 100644 --- a/imas/training.py +++ b/imas/training.py @@ -1,6 +1,6 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. -"""Functions that are useful for the imas-python training courses. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +"""Functions that are useful for the IMAS-Python training courses. """ import importlib diff --git a/imas/util.py b/imas/util.py index 5ab69349..aafad2c7 100644 --- a/imas/util.py +++ b/imas/util.py @@ -1,6 +1,6 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. -"""Collection of useful helper methods when working with imas-python. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +"""Collection of useful helper methods when working with IMAS-Python. """ @@ -32,7 +32,7 @@ def visit_children( ) -> None: """Apply a function to node and its children - imas-python objects generally live in a tree structure. Similar to Pythons + IMAS-Python objects generally live in a tree structure. Similar to Pythons :py:func:`map`, this method can be used to apply a function to objects within this tree structure. @@ -56,7 +56,7 @@ def visit_children( Example: .. code-block:: python - # Print all filled leaf nodes in a given imas-python IDSToplevel + # Print all filled leaf nodes in a given IMAS-Python IDSToplevel visit_children(print, toplevel) See Also: @@ -80,9 +80,9 @@ def tree_iter( accept_lazy: bool = False, include_node: bool = False, ) -> Iterator[IDSBase]: - """Tree iterator for imas-python structures. + """Tree iterator for IMAS-Python structures. - Iterate (depth-first) through the whole subtree of an imas-python structure. + Iterate (depth-first) through the whole subtree of an IMAS-Python structure. Args: node: Node to start iterating from. @@ -104,7 +104,7 @@ def tree_iter( Example: .. code-block:: python - # Iterate over all filled leaf nodes in a given imas-python IDSToplevel + # Iterate over all filled leaf nodes in a given IMAS-Python IDSToplevel for node in tree_iter(toplevel): print(node) @@ -514,7 +514,7 @@ def get_data_dictionary_version(obj: Union[IDSBase, DBEntry, IDSFactory]) -> str """Find out the version of the data dictionary definitions that this object uses. Args: - obj: Any imas-python object that is data-dictionary dependent. + obj: Any IMAS-Python object that is data-dictionary dependent. Returns: The data dictionary version, e.g. ``"3.38.1"``. diff --git a/setup.py b/setup.py index 692fb5cf..486b56d6 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,11 @@ # pylint: disable=wrong-import-position -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. """ Packaging settings. Inspired by a minimal setup.py file, the Pandas cython build and the access-layer setup template. -The installable imas-python package tries to follow in the following order: +The installable IMAS-Python package tries to follow in the following order: - The style guide for Python code [PEP8](https://www.python.org/dev/peps/pep-0008/) - The [PyPA guide on packaging projects]( https://packaging.python.org/guides/distributing-packages-using-setuptools/#distributing-packages) diff --git a/tools/compare_lowlevel_access_patterns.py b/tools/compare_lowlevel_access_patterns.py index 88a5f2aa..c2ace03c 100644 --- a/tools/compare_lowlevel_access_patterns.py +++ b/tools/compare_lowlevel_access_patterns.py @@ -1,4 +1,4 @@ -"""Compare the access patterns of the lowlevel AL API between imas-python and the HLI. +"""Compare the access patterns of the lowlevel AL API between IMAS-Python and the HLI. """ from functools import wraps @@ -113,10 +113,10 @@ def compare_ids_get(imas_ids): help="Use heterogeneous time mode instead of homogeneous time.", ) def main(ids_name, method, heterogeneous): - """Compare lowlevel calls done by imas-python vs. the Python HLI + """Compare lowlevel calls done by IMAS-Python vs. the Python HLI This program fills the provided IDS with random data, then does I/O with it using - both the Python HLI and the imas-python APIs. The resulting calls to the lowlevel Access + both the Python HLI and the IMAS-Python APIs. The resulting calls to the lowlevel Access Layer are logged to respectively /tmp/hli.log and /tmp/imas.log. You may use your favorite diff tool to compare the two files. diff --git a/tools/extract_test_data.py b/tools/extract_test_data.py index d64c4f51..c17f8ec8 100644 --- a/tools/extract_test_data.py +++ b/tools/extract_test_data.py @@ -1,5 +1,5 @@ -# This file is part of imas-python. -# You should have received the imas-python LICENSE file with this project. +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. import os import imas From f8ed3554340eacb543a67646e532bd05c6804b3c Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Thu, 23 Jan 2025 14:36:44 +0100 Subject: [PATCH 49/97] fixed pytest and updated documentation references --- benchmarks/technical.py | 7 +-- ci/run_benchmark.sh | 4 +- ci/run_pytest.sh | 10 +++-- docs/source/benchmarking.rst | 85 +++++++++++++----------------------- docs/source/changelog.rst | 52 +++++++++++----------- docs/source/ci_config.rst | 2 +- imas/dd_helpers.py | 1 - imas/test/test_dd_helpers.py | 5 +-- 8 files changed, 71 insertions(+), 95 deletions(-) diff --git a/benchmarks/technical.py b/benchmarks/technical.py index 1bbf3a48..d85244ca 100644 --- a/benchmarks/technical.py +++ b/benchmarks/technical.py @@ -19,9 +19,10 @@ def timeraw_import_imas(): # `asv compare` :( """ def track_imas_versions(): - equilibrium = imas.equilibrium() - equilibrium.ids_properties.homogeneous_time = imas.imasdef.IDS_TIME_MODE_INDEPENDENT - dbentry = imas.DBEntry(imas.imasdef.MEMORY_BACKEND, "test", 1, 1) + ids_factory = imas.IDSFactory() + equilibrium = ids_factory.equilibrium() + equilibrium.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_INDEPENDENT + dbentry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 1, 1) dbentry.create() dbentry.put(equilibrium) equilibrium = dbentry.get("equilibrium") diff --git a/ci/run_benchmark.sh b/ci/run_benchmark.sh index 1fe77bcc..ae24ce2d 100755 --- a/ci/run_benchmark.sh +++ b/ci/run_benchmark.sh @@ -59,7 +59,7 @@ asv machine --yes # Run ASV for the current commit, develop and main asv run --skip-existing-successful HEAD^! asv run --skip-existing-successful develop^! -# asv run --skip-existing-successful main^! +asv run --skip-existing-successful main^! # Compare results if [ `git rev-parse --abbrev-ref HEAD` == develop ] @@ -75,5 +75,5 @@ asv publish # And persistently store them cp -rf .asv/{results,html} "$BENCHMARKS_DIR" -deactivate + diff --git a/ci/run_pytest.sh b/ci/run_pytest.sh index 511264b2..979b00de 100755 --- a/ci/run_pytest.sh +++ b/ci/run_pytest.sh @@ -37,8 +37,10 @@ pip freeze # Clean artifacts created by pytest rm -f junit.xml rm -rf htmlcov -mkdir -p ~/tmp -export PYTEST_DEBUG_TEMPROOT=~/tmp -python -m pytest -n=auto --cov=imas --cov-report=term-missing --cov-report=html --junit-xml=junit.xml -x -deactivate +# setups local directory to not to full /tmp directory with pytest temporary files +# mkdir -p ~/tmp +# export PYTEST_DEBUG_TEMPROOT=~/tmp +python -m pytest -n=auto --cov=imas --cov-report=term-missing --cov-report=html --junit-xml=junit.xml + + diff --git a/docs/source/benchmarking.rst b/docs/source/benchmarking.rst index a0cf0ca5..a8c654bd 100644 --- a/docs/source/benchmarking.rst +++ b/docs/source/benchmarking.rst @@ -1,4 +1,4 @@ -.. _`benchmarking IMAS`: +.. _`benchmarking IMAS-Python`: Benchmarking IMAS-Python ======================== @@ -19,15 +19,14 @@ Technical benchmarks package. Basic functional benchmarks - These are for benchmarking functionality with an equivalent feature in the IMAS - Access Layer HLI. In addition to tracking the performance of the IMAS-Python features - over time, we can also benchmark the performance against the traditional HLI. + These are for benchmarking functionality with an addition to track the performance + of the IMAS-Python features over time. For example: putting and getting IDSs. IMAS-Python-specific functional benchmarks - These are for benchmarking functionality without an equivalent feature in the IMAS - Access Layer HLI. We use these for tracking the IMAS-Python performance over time. + These are for benchmarking core functionalities for checking performance. We use these + for tracking the IMAS-Python core features performance over time. For example: data conversion between DD versions. @@ -62,33 +61,31 @@ contains tabular results. Some examples: .. code-block:: text :caption: Example output for a test parametrized in ``hli`` - [ 58.33%] ··· core_profiles.Generate.time_create_core_profiles ok - [ 58.33%] ··· ======== ============ - hli - -------- ------------ - imas 22.9±0.4μs - imas 408±8μs - ======== ============ + [56.25%] ··· core_profiles.Generate.time_create_core_profiles ok + [56.25%] ··· ====== ============= + hli + ------ ------------- + imas 2.04±0.01μs + ====== ============= -Here we see the benchmark ``core_profiles.Generate.time_create_core_profiles`` was -repeated for multiple values of ``hli``: once for the ``imas`` HLI, and once for the -``imas`` HLI. + +Here we see the benchmark ``core_profiles.Generate.time_create_core_profiles`` for +imas-python ``imas-python``. Some benchmarks are parametrized in multiple dimensions, as in below example. This results in a 2D table of results. .. code-block:: text - :caption: Example output for a test parametrized in ``hli`` and ``backend`` - - [ 70.83%] ··· core_profiles.Get.time_get ok - [ 70.83%] ··· ======== ========== ============ ========= - -- backend - -------- --------------------------------- - hli 13 14 11 - ======== ========== ============ ========= - imas 75.1±1ms 70.2±0.5ms 207±2ms - imas 241±4ms 229±2ms 364±6ms - ======== ========== ============ ========= + :caption: Example output for a test parametrized in ``imas-python`` and ``backend`` + + [65.62%] ··· core_profiles.Get.time_get ok + [65.62%] ··· ====== ========= ========== ============ ========= ============ + -- backend + ------ -------------------------------------------------------- + hli HDF5 MDSplus memory ASCII netCDF + ====== ========= ========== ============ ========= ============ + imas 172±3ms 86.7±2ms 68.5±0.8ms 291±3ms 14.2±0.7ms + ====== ========= ========== ============ ========= ============ .. note:: The backends are listed by their numerical IDS: @@ -103,7 +100,7 @@ Running benchmarks (advanced) ----------------------------- Running benchmarks quickly, as explained in the previous section, is great during -development and for comparing the performance of IMAS-Python against the imas HLI. However, +development and for comparing the performance of IMAS-Python. However, ``asv`` can also track the performance of benchmarks over various commits of IMAS-Python. Unfortunately this is a bit more tricky to set up. @@ -113,22 +110,7 @@ Setup advanced benchmarking First, some background on how ``asv`` tracks performance: it creates an isolated virtual environment (using the ``virtualenv`` package) and installs IMAS-Python for each commit that -will be benchmarked. However, because the virtual environment is isolated, the ``imas`` -package won't be available. We need to work around it by setting the environment -variable ``ASV_PYTHONPATH``: - -.. code-block:: console - :caption: Setting up the ``ASV_PYTHONPATH`` on SDCC - - $ module load IMAS - $ export ASV_PYTHONPATH="$PYTHONPATH" - -.. caution:: - - ``imas`` must not be available on the ``ASV_PYTHONPATH`` to avoid the interfering - of two imas modules (one on the ``PYTHONPATH``, and the other installed by ``asv`` - in the virtual environment). - +will be benchmarked. Deciding which commits to benchmark ''''''''''''''''''''''''''''''''''' @@ -184,19 +166,14 @@ Instead, you can submit a benchmark job to the compute nodes. .. code-block:: bash :caption: Benchmark run script (``run_benchmarks.sh``) - # Load IMAS module + # Load IMAS-AL-Core module module purge - module load IMAS - # Verify we can run python and import imas + module load IMAS-AL-Core + module load Python + + # Verify we can run python echo "Python version:" python --version - echo "Import imas:" - python -c 'import imas; print(imas)' - - # Set the ASV_PYTHONPATH so we can `import imas` in the benchmarks - export ASV_PYTHONPATH="$PYTHONPATH" - echo "ASV_PYTHONPATH=$ASV_PYTHONPATH" - echo # Activate the virtual environment which has asv installed . venv_imas/bin/activate diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index d724ac4f..f66ef4d3 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -40,13 +40,13 @@ New features - IMAS-Python 1.1 adds support for Identifiers defined by the Data Dictionary. This functionality is described in detail in :ref:`Identifiers`. - Support for the new - :py:const:`~imaspy.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` that is + :py:const:`~imas.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` that is implemented in Access Layer Core 5.3. This is a much faster and more efficient serialization format than the - :py:const:`~imaspy.ids_defs.ASCII_SERIALIZER_PROTOCOL`. The Flexbuffers + :py:const:`~imas.ids_defs.ASCII_SERIALIZER_PROTOCOL`. The Flexbuffers serializer protocol requires ``imas_core`` version 5.3 or newer. It is the default serializer format when it is available. This features is not available - when the variable :py:const:`~imaspy.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` + when the variable :py:const:`~imas.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` is set to ``None``. - Preview feature: :ref:`IMAS netCDF files`. Store IDSs in a self-describing netCDF file, which can be used for sharing and/or archiving data. @@ -54,21 +54,21 @@ New features This feature is in `preview` status, meaning that it may change in upcoming minor releases of IMAS-Python. -- Additional utility functions in :py:mod:`imaspy.util`: +- Additional utility functions in :py:mod:`imas.util`: - - :py:func:`imaspy.util.tree_iter` can be used to iterate over all nodes inside + - :py:func:`imas.util.tree_iter` can be used to iterate over all nodes inside an IDS. - - :py:func:`imaspy.util.get_parent` can be used to get the parent element of + - :py:func:`imas.util.get_parent` can be used to get the parent element of an IDS node. - - :py:func:`imaspy.util.get_time_mode` is a convenience function to get the + - :py:func:`imas.util.get_time_mode` is a convenience function to get the ``ids_properties/homogeneous_time`` value for any node in the IDS. - - :py:func:`imaspy.util.get_toplevel` returns the IDS Toplevel element for any + - :py:func:`imas.util.get_toplevel` returns the IDS Toplevel element for any node in the IDS. - - :py:func:`imaspy.util.is_lazy_loaded` will indicate whether an IDS is lazy + - :py:func:`imas.util.is_lazy_loaded` will indicate whether an IDS is lazy loaded. - - :py:func:`imaspy.util.get_full_path` returns the full path (including Array + - :py:func:`imas.util.get_full_path` returns the full path (including Array of Structure indices) of a node. - - :py:func:`imaspy.util.get_data_dictionary_version` returns the Data + - :py:func:`imas.util.get_data_dictionary_version` returns the Data Dictionary version for which an IDS was created. - Add support for IMAS Access Layer Core 5.2 and later. IMAS-Python can now be used @@ -84,18 +84,18 @@ New features pip install 'imas[imas-core] @ git+ssh://git@github.com/iterorganization/imas-core.git' -- A diff tool for IDSs: :py:func:`imaspy.util.idsdiff`. +- A diff tool for IDSs: :py:func:`imas.util.idsdiff`. - Implement ``==`` equality checking for IDS Structures and Arrays of Structures (`IMAS-5120 `__). - Add option to ignore unknown Data Dictionary versions of data stored in the backend. - During a :py:meth:`~imaspy.db_entry.DBEntry.get` or - :py:meth:`~imaspy.db_entry.DBEntry.get_slice`, IMAS-Python first reads the version + During a :py:meth:`~imas.db_entry.DBEntry.get` or + :py:meth:`~imas.db_entry.DBEntry.get_slice`, IMAS-Python first reads the version of the Data Dictionary that was used to store the IDS. When this version is not known to IMAS-Python, an error is raised. This error can now be ignored by setting the parameter - :py:param:`~imaspy.db_entry.DBEntry.get.ignore_unknown_dd_version` to + :py:param:`~imas.db_entry.DBEntry.get.ignore_unknown_dd_version` to ``True``, and IMAS-Python will do its best to load the data anyway. - A new command line tool exists for analyzing which Data Dictionary fields are @@ -128,18 +128,18 @@ Breaking changes Bug fixes ''''''''' -- Fixed a bug in :py:func:`imaspy.util.inspect` when inspecting lazy loaded IDSs. +- Fixed a bug in :py:func:`imas.util.inspect` when inspecting lazy loaded IDSs. - Fixed a bug when converting the ``neutron_diagnostics`` IDS to/from Data Dictionary version ``3.41.0``. - Fixed a bug that allowed setting arbitrary attributes on IDS structures. It is only allowed to use attributes defined by the Data Dictionary. -- Fixed a bug with :py:func:`~imaspy.ids_toplevel.IDSToplevel.serialize` when +- Fixed a bug with :py:func:`~imas.ids_toplevel.IDSToplevel.serialize` when the IDS is in a non-default Data Dictionary version. - Fixed a bug when assigning ``nan`` to a FLT_0D, which would lead to a confusing and incorrect log message in IMAS-Python 1.0. - Fixed incorrect oldest supported DD version. Previously IMAS-Python indicated that DD ``3.21.1`` was supported, however ``3.22.0`` is the oldest Data Dictionary - tested (and provided) with IMAS-Python. :py:attr:`imaspy.OLDEST_SUPPORTED_VERSION` + tested (and provided) with IMAS-Python. :py:attr:`imas.OLDEST_SUPPORTED_VERSION` has been updated to reflect this. - Fixed a bug when using numpy functions, such as :external:py:func:`numpy.isclose` on scalar numbers. Previously an error was @@ -157,16 +157,16 @@ Bug fixes Improved performance '''''''''''''''''''' -- Improved performance of :py:meth:`~imaspy.ids_toplevel.IDSToplevel.validate`. +- Improved performance of :py:meth:`~imas.ids_toplevel.IDSToplevel.validate`. - Improved creation of IMAS-Python IDS objects. This made filling IDSs and loading - them with :py:meth:`~imaspy.db_entry.DBEntry.get` / - :py:meth:`~imaspy.db_entry.DBEntry.get_slice` 10-20% faster. + them with :py:meth:`~imas.db_entry.DBEntry.get` / + :py:meth:`~imas.db_entry.DBEntry.get_slice` 10-20% faster. - Improved the performance of lazy loading. This is most noticeable with the ``HDF5`` backend, which is now up to 40x faster than with IMAS-Python 1.0. -- Improved the performance of :py:meth:`~imaspy.db_entry.DBEntry.get` / - :py:meth:`~imaspy.db_entry.DBEntry.get_slice` / - :py:meth:`~imaspy.db_entry.DBEntry.put` / - :py:meth:`~imaspy.db_entry.DBEntry.put_slice` for IDSs with many nested arrays +- Improved the performance of :py:meth:`~imas.db_entry.DBEntry.get` / + :py:meth:`~imas.db_entry.DBEntry.get_slice` / + :py:meth:`~imas.db_entry.DBEntry.put` / + :py:meth:`~imas.db_entry.DBEntry.put_slice` for IDSs with many nested arrays of structures. This performance improvement is most noticeable for IDSs with filled GGD grids and data structures (up to 25% faster). @@ -195,7 +195,7 @@ recent DD versions. Further details on IDS conversion can be found in - Automatically convert data between 0D and 1D when possible (`IMAS-5170 `__). The following type changes are now automatically supported by - :py:func:`imaspy.convert_ids `: + :py:func:`imas.convert_ids `: - INT_0D to INT_1D - FLT_0D to FLT_1D diff --git a/docs/source/ci_config.rst b/docs/source/ci_config.rst index 2fcf1d9b..18155163 100644 --- a/docs/source/ci_config.rst +++ b/docs/source/ci_config.rst @@ -49,7 +49,7 @@ Test with AL 5. In the "Script" Task, update the module(s) in the Argument field Benchmark - This job runs the :ref:`ASV benchmarks ` on the CI server. It + This job runs the :ref:`ASV benchmarks ` on the CI server. It is configured such that it can only run on a single CI agent (`io-ls-bamboowk6.iter.org`). There are two reasons for this: diff --git a/imas/dd_helpers.py b/imas/dd_helpers.py index 0cfabc10..446a9991 100644 --- a/imas/dd_helpers.py +++ b/imas/dd_helpers.py @@ -134,7 +134,6 @@ def _run_xsl_transformation( stylesheet_file=str(xsl_file), output_file=str(output_file), ) - logger.info("Transformation complete: %s -> %s", xsd_file, output_file) def build_data_dictionary(repo, tag: str, rebuild=False) -> None: diff --git a/imas/test/test_dd_helpers.py b/imas/test/test_dd_helpers.py index 20a8cad1..07d1d2b0 100644 --- a/imas/test/test_dd_helpers.py +++ b/imas/test/test_dd_helpers.py @@ -8,6 +8,7 @@ _idsdef_unzipped_relpath = Path("idsdef_unzipped") + @pytest.mark.skip(reason="skipping IDSDef.zip generation") def test_prepare_data_dictionaries(): prepare_data_dictionaries() @@ -50,7 +51,3 @@ def test_prepare_data_dictionaries(): if _build_dir.exists(): shutil.rmtree(_idsdef_unzipped_relpath) - - -if __name__ == "__main__": - pytest.main() From 9d925a5949f4fa1417c8ab8f86e82d3674a30e43 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Thu, 23 Jan 2025 15:07:31 +0100 Subject: [PATCH 50/97] fixed readthedocs link --- README.md | 2 +- docs/source/ci_config.rst | 2 +- imas/__init__.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index d28328cb..b22b9a91 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ Data Model. Install steps are described in the documentation generated from `/docs/source/installing.rst`. Documentation is autogenerated from the source using [Sphinx](http://sphinx-doc.org/) -and can be found at the [readthedocs](https://IMAS-Python.readthedocs.io/en/latest/) +and can be found at the [readthedocs](https://imas-python.readthedocs.io/en/latest/) The documentation can be manually generated by installing sphinx and running: diff --git a/docs/source/ci_config.rst b/docs/source/ci_config.rst index 18155163..a7c18ef6 100644 --- a/docs/source/ci_config.rst +++ b/docs/source/ci_config.rst @@ -80,5 +80,5 @@ There is github workflow for IMAS-Python: `Deploy IMAS-Python-doc `_ Deploy the documentation using `readthedocs - `_. + `_. diff --git a/imas/__init__.py b/imas/__init__.py index e7c773a4..0ed10404 100644 --- a/imas/__init__.py +++ b/imas/__init__.py @@ -25,7 +25,7 @@ util, ) -PUBLISHED_DOCUMENTATION_ROOT = "https://IMAS-Python.readthedocs.io/en/latest/" +PUBLISHED_DOCUMENTATION_ROOT = "https://imas-python.readthedocs.io/en/latest/" """URL to the published documentation.""" OLDEST_SUPPORTED_VERSION = _V("3.22.0") """Oldest Data Dictionary version that is supported by IMAS-Python.""" From f53fb888e123488a511441547f279d0d0787e631 Mon Sep 17 00:00:00 2001 From: Prasad Date: Thu, 23 Jan 2025 17:06:01 +0100 Subject: [PATCH 51/97] Apply suggestions from code review Co-authored-by: Simon Pinches --- .github/workflows/linting.yml | 4 ++-- .github/workflows/publish.yml | 4 ++-- docs/source/benchmarking.rst | 4 ++-- imas/assets/IDS_minimal.xml | 2 +- imas/assets/IDS_minimal_2.xml | 2 +- imas/assets/IDS_minimal_struct_array.xml | 2 +- imas/assets/IDS_minimal_types.xml | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 5c54081b..87c12e0a 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -1,4 +1,4 @@ -name: imas-python +name: IMAS-Python on: push @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - - name: Checkout imas-python sources + - name: Checkout IMAS-Python sources uses: actions/checkout@v4 - name: Set up Python diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 3623ebab..0107c523 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -26,7 +26,7 @@ jobs: path: dist/ publish-to-pypi: - name: Publish imas-python distribution to PyPI + name: Publish IMAS-Python distribution to PyPI needs: - build runs-on: ubuntu-latest @@ -45,7 +45,7 @@ jobs: uses: pypa/gh-action-pypi-publish@release/v1 publish-to-testpypi: - name: Publish imas-python distribution to TestPyPI + name: Publish IMAS-Python distribution to TestPyPI needs: - build runs-on: ubuntu-latest diff --git a/docs/source/benchmarking.rst b/docs/source/benchmarking.rst index a8c654bd..a9c94dc7 100644 --- a/docs/source/benchmarking.rst +++ b/docs/source/benchmarking.rst @@ -70,13 +70,13 @@ contains tabular results. Some examples: Here we see the benchmark ``core_profiles.Generate.time_create_core_profiles`` for -imas-python ``imas-python``. +``IMAS-Python``. Some benchmarks are parametrized in multiple dimensions, as in below example. This results in a 2D table of results. .. code-block:: text - :caption: Example output for a test parametrized in ``imas-python`` and ``backend`` + :caption: Example output for a test parametrized in ``IMAS-Python`` and ``backend`` [65.62%] ··· core_profiles.Get.time_get ok [65.62%] ··· ====== ========= ========== ============ ========= ============ diff --git a/imas/assets/IDS_minimal.xml b/imas/assets/IDS_minimal.xml index 01764e95..680eb483 100644 --- a/imas/assets/IDS_minimal.xml +++ b/imas/assets/IDS_minimal.xml @@ -3,7 +3,7 @@ 0.0.1 diff --git a/imas/assets/IDS_minimal_2.xml b/imas/assets/IDS_minimal_2.xml index 57a90d23..4effbdc3 100644 --- a/imas/assets/IDS_minimal_2.xml +++ b/imas/assets/IDS_minimal_2.xml @@ -3,7 +3,7 @@ 0.0.2 diff --git a/imas/assets/IDS_minimal_struct_array.xml b/imas/assets/IDS_minimal_struct_array.xml index 72845315..d181b314 100644 --- a/imas/assets/IDS_minimal_struct_array.xml +++ b/imas/assets/IDS_minimal_struct_array.xml @@ -3,7 +3,7 @@ 0.0.1 diff --git a/imas/assets/IDS_minimal_types.xml b/imas/assets/IDS_minimal_types.xml index d939aa32..574978fe 100644 --- a/imas/assets/IDS_minimal_types.xml +++ b/imas/assets/IDS_minimal_types.xml @@ -3,7 +3,7 @@ 0.0.1 From 438dc7359ecfcf7cf59354b6453c4f712f7b352e Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Fri, 24 Jan 2025 14:46:09 +0100 Subject: [PATCH 52/97] Few fixes in the documentation --- .github/workflows/linting.yml | 2 +- docs/source/benchmarking.rst | 26 +++++------- docs/source/configuring.rst | 6 +-- docs/source/courses/basic/analyze.rst | 4 +- docs/source/courses/basic/explore.rst | 7 ++-- docs/source/installing.rst | 15 ++++--- docs/source/netcdf/conventions.rst | 2 +- docs/source/release_imas.rst | 60 +++------------------------ 8 files changed, 34 insertions(+), 88 deletions(-) diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 87c12e0a..b5e13617 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -1,4 +1,4 @@ -name: IMAS-Python +name: linting-and-code-formatting on: push diff --git a/docs/source/benchmarking.rst b/docs/source/benchmarking.rst index a9c94dc7..f5e42f2e 100644 --- a/docs/source/benchmarking.rst +++ b/docs/source/benchmarking.rst @@ -34,7 +34,7 @@ IMAS-Python-specific functional benchmarks Running benchmarks (quick) -------------------------- -When you have an existing IMAS-Python installation, you can run the benchmarks like this: +When you have an existing IMAS-Python development installation, you can run the benchmarks like this: .. code-block:: console @@ -59,7 +59,7 @@ parametrized (they are repeated with varying parameters), in which case the outp contains tabular results. Some examples: .. code-block:: text - :caption: Example output for a test parametrized in ``hli`` + :caption: Example output for a test [56.25%] ··· core_profiles.Generate.time_create_core_profiles ok [56.25%] ··· ====== ============= @@ -69,14 +69,13 @@ contains tabular results. Some examples: ====== ============= -Here we see the benchmark ``core_profiles.Generate.time_create_core_profiles`` for -``IMAS-Python``. +Here we see the benchmark ``core_profiles.Generate.time_create_core_profiles``. Some benchmarks are parametrized in multiple dimensions, as in below example. This results in a 2D table of results. .. code-block:: text - :caption: Example output for a test parametrized in ``IMAS-Python`` and ``backend`` + :caption: Example output for a test parametrized in ``backend`` [65.62%] ··· core_profiles.Get.time_get ok [65.62%] ··· ====== ========= ========== ============ ========= ============ @@ -138,14 +137,11 @@ benchmarked. See the `asv documentation for some examples .. seealso:: https://asv.readthedocs.io/en/stable/commands.html#asv-run -Running benchmarks on SDCC -'''''''''''''''''''''''''' +Running benchmarks on a cluster +''''''''''''''''''''''''''''''' -Running benchmarks on the SDCC login nodes is useful for debugging, but not for -comparing performance: many people are using the login nodes at the same time, and the -machine load is variable. - -Instead, you can submit a benchmark job to the compute nodes. +For running the benchmarks on a cluster by submitting a job with SLURM, you can +adapt the following scripts to your own needs. .. code-block:: bash :caption: SLURM control script (``slurm.sh``) @@ -155,7 +151,7 @@ Instead, you can submit a benchmark job to the compute nodes. # Set SLURM options: #SBATCH --job-name=IMAS-Python-benchmark #SBATCH --time=1:00:00 - #SBATCH --partition=gen10_ib + #SBATCH --partition=<...> # Note: for proper benchmarking we need to exclusively reserve a node, even though # we're only using 1 CPU (most of the time) #SBATCH --exclusive @@ -166,9 +162,9 @@ Instead, you can submit a benchmark job to the compute nodes. .. code-block:: bash :caption: Benchmark run script (``run_benchmarks.sh``) - # Load IMAS-AL-Core module + # If using environment modules (must be adapted to names of the modules in the targeted cluster) module purge - module load IMAS-AL-Core + module load IMAS-AL-Core module load Python # Verify we can run python diff --git a/docs/source/configuring.rst b/docs/source/configuring.rst index bb5c4293..4ac9ed8c 100644 --- a/docs/source/configuring.rst +++ b/docs/source/configuring.rst @@ -6,10 +6,10 @@ This page provides an overview of available variables. .. note:: - In addition to the listed environment variables, the Access Layer lowlevel also has - environment variables available to control its behaviour. See the `Access Layer + In addition to the listed environment variables, the IMAS Core library also has + environment variables available to control its behaviour. See the `IMAS Core documentation - `_ + `_ ``IMAS_LOGLEVEL`` diff --git a/docs/source/courses/basic/analyze.rst b/docs/source/courses/basic/analyze.rst index 2dabad0d..d1ae1434 100644 --- a/docs/source/courses/basic/analyze.rst +++ b/docs/source/courses/basic/analyze.rst @@ -62,8 +62,8 @@ Exercise 1 depending on the time mode of the IDS (``core_profiles.ids_properties.homogeneous_time``). In this case the IDS uses homogeneous time, so all time coordinates use - ``core_profiles.time``. See also the `AL documentation (iter.org) - `_. + ``core_profiles.time``. See also the `Data Dictionary documentation + `_. 5. Read and print the 1D electron temperature profile (:math:`T_e`, ``core_profiles.profiles_1d[i].electrons.temperature``) from the diff --git a/docs/source/courses/basic/explore.rst b/docs/source/courses/basic/explore.rst index 776918b5..e3395eda 100644 --- a/docs/source/courses/basic/explore.rst +++ b/docs/source/courses/basic/explore.rst @@ -14,9 +14,8 @@ Most codes will touch multiple IDSs inside a single IMAS data entry. For example a heating code using a magnetic equilibrium from the ``equilibrium`` IDS with a heating profile from the ``core_sources`` IDS. To find out how to write your code, there are two main strategies: read the -`Data Model `_ documents of -the `Data Dictionary -`_ +`IMAS Data Dictionary documentation +`_ or explore the data interactively. We will focus on the latter method here. @@ -191,4 +190,4 @@ Exercise 2 .. md-tab-item:: `ec_launchers` data - .. literalinclude:: imas_snippets/explore_public_ec_launchers.py \ No newline at end of file + .. literalinclude:: imas_snippets/explore_public_ec_launchers.py diff --git a/docs/source/installing.rst b/docs/source/installing.rst index 2e67a210..2d05ff2f 100644 --- a/docs/source/installing.rst +++ b/docs/source/installing.rst @@ -3,16 +3,15 @@ Installing IMAS-Python ====================== -IMAS-Python is a pure Python package. For full functionality of the package you need -an installation of `the IMAS Core library `_. See -:ref:`IMAS-Python 5 minute introduction` for an overview of functionality which does -(not) require the IMAS Core library available. +IMAS-Python is a pure Python package. While it can be used without it, for full functionality +of the package you need an installation of `the IMAS Core library `_. +See :ref:`IMAS-Python 5 minute introduction` for a quick overview of its most basic functionalities. -To get started, you can install it from `pypi.org `_: +To get started, you can install it from `pypi.org `_: .. code-block:: bash - pip install IMAS-Python + pip install imas-python Local installation from sources @@ -26,8 +25,8 @@ and run `pip install`: python3 -m venv ./venv . venv/bin/activate - git clone ssh://git@github.com:iterorganization/IMAS-Python.git - cd imas + git clone git@github.com:iterorganization/IMAS-Python.git + cd IMAS-Python pip install --upgrade pip pip install --upgrade wheel setuptools pip install . diff --git a/docs/source/netcdf/conventions.rst b/docs/source/netcdf/conventions.rst index 2b31b9b7..0249399e 100644 --- a/docs/source/netcdf/conventions.rst +++ b/docs/source/netcdf/conventions.rst @@ -21,7 +21,7 @@ Goals The netCDF library is a cross-platform library that enables to read and write *self-describing* datasets consisting of multi-dimensional arrays. The purpose of these IMAS conventions is to define how to store IMAS data, conforming to the -`IMAS Data Dictionary `__, +`IMAS Data Dictionary `__, in a netCDF file. diff --git a/docs/source/release_imas.rst b/docs/source/release_imas.rst index 994bd88a..59883429 100644 --- a/docs/source/release_imas.rst +++ b/docs/source/release_imas.rst @@ -9,62 +9,21 @@ IMAS-Python development follows the a fork-based model described in Creating an IMAS-Python release ------------------------------- -1. Create a Pull Request using fork based workflow from ``develop`` to ``main``. +1. Create a Pull Request from ``develop`` to ``main``. 2. Add a change log to the Pull Request, briefly describing new features, bug fixes, and update accordingly the :ref:`changelog`. 3. The PR is reviewed and merged by the maintainers who also create the release tags. 4. After the release PR is merged, update the Easybuild configurations for SDCC modules in the `easybuild-easyconfigs repository - `_. + `_. See the next section for more details on how to do this. Updating and testing the IMAS-Python Easybuild configuration ------------------------------------------------------------ -The following steps can be taken on an SDCC login node. - -Configure easybuild -''''''''''''''''''' - -First we need to configure easybuild. This only needs to be done once. - -- Create an HTTP access token in Bitbucket with ``PROJECT READ`` and ``REPOSITORY - READ`` permissions. See this `Bitbucket support page - `_ - for more details. -- Create a new text file in your home folder - ``$HOME/.config/easybuild/secret.txt``. Fill it as follows (replace ```` - with the token generated in the previous bullet). - - .. code-block:: text - :caption: ``$HOME/.config/easybuild/secret.txt`` - - ^https://git.iter.org::Authorization: Bearer - - Ensure that only you have access to the file, e.g. ``chmod 600 - ~/.config/easybuild/secret.txt``. -- Create a new configuration file ``$HOME/.config/easybuild/config.cfg`` and fill - it as follows (replace ```` with your username): - - .. code-block:: cfg - :caption: ``$HOME/.config/easybuild/config.cfg`` - - [override] - # Set extra HTTP header Fields when downloading files from URL patterns: - http-header-fields-urlpat=/home/ITER//.config/easybuild/secret.txt - - # Set modules flags - module-syntax=Tcl - modules-tool=EnvironmentModules - allow-modules-tool-mismatch=true - - -Update and test Easybuild configurations -'''''''''''''''''''''''''''''''''''''''' - -The following steps must be performed for each of the tool chains (currently -``intel-2020b``, ``foss-2020b`` and ``gfbf-2022b``): +The following steps must be performed for each of the supported tool chains +(currently ``intel-2023b``, ``foss-2023b``): 1. Create the ``.eb`` file for the new release. @@ -75,13 +34,6 @@ The following steps must be performed for each of the tool chains (currently - ``builddependencies`` contains build-time dependencies which are available as a module on SDCC. - - .. note:: - - The IMAS module is a build-time dependency only and not a runtime - dependency. This allows IMAS-Python users to load the IMAS-Python module and - **any** supported IMAS module. - - ``dependencies`` contains run-time dependencies which are available as a module on SDCC. - ``exts_list`` contains python package dependencies (and potentially @@ -94,7 +46,7 @@ The following steps must be performed for each of the tool chains (currently .. code-block:: text - https://github.com/iterorganization/IMAS-Python/archive/refs/heads/.tar.gz + https://github.com/iterorganization/IMAS-Python/archive/refs/tags/.tar.gz Then, calculate the hash of the downloaded archive with ``sha256sum`` and update it in the ``.eb`` file. @@ -121,6 +73,6 @@ The following steps must be performed for each of the tool chains (currently module purge module use ~/.local/easybuild/modules/all/ module load IMAS-Python/- - module laod IMAS + module laod IMAS-AL-Core c. Sanity check the module, for example by running the ``pytest`` unit tests. From 954bdb8d361272f844fd91bc33c44c0ed9f0c2c4 Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Fri, 24 Jan 2025 15:05:40 +0100 Subject: [PATCH 53/97] Publish package to pypi for tags only, and to testpypi for develop only --- .github/workflows/publish.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 0107c523..196111cc 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -27,6 +27,7 @@ jobs: publish-to-pypi: name: Publish IMAS-Python distribution to PyPI + if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes needs: - build runs-on: ubuntu-latest @@ -46,6 +47,7 @@ jobs: publish-to-testpypi: name: Publish IMAS-Python distribution to TestPyPI + if: github.ref=='refs/heads/develop' # only publish to TestPyPI on develop pushes needs: - build runs-on: ubuntu-latest @@ -64,4 +66,4 @@ jobs: uses: pypa/gh-action-pypi-publish@unstable/v1 with: repository-url: https://test.pypi.org/legacy/ - verbose: true \ No newline at end of file + verbose: true From 6186d46c58c91540399a121a7675710f62cfb0fd Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Mon, 3 Feb 2025 13:44:59 +0100 Subject: [PATCH 54/97] fixed issue with imas-core dependency while installing pip install -e .[all] --- pyproject.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 6451d06d..1b1b86c3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,7 +71,8 @@ dependencies = [ [project.optional-dependencies] # these self-dependencies are available since pip 21.2 all = [ - "imas[test,docs,imas-core,netcdf,h5py]" + "imas-python[test,docs,netcdf,h5py]" + # "imas-python[test,docs,imas-core,netcdf,h5py]" TODO enable when imas-core is available on pypi ] docs = [ "sphinx>=6.0.0,<7.0.0", From 1d5b96732ade5ee599818f7d21d97f4c5a12096e Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Mon, 3 Feb 2025 17:15:05 +0100 Subject: [PATCH 55/97] fixed create_model_ids_xml function --- ci/run_pytest.sh | 2 +- imas/backends/imas_core/mdsplus_model.py | 64 ++++++++++++------------ 2 files changed, 33 insertions(+), 33 deletions(-) diff --git a/ci/run_pytest.sh b/ci/run_pytest.sh index 979b00de..4af184dc 100755 --- a/ci/run_pytest.sh +++ b/ci/run_pytest.sh @@ -13,7 +13,7 @@ source /etc/profile.d/modules.sh module purge # Modules are supplied as arguments in the CI job: if [ -z "$@" ]; then - module load IMAS-AL-Core + module load IMAS-AL-Core Java MDSplus else module load $@ fi diff --git a/imas/backends/imas_core/mdsplus_model.py b/imas/backends/imas_core/mdsplus_model.py index 00514e01..bf960a6b 100644 --- a/imas/backends/imas_core/mdsplus_model.py +++ b/imas/backends/imas_core/mdsplus_model.py @@ -243,40 +243,40 @@ def create_model_ids_xml(cache_dir_path, fname, version): with PySaxonProcessor(license=False) as proc: xslt_processor = proc.new_xslt30_processor() - - xslt_processor.compile_stylesheet(stylesheet_file=str(xslfile)) - - input_xml = get_dd_xml(version) if version else None - if fname: - source_file = str(fname) - elif input_xml: - source_file = input_xml # Use standard input for the XML string - else: - raise ValueError( - "Either 'fname' or 'version' must be provided to generate XML." - ) - - # xdm_ddgit = proc.make_string_value(str(version or fname)) - # xsltproc.set_parameter("DD_GIT_DESCRIBE", xdm_ddgit) - # xdm_algit = proc.make_string_value(os.environ.get - # ("AL_VERSION", "0.0.0")) - # xsltproc.set_parameter("AL_GIT_DESCRIBE", xdm_algit) - # Transform XML - result = xslt_processor.transform_to_file( - source_file=source_file, - output_file=str(output_file), - initial_template_params={ - "DD_GIT_DESCRIBE": str(version or fname), - "AL_GIT_DESCRIBE": os.environ.get("AL_VERSION", "0.0.0"), - }, + xdm_ddgit = proc.make_string_value(str(version) or fname) + xslt_processor.set_parameter("DD_GIT_DESCRIBE", xdm_ddgit) + xdm_algit = proc.make_string_value( + os.environ.get("AL_VERSION", "0.0.0") ) - - if result is False: - logger.error( - "Transformation failed: Check Saxon/C logs for details." + xslt_processor.set_parameter("AL_GIT_DESCRIBE", xdm_algit) + if ( + fname != None + and fname != "-" + and fname != "" + and os.path.exists(fname) + ): + result = xslt_processor.transform_to_file( + source_file=fname, + stylesheet_file=str(xslfile), + output_file=str(output_file), ) - raise RuntimeError("Saxon/C XSLT transformation failed.") - + elif version != None and version != "": + xml_string = get_dd_xml(version) + import tempfile + + with tempfile.NamedTemporaryFile( + delete=False, mode="w+b" + ) as temp_file: + temp_file.write(xml_string) + temp_file.seek(0) + + result = xslt_processor.transform_to_file( + source_file=temp_file.name, + stylesheet_file=str(xslfile), + output_file=str(output_file), + ) + else: + raise MDSPlusModelError("Either fname or version must be provided") except Exception as e: if fname: logger.error("Error making MDSplus model IDS.xml for %s", fname) From 39c8db72e307318a10a2ff801828c2cba7af9132 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Mon, 3 Feb 2025 22:55:46 +0100 Subject: [PATCH 56/97] pass filename as string --- imas/backends/imas_core/mdsplus_model.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/imas/backends/imas_core/mdsplus_model.py b/imas/backends/imas_core/mdsplus_model.py index bf960a6b..10dcfb56 100644 --- a/imas/backends/imas_core/mdsplus_model.py +++ b/imas/backends/imas_core/mdsplus_model.py @@ -1,7 +1,6 @@ # Helper functions to create MDSPlus reference models # and store them in a cache directory (.cache/imas/MDSPlus/name-HASH/) -"""Module for generating and working with MDSplus models. -""" +"""Module for generating and working with MDSplus models.""" import errno import getpass @@ -256,7 +255,7 @@ def create_model_ids_xml(cache_dir_path, fname, version): and os.path.exists(fname) ): result = xslt_processor.transform_to_file( - source_file=fname, + source_file=str(fname), stylesheet_file=str(xslfile), output_file=str(output_file), ) From cb3830a583064621ce611ea5454b1cb2709c4138 Mon Sep 17 00:00:00 2001 From: Prasad Date: Tue, 4 Feb 2025 11:32:30 +0100 Subject: [PATCH 57/97] Update imas/backends/imas_core/mdsplus_model.py Co-authored-by: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> --- imas/backends/imas_core/mdsplus_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imas/backends/imas_core/mdsplus_model.py b/imas/backends/imas_core/mdsplus_model.py index 10dcfb56..fb164e04 100644 --- a/imas/backends/imas_core/mdsplus_model.py +++ b/imas/backends/imas_core/mdsplus_model.py @@ -249,7 +249,7 @@ def create_model_ids_xml(cache_dir_path, fname, version): ) xslt_processor.set_parameter("AL_GIT_DESCRIBE", xdm_algit) if ( - fname != None + fname is not None and fname != "-" and fname != "" and os.path.exists(fname) From 9a57d5cc2f536642ff3eb39062d3146a269cac91 Mon Sep 17 00:00:00 2001 From: Prasad Date: Tue, 4 Feb 2025 11:32:42 +0100 Subject: [PATCH 58/97] Update imas/backends/imas_core/mdsplus_model.py Co-authored-by: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> --- imas/backends/imas_core/mdsplus_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imas/backends/imas_core/mdsplus_model.py b/imas/backends/imas_core/mdsplus_model.py index fb164e04..bd052efd 100644 --- a/imas/backends/imas_core/mdsplus_model.py +++ b/imas/backends/imas_core/mdsplus_model.py @@ -259,7 +259,7 @@ def create_model_ids_xml(cache_dir_path, fname, version): stylesheet_file=str(xslfile), output_file=str(output_file), ) - elif version != None and version != "": + elif version is not None and version != "": xml_string = get_dd_xml(version) import tempfile From bc010ab5cbebd5f7e3dcb83d47d0fe6602e10173 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Tue, 4 Feb 2025 14:05:33 +0100 Subject: [PATCH 59/97] fixed review comments --- imas/backends/imas_core/mdsplus_model.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/imas/backends/imas_core/mdsplus_model.py b/imas/backends/imas_core/mdsplus_model.py index bd052efd..48864346 100644 --- a/imas/backends/imas_core/mdsplus_model.py +++ b/imas/backends/imas_core/mdsplus_model.py @@ -234,6 +234,14 @@ def model_exists(path: Path) -> bool: ) +def transform_with_xslt(xslt_processor, source, xslfile, output_file): + return xslt_processor.transform_to_file( + source_file=str(source), + stylesheet_file=str(xslfile), + output_file=str(output_file), + ) + + def create_model_ids_xml(cache_dir_path, fname, version): """Use Saxon/C to compile an ids.xml suitable for creating an MDSplus model.""" try: @@ -254,25 +262,17 @@ def create_model_ids_xml(cache_dir_path, fname, version): and fname != "" and os.path.exists(fname) ): - result = xslt_processor.transform_to_file( - source_file=str(fname), - stylesheet_file=str(xslfile), - output_file=str(output_file), - ) + transform_with_xslt(xslt_processor, fname, xslfile, output_file) elif version is not None and version != "": xml_string = get_dd_xml(version) - import tempfile with tempfile.NamedTemporaryFile( - delete=False, mode="w+b" + delete=True, mode="w+b" ) as temp_file: temp_file.write(xml_string) temp_file.seek(0) - - result = xslt_processor.transform_to_file( - source_file=temp_file.name, - stylesheet_file=str(xslfile), - output_file=str(output_file), + transform_with_xslt( + xslt_processor, temp_file.name, xslfile, output_file ) else: raise MDSPlusModelError("Either fname or version must be provided") From 1b4189d6a0ffd166e1f1dab86e85901a64b3e5ad Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 4 Feb 2025 16:36:09 +0100 Subject: [PATCH 60/97] Implement implicit DD version conversion in NetCDF backend --- imas/backends/netcdf/db_entry_nc.py | 21 +++++++---- imas/backends/netcdf/nc2ids.py | 57 ++++++++++++++++++++++++----- imas/backends/netcdf/nc_validate.py | 3 +- imas/test/test_nbc_change.py | 37 +++++++++++++++---- imas/test/test_nc_validation.py | 43 +++++++++++++--------- 5 files changed, 117 insertions(+), 44 deletions(-) diff --git a/imas/backends/netcdf/db_entry_nc.py b/imas/backends/netcdf/db_entry_nc.py index a23005a6..b702256d 100644 --- a/imas/backends/netcdf/db_entry_nc.py +++ b/imas/backends/netcdf/db_entry_nc.py @@ -11,7 +11,7 @@ from imas.backends.netcdf.ids2nc import IDS2NC from imas.backends.netcdf.nc2ids import NC2IDS from imas.exception import DataEntryException, InvalidNetCDFEntry -from imas.ids_convert import NBCPathMap, convert_ids +from imas.ids_convert import NBCPathMap, dd_version_map_from_factories from imas.ids_factory import IDSFactory from imas.ids_toplevel import IDSToplevel @@ -123,14 +123,19 @@ def get( # Load data into the destination IDS if self._ds_factory.dd_version == destination._dd_version: - NC2IDS(group, destination).run() + NC2IDS(group, destination, destination.metadata, None).run() else: - # FIXME: implement automatic conversion using nbc_map - # As a work-around: do an explicit conversion, but automatic conversion - # will also be needed to implement lazy loading. - ids = self._ds_factory.new(ids_name) - NC2IDS(group, ids).run() - convert_ids(ids, None, target=destination) + # Construct relevant NBCPathMap, the one we get from DBEntry has the reverse + # mapping from what we need. The imas_core logic does the mapping from + # in-memory to on-disk, while we take what is on-disk and map it to + # in-memory. + ddmap, source_is_older = dd_version_map_from_factories( + ids_name, self._ds_factory, self._factory + ) + nbc_map = ddmap.old_to_new if source_is_older else ddmap.new_to_old + NC2IDS( + group, destination, self._ds_factory.new(ids_name).metadata, nbc_map + ).run() return destination diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index 50668dfb..a6d5d14a 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -8,6 +8,7 @@ from imas.backends.netcdf.nc_metadata import NCMetadata from imas.exception import InvalidNetCDFEntry from imas.ids_base import IDSBase +from imas.ids_convert import NBCPathMap from imas.ids_data_type import IDSDataType from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS from imas.ids_metadata import IDSMetadata @@ -70,19 +71,32 @@ def _tree_iter( class NC2IDS: """Class responsible for reading an IDS from a NetCDF group.""" - def __init__(self, group: netCDF4.Group, ids: IDSToplevel) -> None: + def __init__( + self, + group: netCDF4.Group, + ids: IDSToplevel, + ids_metadata: IDSMetadata, + nbc_map: Optional[NBCPathMap], + ) -> None: """Initialize NC2IDS converter. Args: group: NetCDF group that stores the IDS data. ids: Corresponding IDS toplevel to store the data in. + ids_metadata: Metadata corresponding to the DD version that the data is + stored in. + nbc_map: Path map for implicit DD conversions. """ self.group = group """NetCDF Group that the IDS is stored in.""" self.ids = ids """IDS to store the data in.""" + self.ids_metadata = ids_metadata + """Metadata of the IDS in the DD version that the data is stored in""" + self.nbc_map = nbc_map + """Path map for implicit DD conversions.""" - self.ncmeta = NCMetadata(ids.metadata) + self.ncmeta = NCMetadata(ids_metadata) """NetCDF related metadata.""" self.variables = list(group.variables) """List of variable names stored in the netCDF group.""" @@ -114,16 +128,39 @@ def run(self) -> None: for var_name in self.variables: if var_name.endswith(":shape"): continue - metadata = self.ids.metadata[var_name] + metadata = self.ids_metadata[var_name] if metadata.data_type is IDSDataType.STRUCTURE: continue # This only contains DD metadata we already know + # Handle implicit DD version conversion + if self.nbc_map is None: + target_metadata = metadata # no conversion + elif metadata.path_string in self.nbc_map: + new_path = self.nbc_map.path[metadata.path_string] + if new_path is None: + logging.info( + "Not loading data for %s: no equivalent data structure exists " + "in the target Data Dictionary version.", + metadata.path_string, + ) + continue + target_metadata = self.ids.metadata[new_path] + elif metadata.path_string in self.nbc_map.type_change: + logging.info( + "Not loading data for %s: cannot hanlde type changes when " + "implicitly converting data to the target Data Dictionary version.", + metadata.path_string, + ) + continue + else: + target_metadata = metadata # no conversion required + var = self.group[var_name] if metadata.data_type is IDSDataType.STRUCT_ARRAY: if "sparse" in var.ncattrs(): shapes = self.group[var_name + ":shape"][()] - for index, node in tree_iter(self.ids, metadata): + for index, node in tree_iter(self.ids, target_metadata): node.resize(shapes[index][0]) else: @@ -132,7 +169,7 @@ def run(self) -> None: metadata.path_string, self.homogeneous_time )[-1] size = self.group.dimensions[dim].size - for _, node in tree_iter(self.ids, metadata): + for _, node in tree_iter(self.ids, target_metadata): node.resize(size) continue @@ -144,22 +181,22 @@ def run(self) -> None: if "sparse" in var.ncattrs(): if metadata.ndim: shapes = self.group[var_name + ":shape"][()] - for index, node in tree_iter(self.ids, metadata): + for index, node in tree_iter(self.ids, target_metadata): shape = shapes[index] if shape.all(): node.value = data[index + tuple(map(slice, shapes[index]))] else: - for index, node in tree_iter(self.ids, metadata): + for index, node in tree_iter(self.ids, target_metadata): value = data[index] if value != getattr(var, "_FillValue", None): node.value = data[index] elif metadata.path_string not in self.ncmeta.aos: # Shortcut for assigning untensorized data - self.ids[metadata.path] = data + self.ids[target_metadata.path] = data else: - for index, node in tree_iter(self.ids, metadata): + for index, node in tree_iter(self.ids, target_metadata): node.value = data[index] def validate_variables(self) -> None: @@ -194,7 +231,7 @@ def validate_variables(self) -> None: # Check that the DD defines this variable, and validate its metadata var = self.group[var_name] try: - metadata = self.ids.metadata[var_name] + metadata = self.ids_metadata[var_name] except KeyError: raise InvalidNetCDFEntry( f"Invalid variable {var_name}: no such variable exists in the " diff --git a/imas/backends/netcdf/nc_validate.py b/imas/backends/netcdf/nc_validate.py index 2a4877d2..03aded1b 100644 --- a/imas/backends/netcdf/nc_validate.py +++ b/imas/backends/netcdf/nc_validate.py @@ -47,8 +47,9 @@ def validate_netcdf_file(filename: str) -> None: for ids_name in ids_names: for occurrence in entry.list_all_occurrences(ids_name): group = dataset[f"{ids_name}/{occurrence}"] + ids = factory.new(ids_name) try: - NC2IDS(group, factory.new(ids_name)).validate_variables() + NC2IDS(group, ids, ids.metadata, None).validate_variables() except InvalidNetCDFEntry as exc: occ = f":{occurrence}" if occurrence else "" raise InvalidNetCDFEntry(f"Invalid IDS {ids_name}{occ}: {exc}") diff --git a/imas/test/test_nbc_change.py b/imas/test/test_nbc_change.py index b5c7905c..91ede0e3 100644 --- a/imas/test/test_nbc_change.py +++ b/imas/test/test_nbc_change.py @@ -9,16 +9,11 @@ import numpy as np import pytest - from imas.db_entry import DBEntry from imas.ids_convert import convert_ids from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS, MEMORY_BACKEND from imas.ids_factory import IDSFactory -from imas.test.test_helpers import ( - compare_children, - fill_with_random_data, - open_dbentry, -) +from imas.test.test_helpers import compare_children, fill_with_random_data, open_dbentry @pytest.fixture(autouse=True) @@ -97,6 +92,23 @@ def test_nbc_0d_to_1d(caplog, requires_imas): entry_339.close() +def test_nbc_0d_to_1d_netcdf(caplog, tmp_path): + # channel/filter_spectrometer/radiance_calibration in spectrometer visible changed + # from FLT_0D to FLT_1D in DD 3.39.0 + ids = IDSFactory("3.32.0").spectrometer_visible() + ids.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS + ids.channel.resize(1) + ids.channel[0].filter_spectrometer.radiance_calibration = 1.0 + + # Test implicit conversion during get + with DBEntry(str(tmp_path / "test.nc"), "x", dd_version="3.32.0") as entry_332: + entry_332.put(ids) + with DBEntry(str(tmp_path / "test.nc"), "r", dd_version="3.39.0") as entry_339: + ids_339 = entry_339.get("spectrometer_visible") # implicit conversion + assert not ids_339.channel[0].filter_spectrometer.radiance_calibration.has_value + entry_339.close() + + def test_nbc_change_aos_renamed(): """Test renamed AoS in pulse_schedule: ec/antenna -> ec/launcher. @@ -272,7 +284,7 @@ def test_pulse_schedule_aos_renamed_autofill_up(backend, worker_id, tmp_path): dbentry.close() -def test_pulse_schedule_multi_rename(): +def test_pulse_schedule_multi_rename(tmp_path): # Multiple renames of the same element: # DD >= 3.40+: ec/beam # DD 3.26-3.40: ec/launcher (but NBC metadata added in 3.28 only) @@ -294,9 +306,18 @@ def test_pulse_schedule_multi_rename(): ps["3.40.0"].ec.beam[0].name = name for version1 in ps: + ncfilename = str(tmp_path / f"{version1}.nc") + with DBEntry(ncfilename, "x", dd_version=version1) as entry: + entry.put(ps[version1]) + for version2 in ps: converted = convert_ids(ps[version1], version2) - compare_children(ps[version2], converted) + compare_children(ps[version2].ec, converted.ec) + + # Test with netCDF backend + with DBEntry(ncfilename, "r", dd_version=version2) as entry: + converted = entry.get("pulse_schedule") + compare_children(ps[version2].ec, converted.ec) def test_autofill_save_newer(ids_name, backend, worker_id, tmp_path): diff --git a/imas/test/test_nc_validation.py b/imas/test/test_nc_validation.py index 2f63b017..d111d3c3 100644 --- a/imas/test/test_nc_validation.py +++ b/imas/test/test_nc_validation.py @@ -1,7 +1,6 @@ import netCDF4 import numpy as np import pytest - from imas.backends.netcdf.ids2nc import IDS2NC from imas.backends.netcdf.nc2ids import NC2IDS from imas.backends.netcdf.nc_validate import validate_netcdf_file @@ -32,7 +31,8 @@ def memfile_with_ids(memfile, factory): ids.profiles_1d[0].zeff = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0] IDS2NC(ids, memfile).run() # This one is valid: - NC2IDS(memfile, factory.core_profiles()).run() + ids = factory.core_profiles() + NC2IDS(memfile, ids, ids.metadata, None).run() return memfile @@ -51,66 +51,75 @@ def test_invalid_homogeneous_time(memfile, factory): ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(empty_group, ids) # ids_properties.homogeneous_time does not exist + # ids_properties.homogeneous_time does not exist + NC2IDS(empty_group, ids, ids.metadata, None) with pytest.raises(InvalidNetCDFEntry): - NC2IDS(invalid_dtype, ids) + NC2IDS(invalid_dtype, ids, ids.metadata, None) with pytest.raises(InvalidNetCDFEntry): - NC2IDS(invalid_shape, ids) + NC2IDS(invalid_shape, ids, ids.metadata, None) with pytest.raises(InvalidNetCDFEntry): - NC2IDS(invalid_value, ids) + NC2IDS(invalid_value, ids, ids.metadata, None) def test_invalid_units(memfile_with_ids, factory): memfile_with_ids["time"].units = "hours" + ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, factory.core_profiles()).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() def test_invalid_documentation(memfile_with_ids, factory, caplog): + ids = factory.core_profiles() with caplog.at_level("WARNING"): - NC2IDS(memfile_with_ids, factory.core_profiles()).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() assert not caplog.records # Invalid docstring logs a warning memfile_with_ids["time"].documentation = "https://en.wikipedia.org/wiki/Time" with caplog.at_level("WARNING"): - NC2IDS(memfile_with_ids, factory.core_profiles()).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() assert len(caplog.records) == 1 def test_invalid_dimension_name(memfile_with_ids, factory): memfile_with_ids.renameDimension("time", "T") + ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, factory.core_profiles()).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() def test_invalid_coordinates(memfile_with_ids, factory): memfile_with_ids["profiles_1d.grid.rho_tor_norm"].coordinates = "xyz" + ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, factory.core_profiles()).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() def test_invalid_ancillary_variables(memfile_with_ids, factory): memfile_with_ids["time"].ancillary_variables = "xyz" + ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, factory.core_profiles()).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() def test_extra_attributes(memfile_with_ids, factory): memfile_with_ids["time"].new_attribute = [1, 2, 3] + ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, factory.core_profiles()).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() def test_shape_array_without_data(memfile_with_ids, factory): memfile_with_ids.createVariable("profiles_1d.t_i_average:shape", int, ()) + ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, factory.core_profiles()).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() def test_shape_array_without_sparse_data(memfile_with_ids, factory): memfile_with_ids.createVariable("profiles_1d.grid.rho_tor_norm:shape", int, ()) + ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, factory.core_profiles()).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() def test_shape_array_with_invalid_dimensions(memfile_with_ids, factory): @@ -128,7 +137,7 @@ def test_shape_array_with_invalid_dimensions(memfile_with_ids, factory): ("time", "profiles_1d.grid.rho_tor_norm:i"), ) with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, cp).run() + NC2IDS(memfile_with_ids, cp, cp.metadata, None).run() def test_shape_array_with_invalid_dtype(memfile_with_ids, factory): @@ -144,7 +153,7 @@ def test_shape_array_with_invalid_dtype(memfile_with_ids, factory): "profiles_1d.t_i_average:shape", float, ("time", "1D") ) with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, cp).run() + NC2IDS(memfile_with_ids, cp, cp.metadata, None).run() def test_validate_nc(tmpdir): From 0c835a18523009a9be69834252ae0b16e47f247c Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 4 Feb 2025 17:47:53 +0100 Subject: [PATCH 61/97] Improve performance of nc2ids by bypassing IDSPrimitive.value.setter logic --- imas/backends/netcdf/nc2ids.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index a6d5d14a..de42e4fa 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -184,20 +184,25 @@ def run(self) -> None: for index, node in tree_iter(self.ids, target_metadata): shape = shapes[index] if shape.all(): - node.value = data[index + tuple(map(slice, shapes[index]))] + # NOTE: bypassing IDSPrimitive.value.setter logic + node._IDSPrimitive__value = data[ + index + tuple(map(slice, shape)) + ] else: for index, node in tree_iter(self.ids, target_metadata): value = data[index] if value != getattr(var, "_FillValue", None): - node.value = data[index] + # NOTE: bypassing IDSPrimitive.value.setter logic + node._IDSPrimitive__value = value elif metadata.path_string not in self.ncmeta.aos: # Shortcut for assigning untensorized data - self.ids[target_metadata.path] = data + self.ids[target_metadata.path]._IDSPrimitive__value = data else: for index, node in tree_iter(self.ids, target_metadata): - node.value = data[index] + # NOTE: bypassing IDSPrimitive.value.setter logic + node._IDSPrimitive__value = data[index] def validate_variables(self) -> None: """Validate that all variables in the netCDF Group exist and match the DD.""" From 7ab02c78c7ddcae4a87b655f41fe0156d8765812 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 4 Feb 2025 17:51:23 +0100 Subject: [PATCH 62/97] WIP: lazy loading for netCDF backend --- imas/backends/imas_core/al_context.py | 4 + imas/backends/imas_core/db_entry_helpers.py | 10 +-- imas/backends/netcdf/db_entry_nc.py | 8 +- imas/backends/netcdf/nc2ids.py | 84 ++++++++++++++++++++- imas/ids_structure.py | 16 ++-- 5 files changed, 99 insertions(+), 23 deletions(-) diff --git a/imas/backends/imas_core/al_context.py b/imas/backends/imas_core/al_context.py index b33c99b3..19b34d8e 100644 --- a/imas/backends/imas_core/al_context.py +++ b/imas/backends/imas_core/al_context.py @@ -10,6 +10,7 @@ import numpy +import imas from imas.backends.imas_core.imas_interface import ll_interface from imas.exception import LowlevelError from imas.ids_defs import ( @@ -280,6 +281,9 @@ def __init__( self.context = None """Potential weak reference to opened context.""" + def get_child(self, child): + imas.backends.imas_core.db_entry_helpers._get_child(child, self) + def get_context(self) -> ALContext: """Create and yield the actual ALContext.""" if self.dbentry._db_ctx is not self.dbentry_ctx: diff --git a/imas/backends/imas_core/db_entry_helpers.py b/imas/backends/imas_core/db_entry_helpers.py index 4216db5d..f83a0d47 100644 --- a/imas/backends/imas_core/db_entry_helpers.py +++ b/imas/backends/imas_core/db_entry_helpers.py @@ -22,7 +22,7 @@ def get_children( structure: IDSStructure, ctx: ALContext, time_mode: int, - nbc_map: Optional[NBCPathMap], + nbc_map: Optional["NBCPathMap"], ) -> None: """Recursively get all children of an IDSStructure.""" # NOTE: changes in this method must be propagated to _get_child and vice versa @@ -77,15 +77,11 @@ def get_children( getattr(structure, name)._IDSPrimitive__value = data -def _get_child(child: IDSBase, ctx: Optional[LazyALContext]): +def _get_child(child: IDSBase, ctx: LazyALContext): """Get a single child when required (lazy loading).""" # NOTE: changes in this method must be propagated to _get_children and vice versa # Performance: this method is specialized for the lazy get - # ctx can be None when the parent structure does not exist in the on-disk DD version - if ctx is None: - return # There is no data to be loaded - time_mode = ctx.time_mode if time_mode == IDS_TIME_MODE_INDEPENDENT and child.metadata.type.is_dynamic: return # skip dynamic (time-dependent) nodes @@ -148,7 +144,7 @@ def put_children( ctx: ALContext, time_mode: int, is_slice: bool, - nbc_map: Optional[NBCPathMap], + nbc_map: Optional["NBCPathMap"], verify_maxoccur: bool, ) -> None: """Recursively put all children of an IDSStructure""" diff --git a/imas/backends/netcdf/db_entry_nc.py b/imas/backends/netcdf/db_entry_nc.py index b702256d..e6ee32cb 100644 --- a/imas/backends/netcdf/db_entry_nc.py +++ b/imas/backends/netcdf/db_entry_nc.py @@ -108,10 +108,6 @@ def get( else: func = "get_sample" raise NotImplementedError(f"`{func}` is not available for netCDF files.") - if lazy: - raise NotImplementedError( - "Lazy loading is not implemented for netCDF files." - ) # Check if the IDS/occurrence exists, and obtain the group it is stored in try: @@ -123,7 +119,7 @@ def get( # Load data into the destination IDS if self._ds_factory.dd_version == destination._dd_version: - NC2IDS(group, destination, destination.metadata, None).run() + NC2IDS(group, destination, destination.metadata, None).run(lazy) else: # Construct relevant NBCPathMap, the one we get from DBEntry has the reverse # mapping from what we need. The imas_core logic does the mapping from @@ -135,7 +131,7 @@ def get( nbc_map = ddmap.old_to_new if source_is_older else ddmap.new_to_old NC2IDS( group, destination, self._ds_factory.new(ids_name).metadata, nbc_map - ).run() + ).run(lazy) return destination diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index de42e4fa..f012b0ba 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -3,6 +3,7 @@ from typing import Iterator, List, Optional, Tuple import netCDF4 +import numpy as np from imas.backends.netcdf import ids2nc from imas.backends.netcdf.nc_metadata import NCMetadata @@ -100,6 +101,8 @@ def __init__( """NetCDF related metadata.""" self.variables = list(group.variables) """List of variable names stored in the netCDF group.""" + + self._lazy_map = {} # Don't use masked arrays: they're slow and we'll handle most of the unset # values through the `:shape` arrays self.group.set_auto_mask(False) @@ -113,7 +116,7 @@ def __init__( "Mandatory variable `ids_properties.homogeneous_time` does not exist." ) var = group["ids_properties.homogeneous_time"] - self._validate_variable(var, ids.ids_properties.homogeneous_time.metadata) + self._validate_variable(var, ids.metadata["ids_properties/homogeneous_time"]) if var[()] not in [0, 1, 2]: raise InvalidNetCDFEntry( f"Invalid value for ids_properties.homogeneous_time: {var[()]}. " @@ -121,10 +124,12 @@ def __init__( ) self.homogeneous_time = var[()] == IDS_TIME_MODE_HOMOGENEOUS - def run(self) -> None: + def run(self, lazy: bool) -> None: """Load the data from the netCDF group into the IDS.""" self.variables.sort() self.validate_variables() + if lazy: + self.ids._set_lazy_context(LazyContext(self)) for var_name in self.variables: if var_name.endswith(":shape"): continue @@ -157,6 +162,10 @@ def run(self) -> None: target_metadata = metadata # no conversion required var = self.group[var_name] + if lazy: + self._lazy_map[target_metadata.path_string] = var + continue + if metadata.data_type is IDSDataType.STRUCT_ARRAY: if "sparse" in var.ncattrs(): shapes = self.group[var_name + ":shape"][()] @@ -342,3 +351,74 @@ def _validate_sparsity( raise variable_error( shape_var, "dtype", shape_var.dtype, "any integer type" ) + + +class LazyContext: + def __init__(self, nc2ids, index=()): + self.nc2ids = nc2ids + self.index = index + + def get_child(self, child): + metadata = child.metadata + path = metadata.path_string + data_type = metadata.data_type + + var = self.nc2ids._lazy_map.get(path) + if data_type is IDSDataType.STRUCT_ARRAY: + # Determine size of the aos + if var is None: + size = 0 + elif "sparse" in var.ncattrs(): + size = self.group[var.name + ":shape"][self.index][0] + else: + # FIXME: extract dimension name from nc file? + dim = self.ncmeta.get_dimensions( + metadata.path_string, self.homogeneous_time + )[-1] + size = self.group.dimensions[dim].size + + child._set_lazy_context( + LazyArrayStructContext(self.nc2ids, self.index, size) + ) + + elif data_type is IDSDataType.STRUCTURE: + child._set_lazy_context(self) + + else: # Data elements + var = self.nc2ids._lazy_map.get(path) + if var is None: + return # nothing to load + + value = None + if "sparse" in var.ncattrs(): + if metadata.ndim: + shape_var = self.nc2ids.group[var.name + ":shape"] + shape = shape_var[self.index] + if shape.all(): + value = var[self.index + tuple(map(slice, shape))] + else: + value = var[self.index] + if value == getattr(var, "_FillValue", None): + value = None # Skip setting + else: + value = var[self.index] + + if value is not None: + if isinstance(value, np.ndarray): + # Convert the numpy array to a read-only view + value = value.view() + value.flags.writeable = False + # NOTE: bypassing IDSPrimitive.value.setter logic + child._IDSPrimitive__value = value + + +class LazyArrayStructContext(LazyContext): + def __init__(self, nc2ids, index, size): + super().__init__(nc2ids, index) + self.size = size + + def get_context(self): + return self # IDSStructArray expects to get something with a size attribute + + def iterate_to_index(self, index: int) -> LazyContext: + return LazyContext(self.nc2ids, self.index + (index,)) diff --git a/imas/ids_structure.py b/imas/ids_structure.py index 3482d6ef..27270034 100644 --- a/imas/ids_structure.py +++ b/imas/ids_structure.py @@ -6,11 +6,10 @@ import logging from copy import deepcopy from types import MappingProxyType -from typing import Generator, List, Optional, Union +from typing import TYPE_CHECKING, Generator, List, Optional, Union from xxhash import xxh3_64 -from imas.backends.imas_core.al_context import LazyALContext from imas.ids_base import IDSBase, IDSDoc from imas.ids_identifiers import IDSIdentifier from imas.ids_metadata import IDSDataType, IDSMetadata @@ -18,6 +17,9 @@ from imas.ids_primitive import IDSPrimitive from imas.ids_struct_array import IDSStructArray +if TYPE_CHECKING: + from imas.backends.imas_core.al_context import LazyALContext + logger = logging.getLogger(__name__) @@ -32,7 +34,7 @@ class IDSStructure(IDSBase): __doc__ = IDSDoc(__doc__) _children: "MappingProxyType[str, IDSMetadata]" - _lazy_context: Optional[LazyALContext] + _lazy_context: Optional["LazyALContext"] def __init__(self, parent: IDSBase, metadata: IDSMetadata): """Initialize IDSStructure from metadata specification @@ -62,10 +64,8 @@ def __getattr__(self, name): child_meta = self._children[name] child = child_meta._node_type(self, child_meta) self.__dict__[name] = child # bypass setattr logic below: avoid recursion - if self._lazy: # lazy load the child - from imas.backends.imas_core.db_entry_helpers import _get_child - - _get_child(child, self._lazy_context) + if self._lazy and self._lazy_context is not None: # lazy load the child + self._lazy_context.get_child(child) return child def _assign_identifier(self, value: Union[IDSIdentifier, str, int]) -> None: @@ -168,7 +168,7 @@ def __eq__(self, other) -> bool: return False # Not equal if there is any difference return True # Equal when there are no differences - def _set_lazy_context(self, ctx: LazyALContext) -> None: + def _set_lazy_context(self, ctx: "LazyALContext") -> None: """Called by DBEntry during a lazy get/get_slice. Set the context that we can use for retrieving our children. From e388694a93ea0e5e1c008168fab4afcda38d3055 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Wed, 5 Feb 2025 10:12:31 +0100 Subject: [PATCH 63/97] keep ubuntu-22.04 because latest has Python 3.13 and saxonche is not yet present --- .github/workflows/linting.yml | 2 +- .github/workflows/publish.yml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index b5e13617..a3928841 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -5,7 +5,7 @@ on: push jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout IMAS-Python sources diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 196111cc..06a7679d 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -5,7 +5,7 @@ on: push jobs: build: name: Build distribution - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 with: @@ -30,7 +30,7 @@ jobs: if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes needs: - build - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 environment: name: pypi url: https://pypi.org/p/imas-python @@ -50,7 +50,7 @@ jobs: if: github.ref=='refs/heads/develop' # only publish to TestPyPI on develop pushes needs: - build - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 environment: name: testpypi url: https://test.pypi.org/p/imas-python From eabbecf1788d0d71132c7001e0b6b4103b21138a Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Wed, 5 Feb 2025 10:15:48 +0100 Subject: [PATCH 64/97] Add tests for netcdf lazy loading and fix implementation bugs --- imas/backends/netcdf/nc2ids.py | 23 ++++++-------- imas/test/test_lazy_loading.py | 57 +++++++++++++++++++++++++++++++--- 2 files changed, 62 insertions(+), 18 deletions(-) diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index f012b0ba..b39e7c71 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -362,37 +362,32 @@ def get_child(self, child): metadata = child.metadata path = metadata.path_string data_type = metadata.data_type + nc2ids = self.nc2ids + var = nc2ids._lazy_map.get(path) - var = self.nc2ids._lazy_map.get(path) if data_type is IDSDataType.STRUCT_ARRAY: # Determine size of the aos if var is None: size = 0 elif "sparse" in var.ncattrs(): - size = self.group[var.name + ":shape"][self.index][0] + size = nc2ids.group[var.name + ":shape"][self.index][0] else: # FIXME: extract dimension name from nc file? - dim = self.ncmeta.get_dimensions( - metadata.path_string, self.homogeneous_time + dim = nc2ids.ncmeta.get_dimensions( + metadata.path_string, nc2ids.homogeneous_time )[-1] - size = self.group.dimensions[dim].size + size = nc2ids.group.dimensions[dim].size - child._set_lazy_context( - LazyArrayStructContext(self.nc2ids, self.index, size) - ) + child._set_lazy_context(LazyArrayStructContext(nc2ids, self.index, size)) elif data_type is IDSDataType.STRUCTURE: child._set_lazy_context(self) - else: # Data elements - var = self.nc2ids._lazy_map.get(path) - if var is None: - return # nothing to load - + elif var is not None: # Data elements value = None if "sparse" in var.ncattrs(): if metadata.ndim: - shape_var = self.nc2ids.group[var.name + ":shape"] + shape_var = nc2ids.group[var.name + ":shape"] shape = shape_var[self.index] if shape.all(): value = var[self.index + tuple(map(slice, shape))] diff --git a/imas/test/test_lazy_loading.py b/imas/test/test_lazy_loading.py index fabc8a3e..9023a795 100644 --- a/imas/test/test_lazy_loading.py +++ b/imas/test/test_lazy_loading.py @@ -3,7 +3,6 @@ import numpy import pytest - from imas.backends.imas_core.imas_interface import ll_interface from imas.db_entry import DBEntry from imas.ids_defs import ( @@ -22,6 +21,15 @@ def test_lazy_load_aos(backend, worker_id, tmp_path, log_lowlevel_calls): if backend == ASCII_BACKEND: pytest.skip("Lazy loading is not supported by the ASCII backend.") dbentry = open_dbentry(backend, "w", worker_id, tmp_path, dd_version="3.39.0") + run_lazy_load_aos(dbentry) + + +def test_lazy_load_aos_netcdf(tmp_path): + dbentry = DBEntry(str(tmp_path / "lazy_load_aos.nc"), "x", dd_version="3.39.0") + run_lazy_load_aos(dbentry) + + +def run_lazy_load_aos(dbentry): ids = dbentry.factory.new("core_profiles") ids.ids_properties.homogeneous_time = IDS_TIME_MODE_HETEROGENEOUS ids.profiles_1d.resize(10) @@ -46,9 +54,12 @@ def test_lazy_load_aos(backend, worker_id, tmp_path, log_lowlevel_calls): assert values[method].call_count == 0 # Test get_slice - lazy_ids_slice = dbentry.get_slice("core_profiles", 3.5, PREVIOUS_INTERP, lazy=True) - assert lazy_ids_slice.profiles_1d.shape == (1,) - assert lazy_ids_slice.profiles_1d[0].time == 3 + try: + lazy_slice = dbentry.get_slice("core_profiles", 3.5, PREVIOUS_INTERP, lazy=True) + assert lazy_slice.profiles_1d.shape == (1,) + assert lazy_slice.profiles_1d[0].time == 3 + except NotImplementedError: + pass # netCDF backend doesn't implement get_slice dbentry.close() @@ -57,6 +68,15 @@ def test_lazy_loading_distributions_random(backend, worker_id, tmp_path): if backend == ASCII_BACKEND: pytest.skip("Lazy loading is not supported by the ASCII backend.") dbentry = open_dbentry(backend, "w", worker_id, tmp_path) + run_lazy_loading_distributions_random(dbentry) + + +def test_lazy_loading_distributions_random_netcdf(tmp_path): + dbentry = DBEntry(str(tmp_path / "lazy_load_distributions.nc"), "x") + run_lazy_loading_distributions_random(dbentry) + + +def run_lazy_loading_distributions_random(dbentry): ids = IDSFactory().new("distributions") fill_consistent(ids) dbentry.put(ids) @@ -92,7 +112,15 @@ def test_lazy_load_close_dbentry(requires_imas): def test_lazy_load_readonly(requires_imas): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1) dbentry.create() + run_lazy_load_readonly(dbentry) + + +def test_lazy_load_readonly_netcdf(tmp_path): + dbentry = DBEntry(str(tmp_path / "lazy_load_readonly.nc"), "x") + run_lazy_load_readonly(dbentry) + +def run_lazy_load_readonly(dbentry): ids = dbentry.factory.core_profiles() ids.ids_properties.homogeneous_time = IDS_TIME_MODE_HETEROGENEOUS ids.time = [1, 2] @@ -165,6 +193,27 @@ def test_lazy_load_with_new_aos(requires_imas): dbentry.close() +def test_lazy_load_with_new_aos_netcdf(tmp_path): + fname = str(tmp_path / "new_aos.nc") + with DBEntry(fname, "x", dd_version="3.30.0") as dbentry: + et = dbentry.factory.edge_transport() + + et.ids_properties.homogeneous_time = IDS_TIME_MODE_HOMOGENEOUS + et.time = [1.0] + et.model.resize(1) + et.model[0].ggd.resize(1) + et.model[0].ggd[0].electrons.particles.d.resize(1) + et.model[0].ggd[0].electrons.particles.d[0].grid_index = -1 + dbentry.put(et) + + with DBEntry(fname, "r", dd_version="3.39.0") as entry2: + lazy_et = entry2.get("edge_transport", lazy=True) + assert numpy.array_equal(lazy_et.time, [1.0]) + assert lazy_et.model[0].ggd[0].electrons.particles.d[0].grid_index == -1 + # d_radial did not exist in 3.30.0 + assert len(lazy_et.model[0].ggd[0].electrons.particles.d_radial) == 0 + + def test_lazy_load_with_new_structure(requires_imas): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1, dd_version="3.30.0") dbentry.create() From 962bc715128a66ef87415a447dcd92d61987aedf Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Wed, 5 Feb 2025 10:28:25 +0100 Subject: [PATCH 65/97] fixed Python version --- .github/workflows/linting.yml | 4 +++- .github/workflows/publish.yml | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index a3928841..96bed185 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -14,7 +14,9 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: '3.x' + # until saxonche is available in 3.13 + # https://saxonica.plan.io/issues/6561 + python-version: "<3.13" - name: Display Python version run: python -c "import sys; print(sys.version)" diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 06a7679d..2ebc8562 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -13,7 +13,9 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.x" + # until saxonche is available in 3.13 + # https://saxonica.plan.io/issues/6561 + python-version: "<3.13" - name: Install pypa/build run: >- python3 -m pip install pip setuptools wheel build From cfd9a56b809602fa86c02fb17e81c095f7813747 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Wed, 5 Feb 2025 16:25:48 +0100 Subject: [PATCH 66/97] Fix bug where untensorized data could be extracted as a 0D numpy array instead of a python scalar (`int` or `float`) --- imas/backends/netcdf/nc2ids.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index b39e7c71..7829d257 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -206,7 +206,9 @@ def run(self, lazy: bool) -> None: elif metadata.path_string not in self.ncmeta.aos: # Shortcut for assigning untensorized data - self.ids[target_metadata.path]._IDSPrimitive__value = data + # Note: var[()] can return 0D numpy arrays. Instead of handling this + # here, we'll let IDSPrimitive.value.setter take care of it: + self.ids[target_metadata.path].value = data else: for index, node in tree_iter(self.ids, target_metadata): From 1f5362e037064ec44c8159d822974ec6ac9ccee0 Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Thu, 6 Feb 2025 13:40:18 +0100 Subject: [PATCH 67/97] Fix the thrown exception message when trying to load imas_core --- imas/backends/imas_core/imas_interface.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imas/backends/imas_core/imas_interface.py b/imas/backends/imas_core/imas_interface.py index 6f4b3ba6..6e463302 100644 --- a/imas/backends/imas_core/imas_interface.py +++ b/imas/backends/imas_core/imas_interface.py @@ -32,7 +32,7 @@ imasdef = None lowlevel = None logger.critical( - "Could not import 'al_core': %s. Some functionality is not available.", + "Could not import 'imas_core': %s. Some functionality is not available.", exc, ) From a110d69d4ee574be0a0db0bd0c46f1aab0a2f69a Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Fri, 7 Feb 2025 08:48:14 +0100 Subject: [PATCH 68/97] Add missing `lazy` argument to unit tests --- imas/test/test_nc_validation.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/imas/test/test_nc_validation.py b/imas/test/test_nc_validation.py index d111d3c3..69f9f01b 100644 --- a/imas/test/test_nc_validation.py +++ b/imas/test/test_nc_validation.py @@ -32,7 +32,7 @@ def memfile_with_ids(memfile, factory): IDS2NC(ids, memfile).run() # This one is valid: ids = factory.core_profiles() - NC2IDS(memfile, ids, ids.metadata, None).run() + NC2IDS(memfile, ids, ids.metadata, None).run(lazy=False) return memfile @@ -65,18 +65,18 @@ def test_invalid_units(memfile_with_ids, factory): memfile_with_ids["time"].units = "hours" ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run(lazy=False) def test_invalid_documentation(memfile_with_ids, factory, caplog): ids = factory.core_profiles() with caplog.at_level("WARNING"): - NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run(lazy=False) assert not caplog.records # Invalid docstring logs a warning memfile_with_ids["time"].documentation = "https://en.wikipedia.org/wiki/Time" with caplog.at_level("WARNING"): - NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run(lazy=False) assert len(caplog.records) == 1 @@ -84,42 +84,42 @@ def test_invalid_dimension_name(memfile_with_ids, factory): memfile_with_ids.renameDimension("time", "T") ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run(lazy=False) def test_invalid_coordinates(memfile_with_ids, factory): memfile_with_ids["profiles_1d.grid.rho_tor_norm"].coordinates = "xyz" ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run(lazy=False) def test_invalid_ancillary_variables(memfile_with_ids, factory): memfile_with_ids["time"].ancillary_variables = "xyz" ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run(lazy=False) def test_extra_attributes(memfile_with_ids, factory): memfile_with_ids["time"].new_attribute = [1, 2, 3] ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run(lazy=False) def test_shape_array_without_data(memfile_with_ids, factory): memfile_with_ids.createVariable("profiles_1d.t_i_average:shape", int, ()) ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run(lazy=False) def test_shape_array_without_sparse_data(memfile_with_ids, factory): memfile_with_ids.createVariable("profiles_1d.grid.rho_tor_norm:shape", int, ()) ids = factory.core_profiles() with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, ids, ids.metadata, None).run() + NC2IDS(memfile_with_ids, ids, ids.metadata, None).run(lazy=False) def test_shape_array_with_invalid_dimensions(memfile_with_ids, factory): @@ -137,7 +137,7 @@ def test_shape_array_with_invalid_dimensions(memfile_with_ids, factory): ("time", "profiles_1d.grid.rho_tor_norm:i"), ) with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, cp, cp.metadata, None).run() + NC2IDS(memfile_with_ids, cp, cp.metadata, None).run(lazy=False) def test_shape_array_with_invalid_dtype(memfile_with_ids, factory): @@ -153,7 +153,7 @@ def test_shape_array_with_invalid_dtype(memfile_with_ids, factory): "profiles_1d.t_i_average:shape", float, ("time", "1D") ) with pytest.raises(InvalidNetCDFEntry): - NC2IDS(memfile_with_ids, cp, cp.metadata, None).run() + NC2IDS(memfile_with_ids, cp, cp.metadata, None).run(lazy=False) def test_validate_nc(tmpdir): From 62506e02784e83f6e28a1b04137cea8749259d5c Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Fri, 7 Feb 2025 09:11:13 +0100 Subject: [PATCH 69/97] Update netcdf documentation --- docs/source/netcdf.rst | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/docs/source/netcdf.rst b/docs/source/netcdf.rst index 2ff50c41..fb85ea23 100644 --- a/docs/source/netcdf.rst +++ b/docs/source/netcdf.rst @@ -11,7 +11,7 @@ IMAS netCDF files IMAS-Python supports reading IDSs from and writing IDSs to IMAS netCDF files. This feature is currently in alpha status, and its functionality may change in -upcoming minor releases of IMAS-Python. +upcoming (minor) releases of IMAS-Python. A detailed description of the IMAS netCDF format and conventions can be found on the :ref:`IMAS conventions for the netCDF data format` page. @@ -42,6 +42,34 @@ will be used for :py:meth:`~imas.db_entry.DBEntry.get` and imas.util.print_tree(cp2) +Implemented features of a netCDF ``DBEntry`` +-------------------------------------------- + +A netCDF ``DBEntry`` doesn't implement all features that are supported by +``imas_core``. The following table provides an overview of the implemented +features that are supported by DBEntries using ``imas_core`` respectively +``netCDF``: + +.. list-table:: + :header-rows: 1 + + * - Feature + - ``imas_core`` + - ``netCDF`` + * - :ref:`Lazy loading` + - Yes + - Yes + * - :ref:`Automatic conversion between DD versions ` + - When reading and writing + - When reading + * - ``get_slice`` / ``put_slice`` + - Yes + - Not implemented + * - ``get_sample`` + - Yes (requires ``imas_core >= 5.4.0``) + - Not implemented + + Using IMAS netCDF files with 3rd-party tools -------------------------------------------- From 02bcf9aa59e478781f25aea3ba34d075c01c043a Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Mon, 10 Feb 2025 16:40:25 +0100 Subject: [PATCH 70/97] initial version --- .github/workflows/test_with_pytest.yml | 53 ++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 .github/workflows/test_with_pytest.yml diff --git a/.github/workflows/test_with_pytest.yml b/.github/workflows/test_with_pytest.yml new file mode 100644 index 00000000..d300273e --- /dev/null +++ b/.github/workflows/test_with_pytest.yml @@ -0,0 +1,53 @@ +name: Test using pytest + +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-22.04 + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Set up Python + + uses: actions/setup-python@v2 + with: + # until saxonche is available in 3.13 + # https://saxonica.plan.io/issues/6561 + python-version: "<3.13" + - name: Display Python version + run: python -c "import sys; print(sys.version)" + + - name: Install dependencies + run: | + python -m venv venv + source venv/bin/activate + pip install --upgrade pip setuptools wheel + pip install .[h5py,netcdf,test] + + - name: Run tests + run: | + source venv/bin/activate + python -m pytest -n=auto --cov=imas --cov-report=term-missing --cov-report=xml:coverage.xml --cov-report=html:htmlcov --junit-xml=junit.xml + + - name: Upload coverage report + uses: actions/upload-artifact@v4 + with: + name: coverage-report + path: htmlcov + + - name: Upload test report + uses: actions/upload-artifact@v4 + with: + name: test-report + path: junit.xml + + - name: Pytest coverage comment + uses: MishaKav/pytest-coverage-comment@main + with: + pytest-xml-coverage-path: ./coverage.xml + junitxml-path: ./junit.xml + xml-skip-covered: true + hide-report: true \ No newline at end of file From 3587bc772d6e56d4cf779187dcdedf369a2d99a4 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Mon, 10 Feb 2025 17:28:06 +0100 Subject: [PATCH 71/97] added matrix to run within different Python environment --- .github/workflows/test_with_pytest.yml | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/.github/workflows/test_with_pytest.yml b/.github/workflows/test_with_pytest.yml index d300273e..02a7d62f 100644 --- a/.github/workflows/test_with_pytest.yml +++ b/.github/workflows/test_with_pytest.yml @@ -5,18 +5,21 @@ on: [push, pull_request] jobs: test: runs-on: ubuntu-22.04 - + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] # Test on multiple Python versions + steps: - name: Checkout repository uses: actions/checkout@v2 - - name: Set up Python + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: # until saxonche is available in 3.13 # https://saxonica.plan.io/issues/6561 - python-version: "<3.13" + python-version: ${{ matrix.python-version }} - name: Display Python version run: python -c "import sys; print(sys.version)" @@ -32,22 +35,14 @@ jobs: source venv/bin/activate python -m pytest -n=auto --cov=imas --cov-report=term-missing --cov-report=xml:coverage.xml --cov-report=html:htmlcov --junit-xml=junit.xml - - name: Upload coverage report + - name: Upload coverage report ${{ matrix.python-version }} uses: actions/upload-artifact@v4 with: - name: coverage-report + name: coverage-report-${{ matrix.python-version }} path: htmlcov - - name: Upload test report + - name: Upload test report ${{ matrix.python-version }} uses: actions/upload-artifact@v4 with: - name: test-report + name: test-report-${{ matrix.python-version }} path: junit.xml - - - name: Pytest coverage comment - uses: MishaKav/pytest-coverage-comment@main - with: - pytest-xml-coverage-path: ./coverage.xml - junitxml-path: ./junit.xml - xml-skip-covered: true - hide-report: true \ No newline at end of file From 430391dd376e80c4e9cd3d58c52fcedfd68ccd63 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Tue, 11 Feb 2025 09:19:28 +0100 Subject: [PATCH 72/97] fixed UTC issue in Python 3.8 --- .github/workflows/test_with_pytest.yml | 4 ++-- imas/ids_convert.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test_with_pytest.yml b/.github/workflows/test_with_pytest.yml index 02a7d62f..4febc7a3 100644 --- a/.github/workflows/test_with_pytest.yml +++ b/.github/workflows/test_with_pytest.yml @@ -11,11 +11,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: # until saxonche is available in 3.13 # https://saxonica.plan.io/issues/6561 diff --git a/imas/ids_convert.py b/imas/ids_convert.py index f66f519d..a52db521 100644 --- a/imas/ids_convert.py +++ b/imas/ids_convert.py @@ -533,7 +533,8 @@ def _add_provenance_entry( # DD version after IMAS-5304 node.reference.resize(len(node.reference) + 1, keep=True) node.reference[-1].name = source_txt - timestamp = datetime.datetime.now(datetime.UTC).isoformat(timespec="seconds") + utc = getattr(datetime, "UTC", datetime.timezone.utc) + timestamp = datetime.datetime.now(utc).isoformat(timespec="seconds") node.reference[-1].timestamp = timestamp.replace("+00:00", "Z") else: # DD before IMAS-5304 (between 3.34.0 and 3.41.0) From 6d95d00067771d23f12c807cbf5b1736e9309b90 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Tue, 11 Feb 2025 10:27:15 +0100 Subject: [PATCH 73/97] timestamp converted to supported format for Python 3.8 --- imas/test/test_ids_convert.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/imas/test/test_ids_convert.py b/imas/test/test_ids_convert.py index 20dcd8c3..750c44e4 100644 --- a/imas/test/test_ids_convert.py +++ b/imas/test/test_ids_convert.py @@ -200,7 +200,8 @@ def test_provenance_entry(factory): timestamp = str(cp4.ids_properties.provenance.node[0].reference[0].timestamp) # Check that timestamp adheres to the format YYYY-MM-DDTHH:MM:SSZ assert re.match(r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z", timestamp) - dtime = datetime.now(UTC) - datetime.fromisoformat(timestamp) + timestamp_for_parsing = timestamp.replace("Z", "+00:00") + dtime = datetime.now(UTC) - datetime.fromisoformat(timestamp_for_parsing) assert timedelta(seconds=0) <= dtime < timedelta(seconds=2) From 36cebdf0b1e50bb5978f5ba7ceb04401bc62c588 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Thu, 6 Mar 2025 16:38:07 +0100 Subject: [PATCH 74/97] Refactor iterators used in ids2nc and nc2ids --- imas/backends/netcdf/ids2nc.py | 31 +--------- imas/backends/netcdf/iterators.py | 98 +++++++++++++++++++++++++++++++ imas/backends/netcdf/nc2ids.py | 58 +++--------------- 3 files changed, 108 insertions(+), 79 deletions(-) create mode 100644 imas/backends/netcdf/iterators.py diff --git a/imas/backends/netcdf/ids2nc.py b/imas/backends/netcdf/ids2nc.py index 2b892838..d5a8816c 100644 --- a/imas/backends/netcdf/ids2nc.py +++ b/imas/backends/netcdf/ids2nc.py @@ -1,21 +1,16 @@ # This file is part of IMAS-Python. # You should have received the IMAS-Python LICENSE file with this project. -"""NetCDF IO support for IMAS-Python. Requires [netcdf] extra dependencies. -""" - -from typing import Iterator, Tuple +"""NetCDF IO support for IMAS-Python. Requires [netcdf] extra dependencies.""" import netCDF4 import numpy from packaging import version from imas.backends.netcdf.nc_metadata import NCMetadata +from imas.backends.netcdf.iterators import indexed_tree_iter from imas.exception import InvalidNetCDFEntry -from imas.ids_base import IDSBase from imas.ids_data_type import IDSDataType from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS -from imas.ids_struct_array import IDSStructArray -from imas.ids_structure import IDSStructure from imas.ids_toplevel import IDSToplevel default_fillvals = { @@ -33,26 +28,6 @@ SHAPE_DTYPE = numpy.int32 -def nc_tree_iter( - node: IDSStructure, aos_index: Tuple[int, ...] = () -) -> Iterator[Tuple[Tuple[int, ...], IDSBase]]: - """Tree iterator that tracks indices of all ancestor array of structures. - - Args: - node: IDS node to iterate over - - Yields: - (aos_index, node) for all filled nodes. - """ - for child in node.iter_nonempty_(): - yield (aos_index, child) - if isinstance(child, IDSStructArray): - for i in range(len(child)): - yield from nc_tree_iter(child[i], aos_index + (i,)) - elif isinstance(child, IDSStructure): - yield from nc_tree_iter(child, aos_index) - - class IDS2NC: """Class responsible for storing an IDS to a NetCDF file.""" @@ -105,7 +80,7 @@ def collect_filled_data(self) -> None: dimension_size = {} get_dimensions = self.ncmeta.get_dimensions - for aos_index, node in nc_tree_iter(self.ids): + for aos_index, node in indexed_tree_iter(self.ids): path = node.metadata.path_string filled_data[path][aos_index] = node ndim = node.metadata.ndim diff --git a/imas/backends/netcdf/iterators.py b/imas/backends/netcdf/iterators.py new file mode 100644 index 00000000..bff40e43 --- /dev/null +++ b/imas/backends/netcdf/iterators.py @@ -0,0 +1,98 @@ +from typing import Iterator, List, Optional, Tuple + +from imas.ids_base import IDSBase +from imas.ids_data_type import IDSDataType +from imas.ids_metadata import IDSMetadata +from imas.ids_struct_array import IDSStructArray +from imas.ids_structure import IDSStructure +from imas.ids_toplevel import IDSToplevel + + +def _split_on_aos(metadata: IDSMetadata): + """Split paths per IDS.""" + paths = [] + curpath = metadata.name + + item = metadata + while item._parent.data_type is not None: + item = item._parent + if item.data_type is IDSDataType.STRUCT_ARRAY: + paths.append(curpath) + curpath = item.name + else: + curpath = f"{item.name}/{curpath}" + paths.append(curpath) + return paths[::-1] + + +IndexedNode = Tuple[Tuple[int, ...], IDSBase] + + +def indexed_tree_iter( + ids: IDSToplevel, metadata: Optional[IDSMetadata] = None +) -> Iterator[IndexedNode]: + """Tree iterator that tracks indices of all ancestor array of structures. + + Args: + ids: IDS top level element to iterate over + metadata: Iterate over all nodes inside the IDS at the metadata object. + If ``None``, all filled items in the IDS are iterated over. + + Yields: + (aos_indices, node) for all filled nodes. + + Example: + >>> ids = imas.IDSFactory().new("core_profiles") + >>> ids.profiles_1d.resize(2) + >>> ids.profiles_1d[0].time = 1.0 + >>> ids.profiles_1d[1].t_i_average = [1.0] + >>> list(indexed_tree_iter(ids)) + [ + ((), ), + ((0,), ), + ((1,), ) + ] + >>> list(indexed_tree_iter(ids, ids.metadata["profiles_1d/time"])) + [ + ((0,), ), + ((1,), ) + ] + """ # noqa: E501 + if metadata is None: + # Iterate over all filled nodes in the IDS + yield from _full_tree_iter(ids, ()) + + else: + paths = _split_on_aos(metadata) + if len(paths) == 1: + yield (), ids[paths[0]] + else: + yield from _tree_iter(ids, paths, ()) + + +def _tree_iter( + structure: IDSStructure, paths: List[str], curindex: Tuple[int, ...] +) -> Iterator[IndexedNode]: + aos_path, *paths = paths + aos = structure[aos_path] + + if len(paths) == 1: + path = paths[0] + for i, node in enumerate(aos): + yield curindex + (i,), node[path] + + else: + for i, node in enumerate(aos): + yield from _tree_iter(node, paths, curindex + (i,)) + + +def _full_tree_iter( + node: IDSStructure, cur_index: Tuple[int, ...] +) -> Iterator[IndexedNode]: + for child in node.iter_nonempty_(): + yield (cur_index, child) + if isinstance(child, IDSStructArray): + for i in range(len(child)): + yield from _full_tree_iter(child[i], cur_index + (i,)) + elif isinstance(child, IDSStructure): + yield from _full_tree_iter(child, cur_index) diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index 7829d257..e9b524fb 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -1,19 +1,18 @@ import logging import os -from typing import Iterator, List, Optional, Tuple +from typing import Optional import netCDF4 import numpy as np from imas.backends.netcdf import ids2nc from imas.backends.netcdf.nc_metadata import NCMetadata +from imas.backends.netcdf.iterators import indexed_tree_iter from imas.exception import InvalidNetCDFEntry -from imas.ids_base import IDSBase from imas.ids_convert import NBCPathMap from imas.ids_data_type import IDSDataType from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS from imas.ids_metadata import IDSMetadata -from imas.ids_structure import IDSStructure from imas.ids_toplevel import IDSToplevel logger = logging.getLogger(__name__) @@ -26,49 +25,6 @@ def variable_error(var, issue, value, expected=None) -> InvalidNetCDFEntry: ) -def split_on_aos(metadata: IDSMetadata): - paths = [] - curpath = metadata.name - - item = metadata - while item._parent.data_type is not None: - item = item._parent - if item.data_type is IDSDataType.STRUCT_ARRAY: - paths.append(curpath) - curpath = item.name - else: - curpath = f"{item.name}/{curpath}" - paths.append(curpath) - return paths[::-1] - - -IndexedNode = Tuple[Tuple[int, ...], IDSBase] - - -def tree_iter(structure: IDSStructure, metadata: IDSMetadata) -> Iterator[IndexedNode]: - paths = split_on_aos(metadata) - if len(paths) == 1: - yield (), structure[paths[0]] - else: - yield from _tree_iter(structure, paths, ()) - - -def _tree_iter( - structure: IDSStructure, paths: List[str], curindex: Tuple[int, ...] -) -> Iterator[IndexedNode]: - aos_path, *paths = paths - aos = structure[aos_path] - - if len(paths) == 1: - path = paths[0] - for i, node in enumerate(aos): - yield curindex + (i,), node[path] - - else: - for i, node in enumerate(aos): - yield from _tree_iter(node, paths, curindex + (i,)) - - class NC2IDS: """Class responsible for reading an IDS from a NetCDF group.""" @@ -169,7 +125,7 @@ def run(self, lazy: bool) -> None: if metadata.data_type is IDSDataType.STRUCT_ARRAY: if "sparse" in var.ncattrs(): shapes = self.group[var_name + ":shape"][()] - for index, node in tree_iter(self.ids, target_metadata): + for index, node in indexed_tree_iter(self.ids, target_metadata): node.resize(shapes[index][0]) else: @@ -178,7 +134,7 @@ def run(self, lazy: bool) -> None: metadata.path_string, self.homogeneous_time )[-1] size = self.group.dimensions[dim].size - for _, node in tree_iter(self.ids, target_metadata): + for _, node in indexed_tree_iter(self.ids, target_metadata): node.resize(size) continue @@ -190,7 +146,7 @@ def run(self, lazy: bool) -> None: if "sparse" in var.ncattrs(): if metadata.ndim: shapes = self.group[var_name + ":shape"][()] - for index, node in tree_iter(self.ids, target_metadata): + for index, node in indexed_tree_iter(self.ids, target_metadata): shape = shapes[index] if shape.all(): # NOTE: bypassing IDSPrimitive.value.setter logic @@ -198,7 +154,7 @@ def run(self, lazy: bool) -> None: index + tuple(map(slice, shape)) ] else: - for index, node in tree_iter(self.ids, target_metadata): + for index, node in indexed_tree_iter(self.ids, target_metadata): value = data[index] if value != getattr(var, "_FillValue", None): # NOTE: bypassing IDSPrimitive.value.setter logic @@ -211,7 +167,7 @@ def run(self, lazy: bool) -> None: self.ids[target_metadata.path].value = data else: - for index, node in tree_iter(self.ids, target_metadata): + for index, node in indexed_tree_iter(self.ids, target_metadata): # NOTE: bypassing IDSPrimitive.value.setter logic node._IDSPrimitive__value = data[index] From 242959596ad83b2b0ad7fa85467c0b23dd6014cc Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Fri, 7 Mar 2025 11:14:18 +0100 Subject: [PATCH 75/97] Refactor ids2nc, extract common tensorization logic in IDSTensorizer Allows reuse of functionality to (partially) convert IDSs to xarray Datasets. --- imas/backends/netcdf/ids2nc.py | 138 +------------------- imas/backends/netcdf/ids_tensorizer.py | 173 +++++++++++++++++++++++++ 2 files changed, 177 insertions(+), 134 deletions(-) create mode 100644 imas/backends/netcdf/ids_tensorizer.py diff --git a/imas/backends/netcdf/ids2nc.py b/imas/backends/netcdf/ids2nc.py index d5a8816c..531c7ac2 100644 --- a/imas/backends/netcdf/ids2nc.py +++ b/imas/backends/netcdf/ids2nc.py @@ -3,14 +3,11 @@ """NetCDF IO support for IMAS-Python. Requires [netcdf] extra dependencies.""" import netCDF4 -import numpy from packaging import version -from imas.backends.netcdf.nc_metadata import NCMetadata -from imas.backends.netcdf.iterators import indexed_tree_iter +from imas.backends.netcdf.ids_tensorizer import SHAPE_DTYPE, IDSTensorizer, dtypes from imas.exception import InvalidNetCDFEntry from imas.ids_data_type import IDSDataType -from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS from imas.ids_toplevel import IDSToplevel default_fillvals = { @@ -19,16 +16,9 @@ IDSDataType.FLT: netCDF4.default_fillvals["f8"], IDSDataType.CPX: netCDF4.default_fillvals["f8"] * (1 + 1j), } -dtypes = { - IDSDataType.INT: numpy.dtype(numpy.int32), - IDSDataType.STR: str, - IDSDataType.FLT: numpy.dtype(numpy.float64), - IDSDataType.CPX: numpy.dtype(numpy.complex128), -} -SHAPE_DTYPE = numpy.int32 -class IDS2NC: +class IDS2NC(IDSTensorizer): """Class responsible for storing an IDS to a NetCDF file.""" def __init__(self, ids: IDSToplevel, group: netCDF4.Group) -> None: @@ -38,112 +28,18 @@ def __init__(self, ids: IDSToplevel, group: netCDF4.Group) -> None: ids: IDSToplevel to store in the netCDF group group: Empty netCDF group to store the IDS in. """ - self.ids = ids - """IDS to store.""" + super().__init__(ids, []) # pass empty list: tensorize full IDS self.group = group """NetCDF Group to store the IDS in.""" - self.ncmeta = NCMetadata(ids.metadata) - """NetCDF related metadata.""" - self.dimension_size = {} - """Map dimension name to its size.""" - self.filled_data = {} - """Map of IDS paths to filled data nodes.""" - self.filled_variables = set() - """Set of filled IDS variables""" - self.homogeneous_time = ( - ids.ids_properties.homogeneous_time == IDS_TIME_MODE_HOMOGENEOUS - ) - """True iff the IDS time mode is homogeneous.""" - self.shapes = {} - """Map of IDS paths to data shape arrays.""" - def run(self) -> None: """Store the IDS in the NetCDF group.""" self.collect_filled_data() self.determine_data_shapes() self.create_dimensions() self.create_variables() - # Synchronize variables to disk - # This is not strictly required (automatically done by netCDF4 when needed), but - # by separating it we get more meaningful profiling statistics - self.group.sync() self.store_data() - def collect_filled_data(self) -> None: - """Collect all filled data in the IDS and determine dimension sizes. - - Results are stored in :attr:`filled_data` and :attr:`dimension_size`. - """ - # Initialize dictionary with all paths that could exist in this IDS - filled_data = {path: {} for path in self.ncmeta.paths} - dimension_size = {} - get_dimensions = self.ncmeta.get_dimensions - - for aos_index, node in indexed_tree_iter(self.ids): - path = node.metadata.path_string - filled_data[path][aos_index] = node - ndim = node.metadata.ndim - if not ndim: - continue - dimensions = get_dimensions(path, self.homogeneous_time) - # We're only interested in the non-tensorized dimensions: [-ndim:] - for dim_name, size in zip(dimensions[-ndim:], node.shape): - dimension_size[dim_name] = max(dimension_size.get(dim_name, 0), size) - - # Remove paths without data - self.filled_data = {path: data for path, data in filled_data.items() if data} - self.filled_variables = {path.replace("/", ".") for path in self.filled_data} - # Store dimension sizes - self.dimension_size = dimension_size - - def determine_data_shapes(self) -> None: - """Determine tensorized data shapes and sparsity, save in :attr:`shapes`.""" - get_dimensions = self.ncmeta.get_dimensions - - for path, nodes_dict in self.filled_data.items(): - metadata = self.ids.metadata[path] - # Structures don't have a size - if metadata.data_type is IDSDataType.STRUCTURE: - continue - ndim = metadata.ndim - dimensions = get_dimensions(path, self.homogeneous_time) - - # node shape if it is completely filled - full_shape = tuple(self.dimension_size[dim] for dim in dimensions[-ndim:]) - - if len(dimensions) == ndim: - # Data at this path is not tensorized - node = nodes_dict[()] - sparse = node.shape != full_shape - if sparse: - shapes = numpy.array(node.shape, dtype=SHAPE_DTYPE) - - else: - # Data is tensorized, determine if it is homogeneously shaped - aos_dims = get_dimensions(self.ncmeta.aos[path], self.homogeneous_time) - shapes_shape = [self.dimension_size[dim] for dim in aos_dims] - if ndim: - shapes_shape.append(ndim) - shapes = numpy.zeros(shapes_shape, dtype=SHAPE_DTYPE) - - if ndim: # ND types have a shape - for aos_coords, node in nodes_dict.items(): - shapes[aos_coords] = node.shape - sparse = not numpy.array_equiv(shapes, full_shape) - - else: # 0D types don't have a shape - for aos_coords in nodes_dict.keys(): - shapes[aos_coords] = 1 - sparse = not shapes.all() - shapes = None - - if sparse: - self.shapes[path] = shapes - if ndim: - # Ensure there is a pseudo-dimension f"{ndim}D" for shapes variable - self.dimension_size[f"{ndim}D"] = ndim - def create_dimensions(self) -> None: """Create netCDF dimensions.""" for dimension, size in self.dimension_size.items(): @@ -228,14 +124,6 @@ def create_variables(self) -> None: "shape is unset (i.e. filled with _Fillvalue)." ) - def filter_coordinates(self, path: str) -> str: - """Filter the coordinates list from NCMetadata to filled variables only.""" - return " ".join( - coordinate - for coordinate in self.ncmeta.get_coordinates(path, self.homogeneous_time) - if coordinate in self.filled_variables - ) - def store_data(self) -> None: """Store data in the netCDF variables""" for path, nodes_dict in self.filled_data.items(): @@ -273,22 +161,4 @@ def store_data(self) -> None: else: # Data is tensorized: tensorize in-memory - # TODO: depending on the data, tmp_var may be HUGE, we may need a more - # efficient assignment algorithm for large and/or irregular data - tmp_var = numpy.full(var.shape, default_fillvals[metadata.data_type]) - if metadata.data_type is IDSDataType.STR: - tmp_var = numpy.asarray(tmp_var, dtype=object) - - # Fill tmp_var - if shapes is None: - # Data is not sparse, so we can assign to the aos_coords - for aos_coords, node in nodes_dict.items(): - tmp_var[aos_coords] = node.value - else: - # Data is sparse, so we must select a slice - for aos_coords, node in nodes_dict.items(): - tmp_var[aos_coords + tuple(map(slice, node.shape))] = node.value - - # Assign data to variable - var[()] = tmp_var - del tmp_var + var[()] = self.tensorize(path, default_fillvals[metadata.data_type]) diff --git a/imas/backends/netcdf/ids_tensorizer.py b/imas/backends/netcdf/ids_tensorizer.py new file mode 100644 index 00000000..4619919b --- /dev/null +++ b/imas/backends/netcdf/ids_tensorizer.py @@ -0,0 +1,173 @@ +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +"""Tensorization logic to convert IDSs to netCDF files and/or xarray Datasets.""" + +from typing import List + +import numpy + +from imas.backends.netcdf.iterators import indexed_tree_iter +from imas.backends.netcdf.nc_metadata import NCMetadata +from imas.ids_data_type import IDSDataType +from imas.ids_defs import IDS_TIME_MODE_HOMOGENEOUS +from imas.ids_toplevel import IDSToplevel + +dtypes = { + IDSDataType.INT: numpy.dtype(numpy.int32), + IDSDataType.STR: str, + IDSDataType.FLT: numpy.dtype(numpy.float64), + IDSDataType.CPX: numpy.dtype(numpy.complex128), +} +SHAPE_DTYPE = numpy.int32 + + +class IDSTensorizer: + """Common functionality for tensorizing IDSs. Used in IDS2NC and util.to_xarray.""" + + def __init__(self, ids: IDSToplevel, paths_to_tensorize: List[str]) -> None: + """Initialize IDSTensorizer. + + Args: + ids: IDSToplevel to store in the netCDF group + paths_to_tensorize: Restrict tensorization to the provided paths. If an + empty list is provided, all filled quantities in the IDS will be + tensorized. + """ + self.ids = ids + """IDS to tensorize.""" + self.paths_to_tensorize = paths_to_tensorize + """List of paths to tensorize""" + + self.ncmeta = NCMetadata(ids.metadata) + """NetCDF related metadata.""" + self.dimension_size = {} + """Map dimension name to its size.""" + self.filled_data = {} + """Map of IDS paths to filled data nodes.""" + self.filled_variables = set() + """Set of filled IDS variables""" + self.homogeneous_time = ( + ids.ids_properties.homogeneous_time == IDS_TIME_MODE_HOMOGENEOUS + ) + """True iff the IDS time mode is homogeneous.""" + self.shapes = {} + """Map of IDS paths to data shape arrays.""" + + def collect_filled_data(self) -> None: + """Collect all filled data in the IDS and determine dimension sizes. + + Results are stored in :attr:`filled_data` and :attr:`dimension_size`. + """ + # Initialize dictionary with all paths that could exist in this IDS + filled_data = {path: {} for path in self.ncmeta.paths} + dimension_size = {} + get_dimensions = self.ncmeta.get_dimensions + + if self.paths_to_tensorize: + # Restrict tensorization to provided paths + iterator = ( + item + for path in self.paths_to_tensorize + for item in indexed_tree_iter(self.ids, self.ids.metadata[path]) + if item[1].has_value # Skip nodes without value set + ) + else: + # Tensorize all non-empty nodes + iterator = indexed_tree_iter(self.ids) + + for aos_index, node in iterator: + path = node.metadata.path_string + filled_data[path][aos_index] = node + ndim = node.metadata.ndim + if not ndim: + continue + dimensions = get_dimensions(path, self.homogeneous_time) + # We're only interested in the non-tensorized dimensions: [-ndim:] + for dim_name, size in zip(dimensions[-ndim:], node.shape): + dimension_size[dim_name] = max(dimension_size.get(dim_name, 0), size) + + # Remove paths without data + self.filled_data = {path: data for path, data in filled_data.items() if data} + self.filled_variables = {path.replace("/", ".") for path in self.filled_data} + # Store dimension sizes + self.dimension_size = dimension_size + + def determine_data_shapes(self) -> None: + """Determine tensorized data shapes and sparsity, save in :attr:`shapes`.""" + get_dimensions = self.ncmeta.get_dimensions + + for path, nodes_dict in self.filled_data.items(): + metadata = self.ids.metadata[path] + # Structures don't have a size + if metadata.data_type is IDSDataType.STRUCTURE: + continue + ndim = metadata.ndim + dimensions = get_dimensions(path, self.homogeneous_time) + + # node shape if it is completely filled + full_shape = tuple(self.dimension_size[dim] for dim in dimensions[-ndim:]) + + if len(dimensions) == ndim: + # Data at this path is not tensorized + node = nodes_dict[()] + sparse = node.shape != full_shape + if sparse: + shapes = numpy.array(node.shape, dtype=SHAPE_DTYPE) + + else: + # Data is tensorized, determine if it is homogeneously shaped + aos_dims = get_dimensions(self.ncmeta.aos[path], self.homogeneous_time) + shapes_shape = [self.dimension_size[dim] for dim in aos_dims] + if ndim: + shapes_shape.append(ndim) + shapes = numpy.zeros(shapes_shape, dtype=SHAPE_DTYPE) + + if ndim: # ND types have a shape + for aos_coords, node in nodes_dict.items(): + shapes[aos_coords] = node.shape + sparse = not numpy.array_equiv(shapes, full_shape) + + else: # 0D types don't have a shape + for aos_coords in nodes_dict.keys(): + shapes[aos_coords] = 1 + sparse = not shapes.all() + shapes = None + + if sparse: + self.shapes[path] = shapes + if ndim: + # Ensure there is a pseudo-dimension f"{ndim}D" for shapes variable + self.dimension_size[f"{ndim}D"] = ndim + + def filter_coordinates(self, path: str) -> str: + """Filter the coordinates list from NCMetadata to filled variables only.""" + return " ".join( + coordinate + for coordinate in self.ncmeta.get_coordinates(path, self.homogeneous_time) + if coordinate in self.filled_variables + ) + + def tensorize(self, path, fillvalue): + dimensions = self.ncmeta.get_dimensions(path, self.homogeneous_time) + shape = tuple(self.dimension_size[dim] for dim in dimensions) + + # TODO: depending on the data, tmp_var may be HUGE, we may need a more + # efficient assignment algorithm for large and/or irregular data + tmp_var = numpy.full(shape, fillvalue) + if isinstance(fillvalue, str): + tmp_var = numpy.asarray(tmp_var, dtype=object) + + shapes = self.shapes.get(path) + nodes_dict = self.filled_data[path] + + # Fill tmp_var + if shapes is None: + # Data is not sparse, so we can assign to the aos_coords + for aos_coords, node in nodes_dict.items(): + tmp_var[aos_coords] = node.value + else: + # Data is sparse, so we must select a slice + for aos_coords, node in nodes_dict.items(): + tmp_var[aos_coords + tuple(map(slice, node.shape))] = node.value + + return tmp_var From 09fb355f04c2a8bc69e3dff9ad918030927663a9 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Fri, 7 Mar 2025 13:42:34 +0100 Subject: [PATCH 76/97] Implement `imas.util.to_xarray` Reuses most of the tensorization and metadata logic from the netCDF export. --- imas/_to_xarray.py | 73 ++++++++++++++++++++ imas/backends/netcdf/ids_tensorizer.py | 21 ++++++ imas/test/test_to_xarray.py | 94 ++++++++++++++++++++++++++ imas/util.py | 56 ++++++++++++++- 4 files changed, 241 insertions(+), 3 deletions(-) create mode 100644 imas/_to_xarray.py create mode 100644 imas/test/test_to_xarray.py diff --git a/imas/_to_xarray.py b/imas/_to_xarray.py new file mode 100644 index 00000000..6caec501 --- /dev/null +++ b/imas/_to_xarray.py @@ -0,0 +1,73 @@ +# xarray is an optional dependency, but this module won't be imported when xarray is not +# available +import numpy +import xarray + +from imas.ids_toplevel import IDSToplevel +from imas.backends.netcdf.ids_tensorizer import IDSTensorizer +from imas.ids_data_type import IDSDataType + +fillvals = { + IDSDataType.INT: -(2**31) + 1, + IDSDataType.STR: "", + IDSDataType.FLT: numpy.nan, + IDSDataType.CPX: numpy.nan * (1 + 1j), +} + + +def to_xarray(ids: IDSToplevel, *paths: str) -> xarray.Dataset: + """See :func:`imas.util.to_xarray`""" + # We really need an IDS toplevel element + if not isinstance(ids, IDSToplevel): + raise TypeError( + f"to_xarray needs a toplevel IDS element as first argument, but got {ids!r}" + ) + + # Valid path can use / or . as separator, but IDSTensorizer expects /. The following + # block checks if the paths are valid, and by using "metadata.path_string" we ensure + # that / are used as separator. + try: + paths = [ids.metadata[path].path_string for path in paths] + except KeyError as exc: + raise ValueError(str(exc)) from None + + # Converting lazy-loaded IDSs requires users to specify at least one path + if ids._lazy and not paths: + raise RuntimeError( + "This IDS is lazy loaded. Please provide at least one path to convert to" + " xarray." + ) + + # Use netcdf IDS Tensorizer to tensorize the data and determine metadata + tensorizer = IDSTensorizer(ids, paths) + tensorizer.include_coordinate_paths() + tensorizer.collect_filled_data() + tensorizer.determine_data_shapes() + + data_vars = {} + coordinate_names = set() + for path in tensorizer.filled_data: + var_name = path.replace("/", ".") + metadata = ids.metadata[path] + if metadata.data_type in (IDSDataType.STRUCTURE, IDSDataType.STRUCT_ARRAY): + continue # We don't store these in xarray + + dimensions = tensorizer.ncmeta.get_dimensions(path, tensorizer.homogeneous_time) + data = tensorizer.tensorize(path, fillvals[metadata.data_type]) + + attrs = dict(documentation=metadata.documentation) + if metadata.units: + attrs["units"] = metadata.units + coordinates = tensorizer.filter_coordinates(path) + if coordinates: + coordinate_names.update(coordinates.split(" ")) + attrs["coordinates"] = coordinates + + data_vars[var_name] = (dimensions, data, attrs) + + # Remove coordinates from data_vars and put in coordinates mapping: + coordinates = {} + for coordinate_name in coordinate_names: + coordinates[coordinate_name] = data_vars.pop(coordinate_name) + + return xarray.Dataset(data_vars, coordinates) diff --git a/imas/backends/netcdf/ids_tensorizer.py b/imas/backends/netcdf/ids_tensorizer.py index 4619919b..95bfba47 100644 --- a/imas/backends/netcdf/ids_tensorizer.py +++ b/imas/backends/netcdf/ids_tensorizer.py @@ -2,6 +2,7 @@ # You should have received the IMAS-Python LICENSE file with this project. """Tensorization logic to convert IDSs to netCDF files and/or xarray Datasets.""" +from collections import deque from typing import List import numpy @@ -53,6 +54,26 @@ def __init__(self, ids: IDSToplevel, paths_to_tensorize: List[str]) -> None: self.shapes = {} """Map of IDS paths to data shape arrays.""" + def include_coordinate_paths(self) -> None: + """Append all paths that are coordinates of self.paths_to_tensorize""" + # Use a queue so we can also take coordinates of coordinates into account + queue = deque(self.paths_to_tensorize) + # Include all parent AoS as well: + for path in self.paths_to_tensorize: + while path: + path, _, _ = path.rpartition("/") + if self.ncmeta.get_dimensions(path, self.homogeneous_time): + queue.append(path) + + self.paths_to_tensorize = [] + while queue: + path = queue.popleft() + if path in self.paths_to_tensorize: + continue # already processed + self.paths_to_tensorize.append(path) + for coordinate in self.ncmeta.get_coordinates(path, self.homogeneous_time): + queue.append(coordinate.replace(".", "/")) + def collect_filled_data(self) -> None: """Collect all filled data in the IDS and determine dimension sizes. diff --git a/imas/test/test_to_xarray.py b/imas/test/test_to_xarray.py new file mode 100644 index 00000000..1767a6d9 --- /dev/null +++ b/imas/test/test_to_xarray.py @@ -0,0 +1,94 @@ +import numpy as np +import pytest + +import imas +import imas.training +from imas.util import to_xarray + +pytest.importorskip("xarray") + + +@pytest.fixture +def entry(requires_imas, monkeypatch): + monkeypatch.setenv("IMAS_VERSION", "3.39.0") # Use fixed DD version + return imas.training.get_training_db_entry() + + +def test_to_xarray_invalid_argtype(): + ids = imas.IDSFactory("3.39.0").core_profiles() + + with pytest.raises(TypeError): + to_xarray("test") + with pytest.raises(TypeError): + to_xarray(ids.time) + with pytest.raises(TypeError): + to_xarray(ids.ids_properties) + + +def test_to_xarray_invalid_paths(): + ids = imas.IDSFactory("3.39.0").core_profiles() + + with pytest.raises(ValueError, match="xyz"): + to_xarray(ids, "xyz") + with pytest.raises(ValueError, match="ids_properties/xyz"): + to_xarray(ids, "ids_properties/xyz") + with pytest.raises(ValueError, match="Xtime"): + to_xarray(ids, "time", "Xtime") + + +def validate_trainingdb_electron_temperature_dataset(ds): + assert ds.sizes == {"time": 3, "profiles_1d.grid.rho_tor_norm:i": 101} + assert ds.data_vars.keys() == {"profiles_1d.electrons.temperature"} + assert ds.coords.keys() == {"time", "profiles_1d.grid.rho_tor_norm"} + + # Check that values are loaded as expected + assert np.allclose(ds["time"], [3.987222, 432.937598, 792.0]) + assert np.allclose( + ds.isel(time=1)["profiles_1d.electrons.temperature"][10:13], + [17728.81703089, 17440.78020568, 17139.35431082], + ) + + +def test_to_xarray_lazy_loaded(entry): + ids = entry.get("core_profiles", lazy=True) + + with pytest.raises(RuntimeError): + to_xarray(ids) + + ds = to_xarray(ids, "profiles_1d.electrons.temperature") + validate_trainingdb_electron_temperature_dataset(ds) + + +def test_to_xarray_from_trainingdb(entry): + ids = entry.get("core_profiles") + + ds = to_xarray(ids) + validate_trainingdb_electron_temperature_dataset( + ds["profiles_1d.electrons.temperature"].to_dataset() + ) + ds = to_xarray(ids, "profiles_1d.electrons.temperature") + validate_trainingdb_electron_temperature_dataset(ds) + + ds = to_xarray( + ids, "profiles_1d.electrons.temperature", "profiles_1d/electrons/density" + ) + assert ds.data_vars.keys() == { + "profiles_1d.electrons.temperature", + "profiles_1d.electrons.density", + } + + +def test_to_xarray(): + ids = imas.IDSFactory("3.39.0").core_profiles() + + ids.profiles_1d.resize(2) + ids.profiles_1d[0].electrons.temperature = [1.0, 2.0] + ids.profiles_1d[0].grid.rho_tor_norm = [0.0, 1.0] + ids.profiles_1d[0].time = 0.0 + + # These should all be identical: + ds1 = to_xarray(ids) + ds2 = to_xarray(ids, "profiles_1d.electrons.temperature") + ds3 = to_xarray(ids, "profiles_1d/electrons/temperature") + assert ds1.equals(ds2) + assert ds2.equals(ds3) diff --git a/imas/util.py b/imas/util.py index aafad2c7..64e2b228 100644 --- a/imas/util.py +++ b/imas/util.py @@ -1,8 +1,6 @@ # This file is part of IMAS-Python. # You should have received the IMAS-Python LICENSE file with this project. -"""Collection of useful helper methods when working with IMAS-Python. -""" - +"""Collection of useful helper methods when working with IMAS-Python.""" import logging import re @@ -524,3 +522,55 @@ def get_data_dictionary_version(obj: Union[IDSBase, DBEntry, IDSFactory]) -> str if isinstance(obj, IDSBase): return obj._version raise TypeError(f"Cannot get data dictionary version of '{type(obj)}'") + + +def to_xarray(ids: IDSToplevel, *paths: str) -> Any: + """Convert an IDS to an xarray Dataset. + + Args: + ids: An IDS toplevel element + paths: Optional list of element paths to convert to xarray. The full IDS will be + converted to an xarray Dataset if no paths are provided. + + Paths must not contain indices, and may use a ``/`` or a ``.`` as separator. + For example, ``"profiles_1d(itime)/electrons/density"`` is not allowed as + path, use ``"profiles_1d/electrons/density"`` or + ``profiles_1d.electrons.density"`` instead. + + Coordinates to the quantities in the requested paths will also be included + in the xarray Dataset. + + Returns: + An ``xarray.Dataset`` object. + + Examples: + .. code-block:: python + + # Convert the whole IDS to an xarray Dataset + ds = imas.util.to_xarray(ids) + + # Convert only some elements in the IDS (including their coordinates) + ds = imas.util.to_xarray( + ids, + "profiles_1d/electrons/density", + "profiles_1d/electrons/temperature", + ) + + # Paths can be provided with "/" or "." as separator + ds = imas.util.to_xarray( + ids, + "profiles_1d.electrons.density", + "profiles_1d.electrons.temperature", + ) + + See Also: + https://docs.xarray.dev/en/stable/generated/xarray.Dataset.html + """ + try: + import xarray # noqa: F401 + except ImportError: + raise RuntimeError("xarray is not available, cannot convert the IDS to xarray.") + + from imas._to_xarray import to_xarray + + return to_xarray(ids, *paths) From 78b89f6ef6be06a49aaa54d037c988324db461c8 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Fri, 7 Mar 2025 13:43:01 +0100 Subject: [PATCH 77/97] Add `xarray` as optional imas-python dependency --- .github/workflows/test_with_pytest.yml | 2 +- pyproject.toml | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test_with_pytest.yml b/.github/workflows/test_with_pytest.yml index 4febc7a3..7e56ac38 100644 --- a/.github/workflows/test_with_pytest.yml +++ b/.github/workflows/test_with_pytest.yml @@ -28,7 +28,7 @@ jobs: python -m venv venv source venv/bin/activate pip install --upgrade pip setuptools wheel - pip install .[h5py,netcdf,test] + pip install .[test] - name: Run tests run: | diff --git a/pyproject.toml b/pyproject.toml index 1b1b86c3..56e6dc1b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,8 +71,7 @@ dependencies = [ [project.optional-dependencies] # these self-dependencies are available since pip 21.2 all = [ - "imas-python[test,docs,netcdf,h5py]" - # "imas-python[test,docs,imas-core,netcdf,h5py]" TODO enable when imas-core is available on pypi + "imas-python[test,docs]" ] docs = [ "sphinx>=6.0.0,<7.0.0", @@ -90,6 +89,9 @@ netcdf = [ h5py = [ "h5py", ] +xarray = [ + "xarray", +] test = [ "pytest>=5.4.1", "pytest-cov>=0.6", @@ -101,7 +103,9 @@ test = [ "virtualenv", # Pint and xarray are used in training snippets "pint", - "xarray", + # Optional dependencies + # TODO add imas-core when it is available on pypi + "imas-python[netcdf,h5py,xarray]", ] [project.scripts] From a56a5e73fdf2594df157f0a04702c5957c6f6991 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Mon, 10 Mar 2025 13:58:57 +0100 Subject: [PATCH 78/97] Update xarray advanced course to mention imas.util.to_xarray --- .../imas_snippets/tensorized_ids_to_xarray.py | 34 +++++++++++++++++++ docs/source/courses/advanced/xarray.rst | 16 +++++++-- 2 files changed, 47 insertions(+), 3 deletions(-) diff --git a/docs/source/courses/advanced/imas_snippets/tensorized_ids_to_xarray.py b/docs/source/courses/advanced/imas_snippets/tensorized_ids_to_xarray.py index ff4f4e28..559b27a7 100644 --- a/docs/source/courses/advanced/imas_snippets/tensorized_ids_to_xarray.py +++ b/docs/source/courses/advanced/imas_snippets/tensorized_ids_to_xarray.py @@ -1,6 +1,7 @@ import os import matplotlib + # To avoid possible display issues when Matplotlib uses a non-GUI backend if "DISPLAY" not in os.environ: matplotlib.use("agg") @@ -17,6 +18,39 @@ entry = imas.training.get_training_db_entry() cp = entry.get("core_profiles") +####################################################################################### +# Steps 2, 3 and 4, using imas.util.to_xarray +# Create an xarray Dataset containing t_i_average and its coordinates +xrds = imas.util.to_xarray(cp, "profiles_1d/t_i_average") +# Note that profiles_1d.grid.rho_tor_norm is a 2D coordinate: its values may be +# different at different times. +# +# Since the values at different time slices differ only minutely in this example, we'll +# rename the `profiles_1d.grid.rho_tor_norm:i` dimension to `rho_tor_norm` and set the +# values to the values of rho_tor_norm of the first time slice: +xrds = xrds.rename({"profiles_1d.grid.rho_tor_norm:i": "rho_tor_norm"}).assign_coords( + {"rho_tor_norm": xrds["profiles_1d.grid.rho_tor_norm"].isel(time=0).data} +) + +# Extract temperatures as an xarray DataArray +temperature = xrds["profiles_1d.t_i_average"] + +# 5a. Select subset of temperature where 0.4 <= rho_tor_norm < 0.6: +print(temperature.sel(rho_tor_norm=slice(0.4, 0.6))) + +# 5b. Interpolate temperature on a new grid: [0, 0.1, 0.2, ..., 0.9, 1.0] +print(temperature.interp(rho_tor_norm=numpy.linspace(0, 1, 11))) + +# 5c. Interpolate temperature on a new time base: [10, 20] +print(temperature.interp(time=[10, 20])) + +# 5d. Plot +temperature.plot(x="time", norm=matplotlib.colors.LogNorm()) +plt.show() + +####################################################################################### +# We can also manually build an xarray DataArray, this is shown below: + # 2. Store the temperature of the first time slice temperature = cp.profiles_1d[0].t_i_average diff --git a/docs/source/courses/advanced/xarray.rst b/docs/source/courses/advanced/xarray.rst index f28b452b..d1375a45 100644 --- a/docs/source/courses/advanced/xarray.rst +++ b/docs/source/courses/advanced/xarray.rst @@ -3,9 +3,10 @@ Create ``xarray.DataArray`` from an IDS .. info:: - In this lesson you will create a ``DataArray`` manually. In a future version of - IMAS-Python we plan to include functionality that will automatically do this for you. - That should further simplify working with data inside IDSs. + This lesson was written before :py:func:`imas.util.to_xarray` was + implemented. This lesson is retained for educational purposes, however we + recommend to use :py:func:`imas.util.to_xarray` instead of manually creating + xarray ``DataArray``\ s. Let's start with an introduction of Xarray. According to `their website `_ (where you @@ -61,6 +62,10 @@ Exercise 1: create a ``DataArray`` for ``profiles_1d/temperature`` .. md-tab-item:: Solution + This exercise was created before the implementation of + :py:func:`imas.util.to_xarray`. The original approach is available below + for educational purposes. + .. literalinclude:: imas_snippets/ids_to_xarray.py @@ -96,4 +101,9 @@ the ``profiles_1d`` array of structures. When the grid is not changing in the ID .. md-tab-item:: Solution + This exercise was created before the implementation of + :py:func:`imas.util.to_xarray`. Below code sample is updated to provide + two alternatives: the first is based on :py:func:`imas.util.to_xarray`, + the second is the original, manual approach. + .. literalinclude:: imas_snippets/tensorized_ids_to_xarray.py From ce313ded108ba7f7bb8c3431aded794ba3dea468 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Mon, 10 Mar 2025 15:00:37 +0100 Subject: [PATCH 79/97] Additional documentation for `imas.util.to_xarray` --- docs/source/courses/advanced/xarray.rst | 2 + docs/source/netcdf.rst | 49 ++++++++++++++++++++++++- 2 files changed, 49 insertions(+), 2 deletions(-) diff --git a/docs/source/courses/advanced/xarray.rst b/docs/source/courses/advanced/xarray.rst index d1375a45..249520d0 100644 --- a/docs/source/courses/advanced/xarray.rst +++ b/docs/source/courses/advanced/xarray.rst @@ -8,6 +8,8 @@ Create ``xarray.DataArray`` from an IDS recommend to use :py:func:`imas.util.to_xarray` instead of manually creating xarray ``DataArray``\ s. + See also: :ref:`Convert IMAS-Python IDSs directly to Xarray Datasets`. + Let's start with an introduction of Xarray. According to `their website `_ (where you can also find an excellent summary of why that is useful): diff --git a/docs/source/netcdf.rst b/docs/source/netcdf.rst index fb85ea23..868ae429 100644 --- a/docs/source/netcdf.rst +++ b/docs/source/netcdf.rst @@ -1,7 +1,7 @@ .. _`IMAS netCDF files`: -IMAS netCDF files -================= +IMAS netCDF files \& Xarray +=========================== .. toctree:: :hidden: @@ -69,6 +69,7 @@ features that are supported by DBEntries using ``imas_core`` respectively - Yes (requires ``imas_core >= 5.4.0``) - Not implemented +.. _`Using IMAS netCDF files with 3rd-party tools`: Using IMAS netCDF files with 3rd-party tools -------------------------------------------- @@ -138,3 +139,47 @@ Validating an IMAS netCDF file IMAS netCDF files can be validated with IMAS-Python through the command line ``imas validate_nc ``. See also :ref:`IMAS-Python Command Line tool` or type ``imas validate_nc --help`` in a command line. + + +.. _`Convert IMAS-Python IDSs directly to Xarray Datasets`: + +Convert IMAS-Python IDSs directly to Xarray Datasets +---------------------------------------------------- + +In the section :ref:`Using IMAS netCDF files with 3rd-party tools`, we showed +how to open an IMAS netCDF file with Xarray. However, IMAS-Python IDSs can also +be converted directly to Xarray ``Dataset``\ s with +:py:func:`imas.util.to_xarray`. + +This method can be used to convert a full IDS to an Xarray ``Dataset``, or only +specific paths inside the IDS. The latter variant can also be combined with +:ref:`lazy loading`. We'll show a small example below: + +.. code-block:: python + :caption: Converting a lazy loaded IDS to Xarray + + import imas.training + + # Open the training entry + with imas.training.get_training_db_entry() as training_entry: + # Lazy load the core_profiles IDS + core_profiles = training_entry.get("core_profiles", lazy=True) + # Load the average ion temperature and all coordinate data + xrds = imas.util.to_xarray(core_profiles, "profiles_1d.t_i_average") + # All relevant data is now loaded from the data entry into the xarray + # Dataset. We close the data entry by exiting the with-statement. + + # Inspect what's inside the dataset + print(xrds.data_vars) + # Data variables: + # profiles_1d.t_i_average + + # Included coordinates depends on the used Data Dictionary version + print(xrds.coords) + # Coordinates: (with DD 4.0.0) + # * time + # profiles_1d.grid.area + # profiles_1d.grid.volume + # profiles_1d.grid.rho_tor + # profiles_1d.grid.rho_tor_norm + # profiles_1d.grid.psi From dffeac7035db55f2445fba0f28a2f49dde14e78e Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Thu, 13 Mar 2025 11:33:06 +0100 Subject: [PATCH 80/97] Additional documentation and example for to_xarray --- imas/util.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/imas/util.py b/imas/util.py index 64e2b228..e41d28a2 100644 --- a/imas/util.py +++ b/imas/util.py @@ -543,6 +543,22 @@ def to_xarray(ids: IDSToplevel, *paths: str) -> Any: Returns: An ``xarray.Dataset`` object. + Notes: + - Lazy loaded IDSs are not supported for full IDS conversion + (``imas.util.to_xarray(ids)`` will raise an exception for lazy loaded IDSs). + This function can work with lazy loaded IDSs when paths are explicitly + provided: this might take a while because it will load all data for the + provided paths and their coordinates. + - This function does not accept wildcards for the paths. However, it is possible + to combine this method with :py:func:`imas.util.find_paths`, see the Examples + below. + - This function may return an empty dataset in the following cases: + + - The provided IDS does not contain any data. + - The IDS does not contain any data for the provided paths. + - The provided paths do not point to data nodes, but to (arrays of) + structures. + Examples: .. code-block:: python @@ -563,6 +579,12 @@ def to_xarray(ids: IDSToplevel, *paths: str) -> Any: "profiles_1d.electrons.temperature", ) + # Combine with imas.util.find_paths to include all paths containing + # "profiles_1d" in the xarray conversion: + profiles_1d_paths = imas.util.find_paths(ids, "profiles_1d") + assert len(profiles_1d_paths) > 0 + ds = imas.util.to_xarray(ids, *profiles_1d_paths) + See Also: https://docs.xarray.dev/en/stable/generated/xarray.Dataset.html """ From 49d87d4083eb957b442de1b8f6acab54379b34f3 Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Mon, 17 Mar 2025 11:50:05 +0100 Subject: [PATCH 81/97] Fixup merge conflicts --- docs/Makefile | 2 +- imas/backends/db_entry_impl.py | 24 ------------------------ 2 files changed, 1 insertion(+), 25 deletions(-) diff --git a/docs/Makefile b/docs/Makefile index f0c27f01..bca56859 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -31,7 +31,7 @@ clean: Makefile # Seems to overwrite autosummary documentation though! So not using this rn MODULE_EXCLUDE="../imas/examples/**" "../imas/**" apidocs: Makefile - sphinx-apidoc --implicit-namespaces -o "$(GENERATEDDIR)" "$(PROJECT_ROOT)/imaspy/" $(MODULE_EXCLUDE) + sphinx-apidoc --implicit-namespaces -o "$(GENERATEDDIR)" "$(PROJECT_ROOT)/imas/" $(MODULE_EXCLUDE) # Sphinx will run this automatically autogen: Makefile diff --git a/imas/backends/db_entry_impl.py b/imas/backends/db_entry_impl.py index 4a33dde1..df1e4638 100644 --- a/imas/backends/db_entry_impl.py +++ b/imas/backends/db_entry_impl.py @@ -36,30 +36,6 @@ class GetSampleParameters: """See :param:`imas.db_entry.DBEntry.get_sample.interpolation_method`.""" -@dataclass -class GetSliceParameters: - """Helper class to store parameters to get_slice.""" - - time_requested: float - """See :param:`imaspy.db_entry.DBEntry.get_slice.time_requested`.""" - interpolation_method: int - """See :param:`imaspy.db_entry.DBEntry.get_slice.interpolation_method`.""" - - -@dataclass -class GetSampleParameters: - """Helper class to store parameters to get_sample.""" - - tmin: float - """See :param:`imaspy.db_entry.DBEntry.get_sample.tmin`.""" - tmax: float - """See :param:`imaspy.db_entry.DBEntry.get_sample.tmax`.""" - dtime: Optional[numpy.ndarray] - """See :param:`imaspy.db_entry.DBEntry.get_sample.dtime`.""" - interpolation_method: Optional[int] - """See :param:`imaspy.db_entry.DBEntry.get_sample.interpolation_method`.""" - - class DBEntryImpl(ABC): """Interface for DBEntry implementations.""" From 90e97b675fcc2586e46c450388207a30369b0784 Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Mon, 17 Mar 2025 14:38:52 +0100 Subject: [PATCH 82/97] Fixup code after merge --- README.md | 4 ++-- imas/backends/imas_core/db_entry_helpers.py | 6 +----- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index f166ded1..a277c9af 100644 --- a/README.md +++ b/README.md @@ -32,11 +32,11 @@ equilibrium.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGE equilibrium.ids_properties.comment = "testing" equilibrium.time = [0.01] -with imas.DBEntry("imas:hdf5?path=./testdb","w") as dbentry: +with imas.DBEntry("imas:hdf5?path=./testdb", "w") as dbentry: dbentry.put(equilibrium) # or without imas_core dependency -with imas.DBEntry("./test.nc","w") as dbentry: +with imas.DBEntry("./test.nc", "w") as dbentry: dbentry.put(equilibrium) ``` diff --git a/imas/backends/imas_core/db_entry_helpers.py b/imas/backends/imas_core/db_entry_helpers.py index bd118ea1..f83a0d47 100644 --- a/imas/backends/imas_core/db_entry_helpers.py +++ b/imas/backends/imas_core/db_entry_helpers.py @@ -77,15 +77,11 @@ def get_children( getattr(structure, name)._IDSPrimitive__value = data -def _get_child(child: IDSBase, ctx: Optional[LazyALContext]): +def _get_child(child: IDSBase, ctx: LazyALContext): """Get a single child when required (lazy loading).""" # NOTE: changes in this method must be propagated to _get_children and vice versa # Performance: this method is specialized for the lazy get - # ctx can be None when the parent structure does not exist in the on-disk DD version - if ctx is None: - return # There is no data to be loaded - time_mode = ctx.time_mode if time_mode == IDS_TIME_MODE_INDEPENDENT and child.metadata.type.is_dynamic: return # skip dynamic (time-dependent) nodes From 2d93ab01614451924baa2ae5fd170cc9a8ea1ff7 Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Mon, 17 Mar 2025 14:39:58 +0100 Subject: [PATCH 83/97] Updating changelog for release 2.0.0 --- docs/source/changelog.rst | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index b764f73f..74db4e44 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -3,8 +3,28 @@ Changelog ========= +What's new in IMAS-Python 2.0.0 +------------------------------- + +Breaking change +''''''''''''''' + +The package name was changed from `imaspy` to `imas` while porting the code to GitHub. This shall only affect the import statements in your code. + +New features and improvements +''''''''''''''''''''''''''''' + +- Add :py:func:`imas.util.to_xarray` to convert a full IDS or only specific paths herein to a Xarray ``Dataset``. See :ref:`Convert IMAS-Python IDSs directly to Xarray Datasets` for more details. +- Implements automatic DD version conversion on :py:meth:`imas.db_entry.DBEntry.get` (conversion during :py:meth:`imas.db_entry.DBEntry.put` is not supported as this is rarely needed and easily worked around). +- Enable lazy loading when reading data from a netCDF file. +- Minor performance improvement loading data from a netCDF file. +- Replace ``versioneer`` by ``setuptools-scm`` to determine the version of the code. +- Use `saxonche `__ instead of the JAR for XSL transforms (when building versions of the DD). +- Updating the README, CONTRIBUTING guidelines and documentation after making the code open access. + + What's new in IMAS-Python 1.2.0 --------------------------- +------------------------------- New features and improvements ''''''''''''''''''''''''''''' From f37b699376298f7e7b768f719e730c1f54b1038f Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Mon, 17 Mar 2025 15:06:02 +0100 Subject: [PATCH 84/97] Fixup formatting in changelog --- docs/source/changelog.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 74db4e44..6f347581 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -9,7 +9,7 @@ What's new in IMAS-Python 2.0.0 Breaking change ''''''''''''''' -The package name was changed from `imaspy` to `imas` while porting the code to GitHub. This shall only affect the import statements in your code. +The package name was changed from ``imaspy`` to ``imas`` while porting the code to GitHub. This shall only affect the import statements in your code. New features and improvements ''''''''''''''''''''''''''''' @@ -18,7 +18,7 @@ New features and improvements - Implements automatic DD version conversion on :py:meth:`imas.db_entry.DBEntry.get` (conversion during :py:meth:`imas.db_entry.DBEntry.put` is not supported as this is rarely needed and easily worked around). - Enable lazy loading when reading data from a netCDF file. - Minor performance improvement loading data from a netCDF file. -- Replace ``versioneer`` by ``setuptools-scm`` to determine the version of the code. +- Replace versioneer by setuptools-scm to determine the version of the code. - Use `saxonche `__ instead of the JAR for XSL transforms (when building versions of the DD). - Updating the README, CONTRIBUTING guidelines and documentation after making the code open access. From 6151b371d7f533214691eb44f0d45a33a0f1cee3 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Mon, 17 Mar 2025 14:05:51 +0100 Subject: [PATCH 85/97] removed pull_request event --- .github/workflows/test_with_pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test_with_pytest.yml b/.github/workflows/test_with_pytest.yml index 7e56ac38..ecff9a7e 100644 --- a/.github/workflows/test_with_pytest.yml +++ b/.github/workflows/test_with_pytest.yml @@ -1,6 +1,6 @@ name: Test using pytest -on: [push, pull_request] +on: push jobs: test: From 062266a761dd5345cb41cc4d2eb60d7230845a60 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Mon, 17 Mar 2025 15:19:44 +0100 Subject: [PATCH 86/97] added pull_request event with options --- .github/workflows/linting.yml | 5 ++++- .github/workflows/publish.yml | 5 ++++- .github/workflows/test_with_pytest.yml | 5 ++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 96bed185..e18f1c38 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -1,6 +1,9 @@ name: linting-and-code-formatting -on: push +on: + push: + pull_request: + types: [opened, synchronize, reopened] jobs: build: diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 2ebc8562..a2f88020 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,6 +1,9 @@ name: build-wheel-and-publish-test-pypi -on: push +on: + push: + pull_request: + types: [opened, synchronize, reopened] jobs: build: diff --git a/.github/workflows/test_with_pytest.yml b/.github/workflows/test_with_pytest.yml index ecff9a7e..7a345c7f 100644 --- a/.github/workflows/test_with_pytest.yml +++ b/.github/workflows/test_with_pytest.yml @@ -1,6 +1,9 @@ name: Test using pytest -on: push +on: + push: + pull_request: + types: [opened, synchronize, reopened] jobs: test: From 2e3b1b16e30f6a39d9b74933fe8e888a803cb4fb Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Tue, 18 Mar 2025 11:35:02 +0100 Subject: [PATCH 87/97] added workflow for sphinx doc generation and fixed docstrings --- .github/workflows/verify_sphinx_doc.yml | 56 +++++++++++++++++++++++++ docs/source/changelog.rst | 4 +- imas/backends/imas_core/al_context.py | 12 ++++++ imas/backends/netcdf/ids_tensorizer.py | 11 +++++ imas/backends/netcdf/nc2ids.py | 38 +++++++++++++++++ 5 files changed, 119 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/verify_sphinx_doc.yml diff --git a/.github/workflows/verify_sphinx_doc.yml b/.github/workflows/verify_sphinx_doc.yml new file mode 100644 index 00000000..706fa1b1 --- /dev/null +++ b/.github/workflows/verify_sphinx_doc.yml @@ -0,0 +1,56 @@ +name: verify-sphinx-doc-generation + +on: + push: + pull_request: + types: [opened, synchronize, reopened] + +jobs: + build-and-test: + runs-on: ubuntu-22.04 + + steps: + - name: Checkout IMAS-Python sources + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + with: + # until saxonche is available in 3.13 + # https://saxonica.plan.io/issues/6561 + python-version: "<3.13" + + - name: Display Python version + run: python -c "import sys; print(sys.version)" + + + - name: Set up Python virtual environment + run: | + python -m venv venv + source venv/bin/activate + + - name: Install build dependencies + run: | + pip install --upgrade pip setuptools wheel build + + - name: Build package + run: | + rm -rf dist + python -m build . + + - name: Install package and dependencies + run: | + pip install "$(readlink -f dist/*.whl)[docs,netcdf]" + + - name: Debug dependencies + run: | + pip freeze + + - name: Build Sphinx documentation + run: | + export SPHINXOPTS='-W -n --keep-going' + make -C docs clean html + + - name: Deactivate virtual environment + run: deactivate \ No newline at end of file diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index b764f73f..92d8b043 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -4,12 +4,12 @@ Changelog ========= What's new in IMAS-Python 1.2.0 --------------------------- +------------------------------- New features and improvements ''''''''''''''''''''''''''''' -- Add :py:func:`imaspy.DBEntry.get_sample` (requires imas_core >= 5.4.0) +- Add :py:func:`imas.DBEntry.get_sample` previously imaspy.DBEntry.get_sample (requires imas_core >= 5.4.0) - Improved validation of netCDF files - Improve compatibility with the UDA backend in imas_core - Extend the support of netCDF to >= 1.4.1 (without complex numbers) diff --git a/imas/backends/imas_core/al_context.py b/imas/backends/imas_core/al_context.py index 19b34d8e..7fafd77e 100644 --- a/imas/backends/imas_core/al_context.py +++ b/imas/backends/imas_core/al_context.py @@ -282,6 +282,18 @@ def __init__( """Potential weak reference to opened context.""" def get_child(self, child): + """ + Retrieve a child entry from the field. + + Args: + child (str): The name or identifier of the child entry to retrieve. + + Returns: + The child entry retrieved from the database. + + Raises: + Exception: If the child entry cannot be found or an error occurs during retrieval. + """ imas.backends.imas_core.db_entry_helpers._get_child(child, self) def get_context(self) -> ALContext: diff --git a/imas/backends/netcdf/ids_tensorizer.py b/imas/backends/netcdf/ids_tensorizer.py index 95bfba47..3e9f77d0 100644 --- a/imas/backends/netcdf/ids_tensorizer.py +++ b/imas/backends/netcdf/ids_tensorizer.py @@ -169,6 +169,17 @@ def filter_coordinates(self, path: str) -> str: ) def tensorize(self, path, fillvalue): + """ + Tensorizes the data at the given path with the specified fill value. + + Args: + path (str): The path to the data in the NetCDF file. + fillvalue (any): The value to fill the tensor with. Can be of any type, + including strings. + + Returns: + numpy.ndarray: A tensor filled with the data from the specified path. + """ dimensions = self.ncmeta.get_dimensions(path, self.homogeneous_time) shape = tuple(self.dimension_size[dim] for dim in dimensions) diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index e9b524fb..a4da2b67 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -317,6 +317,13 @@ def __init__(self, nc2ids, index=()): self.index = index def get_child(self, child): + """ + Retrieves and sets the appropriate context or value for a given child node based on its metadata. + + Args: + child: The child node for which the context or value is to be set. The child node should have metadata attributes. + + """ metadata = child.metadata path = metadata.path_string data_type = metadata.data_type @@ -366,12 +373,43 @@ def get_child(self, child): class LazyArrayStructContext(LazyContext): + """ + LazyArrayStructContext is a subclass of LazyContext that provides a context for + handling structured arrays in a lazy manner. It is initialized with a NetCDF to + IDS mapping object, an index, and a size. + """ def __init__(self, nc2ids, index, size): + """ + Initialize the instance with nc2ids, index, and size. + + Args: + nc2ids: The NetCDF to IDS mapping object. + index: The index within the NetCDF file. + size: The size of the data to be processed. + """ super().__init__(nc2ids, index) self.size = size def get_context(self): + """ + Returns the current context. + + This method returns the current instance of the class, which is expected + to have a 'size' attribute as required by IDSStructArray. + + Returns: + self: The current instance of the class. + """ return self # IDSStructArray expects to get something with a size attribute def iterate_to_index(self, index: int) -> LazyContext: + """ + Iterates to a specified index and returns a LazyContext object. + + Args: + index (int): The index to iterate to. + + Returns: + LazyContext: A LazyContext object initialized with the updated index. + """ return LazyContext(self.nc2ids, self.index + (index,)) From 1d212a0bdd696238d16da32acf8b354d56fa09ef Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Tue, 18 Mar 2025 13:13:48 +0100 Subject: [PATCH 88/97] fix sphinx docstring issue --- docs/source/changelog.rst | 2 +- imas/backends/netcdf/ids_tensorizer.py | 6 +++--- imas/backends/netcdf/nc2ids.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 92d8b043..d04aea09 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -9,7 +9,7 @@ What's new in IMAS-Python 1.2.0 New features and improvements ''''''''''''''''''''''''''''' -- Add :py:func:`imas.DBEntry.get_sample` previously imaspy.DBEntry.get_sample (requires imas_core >= 5.4.0) +- Add :py:func:`imas.DBEntry.get_sample ` (requires imas_core >= 5.4.0) - Improved validation of netCDF files - Improve compatibility with the UDA backend in imas_core - Extend the support of netCDF to >= 1.4.1 (without complex numbers) diff --git a/imas/backends/netcdf/ids_tensorizer.py b/imas/backends/netcdf/ids_tensorizer.py index 3e9f77d0..38e934ea 100644 --- a/imas/backends/netcdf/ids_tensorizer.py +++ b/imas/backends/netcdf/ids_tensorizer.py @@ -173,12 +173,12 @@ def tensorize(self, path, fillvalue): Tensorizes the data at the given path with the specified fill value. Args: - path (str): The path to the data in the NetCDF file. - fillvalue (any): The value to fill the tensor with. Can be of any type, + path: The path to the data in the NetCDF file. + fillvalue: The value to fill the tensor with. Can be of any type, including strings. Returns: - numpy.ndarray: A tensor filled with the data from the specified path. + A tensor filled with the data from the specified path. """ dimensions = self.ncmeta.get_dimensions(path, self.homogeneous_time) shape = tuple(self.dimension_size[dim] for dim in dimensions) diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index a4da2b67..0af27d9c 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -398,7 +398,7 @@ def get_context(self): to have a 'size' attribute as required by IDSStructArray. Returns: - self: The current instance of the class. + The current instance of the class. """ return self # IDSStructArray expects to get something with a size attribute From 8f49f94ee5de45e01f374b9d501c26eb1fd8870d Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Tue, 18 Mar 2025 13:31:41 +0100 Subject: [PATCH 89/97] fixed formatting --- imas/backends/imas_core/al_context.py | 3 ++- imas/backends/netcdf/nc2ids.py | 13 ++++++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/imas/backends/imas_core/al_context.py b/imas/backends/imas_core/al_context.py index 7fafd77e..2b6117e0 100644 --- a/imas/backends/imas_core/al_context.py +++ b/imas/backends/imas_core/al_context.py @@ -292,7 +292,8 @@ def get_child(self, child): The child entry retrieved from the database. Raises: - Exception: If the child entry cannot be found or an error occurs during retrieval. + Exception: If the child entry cannot be found or an error occurs + during retrieval. """ imas.backends.imas_core.db_entry_helpers._get_child(child, self) diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index 0af27d9c..509b7548 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -318,10 +318,12 @@ def __init__(self, nc2ids, index=()): def get_child(self, child): """ - Retrieves and sets the appropriate context or value for a given child node based on its metadata. + Retrieves and sets the appropriate context or value for a given + child node based on its metadata. Args: - child: The child node for which the context or value is to be set. The child node should have metadata attributes. + child: The child node for which the context or value is to be + set. The child node should have metadata attributes. """ metadata = child.metadata @@ -374,10 +376,11 @@ def get_child(self, child): class LazyArrayStructContext(LazyContext): """ - LazyArrayStructContext is a subclass of LazyContext that provides a context for - handling structured arrays in a lazy manner. It is initialized with a NetCDF to + LazyArrayStructContext is a subclass of LazyContext that provides a context for + handling structured arrays in a lazy manner. It is initialized with a NetCDF to IDS mapping object, an index, and a size. """ + def __init__(self, nc2ids, index, size): """ Initialize the instance with nc2ids, index, and size. @@ -394,7 +397,7 @@ def get_context(self): """ Returns the current context. - This method returns the current instance of the class, which is expected + This method returns the current instance of the class, which is expected to have a 'size' attribute as required by IDSStructArray. Returns: From 1d0398ebac4d5164dd4b98833df1e76d770fbb83 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Tue, 18 Mar 2025 13:57:35 +0100 Subject: [PATCH 90/97] fixed sphinx doc issue --- imas/backends/imas_core/al_context.py | 4 ---- imas/backends/netcdf/nc2ids.py | 2 +- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/imas/backends/imas_core/al_context.py b/imas/backends/imas_core/al_context.py index 2b6117e0..3341121b 100644 --- a/imas/backends/imas_core/al_context.py +++ b/imas/backends/imas_core/al_context.py @@ -290,10 +290,6 @@ def get_child(self, child): Returns: The child entry retrieved from the database. - - Raises: - Exception: If the child entry cannot be found or an error occurs - during retrieval. """ imas.backends.imas_core.db_entry_helpers._get_child(child, self) diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index 509b7548..f62c438a 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -323,7 +323,7 @@ def get_child(self, child): Args: child: The child node for which the context or value is to be - set. The child node should have metadata attributes. + set. The child node should have metadata attributes. """ metadata = child.metadata From 125ba37feb70d9dfcc3c92bf24a62228df8f01f9 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Tue, 18 Mar 2025 14:06:01 +0100 Subject: [PATCH 91/97] removed deactivate command from workflow --- .github/workflows/verify_sphinx_doc.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/verify_sphinx_doc.yml b/.github/workflows/verify_sphinx_doc.yml index 706fa1b1..6a12690e 100644 --- a/.github/workflows/verify_sphinx_doc.yml +++ b/.github/workflows/verify_sphinx_doc.yml @@ -51,6 +51,3 @@ jobs: run: | export SPHINXOPTS='-W -n --keep-going' make -C docs clean html - - - name: Deactivate virtual environment - run: deactivate \ No newline at end of file From 50eba563834b5e3e63494d412c63be9d9c24d867 Mon Sep 17 00:00:00 2001 From: Prasad Date: Tue, 18 Mar 2025 16:59:34 +0100 Subject: [PATCH 92/97] Update imas/backends/netcdf/ids_tensorizer.py Co-authored-by: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> --- imas/backends/netcdf/ids_tensorizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imas/backends/netcdf/ids_tensorizer.py b/imas/backends/netcdf/ids_tensorizer.py index 38e934ea..7e9e33ec 100644 --- a/imas/backends/netcdf/ids_tensorizer.py +++ b/imas/backends/netcdf/ids_tensorizer.py @@ -173,7 +173,7 @@ def tensorize(self, path, fillvalue): Tensorizes the data at the given path with the specified fill value. Args: - path: The path to the data in the NetCDF file. + path: The path to the data in the IDS. fillvalue: The value to fill the tensor with. Can be of any type, including strings. From aa18dcb144335e223862e4d2435be713846f4378 Mon Sep 17 00:00:00 2001 From: Prasad Date: Tue, 18 Mar 2025 16:59:45 +0100 Subject: [PATCH 93/97] Update imas/backends/netcdf/nc2ids.py Co-authored-by: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> --- imas/backends/netcdf/nc2ids.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index f62c438a..306c128e 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -322,8 +322,7 @@ def get_child(self, child): child node based on its metadata. Args: - child: The child node for which the context or value is to be - set. The child node should have metadata attributes. + child: The child IDS node which should be lazy loaded. """ metadata = child.metadata From 84cb8790f21410e5785bbb5a9a4b37356421d051 Mon Sep 17 00:00:00 2001 From: prasad-sawantdesai Date: Tue, 18 Mar 2025 17:06:12 +0100 Subject: [PATCH 94/97] imas to imaspy for old version --- docs/source/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index d04aea09..ee831285 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -9,7 +9,7 @@ What's new in IMAS-Python 1.2.0 New features and improvements ''''''''''''''''''''''''''''' -- Add :py:func:`imas.DBEntry.get_sample ` (requires imas_core >= 5.4.0) +- Add :py:func:`imaspy.DBEntry.get_sample ` (requires imas_core >= 5.4.0) - Improved validation of netCDF files - Improve compatibility with the UDA backend in imas_core - Extend the support of netCDF to >= 1.4.1 (without complex numbers) From d81408cefaece8a3ad90e44be2dbfcc3e83da109 Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Tue, 18 Mar 2025 20:08:53 +0100 Subject: [PATCH 95/97] Update the 5 min intro --- docs/source/intro.rst | 83 +++++++++++++++++++++++++------------------ 1 file changed, 49 insertions(+), 34 deletions(-) diff --git a/docs/source/intro.rst b/docs/source/intro.rst index 0118f217..15e27ef4 100644 --- a/docs/source/intro.rst +++ b/docs/source/intro.rst @@ -20,11 +20,11 @@ be outdated. >>> import imas >>> print(imas.__version__) - 1.0.0 + 2.0.0 .. note:: - If you have an IMAS-Python install without the IMAS Access Layer, importing + If you have an IMAS-Python install without the IMAS-Core, importing IMAS-Python will display an error message. You can still use IMAS-Python, but not all functionalities are available. @@ -44,7 +44,7 @@ on different Data Dictionary versions. >>> import imas >>> import numpy as np >>> ids_factory = imas.IDSFactory() - 13:26:47 [INFO] Parsing data dictionary version 3.38.1 @dd_zip.py:127 + 18:23:12 INFO Parsing data dictionary version 4.0.0 @dd_zip.py:166 >>> # Create an empty core_profiles IDS >>> core_profiles = ids_factory.core_profiles() @@ -56,6 +56,7 @@ We can now use this ``core_profiles`` IDS and assign some data to it: >>> core_profiles.ids_properties.homogeneous_time = imas.ids_defs.IDS_TIME_MODE_HOMOGENEOUS >>> # array quantities are automatically converted to the appropriate numpy arrays >>> core_profiles.time = [1, 2, 3] + 18:24:58 INFO Assigning incorrect type 'int64' to , attempting automatic conversion. @ids_primitive.py:483 >>> # the python list of ints is converted to a 1D array of floats >>> core_profiles.time @@ -66,7 +67,7 @@ We can now use this ``core_profiles`` IDS and assign some data to it: 3 >>> # assign some data for the first time slice >>> core_profiles.profiles_1d[0].grid.rho_tor_norm = [0, 0.5, 1.0] - >>> core_profiles.profiles_1d[0].j_tor = [0, 0, 0] + >>> core_profiles.profiles_1d[0].j_phi = [0., 0., 0.] As you can see in the example above, IMAS-Python automatically checks the data you try to assign to an IDS with the data type specified in the Data Dictionary. When @@ -77,30 +78,32 @@ get an error message if this is not possible: >>> core_profiles.time = "Cannot be converted" ValueError: could not convert string to float: 'Cannot be converted' - >>> core_profiles.time = 1-1j - TypeError: can't convert complex to float - >>> core_profiles.ids_properties.source = 1-1j # automatically converted to str - >>> core_profiles.ids_properties.source.value - '(1-1j)' + >>> core_profiles.ids_properties.comment = 1-1j # automatically converted to str + >>> core_profiles.ids_properties.comment + str('(1-1j)') -Store an IDS to disk -'''''''''''''''''''' +Load and store an IDS to disk with IMAS-Core +'''''''''''''''''''''''''''''''''''''''''''' .. note:: - - This functionality requires the IMAS Access Layer. - - This API will change when IMAS-Python is moving to Access Layer 5 (expected Q2 - 2023). + - This functionality requires the IMAS-Core, until this library is openly available + on GitHub you may need to fetch it from `git.iter.org `_ + (requires to have an ITER account). Using IMAS-Core also enable slicing methods + :py:meth:`~imas.db_entry.DBEntry.get_slice`, + :py:meth:`~imas.db_entry.DBEntry.put_slice` and + :py:meth:`~imas.db_entry.DBEntry.get_sample` (with IMAS-Core>=5.4). + - If you can't have access to it, you can save IDS to disk with the built-in + netCDF backend :ref:`Store and load an IDS to disk with netCDF` -To store an IDS to disk, we need to indicate the following information to the -IMAS Access Layer. Please check the `IMAS Access Layer documentation -`_ for more information on this. +To store an IDS to disk, we need to indicate the following URI to the +IMAS-Core: ``imas:?path=`` or using the legacy query keys +``imas:?user=;database=;version=;pulse=;run=`` +which are then converted as a path ``~user/public/imasdb/database/version/pulse/run``. -- Which backend to use (e.g. MDSPLUS or HDF5) -- ``tokamak`` (also known as database) -- ``pulse`` -- ``run`` +Available ```` may depend on your IMAS-Core install: ``hdf5``, ``mdsplus``, +``ascii``, ``memory``, ``uda``. In IMAS-Python you do this as follows: @@ -108,32 +111,44 @@ In IMAS-Python you do this as follows: >>> # Create a new IMAS data entry for storing the core_profiles IDS we created earlier >>> # Here we specify the backend, database, pulse and run - >>> dbentry = imas.DBEntry(imas.ids_defs.HDF5_BACKEND, "TEST", 10, 2) - >>> dbentry.create() + >>> dbentry = imas.DBEntry("imas:hdf5?path=./testdb","w") >>> # now store the core_profiles IDS we just populated >>> dbentry.put(core_profiles) .. image:: imas_structure.png +To load an IDS from disk, you need to specify the same information as +when storing the IDS (see above). Once the data entry is opened, you +can use ``.get()`` to load IDS data from disk: -Load an IDS from disk -''''''''''''''''''''' +.. code-block:: python -.. note:: + >>> # Now load the core_profiles IDS back from disk + >>> dbentry2 = imas.DBEntry("imas:hdf5?path=./testdb","r") + >>> core_profiles2 = dbentry2.get("core_profiles") + >>> print(core_profiles2.ids_properties.comment.value) - - This functionality requires the IMAS Access Layer. - - This API will change when IMAS-Python is moving to Access Layer 5 (expected Q2 - 2023). -To load an IDS from disk, you need to specify the same information as -when storing the IDS (see previous section). Once a data entry is opened, you +Load and store an IDS to disk with IMAS-Core +'''''''''''''''''''''''''''''''''''''''''''' + +In IMAS-Python you do this as follows: + +.. code-block:: python + + >>> # Create a new IMAS data entry for storing the core_profiles IDS we created earlier + >>> # here we directly point to a .nc filename in your system + >>> dbentry = imas.DBEntry("mypulsefile.nc","w") + >>> # now store the core_profiles IDS we just populated + >>> dbentry.put(core_profiles) + +To load an IDS from disk, you need to specify the same file information as +when storing the IDS. Once the data entry is opened, you can use ``.get()`` to load IDS data from disk: .. code-block:: python >>> # Now load the core_profiles IDS back from disk - >>> dbentry2 = imas.DBEntry(imas.ids_defs.HDF5_BACKEND, "TEST", 10, 2) - >>> dbentry2.open() + >>> dbentry2 = imas.DBEntry("mypulsefile.nc","r") >>> core_profiles2 = dbentry2.get("core_profiles") >>> print(core_profiles2.ids_properties.comment.value) - Testing IMAS-Python From 8f7986dec66889f91db4ab00605b369888e4cb8d Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Wed, 19 Mar 2025 09:00:19 +0100 Subject: [PATCH 96/97] Update documentation for release 2.0 --- README.md | 6 ++--- docs/source/changelog.rst | 48 +++++++++++++++++++------------------- docs/source/index.rst | 5 ++++ docs/source/installing.rst | 2 +- docs/source/intro.rst | 8 ++++--- 5 files changed, 38 insertions(+), 31 deletions(-) diff --git a/README.md b/README.md index a277c9af..35c1ae46 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ # IMAS-Python -IMAS-Python is a pure-python library to handle arbitrarily nested data structures. -It is designed for, but not necessarily bound to, interacting with Interface -Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) +IMAS-Python (formerly known as IMASPy its for versions < 2) is a pure-python library to handle +arbitrarily nested data structures. It is designed for, but not necessarily bound to, interacting +with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) Data Model. diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index a09bdcaa..368630bf 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -9,7 +9,7 @@ What's new in IMAS-Python 2.0.0 Breaking change ''''''''''''''' -The package name was changed from ``imaspy`` to ``imas`` while porting the code to GitHub. This shall only affect the import statements in your code. +The package name was changed from ``imaspy`` to ``imas`` while porting the code to `GitHub `__. This shall only affect the import statements in your code. New features and improvements ''''''''''''''''''''''''''''' @@ -23,7 +23,7 @@ New features and improvements - Updating the README, CONTRIBUTING guidelines and documentation after making the code open access. -What's new in IMAS-Python 1.2.0 +What's new in IMASPy 1.2.0 ------------------------------- New features and improvements @@ -43,7 +43,7 @@ Bug fixes -What's new in IMAS-Python 1.1.1 +What's new in IMASPy 1.1.1 ------------------------------- This is a small release that mainly fixes issues related to the recent Data @@ -59,7 +59,7 @@ Bug fixes Dictionary 4.0.0 and 3.42.0. In other cases, the Data Dictionary version is now explicitly indicated. -- :issue:`IMAS-5560`: Fix a bug where IMAS-Python would not correctly recognize that +- :issue:`IMAS-5560`: Fix a bug where IMASPy would not correctly recognize that the UDA backend is used. - :issue:`IMAS-5541`: Fix a bug when converting a closed contour to Data Dictionary version 4.0.0. @@ -69,7 +69,7 @@ Bug fixes recent Data Dictionary version than the on-disk data was stored with. -What's new in IMAS-Python 1.1 +What's new in IMASPy 1.1 ----------------------------- New features @@ -77,7 +77,7 @@ New features - :ref:`1.1/improved performance`. - :ref:`1.1/improved conversion`. -- IMAS-Python 1.1 adds support for Identifiers defined by the Data Dictionary. This +- IMASPy 1.1 adds support for Identifiers defined by the Data Dictionary. This functionality is described in detail in :ref:`Identifiers`. - Support for the new :py:const:`~imas.ids_defs.FLEXBUFFERS_SERIALIZER_PROTOCOL` that is @@ -92,7 +92,7 @@ New features netCDF file, which can be used for sharing and/or archiving data. This feature is in `preview` status, meaning that it may change in upcoming - minor releases of IMAS-Python. + minor releases of IMASPy. - Additional utility functions in :py:mod:`imas.util`: @@ -111,18 +111,18 @@ New features - :py:func:`imas.util.get_data_dictionary_version` returns the Data Dictionary version for which an IDS was created. -- Add support for IMAS Access Layer Core 5.2 and later. IMAS-Python can now be used +- Add support for IMAS Access Layer Core 5.2 and later. IMASPy can now be used with just the Access Layer Core package available, the full AL-Python HLI is no longer required. Since the Access Layer Core is now installable with ``pip`` as well (requires access to the git repository on - ``__), you can install - ``imas`` and ``imas_core`` in one go with: + ``__), you can install + ``imaspy`` and ``imas_core`` in one go with: .. code-block:: bash - pip install 'imas[imas-core] @ git+ssh://git@github.com/iterorganization/imas-core.git' + pip install 'imaspy[imas-core] @ git+ssh://git@github.com/iterorganization/imaspy.git' - A diff tool for IDSs: :py:func:`imas.util.idsdiff`. - Implement ``==`` equality checking for IDS Structures and Arrays of Structures @@ -131,12 +131,12 @@ New features backend. During a :py:meth:`~imas.db_entry.DBEntry.get` or - :py:meth:`~imas.db_entry.DBEntry.get_slice`, IMAS-Python first reads the version + :py:meth:`~imas.db_entry.DBEntry.get_slice`, IMASPy first reads the version of the Data Dictionary that was used to store the IDS. When this version is - not known to IMAS-Python, an error is raised. This error can now be ignored by + not known to IMASPy, an error is raised. This error can now be ignored by setting the parameter :py:param:`~imas.db_entry.DBEntry.get.ignore_unknown_dd_version` to - ``True``, and IMAS-Python will do its best to load the data anyway. + ``True``, and IMASPy will do its best to load the data anyway. - A new command line tool exists for analyzing which Data Dictionary fields are used in provided Data Entries. This tool is explained in detail in @@ -150,7 +150,7 @@ Breaking changes .. note:: - We attempt to keep the public API of IMAS-Python stable with minor releases. The + We attempt to keep the public API of IMASPy stable with minor releases. The following breaking change is the result of an upgrade of the IMAS Access Layer. - Starting with Access Layer 5.2 or newer, the Access Layer will raise @@ -161,8 +161,8 @@ Breaking changes You may need to update the :py:class:`Exception` classes in ``try/except`` blocks to the new Exception classes raised by ``imas_core``. - When using an older version of the Access Layer, the behaviour of IMAS-Python is no - different than in IMAS-Python 1.0. + When using an older version of the Access Layer, the behaviour of IMASPy is no + different than in IMASPy 1.0. Bug fixes @@ -176,10 +176,10 @@ Bug fixes - Fixed a bug with :py:func:`~imas.ids_toplevel.IDSToplevel.serialize` when the IDS is in a non-default Data Dictionary version. - Fixed a bug when assigning ``nan`` to a FLT_0D, which would lead to a - confusing and incorrect log message in IMAS-Python 1.0. -- Fixed incorrect oldest supported DD version. Previously IMAS-Python indicated that + confusing and incorrect log message in IMASPy 1.0. +- Fixed incorrect oldest supported DD version. Previously IMASPy indicated that DD ``3.21.1`` was supported, however ``3.22.0`` is the oldest Data Dictionary - tested (and provided) with IMAS-Python. :py:attr:`imas.OLDEST_SUPPORTED_VERSION` + tested (and provided) with IMASPy. :py:attr:`imas.OLDEST_SUPPORTED_VERSION` has been updated to reflect this. - Fixed a bug when using numpy functions, such as :external:py:func:`numpy.isclose` on scalar numbers. Previously an error was @@ -198,11 +198,11 @@ Improved performance '''''''''''''''''''' - Improved performance of :py:meth:`~imas.ids_toplevel.IDSToplevel.validate`. -- Improved creation of IMAS-Python IDS objects. This made filling IDSs and loading +- Improved creation of IMASPy IDS objects. This made filling IDSs and loading them with :py:meth:`~imas.db_entry.DBEntry.get` / :py:meth:`~imas.db_entry.DBEntry.get_slice` 10-20% faster. - Improved the performance of lazy loading. This is most noticeable with the - ``HDF5`` backend, which is now up to 40x faster than with IMAS-Python 1.0. + ``HDF5`` backend, which is now up to 40x faster than with IMASPy 1.0. - Improved the performance of :py:meth:`~imas.db_entry.DBEntry.get` / :py:meth:`~imas.db_entry.DBEntry.get_slice` / :py:meth:`~imas.db_entry.DBEntry.put` / @@ -220,7 +220,7 @@ Converting IDSs between Data Dictionary versions has several improvements for recent DD versions. Further details on IDS conversion can be found in :ref:`Conversion of IDSs between DD versions`. -- The IMAS-Python Command Line Interface for converting Data Entries between different +- The IMASPy Command Line Interface for converting Data Entries between different versions of the Data Dictionary has been improved. See :ref:`Command line tool reference` or execute ``imas convert --help`` in a shell for further details. @@ -230,7 +230,7 @@ recent DD versions. Further details on IDS conversion can be found in For example, in the ``pulse_schedule`` IDS, the node ``ec/beam/power_launched/reference`` in Data Dictionary ``3.40.0`` was renamed from ``ec/launcher/power/reference/data`` in Data Dictionary ``3.39.0``. This - use case is now supported by IMAS-Python. + use case is now supported by IMASPy. - Automatically convert data between 0D and 1D when possible (`IMAS-5170 `__). diff --git a/docs/source/index.rst b/docs/source/index.rst index 5db1a1fa..7aa06277 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -11,6 +11,11 @@ data structures. IMAS-Python is designed for, but not necessarily bound to, interacting with Interface Data Structures (IDSs) as defined by the Integrated Modelling & Analysis Suite (IMAS) Data Model. +.. note:: + + IMAS-Python was formerly known as IMASPy, and was renamed with the release + of the major version 2. + It provides: - An easy-to-install and easy-to-get started package by diff --git a/docs/source/installing.rst b/docs/source/installing.rst index 2d05ff2f..0f2129ca 100644 --- a/docs/source/installing.rst +++ b/docs/source/installing.rst @@ -56,7 +56,7 @@ This is how to run the IMAS-Python test suite: # inside the IMAS-Python git repository pytest imas --mini - # run with a specific backend + # run with a specific backend, requires IMAS-Core installed pytest imas --ascii --mini And to build the IMAS-Python documentation, execute: diff --git a/docs/source/intro.rst b/docs/source/intro.rst index 15e27ef4..3027a242 100644 --- a/docs/source/intro.rst +++ b/docs/source/intro.rst @@ -95,7 +95,7 @@ Load and store an IDS to disk with IMAS-Core :py:meth:`~imas.db_entry.DBEntry.put_slice` and :py:meth:`~imas.db_entry.DBEntry.get_sample` (with IMAS-Core>=5.4). - If you can't have access to it, you can save IDS to disk with the built-in - netCDF backend :ref:`Store and load an IDS to disk with netCDF` + netCDF backend :ref:`Load and store an IDS to disk with netCDF` To store an IDS to disk, we need to indicate the following URI to the IMAS-Core: ``imas:?path=`` or using the legacy query keys @@ -129,8 +129,10 @@ can use ``.get()`` to load IDS data from disk: >>> print(core_profiles2.ids_properties.comment.value) -Load and store an IDS to disk with IMAS-Core -'''''''''''''''''''''''''''''''''''''''''''' +.. _`Load and store an IDS to disk with netCDF`: + +Load and store an IDS to disk with netCDF +''''''''''''''''''''''''''''''''''''''''' In IMAS-Python you do this as follows: From 0ecccbfaf66c0d1d8dc4ae7d949eee3a6649c9e1 Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Wed, 19 Mar 2025 13:31:23 +0100 Subject: [PATCH 97/97] Update docs/source/changelog.rst Co-authored-by: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> --- docs/source/changelog.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 368630bf..ae995b0e 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -122,7 +122,7 @@ New features .. code-block:: bash - pip install 'imaspy[imas-core] @ git+ssh://git@github.com/iterorganization/imaspy.git' + pip install 'imaspy[imas-core] @ git+ssh://git@git.iter.org/imas/imaspy.git' - A diff tool for IDSs: :py:func:`imas.util.idsdiff`. - Implement ``==`` equality checking for IDS Structures and Arrays of Structures