From 9c56a3ec7036c1f753266ccd59e23e44e1d0b2ee Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Wed, 17 Dec 2025 09:11:17 +0100 Subject: [PATCH 1/9] Properly unpack 0D data when reading an IDS from a netCDF file 0D IDS Data is expected in the native python data types (int, float, complex, string). Before this fix, 0D numerical data would be stored as numpy.int32, numpy.float64 or numpy.complex128 when reading from a netCDF file. Fixes #89 --- imas/backends/netcdf/nc2ids.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/imas/backends/netcdf/nc2ids.py b/imas/backends/netcdf/nc2ids.py index 1b1dbfe8..564d5210 100644 --- a/imas/backends/netcdf/nc2ids.py +++ b/imas/backends/netcdf/nc2ids.py @@ -157,6 +157,8 @@ def run(self, lazy: bool) -> None: for index, node in indexed_tree_iter(self.ids, target_metadata): value = data[index] if value != getattr(var, "_FillValue", None): + if isinstance(value, np.generic): + value = value.item() # NOTE: bypassing IDSPrimitive.value.setter logic node._IDSPrimitive__value = value @@ -166,10 +168,16 @@ def run(self, lazy: bool) -> None: # here, we'll let IDSPrimitive.value.setter take care of it: self.ids[target_metadata.path].value = data - else: + # We need to unpack 0D ints, floats and complex numbers. For better + # performance this check is done outside the for-loop: + elif metadata.ndim or metadata.data_type is IDSDataType.STR: for index, node in indexed_tree_iter(self.ids, target_metadata): # NOTE: bypassing IDSPrimitive.value.setter logic node._IDSPrimitive__value = data[index] + else: + for index, node in indexed_tree_iter(self.ids, target_metadata): + # NOTE: bypassing IDSPrimitive.value.setter logic + node._IDSPrimitive__value = data[index].item() # Unpack 0D value def validate_variables(self) -> None: """Validate that all variables in the netCDF Group exist and match the DD.""" @@ -365,7 +373,7 @@ def get_child(self, child): value = var[self.index] if value is not None: - if isinstance(value, np.ndarray): + if isinstance(value, (np.ndarray, np.generic)): if value.ndim == 0: # Unpack 0D numpy arrays: value = value.item() else: From 28a35f79ffe76dff347f4696b6615459fa78e2ca Mon Sep 17 00:00:00 2001 From: Prasad Date: Thu, 8 Jan 2026 17:51:27 +0100 Subject: [PATCH 2/9] Cleanup unit tests: remove checks for imas-core (#92) --- conftest.py | 34 +------ imas/backends/imas_core/db_entry_al.py | 13 +-- imas/backends/imas_core/imas_interface.py | 31 ++---- imas/exception.py | 6 +- imas/ids_defs.py | 114 +++++++--------------- imas/ids_toplevel.py | 8 -- imas/test/test_cli.py | 4 +- imas/test/test_dbentry.py | 6 +- imas/test/test_exception.py | 2 +- imas/test/test_ids_ascii_data.py | 2 +- imas/test/test_ids_toplevel.py | 2 +- imas/test/test_ids_validate.py | 2 +- imas/test/test_latest_dd_autofill.py | 4 +- imas/test/test_lazy_loading.py | 10 +- imas/test/test_nbc_change.py | 2 +- imas/test/test_snippets.py | 2 +- imas/test/test_static_ids.py | 2 +- imas/test/test_to_xarray.py | 2 +- imas/test/test_util.py | 6 +- 19 files changed, 73 insertions(+), 179 deletions(-) diff --git a/conftest.py b/conftest.py index 51aaa4d4..2d2cd835 100644 --- a/conftest.py +++ b/conftest.py @@ -9,7 +9,6 @@ import functools import logging import os -import sys from copy import deepcopy from pathlib import Path @@ -22,7 +21,6 @@ import pytest from packaging.version import Version -from imas.backends.imas_core.imas_interface import has_imas as _has_imas from imas.backends.imas_core.imas_interface import ll_interface, lowlevel from imas.dd_zip import dd_etree, dd_xml_versions, latest_dd_version from imas.ids_defs import ( @@ -39,17 +37,7 @@ os.environ["IMAS_AL_DISABLE_VALIDATE"] = "1" - -try: - import imas # noqa -except ImportError: - - class SkipOnIMASAccess: - def __getattr__(self, attr): - pytest.skip("This test requires the `imas` HLI, which is not available.") - - # Any test that tries to access an attribute from the `imas` package will be skipped - sys.modules["imas"] = SkipOnIMASAccess() +import imas # noqa def pytest_addoption(parser): @@ -78,7 +66,6 @@ def pytest_addoption(parser): if "not available" in str(iex.message): _BACKENDS.pop("mdsplus") - try: import pytest_xdist except ImportError: @@ -91,28 +78,11 @@ def worker_id(): @pytest.fixture(params=_BACKENDS) def backend(pytestconfig: pytest.Config, request: pytest.FixtureRequest): backends_provided = any(map(pytestconfig.getoption, _BACKENDS)) - if not _has_imas: - if backends_provided: - raise RuntimeError( - "Explicit backends are provided, but IMAS is not available." - ) - pytest.skip("No IMAS available, skip tests using a backend") if backends_provided and not pytestconfig.getoption(request.param): pytest.skip(f"Tests for {request.param} backend are skipped.") return _BACKENDS[request.param] -@pytest.fixture() -def has_imas(): - return _has_imas - - -@pytest.fixture() -def requires_imas(): - if not _has_imas: - pytest.skip("No IMAS available") - - def pytest_generate_tests(metafunc): if "ids_name" in metafunc.fixturenames: if metafunc.config.getoption("ids"): @@ -214,7 +184,7 @@ def wrapper(*args, **kwargs): @pytest.fixture -def log_lowlevel_calls(monkeypatch, requires_imas): +def log_lowlevel_calls(monkeypatch): """Debugging fixture to log calls to the imas lowlevel module.""" for al_function in dir(lowlevel): if al_function.startswith("ual_") or al_function.startswith("al"): diff --git a/imas/backends/imas_core/db_entry_al.py b/imas/backends/imas_core/db_entry_al.py index dad5019b..b9d118dd 100644 --- a/imas/backends/imas_core/db_entry_al.py +++ b/imas/backends/imas_core/db_entry_al.py @@ -38,7 +38,7 @@ from .al_context import ALContext, LazyALContext from .db_entry_helpers import delete_children, get_children, put_children -from .imas_interface import LLInterfaceError, has_imas, ll_interface +from .imas_interface import LLInterfaceError, ll_interface from .mdsplus_model import mdsplus_model_dir from .uda_support import extract_idsdef, get_dd_version_from_idsdef_xml @@ -52,14 +52,6 @@ logger = logging.getLogger(__name__) -def require_imas_available(): - if not has_imas: - raise RuntimeError( - "The IMAS Core library is not available. Please install 'imas_core', " - "or load a supported IMAS module if you use an HPC environment." - ) - - class ALDBEntryImpl(DBEntryImpl): """DBEntry implementation using imas_core as a backend.""" @@ -86,7 +78,6 @@ def __init__(self, uri: str, mode: int, factory: IDSFactory): @classmethod def from_uri(cls, uri: str, mode: str, factory: IDSFactory) -> "ALDBEntryImpl": - require_imas_available() if mode not in _OPEN_MODES: modes = list(_OPEN_MODES) raise ValueError(f"Unknown mode {mode!r}, was expecting any of {modes}") @@ -105,8 +96,6 @@ def from_pulse_run( options: Any, factory: IDSFactory, ) -> "ALDBEntryImpl": - # Raise an error if imas is not available - require_imas_available() # Set defaults user_name = user_name or getpass.getuser() diff --git a/imas/backends/imas_core/imas_interface.py b/imas/backends/imas_core/imas_interface.py index 8fa3963b..c9d69a02 100644 --- a/imas/backends/imas_core/imas_interface.py +++ b/imas/backends/imas_core/imas_interface.py @@ -12,30 +12,17 @@ from packaging.version import Version -logger = logging.getLogger(__name__) +# Import the Access Layer module +# First try to import imas_core, which is available since AL 5.2 +from imas_core import _al_lowlevel as lowlevel +from imas_core import imasdef # noqa: F401 +logger = logging.getLogger(__name__) -# Import the Access Layer module -has_imas = True -try: - # First try to import imas_core, which is available since AL 5.2 - from imas_core import _al_lowlevel as lowlevel - from imas_core import imasdef - - # Enable throwing exceptions from the _al_lowlevel interface - enable_exceptions = getattr(lowlevel, "imas_core_config_enable_exceptions", None) - if enable_exceptions: - enable_exceptions() - -except ImportError as exc: - imas = None - has_imas = False - imasdef = None - lowlevel = None - logger.warning( - "Could not import 'imas_core': %s. Some functionality is not available.", - exc, - ) +# Enable throwing exceptions from the _al_lowlevel interface +enable_exceptions = getattr(lowlevel, "imas_core_config_enable_exceptions", None) +if enable_exceptions: + enable_exceptions() class LLInterfaceError(RuntimeError): diff --git a/imas/exception.py b/imas/exception.py index 737680c2..737284d8 100644 --- a/imas/exception.py +++ b/imas/exception.py @@ -20,10 +20,8 @@ # Expose ALException, which may be thrown by the lowlevel -if _imas_interface.has_imas: - ALException = _imas_interface.lowlevel.ALException -else: - ALException = None + +ALException = _imas_interface.lowlevel.ALException class IDSNameError(ValueError): diff --git a/imas/ids_defs.py b/imas/ids_defs.py index af4ed45c..3ac3c6be 100644 --- a/imas/ids_defs.py +++ b/imas/ids_defs.py @@ -86,86 +86,46 @@ Identifier for the default serialization protocol. """ -import functools import logging -from imas.backends.imas_core.imas_interface import has_imas, imasdef +from imas.backends.imas_core.imas_interface import imasdef logger = logging.getLogger(__name__) -if has_imas: - ASCII_BACKEND = imasdef.ASCII_BACKEND - CHAR_DATA = imasdef.CHAR_DATA - CLOSE_PULSE = imasdef.CLOSE_PULSE - CLOSEST_INTERP = imasdef.CLOSEST_INTERP - CREATE_PULSE = imasdef.CREATE_PULSE - DOUBLE_DATA = imasdef.DOUBLE_DATA - COMPLEX_DATA = imasdef.COMPLEX_DATA - EMPTY_COMPLEX = imasdef.EMPTY_COMPLEX - EMPTY_FLOAT = imasdef.EMPTY_FLOAT - EMPTY_INT = imasdef.EMPTY_INT - ERASE_PULSE = imasdef.ERASE_PULSE - FORCE_CREATE_PULSE = imasdef.FORCE_CREATE_PULSE - FORCE_OPEN_PULSE = imasdef.FORCE_OPEN_PULSE - HDF5_BACKEND = imasdef.HDF5_BACKEND - IDS_TIME_MODE_HETEROGENEOUS = imasdef.IDS_TIME_MODE_HETEROGENEOUS - IDS_TIME_MODE_HOMOGENEOUS = imasdef.IDS_TIME_MODE_HOMOGENEOUS - IDS_TIME_MODE_INDEPENDENT = imasdef.IDS_TIME_MODE_INDEPENDENT - IDS_TIME_MODE_UNKNOWN = imasdef.IDS_TIME_MODE_UNKNOWN - IDS_TIME_MODES = imasdef.IDS_TIME_MODES - INTEGER_DATA = imasdef.INTEGER_DATA - LINEAR_INTERP = imasdef.LINEAR_INTERP - MDSPLUS_BACKEND = imasdef.MDSPLUS_BACKEND - MEMORY_BACKEND = imasdef.MEMORY_BACKEND - NODE_TYPE_STRUCTURE = imasdef.NODE_TYPE_STRUCTURE - OPEN_PULSE = imasdef.OPEN_PULSE - PREVIOUS_INTERP = imasdef.PREVIOUS_INTERP - READ_OP = imasdef.READ_OP - UDA_BACKEND = imasdef.UDA_BACKEND - UNDEFINED_INTERP = imasdef.UNDEFINED_INTERP - UNDEFINED_TIME = imasdef.UNDEFINED_TIME - WRITE_OP = imasdef.WRITE_OP - ASCII_SERIALIZER_PROTOCOL = getattr(imasdef, "ASCII_SERIALIZER_PROTOCOL", 60) - FLEXBUFFERS_SERIALIZER_PROTOCOL = getattr( - imasdef, "FLEXBUFFERS_SERIALIZER_PROTOCOL", None - ) - DEFAULT_SERIALIZER_PROTOCOL = getattr(imasdef, "DEFAULT_SERIALIZER_PROTOCOL", 60) - -else: - # Preset some constants which are used elsewhere - # this is a bit ugly, perhaps reuse the list of imports from above? - # it seems no problem to use None, since the use of the values should not - # be allowed, they are only used in operations which use the backend, - # which we (should) gate - ASCII_BACKEND = CHAR_DATA = CLOSE_PULSE = CLOSEST_INTERP = DOUBLE_DATA = None - FORCE_OPEN_PULSE = CREATE_PULSE = ERASE_PULSE = None - COMPLEX_DATA = FORCE_CREATE_PULSE = HDF5_BACKEND = None - INTEGER_DATA = LINEAR_INTERP = MDSPLUS_BACKEND = MEMORY_BACKEND = None - NODE_TYPE_STRUCTURE = OPEN_PULSE = PREVIOUS_INTERP = READ_OP = None - UDA_BACKEND = UNDEFINED_INTERP = UNDEFINED_TIME = WRITE_OP = None - # These constants are also useful when not working with the AL - EMPTY_FLOAT = -9e40 - EMPTY_INT = -999_999_999 - EMPTY_COMPLEX = complex(EMPTY_FLOAT, EMPTY_FLOAT) - IDS_TIME_MODE_UNKNOWN = EMPTY_INT - IDS_TIME_MODE_HETEROGENEOUS = 0 - IDS_TIME_MODE_HOMOGENEOUS = 1 - IDS_TIME_MODE_INDEPENDENT = 2 - IDS_TIME_MODES = [0, 1, 2] - ASCII_SERIALIZER_PROTOCOL = 60 - FLEXBUFFERS_SERIALIZER_PROTOCOL = None - DEFAULT_SERIALIZER_PROTOCOL = 60 - - -def needs_imas(func): - if has_imas: - return func - - @functools.wraps(func) - def wrapper(*args, **kwargs): - raise RuntimeError( - f"Function {func.__name__} requires IMAS, but IMAS is not available." - ) - - return wrapper +ASCII_BACKEND = imasdef.ASCII_BACKEND +CHAR_DATA = imasdef.CHAR_DATA +CLOSE_PULSE = imasdef.CLOSE_PULSE +CLOSEST_INTERP = imasdef.CLOSEST_INTERP +CREATE_PULSE = imasdef.CREATE_PULSE +DOUBLE_DATA = imasdef.DOUBLE_DATA +COMPLEX_DATA = imasdef.COMPLEX_DATA +EMPTY_COMPLEX = imasdef.EMPTY_COMPLEX +EMPTY_FLOAT = imasdef.EMPTY_FLOAT +EMPTY_INT = imasdef.EMPTY_INT +ERASE_PULSE = imasdef.ERASE_PULSE +FORCE_CREATE_PULSE = imasdef.FORCE_CREATE_PULSE +FORCE_OPEN_PULSE = imasdef.FORCE_OPEN_PULSE +HDF5_BACKEND = imasdef.HDF5_BACKEND +IDS_TIME_MODE_HETEROGENEOUS = imasdef.IDS_TIME_MODE_HETEROGENEOUS +IDS_TIME_MODE_HOMOGENEOUS = imasdef.IDS_TIME_MODE_HOMOGENEOUS +IDS_TIME_MODE_INDEPENDENT = imasdef.IDS_TIME_MODE_INDEPENDENT +IDS_TIME_MODE_UNKNOWN = imasdef.IDS_TIME_MODE_UNKNOWN +IDS_TIME_MODES = imasdef.IDS_TIME_MODES +INTEGER_DATA = imasdef.INTEGER_DATA +LINEAR_INTERP = imasdef.LINEAR_INTERP +MDSPLUS_BACKEND = imasdef.MDSPLUS_BACKEND +MEMORY_BACKEND = imasdef.MEMORY_BACKEND +NODE_TYPE_STRUCTURE = imasdef.NODE_TYPE_STRUCTURE +OPEN_PULSE = imasdef.OPEN_PULSE +PREVIOUS_INTERP = imasdef.PREVIOUS_INTERP +READ_OP = imasdef.READ_OP +UDA_BACKEND = imasdef.UDA_BACKEND +UNDEFINED_INTERP = imasdef.UNDEFINED_INTERP +UNDEFINED_TIME = imasdef.UNDEFINED_TIME +WRITE_OP = imasdef.WRITE_OP +ASCII_SERIALIZER_PROTOCOL = getattr(imasdef, "ASCII_SERIALIZER_PROTOCOL", 60) +FLEXBUFFERS_SERIALIZER_PROTOCOL = getattr( + imasdef, "FLEXBUFFERS_SERIALIZER_PROTOCOL", None +) +DEFAULT_SERIALIZER_PROTOCOL = getattr(imasdef, "DEFAULT_SERIALIZER_PROTOCOL", 60) diff --git a/imas/ids_toplevel.py b/imas/ids_toplevel.py index 947bf72f..fcda5f0d 100644 --- a/imas/ids_toplevel.py +++ b/imas/ids_toplevel.py @@ -22,7 +22,6 @@ IDS_TIME_MODE_INDEPENDENT, IDS_TIME_MODE_UNKNOWN, IDS_TIME_MODES, - needs_imas, ) from imas.ids_metadata import IDSMetadata, IDSType, get_toplevel_metadata from imas.ids_structure import IDSStructure @@ -99,7 +98,6 @@ def default_serializer_protocol(): """Return the default serializer protocol.""" return DEFAULT_SERIALIZER_PROTOCOL - @needs_imas def serialize(self, protocol=None) -> bytes: """Serialize this IDS to a data buffer. @@ -169,7 +167,6 @@ def serialize(self, protocol=None) -> bytes: return bytes(buffer) raise ValueError(f"Unrecognized serialization protocol: {protocol}") - @needs_imas def deserialize(self, data: bytes) -> None: """Deserialize the data buffer into this IDS. @@ -289,7 +286,6 @@ def _validate(self): for child in self.iter_nonempty_(accept_lazy=True): child._validate() - @needs_imas def get(self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None) -> None: """Get data from AL backend storage format. @@ -300,7 +296,6 @@ def get(self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None) -> None raise NotImplementedError() db_entry.get(self.metadata.name, occurrence, destination=self) - @needs_imas def getSlice( self, time_requested: float, @@ -323,7 +318,6 @@ def getSlice( destination=self, ) - @needs_imas def putSlice( self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None ) -> None: @@ -336,7 +330,6 @@ def putSlice( raise NotImplementedError() db_entry.put_slice(self, occurrence) - @needs_imas def deleteData( self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None ) -> None: @@ -349,7 +342,6 @@ def deleteData( raise NotImplementedError() db_entry.delete_data(self, occurrence) - @needs_imas def put(self, occurrence: int = 0, db_entry: Optional["DBEntry"] = None) -> None: """Put this IDS to the backend. diff --git a/imas/test/test_cli.py b/imas/test/test_cli.py index 0f4b305e..130aa287 100644 --- a/imas/test/test_cli.py +++ b/imas/test/test_cli.py @@ -17,7 +17,7 @@ def test_imas_version(): @pytest.mark.cli -def test_db_analysis(tmp_path, requires_imas): +def test_db_analysis(tmp_path): # This only tests the happy flow, error handling is not tested db_path = tmp_path / "test_db_analysis" with DBEntry(f"imas:hdf5?path={db_path}", "w") as entry: @@ -42,7 +42,7 @@ def test_db_analysis(tmp_path, requires_imas): @pytest.mark.cli -def test_db_analysis_csv(tmp_path, requires_imas): +def test_db_analysis_csv(tmp_path): with DBEntry(f"imas:hdf5?path={tmp_path}/entry1", "w") as entry: eq = entry.factory.equilibrium() eq.ids_properties.homogeneous_time = 2 diff --git a/imas/test/test_dbentry.py b/imas/test/test_dbentry.py index e13d82a4..f014eb9b 100644 --- a/imas/test/test_dbentry.py +++ b/imas/test/test_dbentry.py @@ -6,7 +6,7 @@ from imas.test.test_helpers import compare_children, open_dbentry -def test_dbentry_contextmanager(requires_imas): +def test_dbentry_contextmanager(): entry = imas.DBEntry(imas.ids_defs.MEMORY_BACKEND, "test", 1, 1) entry.create() ids = entry.factory.core_profiles() @@ -22,7 +22,7 @@ def test_dbentry_contextmanager(requires_imas): assert entry2._dbe_impl is None -def test_dbentry_contextmanager_uri(tmp_path, requires_imas): +def test_dbentry_contextmanager_uri(tmp_path): entry = imas.DBEntry(f"imas:ascii?path={tmp_path}/testdb", "w") ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 @@ -77,7 +77,7 @@ def test_dbentry_constructor(): assert get_entry_attrs(entry) == (1, 2, 3, 4, None, 6) -def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path, requires_imas): +def test_ignore_unknown_dd_version(monkeypatch, worker_id, tmp_path): entry = open_dbentry(imas.ids_defs.MEMORY_BACKEND, "w", worker_id, tmp_path) ids = entry.factory.core_profiles() ids.ids_properties.homogeneous_time = 0 diff --git a/imas/test/test_exception.py b/imas/test/test_exception.py index 37bebfce..c0b66230 100644 --- a/imas/test/test_exception.py +++ b/imas/test/test_exception.py @@ -4,7 +4,7 @@ from imas.backends.imas_core.imas_interface import ll_interface -def test_catch_al_exception(requires_imas): +def test_catch_al_exception(): # Do something which lets the lowlevel Cython interface throw an ALException # Ensure we can catch it: with pytest.raises(imas.exception.ALException): diff --git a/imas/test/test_ids_ascii_data.py b/imas/test/test_ids_ascii_data.py index d15fecf1..20ae8a66 100644 --- a/imas/test/test_ids_ascii_data.py +++ b/imas/test/test_ids_ascii_data.py @@ -18,7 +18,7 @@ def test_data_exists(): @pytest.fixture -def test_data(requires_imas): +def test_data(): db_entry = imas.training.get_training_db_entry() yield db_entry db_entry.close() diff --git a/imas/test/test_ids_toplevel.py b/imas/test/test_ids_toplevel.py index a5855817..e55bac4d 100644 --- a/imas/test/test_ids_toplevel.py +++ b/imas/test/test_ids_toplevel.py @@ -46,7 +46,7 @@ def test_pretty_print(ids): assert pprint.pformat(ids) == "" -def test_serialize_nondefault_dd_version(requires_imas): +def test_serialize_nondefault_dd_version(): ids = IDSFactory("3.31.0").core_profiles() fill_with_random_data(ids) data = ids.serialize() diff --git a/imas/test/test_ids_validate.py b/imas/test/test_ids_validate.py index 7970c7e2..c3f8f157 100644 --- a/imas/test/test_ids_validate.py +++ b/imas/test/test_ids_validate.py @@ -245,7 +245,7 @@ def test_validate_coordinate_same_as(): (None, True), ], ) -def test_validate_on_put(monkeypatch, env_value, should_validate, requires_imas): +def test_validate_on_put(monkeypatch, env_value, should_validate): dbentry = DBEntry(MEMORY_BACKEND, "test", 1, 1) dbentry.create() ids = dbentry.factory.core_profiles() diff --git a/imas/test/test_latest_dd_autofill.py b/imas/test/test_latest_dd_autofill.py index 6d34b766..6b7fbb6a 100644 --- a/imas/test/test_latest_dd_autofill.py +++ b/imas/test/test_latest_dd_autofill.py @@ -55,7 +55,7 @@ def test_latest_dd_autofill(ids_name, backend, worker_id, tmp_path): @pytest.mark.parametrize( "serializer", [ASCII_SERIALIZER_PROTOCOL, FLEXBUFFERS_SERIALIZER_PROTOCOL] ) -def test_latest_dd_autofill_serialize(serializer, ids_name, has_imas): +def test_latest_dd_autofill_serialize(serializer, ids_name): """Serialize and then deserialize again all IDSToplevels""" if serializer is None: pytest.skip("Unsupported serializer") @@ -64,8 +64,6 @@ def test_latest_dd_autofill_serialize(serializer, ids_name, has_imas): ids = factory.new(ids_name) fill_with_random_data(ids) - if not has_imas: - return # rest of the test requires an IMAS install data = ids.serialize(serializer) ids2 = factory.new(ids_name) diff --git a/imas/test/test_lazy_loading.py b/imas/test/test_lazy_loading.py index 4a7c65ca..1dcd0bff 100644 --- a/imas/test/test_lazy_loading.py +++ b/imas/test/test_lazy_loading.py @@ -94,7 +94,7 @@ def iterate(structure): dbentry.close() -def test_lazy_load_close_dbentry(requires_imas): +def test_lazy_load_close_dbentry(): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1) dbentry.create() @@ -109,7 +109,7 @@ def test_lazy_load_close_dbentry(requires_imas): print(lazy_ids.time) -def test_lazy_load_readonly(requires_imas): +def test_lazy_load_readonly(): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1) dbentry.create() run_lazy_load_readonly(dbentry) @@ -151,7 +151,7 @@ def run_lazy_load_readonly(dbentry): dbentry.close() -def test_lazy_load_no_put(requires_imas): +def test_lazy_load_no_put(): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1) dbentry.create() @@ -169,7 +169,7 @@ def test_lazy_load_no_put(requires_imas): dbentry.close() -def test_lazy_load_with_new_aos(requires_imas): +def test_lazy_load_with_new_aos(): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1, dd_version="3.30.0") dbentry.create() et = dbentry.factory.edge_transport() @@ -214,7 +214,7 @@ def test_lazy_load_with_new_aos_netcdf(tmp_path): assert len(lazy_et.model[0].ggd[0].electrons.particles.d_radial) == 0 -def test_lazy_load_with_new_structure(requires_imas): +def test_lazy_load_with_new_structure(): dbentry = DBEntry(MEMORY_BACKEND, "ITER", 1, 1, dd_version="3.30.0") dbentry.create() diff --git a/imas/test/test_nbc_change.py b/imas/test/test_nbc_change.py index 91ede0e3..b34949df 100644 --- a/imas/test/test_nbc_change.py +++ b/imas/test/test_nbc_change.py @@ -49,7 +49,7 @@ def test_nbc_structure_to_aos(caplog): assert caplog.record_tuples[0][:2] == ("imas.ids_convert", logging.WARNING) -def test_nbc_0d_to_1d(caplog, requires_imas): +def test_nbc_0d_to_1d(caplog): # channel/filter_spectrometer/radiance_calibration in spectrometer visible changed # from FLT_0D to FLT_1D in DD 3.39.0 ids = IDSFactory("3.32.0").spectrometer_visible() diff --git a/imas/test/test_snippets.py b/imas/test/test_snippets.py index 0574b185..8ed49a83 100644 --- a/imas/test/test_snippets.py +++ b/imas/test/test_snippets.py @@ -13,7 +13,7 @@ @pytest.mark.skip(reason="skipping hli test") @pytest.mark.filterwarnings("ignore:The input coordinates to pcolormesh:UserWarning") @pytest.mark.parametrize("snippet", course_snippets) -def test_script_execution(snippet, monkeypatch, tmp_path, requires_imas): +def test_script_execution(snippet, monkeypatch, tmp_path): monkeypatch.chdir(tmp_path) # Prevent showing plots in a GUI monkeypatch.delenv("DISPLAY", raising=False) diff --git a/imas/test/test_static_ids.py b/imas/test/test_static_ids.py index 2c66811d..05133615 100644 --- a/imas/test/test_static_ids.py +++ b/imas/test/test_static_ids.py @@ -21,7 +21,7 @@ def test_ids_valid_type(): assert ids_types in ({IDSType.NONE}, {IDSType.CONSTANT, IDSType.DYNAMIC}) -def test_constant_ids(caplog, requires_imas): +def test_constant_ids(caplog): ids = imas.IDSFactory().new("amns_data") if ids.metadata.type is IDSType.NONE: pytest.skip("IDS definition has no constant IDSs") diff --git a/imas/test/test_to_xarray.py b/imas/test/test_to_xarray.py index 1767a6d9..a5df6a1e 100644 --- a/imas/test/test_to_xarray.py +++ b/imas/test/test_to_xarray.py @@ -9,7 +9,7 @@ @pytest.fixture -def entry(requires_imas, monkeypatch): +def entry(monkeypatch): monkeypatch.setenv("IMAS_VERSION", "3.39.0") # Use fixed DD version return imas.training.get_training_db_entry() diff --git a/imas/test/test_util.py b/imas/test/test_util.py index 15a2a8c0..1834af9c 100644 --- a/imas/test/test_util.py +++ b/imas/test/test_util.py @@ -54,7 +54,7 @@ def test_inspect(): inspect(cp.profiles_1d[1].grid.rho_tor_norm) # IDSPrimitive -def test_inspect_lazy(requires_imas): +def test_inspect_lazy(): with get_training_db_entry() as entry: cp = entry.get("core_profiles", lazy=True) inspect(cp) @@ -141,7 +141,7 @@ def test_idsdiffgen(): assert diff[0] == ("profiles_1d/time", -1, 0) -def test_idsdiff(requires_imas): +def test_idsdiff(): # Test the diff rendering for two sample IDSs with get_training_db_entry() as entry: imas.util.idsdiff(entry.get("core_profiles"), entry.get("equilibrium")) @@ -179,7 +179,7 @@ def test_get_toplevel(): assert get_toplevel(cp) is cp -def test_is_lazy_loaded(requires_imas): +def test_is_lazy_loaded(): with get_training_db_entry() as entry: assert is_lazy_loaded(entry.get("core_profiles")) is False assert is_lazy_loaded(entry.get("core_profiles", lazy=True)) is True From 0de0782573cc6c6bbca40bf78de1c5952c4d9e26 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> Date: Thu, 15 Jan 2026 08:36:34 +0100 Subject: [PATCH 3/9] Defer loading the default DD definitions (#95) --- imas/backends/db_entry_impl.py | 2 +- imas/db_entry.py | 6 +++--- imas/ids_factory.py | 19 ++++++++++++++++++- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/imas/backends/db_entry_impl.py b/imas/backends/db_entry_impl.py index df1e4638..0c1b2cd6 100644 --- a/imas/backends/db_entry_impl.py +++ b/imas/backends/db_entry_impl.py @@ -78,7 +78,7 @@ def get( destination: IDSToplevel, lazy: bool, nbc_map: Optional[NBCPathMap], - ) -> None: + ) -> IDSToplevel: """Implement DBEntry.get/get_slice/get_sample. Load data from the data source. Args: diff --git a/imas/db_entry.py b/imas/db_entry.py index 471a50ad..5a470641 100644 --- a/imas/db_entry.py +++ b/imas/db_entry.py @@ -160,7 +160,7 @@ def __init__( legacy = True except TypeError as exc2: raise TypeError( - f"Incorrect arguments to {__class__.__name__}.__init__(): " + "Incorrect arguments to DBEntry.__init__(): " f"{exc1.args[0]}, {exc2.args[0]}" ) from None @@ -561,7 +561,7 @@ def _get( raise RuntimeError("Database entry is not open.") if lazy and destination: raise ValueError("Cannot supply a destination IDS when lazy loading.") - if not self._ids_factory.exists(ids_name): + if autoconvert and not self._ids_factory.exists(ids_name): raise IDSNameError(ids_name, self._ids_factory) # Note: this will raise an exception when the ids/occurrence is not filled: @@ -577,7 +577,7 @@ def _get( ids_name, occurrence, ) - elif dd_version != self.dd_version and dd_version not in dd_xml_versions(): + elif dd_version not in dd_xml_versions() and dd_version != self.dd_version: # We don't know the DD version that this IDS was written with if ignore_unknown_dd_version: # User chooses to ignore this problem, load as if it was stored with diff --git a/imas/ids_factory.py b/imas/ids_factory.py index b840d8a8..5a8209db 100644 --- a/imas/ids_factory.py +++ b/imas/ids_factory.py @@ -41,6 +41,17 @@ def __init__( version: DD version string, e.g. "3.38.1". xml_path: XML file containing data dictionary definition. """ + if version is None and xml_path is None: + # Defer loading the DD definitions until we really need them + self.__deferred_init = True + else: + # If a specific version or xml_path is requested, we still load immediately + # so any exceptions are raise when creating the IDSfactory + self.__do_init(version, xml_path) + self.__deferred_init = False + + def __do_init(self, version: str | None, xml_path: str | pathlib.Path | None): + """Actual initialization logic""" self._xml_path = xml_path self._etree = dd_zip.dd_etree(version, xml_path) self._ids_elements = { @@ -71,10 +82,16 @@ def __dir__(self) -> Iterable[str]: return sorted(set(object.__dir__(self)).union(self._ids_elements)) def __getattr__(self, name: str) -> Any: + # Actually initialize when we deferred it before + if self.__deferred_init: + self.__do_init(None, None) + self.__deferred_init = False + return getattr(self, name) + # Check if the name matches any IDS and return a 'constructor' for it if name in self._ids_elements: # Note: returning a partial to mimic AL HLI, e.g. factory.core_profiles() return partial(IDSToplevel, self, self._ids_elements[name]) - raise AttributeError(f"{type(self)!r} object has no attribute {name!r}") + raise AttributeError(f"'IDSFactory' has no attribute {name!r}") def __iter__(self) -> Iterator[str]: """Iterate over the IDS names defined by the loaded Data Dictionary""" From fc77ae5c676d0ee98fb2a6cf766427b62439a77c Mon Sep 17 00:00:00 2001 From: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> Date: Fri, 16 Jan 2026 12:35:43 +0100 Subject: [PATCH 4/9] Convert core/edge to plasma IDS (#87) --- docs/source/api.rst | 16 ++- imas/__init__.py | 58 +++++++--- imas/command/cli.py | 71 +++++++++++- imas/convert_core_edge_plasma.py | 124 +++++++++++++++++++++ imas/test/test_cli.py | 37 +++++- imas/test/test_convert_core_edge_plasma.py | 68 +++++++++++ setup.cfg | 3 - 7 files changed, 351 insertions(+), 26 deletions(-) create mode 100644 imas/convert_core_edge_plasma.py create mode 100644 imas/test/test_convert_core_edge_plasma.py diff --git a/docs/source/api.rst b/docs/source/api.rst index 5df6e579..63e8af41 100644 --- a/docs/source/api.rst +++ b/docs/source/api.rst @@ -7,16 +7,24 @@ This page provides an auto-generated summary of IMAS-Python's API. For more deta and examples, refer to the relevant chapters in the main part of the documentation. -IMAS-Python IDS manipulation ----------------------------- +IMAS-Python public API +---------------------- .. currentmodule:: imas .. autosummary:: + convert_core_edge_plasma.convert_to_plasma_profiles + convert_core_edge_plasma.convert_to_plasma_sources + convert_core_edge_plasma.convert_to_plasma_transport db_entry.DBEntry + ids_convert.convert_ids + ids_data_type.IDSDataType ids_factory.IDSFactory - ids_toplevel.IDSToplevel + ids_identifiers.identifiers + ids_metadata.IDSMetadata + ids_metadata.IDSType ids_primitive.IDSPrimitive - ids_structure.IDSStructure ids_struct_array.IDSStructArray + ids_structure.IDSStructure + ids_toplevel.IDSToplevel \ No newline at end of file diff --git a/imas/__init__.py b/imas/__init__.py index 58a66994..4154b9f6 100644 --- a/imas/__init__.py +++ b/imas/__init__.py @@ -1,30 +1,58 @@ # This file is part of IMAS-Python. # You should have received the IMAS-Python LICENSE file with this project. -# isort: skip_file - from packaging.version import Version as _V -from ._version import version as __version__ # noqa: F401 -from ._version import version_tuple # noqa: F401 - # Import logging _first_ -from . import setup_logging +# isort: off +from . import setup_logging # noqa: F401 + +# isort: on -# Import main user API objects in the imas module +# Ensure that `imas.util` is loaded when importing imas +from . import util # noqa: F401 + +# Public API: +from ._version import version as __version__ +from ._version import version_tuple +from .convert_core_edge_plasma import ( + convert_to_plasma_profiles, + convert_to_plasma_sources, + convert_to_plasma_transport, +) from .db_entry import DBEntry -from .ids_factory import IDSFactory from .ids_convert import convert_ids +from .ids_data_type import IDSDataType +from .ids_factory import IDSFactory from .ids_identifiers import identifiers - -# Load the IMAS-Python IMAS AL/DD core -from . import ( - db_entry, - dd_zip, - util, -) +from .ids_metadata import IDSMetadata, IDSType +from .ids_primitive import IDSPrimitive +from .ids_struct_array import IDSStructArray +from .ids_structure import IDSStructure +from .ids_toplevel import IDSToplevel PUBLISHED_DOCUMENTATION_ROOT = "https://imas-python.readthedocs.io/en/latest/" """URL to the published documentation.""" OLDEST_SUPPORTED_VERSION = _V("3.22.0") """Oldest Data Dictionary version that is supported by IMAS-Python.""" + +__all__ = [ + "__version__", + "version_tuple", + "DBEntry", + "IDSDataType", + "IDSFactory", + "IDSMetadata", + "IDSPrimitive", + "IDSStructure", + "IDSStructArray", + "IDSToplevel", + "IDSType", + "convert_ids", + "convert_to_plasma_profiles", + "convert_to_plasma_sources", + "convert_to_plasma_transport", + "identifiers", + "PUBLISHED_DOCUMENTATION_ROOT", + "OLDEST_SUPPORTED_VERSION", +] diff --git a/imas/command/cli.py b/imas/command/cli.py index a270d834..da921973 100644 --- a/imas/command/cli.py +++ b/imas/command/cli.py @@ -1,6 +1,6 @@ # This file is part of IMAS-Python. # You should have received the IMAS-Python LICENSE file with this project. -""" Main CLI entry point """ +"""Main CLI entry point""" import logging import sys @@ -22,7 +22,13 @@ import imas import imas.backends.imas_core.imas_interface -from imas import DBEntry, dd_zip +from imas import ( + DBEntry, + dd_zip, + convert_to_plasma_profiles, + convert_to_plasma_sources, + convert_to_plasma_transport, +) from imas.backends.imas_core.imas_interface import ll_interface from imas.command.db_analysis import analyze_db, process_db_analysis from imas.command.helpers import min_version_guard, setup_rich_log_handler @@ -109,6 +115,23 @@ def print_ids(uri, ids, occurrence, print_all): imas.util.print_tree(ids_obj, not print_all) +def _check_convert_to_plasma_ids(idss_with_occurrences): + """Check if no plasma_ IDS is present when converting a core_ or edge_ IDS.""" + idsnames = {ids_name for ids_name, _ in idss_with_occurrences} + for suffix in ("_profiles", "_sources", "_transport"): + if f"plasma{suffix}" in idsnames: + if f"core{suffix}" in idsnames: + overlap = "core" + elif f"edge{suffix}" in idsnames: + overlap = "edge" + else: + continue + raise RuntimeError( + f"Cannot convert {overlap}{suffix} IDS to plasma{suffix}: " + f"there already exists a plasma{suffix} IDS in the data source." + ) + + @cli.command("convert", no_args_is_help=True) @click.argument("uri_in") @click.argument("dd_version") @@ -127,8 +150,21 @@ def print_ids(uri, ids, occurrence, print_all): is_flag=True, help="Don't add provenance metadata to the converted IDS.", ) +@click.option( + "--convert-to-plasma-ids", + is_flag=True, + help="Convert core/edge profiles/transport/sources to the corresponding plasma IDS", +) def convert_ids( - uri_in, dd_version, uri_out, ids, occurrence, quiet, timeit, no_provenance + uri_in: str, + dd_version: str, + uri_out: str, + ids: str, + occurrence: int, + quiet: bool, + timeit: bool, + no_provenance: bool, + convert_to_plasma_ids: bool, ): """Convert a Data Entry (or a single IDS) to the target DD version. @@ -174,6 +210,10 @@ def convert_ids( else: idss_with_occurrences.append((ids_name, occurrence)) + if convert_to_plasma_ids: # Sanity checks for conversion to plasma IDSs + _check_convert_to_plasma_ids(idss_with_occurrences) + next_plasma_occurrence = {"_profiles": 0, "_transport": 0, "_sources": 0} + # Create progress bar and task columns = ( TimeElapsedColumn(), @@ -209,6 +249,31 @@ def convert_ids( provenance_origin_uri=provenance_origin_uri, ) + # Convert to plasma_profiles/plasma_sources/plasma_transport IDS + if convert_to_plasma_ids and ids_name.startswith(("core", "edge")): + suffix = ids_name[4:] + # This branch also matches core_instant_changes: check that suffix is ok + if suffix in next_plasma_occurrence: + logger.info( + "Storing IDS %s/%d as plasma%s/%d", + ids_name, + occurrence, + suffix, + next_plasma_occurrence[suffix], + ) + occurrence = next_plasma_occurrence[suffix] + next_plasma_occurrence[suffix] += 1 + + name2 = f"[bold green]plasma{suffix}[/][green]/{occurrence}[/]" + progress.update(task, description=f"Converting {name} to {name2}") + if suffix == "_profiles": + ids2 = convert_to_plasma_profiles(ids2) + elif suffix == "_sources": + ids2 = convert_to_plasma_sources(ids2) + elif suffix == "_transport": + ids2 = convert_to_plasma_transport(ids2) + name = name2 + # Store in output entry: progress.update(task, description=f"Storing {name}", advance=1) with timer("Put", name): diff --git a/imas/convert_core_edge_plasma.py b/imas/convert_core_edge_plasma.py new file mode 100644 index 00000000..5a13c5a7 --- /dev/null +++ b/imas/convert_core_edge_plasma.py @@ -0,0 +1,124 @@ +# This file is part of IMAS-Python. +# You should have received the IMAS-Python LICENSE file with this project. +"""Logic to convert core/edge IDSs to their corresponding plasma ID.""" + +from packaging.version import Version + +from imas.ids_toplevel import IDSToplevel +from imas.ids_factory import IDSFactory +from imas.exception import IDSNameError +from imas.ids_convert import DDVersionMap, NBCPathMap, _copy_structure + + +def convert_to_plasma_profiles( + core_or_edge_profiles: IDSToplevel, *, deepcopy: bool = False +) -> IDSToplevel: + """Convert a core_profiles or edge_profiles IDS to a plasma_profiles IDS. + + The input IDS must use a Data Dictionary version for which the plasma_profiles IDS + exists (3.42.0 or newer). + + Args: + core_or_edge_profiles: The core_profiles or edge_profiles IDS to be converted. + + Keyword Args: + deepcopy: When True, performs a deep copy of all data. When False (default), + numpy arrays are not copied and the converted IDS shares the same underlying + data buffers. + """ + return _convert_to_plasma(core_or_edge_profiles, "profiles", deepcopy) + + +def convert_to_plasma_sources( + core_or_edge_sources: IDSToplevel, *, deepcopy: bool = False +) -> IDSToplevel: + """Convert a core_sources or edge_sources IDS to a plasma_sources IDS. + + The input IDS must use a Data Dictionary version for which the plasma_sources IDS + exists (3.42.0 or newer). + + Args: + core_or_edge_sources: The core_sources or edge_sources IDS to be converted. + + Keyword Args: + deepcopy: When True, performs a deep copy of all data. When False (default), + numpy arrays are not copied and the converted IDS shares the same underlying + data buffers. + """ + return _convert_to_plasma(core_or_edge_sources, "sources", deepcopy) + + +def convert_to_plasma_transport( + core_or_edge_transport: IDSToplevel, *, deepcopy: bool = False +) -> IDSToplevel: + """Convert a core_transport or edge_transport IDS to a plasma_transport IDS. + + The input IDS must use a Data Dictionary version for which the plasma_transport IDS + exists (3.42.0 or newer). + + Args: + core_or_edge_transport: The core_transport or edge_transport IDS to be + converted. + + Keyword Args: + deepcopy: When True, performs a deep copy of all data. When False (default), + numpy arrays are not copied and the converted IDS shares the same underlying + data buffers. + """ + return _convert_to_plasma(core_or_edge_transport, "transport", deepcopy) + + +class _CoreEdgePlasmaMap(DDVersionMap): + """Subclass of DDVersionMap to generate an NBCPathMap that is suitable to copy + between a core/edge IDS and the corresponding plasma IDS.""" + + def __init__(self, source, target, factory): + self.ids_name = source + self.old_version = factory._etree + self.new_version = factory._etree + self.version_old = Version(factory.version) + + self.old_to_new = NBCPathMap() + self.new_to_old = NBCPathMap() + + old_ids_object = factory._etree.find(f"IDS[@name='{source}']") + new_ids_object = factory._etree.find(f"IDS[@name='{target}']") + self._build_map(old_ids_object, new_ids_object) + + +def _convert_to_plasma(source: IDSToplevel, suffix: str, deepcopy: bool) -> IDSToplevel: + # Sanity checks for input data + if not isinstance(source, IDSToplevel): + raise TypeError( + f"First argument to convert_to_plasma_{suffix} must be a core_{suffix} or " + f"edge_{suffix} of type IDSToplevel. Got a type {type(source)} instead." + ) + if source.metadata.name not in [f"core_{suffix}", f"edge_{suffix}"]: + raise ValueError( + f"First argument to convert_to_plasma_{suffix} must be a core_{suffix} or " + f"edge_{suffix} IDS. Got a {source.metadata.name} IDS instead." + ) + if source._lazy: + raise NotImplementedError( + "IDS conversion is not implemented for lazy-loaded IDSs" + ) + + # Construct target plasma_{suffix} IDS + factory: IDSFactory = source._parent + try: + target = factory.new(f"plasma_{suffix}") + except IDSNameError: + raise ValueError( + f"Cannot convert {source.metadata.name} IDS to plasma_{suffix}: the source " + f"IDS uses Data Dictionary version {factory.dd_version} which doesn't have " + f"a plasma_{suffix} IDS. Please convert the source IDS to a supported Data " + "Dictionary version using `imas.convert_ids` and try again." + ) from None + + # Leverage existing logic from ids_convert to do the copying + # First construct a map (to handle missing items in the target IDS) + data_map = _CoreEdgePlasmaMap(source.metadata.name, target.metadata.name, factory) + path_map = data_map.old_to_new # old = core/edge, new = plasma IDS + _copy_structure(source, target, deepcopy, path_map) + + return target diff --git a/imas/test/test_cli.py b/imas/test/test_cli.py index 130aa287..e696c056 100644 --- a/imas/test/test_cli.py +++ b/imas/test/test_cli.py @@ -3,7 +3,7 @@ import pytest from click.testing import CliRunner -from imas.command.cli import print_version +from imas.command.cli import print_version, convert_ids from imas.command.db_analysis import analyze_db, process_db_analysis from imas.db_entry import DBEntry from imas.test.test_helpers import fill_with_random_data @@ -100,3 +100,38 @@ def test_db_analysis_csv(tmp_path): wall,ids_properties/version_put/data_dictionary,,1.0,1.0 """ # noqa: E501 (line too long) ) + + +def test_imas_convert_with_plasma(tmp_path): + in_db = tmp_path / "in" + out_db = tmp_path / "out" + with DBEntry(f"imas:hdf5?path={in_db}", "w", dd_version="3.39.0") as entry: + for core_edge in ("core", "edge"): + for suffix in ("profiles", "sources", "transport"): + ids = entry.factory.new(f"{core_edge}_{suffix}") + ids.ids_properties.homogeneous_time = 2 + for i in range(4): + ids.ids_properties.comment = f"{core_edge}_{suffix} occurrence {i}" + entry.put(ids, i) + + runner = CliRunner() + with runner.isolated_filesystem(tmp_path): + convert_result = runner.invoke( + convert_ids, + [ + "--convert-to-plasma-ids", + f"imas:hdf5?path={in_db}", + "4.1.0", + f"imas:hdf5?path={out_db}", + ], + ) + assert convert_result.exit_code == 0 + + with DBEntry(f"imas:hdf5?path={out_db}", "r", dd_version="4.1.0") as entry: + for suffix in ("profiles", "sources", "transport"): + for i in range(8): + # We expect 8 occurrences, first 4 core, then 4 edge ones + core_edge = "core" if i < 4 else "edge" + expected_comment = f"{core_edge}_{suffix} occurrence {i % 4}" + ids = entry.get(f"plasma_{suffix}", i) + assert ids.ids_properties.comment == expected_comment diff --git a/imas/test/test_convert_core_edge_plasma.py b/imas/test/test_convert_core_edge_plasma.py new file mode 100644 index 00000000..08d8ca91 --- /dev/null +++ b/imas/test/test_convert_core_edge_plasma.py @@ -0,0 +1,68 @@ +import pytest + +import imas.training +from imas.util import idsdiffgen +from imas.test.test_helpers import fill_with_random_data + + +def assert_equal(core_edge, plasma): + # We only expect the IDS name to be different: + difflist = list(idsdiffgen(core_edge, plasma)) + assert difflist == [("IDS name", core_edge.metadata.name, plasma.metadata.name)] + + +def test_convert_training_core_profiles(): + with imas.training.get_training_db_entry() as entry: + cp = entry.get("core_profiles") + + pp = imas.convert_to_plasma_profiles(cp) + assert_equal(cp, pp) + + +def test_convert_missing_qty(): + cp = imas.IDSFactory("4.1.0").core_profiles() + cp.profiles_1d.resize(1) + cp.profiles_1d[0].ion.resize(1) + cp.profiles_1d[0].ion[0].state.resize(1) + cp.profiles_1d[0].ion[0].state[0].ionization_potential = 0.5 + + pp = imas.convert_to_plasma_profiles(cp) + # check that state[0] is copied, but that it's empty + assert not pp.profiles_1d[0].ion[0].state[0].has_value + + +@pytest.mark.parametrize("idsname", ["core_profiles", "edge_profiles"]) +def test_convert_randomly_filled_profiles(idsname): + ids = imas.IDSFactory("4.1.0").new(idsname) + fill_with_random_data(ids) + + if idsname == "core_profiles": + # ionization_potential doesn't exist in plasma_profiles in DD 4.1.0. This case + # is tested in test_convert_missing_qty. Unset these variables to avoid a diff: + for profiles in list(ids.profiles_1d) + list(ids.profiles_2d): + for ion in profiles.ion: + for state in ion.state: + del state.ionization_potential + del state.ionization_potential_error_upper + del state.ionization_potential_error_lower + + plasma = imas.convert_to_plasma_profiles(ids) + assert_equal(ids, plasma) + + +@pytest.mark.parametrize("idsname", ["core_sources", "edge_sources"]) +def test_convert_randomly_filled_sources(idsname): + ids = imas.IDSFactory("4.1.0").new(idsname) + fill_with_random_data(ids) + + plasma = imas.convert_to_plasma_sources(ids) + assert_equal(ids, plasma) + + +@pytest.mark.parametrize("idsname", ["core_transport", "edge_transport"]) +def test_convert_randomly_filled_transport(idsname): + ids = imas.IDSFactory("4.1.0").new(idsname) + fill_with_random_data(ids) + + plasma = imas.convert_to_plasma_transport(ids) + assert_equal(ids, plasma) diff --git a/setup.cfg b/setup.cfg index 8e5dd292..fe8ea370 100644 --- a/setup.cfg +++ b/setup.cfg @@ -11,9 +11,6 @@ exclude= docs max-line-length = 88 per-file-ignores= - # Ignore import errors in __init__.py (import not at top of file; imported but - # unused) - imas/__init__.py:E402,F401 # Lots of CLASSPATHS in this test file: adhering to line length would be less # readable imas/test/test_dd_helpers.py:E501 From 8dc5b34596f29acf5f5778b049c86ac09fad3bd4 Mon Sep 17 00:00:00 2001 From: Olivier Hoenen Date: Mon, 26 Jan 2026 08:09:21 +0100 Subject: [PATCH 5/9] update MDSplus models config w.r.t IMAS-Core/5.6.0 change (#98) --- imas/backends/imas_core/db_entry_al.py | 4 ++-- imas/backends/imas_core/mdsplus_model.py | 19 ++++++++++++++++++- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/imas/backends/imas_core/db_entry_al.py b/imas/backends/imas_core/db_entry_al.py index b9d118dd..8559b0c0 100644 --- a/imas/backends/imas_core/db_entry_al.py +++ b/imas/backends/imas_core/db_entry_al.py @@ -39,7 +39,7 @@ from .al_context import ALContext, LazyALContext from .db_entry_helpers import delete_children, get_children, put_children from .imas_interface import LLInterfaceError, ll_interface -from .mdsplus_model import mdsplus_model_dir +from .mdsplus_model import mdsplus_model_dir, get_mdsplus_model_var from .uda_support import extract_idsdef, get_dd_version_from_idsdef_xml _OPEN_MODES = { @@ -120,7 +120,7 @@ def _setup_backend(cls, backend: str, mode: int, factory: IDSFactory) -> None: # Building the MDS+ models is required when creating a new Data Entry ids_path = mdsplus_model_dir(factory) if ids_path: - os.environ["ids_path"] = ids_path + os.environ[get_mdsplus_model_var()] = ids_path elif backend == "uda": # Set IDSDEF_PATH to point the UDA backend to the selected DD version diff --git a/imas/backends/imas_core/mdsplus_model.py b/imas/backends/imas_core/mdsplus_model.py index c5f09e29..d5f667ee 100644 --- a/imas/backends/imas_core/mdsplus_model.py +++ b/imas/backends/imas_core/mdsplus_model.py @@ -14,6 +14,7 @@ from pathlib import Path from subprocess import CalledProcessError, check_output from zlib import crc32 +from packaging.version import Version try: from importlib.resources import as_file, files @@ -24,6 +25,8 @@ from imas.exception import MDSPlusModelError from imas.ids_factory import IDSFactory +from .imas_interface import ll_interface + logger = logging.getLogger(__name__) @@ -292,6 +295,20 @@ def create_model_ids_xml(cache_dir_path, fname, version): raise e +def get_mdsplus_model_var() -> str: + """ + Return the environemnt variable name used by IMAS-Core to locate models: + + - 'ids_path' for IMAS-Core<5.6 + - 'MDSPLUS_MODELS_PATH' for IMAS-Core>=5.6 + """ + return ( + "ids_path" + if ll_interface._al_version < Version("5.6.0") + else "MDSPLUS_MODELS_PATH" + ) + + def create_mdsplus_model(cache_dir_path: Path) -> None: """Use jtraverser to compile a valid MDS model file.""" try: @@ -322,7 +339,7 @@ def create_mdsplus_model(cache_dir_path: Path) -> None: env={ "PATH": os.environ.get("PATH", ""), "LD_LIBRARY_PATH": os.environ.get("LD_LIBRARY_PATH", ""), - "ids_path": str(cache_dir_path), + get_mdsplus_model_var(): str(cache_dir_path), }, ) # Touch a file to show that we have finished the model From c993495332e942ce2af5db59a6e444d142e19753 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 27 Jan 2026 13:53:15 +0100 Subject: [PATCH 6/9] Fix some incorrect type hints --- imas/_to_xarray.py | 2 +- imas/backends/imas_core/al_context.py | 6 +++--- imas/backends/imas_core/db_entry_al.py | 10 ++++++---- imas/backends/imas_core/mdsplus_model.py | 2 +- imas/backends/imas_core/uda_support.py | 4 ++-- imas/backends/netcdf/db_entry_nc.py | 2 +- imas/backends/netcdf/nc_metadata.py | 7 +++---- imas/ids_base.py | 6 +++--- imas/ids_convert.py | 8 +++----- imas/ids_metadata.py | 12 ++++++------ imas/ids_toplevel.py | 4 +++- 11 files changed, 32 insertions(+), 31 deletions(-) diff --git a/imas/_to_xarray.py b/imas/_to_xarray.py index 6caec501..13525c82 100644 --- a/imas/_to_xarray.py +++ b/imas/_to_xarray.py @@ -27,7 +27,7 @@ def to_xarray(ids: IDSToplevel, *paths: str) -> xarray.Dataset: # block checks if the paths are valid, and by using "metadata.path_string" we ensure # that / are used as separator. try: - paths = [ids.metadata[path].path_string for path in paths] + paths: list[str] = [ids.metadata[path].path_string for path in paths] except KeyError as exc: raise ValueError(str(exc)) from None diff --git a/imas/backends/imas_core/al_context.py b/imas/backends/imas_core/al_context.py index ede33bac..d3d2f620 100644 --- a/imas/backends/imas_core/al_context.py +++ b/imas/backends/imas_core/al_context.py @@ -115,7 +115,7 @@ def timerange_action( tmin: float, tmax: float, dtime: Optional[numpy.ndarray], - interpolation_method: int, + interpolation_method: Optional[int], ) -> "ALContext": """Begin a new timerange action for use in a ``with`` context.""" ctx = ll_interface.begin_timerange_action( @@ -163,7 +163,7 @@ def write_data(self, path: str, timebasepath: str, data: Any) -> None: """Call ual_write_data with this context.""" status = ll_interface.write_data(self.ctx, path, timebasepath, data) if status != 0: - raise LowlevelError(f"write data at {path!r}: {status=}") + raise LowlevelError(f"write data at {path!r}", status) def list_all_occurrences(self, ids_name: str) -> List[int]: """List all occurrences of this IDS.""" @@ -359,7 +359,7 @@ def timerange_action( tmin: float, tmax: float, dtime: Optional[numpy.ndarray], - interpolation_method: int, + interpolation_method: Optional[int], ) -> Iterator["LazyALContext"]: """Lazily start a lowlevel timerange action, see :meth:`ALContext.timerange_action`. diff --git a/imas/backends/imas_core/db_entry_al.py b/imas/backends/imas_core/db_entry_al.py index 8559b0c0..167d04b5 100644 --- a/imas/backends/imas_core/db_entry_al.py +++ b/imas/backends/imas_core/db_entry_al.py @@ -96,7 +96,6 @@ def from_pulse_run( options: Any, factory: IDSFactory, ) -> "ALDBEntryImpl": - # Set defaults user_name = user_name or getpass.getuser() data_version = data_version or factory.dd_version @@ -138,7 +137,7 @@ def _setup_backend(cls, backend: str, mode: int, factory: IDSFactory) -> None: if idsdef_path is None: # Extract XML from the DD zip and point UDA to it idsdef_path = extract_idsdef(factory.version) - os.environ["IDSDEF_PATH"] = idsdef_path + os.environ["IDSDEF_PATH"] = str(idsdef_path) elif backend in ["hdf5", "memory", "ascii", "flexbuffers"]: pass # nothing to set up @@ -173,7 +172,7 @@ def get( destination: IDSToplevel, lazy: bool, nbc_map: Optional[NBCPathMap], - ) -> None: + ) -> IDSToplevel: if self._db_ctx is None: raise RuntimeError("Database entry is not open.") if lazy and self.backend == "ascii": @@ -333,9 +332,12 @@ def delete_data(self, ids_name: str, occurrence: int) -> None: ll_path += f"/{occurrence}" ids = self._ids_factory.new(ids_name) with self._db_ctx.global_action(ll_path, WRITE_OP) as write_ctx: - delete_children(ids.metadata, write_ctx, "") + delete_children(ids.metadata, write_ctx) def list_all_occurrences(self, ids_name: str) -> List[int]: + if self._db_ctx is None: + raise RuntimeError("Database entry is not open.") + try: occurrence_list = self._db_ctx.list_all_occurrences(ids_name) except LLInterfaceError: diff --git a/imas/backends/imas_core/mdsplus_model.py b/imas/backends/imas_core/mdsplus_model.py index d5f667ee..df813e8e 100644 --- a/imas/backends/imas_core/mdsplus_model.py +++ b/imas/backends/imas_core/mdsplus_model.py @@ -366,7 +366,7 @@ def jTraverser_jar() -> Path: for component in os.environ.get("CLASSPATH", "").split(":"): if component.endswith(".jar"): if re.search(".*jTraverser.jar", component): - return component + return Path(component) else: # assume its a directory (strip any '*' suffix) search_dirs.append(component.rstrip("*")) diff --git a/imas/backends/imas_core/uda_support.py b/imas/backends/imas_core/uda_support.py index f051f549..299d6381 100644 --- a/imas/backends/imas_core/uda_support.py +++ b/imas/backends/imas_core/uda_support.py @@ -1,6 +1,6 @@ import logging from pathlib import Path -from typing import Union +from typing import Union, Optional from xml.etree import ElementTree as ET from imas import dd_zip @@ -10,7 +10,7 @@ logger = logging.getLogger(__name__) -def get_dd_version_from_idsdef_xml(path: Union[str, Path]) -> str: +def get_dd_version_from_idsdef_xml(path: Union[str, Path]) -> Optional[str]: """Parse the IDSDef.xml up to the point where the Data Dictionary version is set. Returns: diff --git a/imas/backends/netcdf/db_entry_nc.py b/imas/backends/netcdf/db_entry_nc.py index e6ee32cb..0776c47b 100644 --- a/imas/backends/netcdf/db_entry_nc.py +++ b/imas/backends/netcdf/db_entry_nc.py @@ -100,7 +100,7 @@ def get( destination: IDSToplevel, lazy: bool, nbc_map: Optional[NBCPathMap], - ) -> None: + ) -> IDSToplevel: # Feature compatibility checks if parameters is not None: if isinstance(parameters, GetSliceParameters): diff --git a/imas/backends/netcdf/nc_metadata.py b/imas/backends/netcdf/nc_metadata.py index 94929957..246376b2 100644 --- a/imas/backends/netcdf/nc_metadata.py +++ b/imas/backends/netcdf/nc_metadata.py @@ -1,7 +1,6 @@ # This file is part of IMAS-Python. # You should have received the IMAS-Python LICENSE file with this project. -"""NetCDF metadata for dimensions and tensorization of IDSs. -""" +"""NetCDF metadata for dimensions and tensorization of IDSs.""" from functools import lru_cache from typing import Dict, List, Optional, Set, Tuple @@ -89,7 +88,7 @@ def __init__(self, ids_metadata: IDSMetadata) -> None: # Add cache for public API self.get_dimensions = lru_cache(maxsize=None)(self.get_dimensions) - def get_coordinates(self, path: str, homogeneous_time: bool) -> Tuple[str]: + def get_coordinates(self, path: str, homogeneous_time: bool) -> Tuple[str, ...]: """Get the coordinates (adhering to CF conventions) for a netCDF variable. Args: @@ -109,7 +108,7 @@ def get_coordinates(self, path: str, homogeneous_time: bool) -> Tuple[str]: for coord in self.coordinates[path] ) - def get_dimensions(self, path: str, homogeneous_time: bool) -> Tuple[str]: + def get_dimensions(self, path: str, homogeneous_time: bool) -> Tuple[str, ...]: """Get the dimensions for a netCDF variable. Args: diff --git a/imas/ids_base.py b/imas/ids_base.py index 5c74bf5c..7d466fb8 100644 --- a/imas/ids_base.py +++ b/imas/ids_base.py @@ -1,7 +1,6 @@ # This file is part of IMAS-Python. # You should have received the IMAS-Python LICENSE file with this project. -"""Base class for all IDS nodes. -""" +"""Base class for all IDS nodes.""" import logging from typing import TYPE_CHECKING, Optional, Type @@ -12,6 +11,7 @@ if TYPE_CHECKING: from imas.ids_toplevel import IDSToplevel + from imas.ids_primitive import IDSInt0D logger = logging.getLogger(__name__) @@ -35,7 +35,7 @@ class IDSBase: """True iff this IDS lazy-loads its data""" @property - def _time_mode(self) -> int: + def _time_mode(self) -> "IDSInt0D": """Retrieve the time mode from `/ids_properties/homogeneous_time`""" return self._parent._time_mode diff --git a/imas/ids_convert.py b/imas/ids_convert.py index a1631b83..a1707c16 100644 --- a/imas/ids_convert.py +++ b/imas/ids_convert.py @@ -7,7 +7,7 @@ import logging from functools import lru_cache, partial from pathlib import Path -from typing import Callable, Dict, Iterator, List, Optional, Set, Tuple +from typing import Callable, Dict, Iterator, List, Optional, Set, Tuple, Any from xml.etree.ElementTree import Element, ElementTree import numpy @@ -70,7 +70,7 @@ def __init__(self) -> None: self.ctxpath: Dict[str, str] = {} """Map providing the lowlevel context path for renamed elements.""" - self.type_change: Dict[str, Optional[Callable[[IDSBase, IDSBase], None]]] = {} + self.type_change: Dict[str, Optional[Callable[[IDSBase, IDSBase], Any]]] = {} """Dictionary of paths that had a type change. Type changes are mapped to None in :py:attr:`path`, this ``dict`` allows to @@ -1001,9 +1001,7 @@ def _repeat_first_point(node: IDSBase) -> None: child.value = numpy.concatenate((child.value, [child.value[0]])) -def _remove_last_point_conditional( - source_node: IDSStructure, target_node: IDSStructure -) -> None: +def _remove_last_point_conditional(source_node: IDSBase, target_node: IDSBase) -> None: """Type change method for nbc_description=repeat_children_first_point_conditional*. This method handles converting from new (DDv4) to old (DDv3). diff --git a/imas/ids_metadata.py b/imas/ids_metadata.py index 4d2d5dbb..c1e29f43 100644 --- a/imas/ids_metadata.py +++ b/imas/ids_metadata.py @@ -1,7 +1,7 @@ # This file is part of IMAS-Python. # You should have received the IMAS-Python LICENSE file with this project. -"""Core of the IMAS-Python interpreted IDS metadata -""" +"""Core of the IMAS-Python interpreted IDS metadata""" + import re import types from enum import Enum @@ -77,7 +77,7 @@ def get_toplevel_metadata(structure_xml: Element) -> "IDSMetadata": IDSMetadata.__setattr__ = orig_setattr -_type_map: Dict[Tuple[IDSDataType, int], Type] = {} +_type_map: Dict[Tuple[Optional[IDSDataType], int], Type] = {} """Map of IDSDataType and ndim to IDSBase implementation class.""" @@ -205,11 +205,11 @@ def __init__( if self._parent is not None: self._is_dynamic = self.type.is_dynamic or self._parent._is_dynamic - self.coordinates: "tuple[IDSCoordinate]" + self.coordinates: "tuple[IDSCoordinate, ...]" """Tuple of coordinates of this node. ``coordinates[0]`` is the coordinate of the first dimension, etc.""" - self.coordinates_same_as: "tuple[IDSCoordinate]" + self.coordinates_same_as: "tuple[IDSCoordinate, ...]" """Indicates quantities which share the same coordinate in a given dimension, but the coordinate is not explicitly stored in the IDS.""" if self.ndim == 0: @@ -231,7 +231,7 @@ def __init__( self.coordinates_same_as = tuple(coors_same_as) # Parse alternative coordinates - self.alternative_coordinates: "tuple[IDSPath]" = () + self.alternative_coordinates: "tuple[IDSPath, ...]" = () """Quantities that can be used as coordinate instead of this node.""" if "alternative_coordinate1" in attrib: self.alternative_coordinates = tuple( diff --git a/imas/ids_toplevel.py b/imas/ids_toplevel.py index fcda5f0d..67e53c0f 100644 --- a/imas/ids_toplevel.py +++ b/imas/ids_toplevel.py @@ -25,6 +25,7 @@ ) from imas.ids_metadata import IDSMetadata, IDSType, get_toplevel_metadata from imas.ids_structure import IDSStructure +from imas.ids_primitive import IDSInt0D if TYPE_CHECKING: from imas.db_entry import DBEntry @@ -61,6 +62,7 @@ class IDSToplevel(IDSStructure): __doc__ = IDSDoc(__doc__) _path = "" # Path to ourselves without the IDS name and slashes + _parent: "IDSFactory" # In contrast to IDSBase, our parent is the IDSFactory def __init__(self, parent: "IDSFactory", structure_xml, lazy=False): """Save backend_version and backend_xml and build translation layer. @@ -89,7 +91,7 @@ def _dd_version(self) -> str: return self._version @property - def _time_mode(self) -> int: + def _time_mode(self) -> IDSInt0D: """Retrieve the time mode from `/ids_properties/homogeneous_time`""" return self.ids_properties.homogeneous_time From 855fb9491f024bb7d4d4e179e8e757ffc1d7749b Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Tue, 27 Jan 2026 13:54:19 +0100 Subject: [PATCH 7/9] Remove legacy tool `tools/extract_test_data.py` depended on the legacy AL-Python (`imas.imasdef.*`) --- tools/extract_test_data.py | 57 -------------------------------------- 1 file changed, 57 deletions(-) delete mode 100644 tools/extract_test_data.py diff --git a/tools/extract_test_data.py b/tools/extract_test_data.py deleted file mode 100644 index c17f8ec8..00000000 --- a/tools/extract_test_data.py +++ /dev/null @@ -1,57 +0,0 @@ -# This file is part of IMAS-Python. -# You should have received the IMAS-Python LICENSE file with this project. -import os - -import imas - -# Open input datafile -pulse, run, user, database = 134173, 106, "public", "ITER" -input = imas.DBEntry(imas.imasdef.MDSPLUS_BACKEND, database, pulse, run, user) -input.open() - -# Read Te profile and the associated normalised toroidal flux coordinate -get_these_idss = ["equilibrium", "core_profiles"] -idss = {} -# The reference has 871 timepoints -for time_index in [0, 433, 871]: - for ids_name in get_these_idss: - if ids_name not in idss: - idss[ids_name] = [] - idss[ids_name].append( - input.get_slice( - ids_name, - time_index, - imas.imasdef.PREVIOUS_INTERP, - occurrence=0, - ) - ) - -# Close the datafile -input.close() - -# Dump the data to ASCII -# Create output datafile -temp = imas.DBEntry(imas.imasdef.MEMORY_BACKEND, database, pulse, run, user) -temp.create() -for ids_name, ids_list in idss.items(): - for ids_slice in ids_list: - temp.put_slice(ids_slice) - -uber_idss = {} -for ids_name in idss: - uber_idss[ids_name] = temp.get(ids_name) -temp.close() - - -user = os.getenv("USER") -# Because we use the ASCII backend, this results in a .ids file in the cwd -output = imas.DBEntry(imas.imasdef.ASCII_BACKEND, database, pulse, run, user) -output.create() - -# Save the IDS -for ids_name, ids in uber_idss.items(): - print(f"Putting {ids_name}") - output.put(ids) - -# Close the output datafile -output.close() From 244ad3bc93513a2576ef46cae8a9570f3c1c6ab9 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts <110895564+maarten-ic@users.noreply.github.com> Date: Wed, 28 Jan 2026 17:51:41 +0100 Subject: [PATCH 8/9] Add a RuntimeError in some cases when using the UDA backend (#99) --- docs/source/multi-dd.rst | 113 +++++++++++++++++++++---- imas/backends/imas_core/db_entry_al.py | 16 ++++ imas/test/test_dbentry_uda.py | 80 +++++++++++++++++ 3 files changed, 194 insertions(+), 15 deletions(-) create mode 100644 imas/test/test_dbentry_uda.py diff --git a/docs/source/multi-dd.rst b/docs/source/multi-dd.rst index ae1175fd..ea0e9f1b 100644 --- a/docs/source/multi-dd.rst +++ b/docs/source/multi-dd.rst @@ -276,27 +276,110 @@ You need to explicitly convert the data, which you can do as follows: entry.put(imas.convert_ids(equilibrium, entry.dd_version)) +.. _`UDA backend and DD versions`: -.. _`DD background`: +UDA backend caching and Data Dictionary versions +------------------------------------------------ -Background information ----------------------- +If you try to load data from a different Data Dictionary version with the UDA backend, +you may see the following error: + +.. code-block:: text + + The Data Dictionary version of the data (3.38.1) is different from the Data + Dictionary version of the DBEntry (3.42.0). This is not supported when using the + UDA backend. + +There are three possible workarounds. The first two require passing an additional option +in the IMAS UDA URI: please see the `imas-core documentation +`__ +for more details on these URI options. + +1. Use UDA fetch to bypass the cache problem. You can do this by appending ``&fetch=1`` + to the URI when you create the :py:class:`~imas.db_entry.DBEntry`. + + Note that this will download the entire IDS files from the remote server, this may + not be desired if you only want to read a single time slice. +2. Disable the UDA cache. You can do this by appending ``&cache_mode=none`` to the URI + when you create the :py:class:`~imas.db_entry.DBEntry`. + + Note that this may make the ``get()`` (a lot) slower, since a separate request needs + to be sent to the remote UDA server for every data variable. However, this may still + be the best performing option if you are only interested in a subset of all the data + in an IDS (and use :ref:`lazy loading`). +3. Explicitly provide the data dictionary version when you create the + :py:class:`~imas.db_entry.DBEntry`, setting it to match the Data Dictionary version + of the data you want to load. To obtain the version of the data on the remote server + from the field `ids_properties.put_version.data_dictionary` via a _lazy_ ``get()`` + with ``autoconvert=False`` option and using the ``&cache_mode=none`` query in the URI. + + Note that you may need to call ``imas.convert_ids`` to convert the IDS to your + desired Data Dictionary version. + +All three possible workarounds are shown in the examples below: + +.. md-tab-set:: + + .. md-tab-item:: Original code + + .. code-block:: python + + import imas + + URI = ( + "imas://uda.iter.org:56565/uda?backend=hdf5" + "&path=/work/imas/shared/imasdb/ITER/3/121013/50" + ) + with imas.DBEntry(URI, "r") as entry: + cp = entry.get("core_profiles") -Since IMAS-Python needs to have access to multiple DD versions it was chosen to -bundle these with the code at build-time, in setup.py. If a git clone of the -Data Dictionary succeeds, the setup tools automatically download saxon and -generate ``IDSDef.xml`` for each of the tagged versions in the DD git -repository. These are then gathered into ``IDSDef.zip``, which is -distributed inside the IMAS-Python package. + .. md-tab-item:: 1. Use UDA fetch -To update the set of data dictionaries new versions can be added to the zipfile. -A reinstall of the package will ensure that all available versions are included -in IMAS-Python. Additionally an explicit path to an XML file can be specified, which -is useful for development. + .. code-block:: python -Automated tests have been provided that check the loading of all of the DD -versions tagged in the data-dictionary git repository. + import imas + URI = ( + "imas://uda.iter.org:56565/uda?backend=hdf5" + "&path=/work/imas/shared/imasdb/ITER/3/121013/50&fetch=1" + ) + with imas.DBEntry(URI, "r") as entry: + cp = entry.get("core_profiles") + + .. md-tab-item:: 2. Disable the UDA cache + + .. code-block:: python + + import imas + + URI = ( + "imas://uda.iter.org:56565/uda?backend=hdf5" + "&path=/work/imas/shared/imasdb/ITER/3/121013/50&cache_mode=none" + ) + with imas.DBEntry(URI, "r") as entry: + cp = entry.get("core_profiles") + + .. md-tab-item:: 3. Explicitly provide the DD version + + .. code-block:: python + + import imas + + URI = ( + "imas://uda.iter.org:56565/uda?backend=hdf5" + "&path=/work/imas/shared/imasdb/ITER/3/121013/50" + ) + with imas.DBEntry(URI, "r", dd_version="3.38.1") as entry: + cp = entry.get("core_profiles") + + # Optional: convert the IDS to your desired DD version + cp = imas.convert_ids(cp, "3.42.0") + + +.. _`DD background`: + +Background information +---------------------- Data Dictionary definitions ''''''''''''''''''''''''''' diff --git a/imas/backends/imas_core/db_entry_al.py b/imas/backends/imas_core/db_entry_al.py index 167d04b5..c9e27691 100644 --- a/imas/backends/imas_core/db_entry_al.py +++ b/imas/backends/imas_core/db_entry_al.py @@ -256,6 +256,22 @@ def read_dd_version(self, ids_name: str, occurrence: int) -> str: raise DataEntryException( f"IDS {ids_name!r}, occurrence {occurrence} is empty." ) + + # UDA caching doesn't play well when the DD version of the on-disk IDS doesn't + # match the DD version of this DBEntry. See GH#97 + if self.backend == "uda" and dd_version != self._ids_factory.dd_version: + cache_mode = self._querydict.get("cache_mode") + fetch = self._querydict.get("fetch") + if cache_mode != "none" and fetch not in ("1", "true"): + raise RuntimeError( + f"The Data Dictionary version of the data ({dd_version}) is " + "different from the Data Dictionary version of the DBEntry " + f"({self._ids_factory.dd_version}). This is not supported when " + f"using the UDA backend. See {imas.PUBLISHED_DOCUMENTATION_ROOT}" + "multi-dd.html#uda-backend-caching-and-data-dictionary-versions " + "for more details and workarounds." + ) + return dd_version def put(self, ids: IDSToplevel, occurrence: int, is_slice: bool) -> None: diff --git a/imas/test/test_dbentry_uda.py b/imas/test/test_dbentry_uda.py new file mode 100644 index 00000000..102f5c07 --- /dev/null +++ b/imas/test/test_dbentry_uda.py @@ -0,0 +1,80 @@ +from pathlib import Path +import os +from unittest.mock import patch + +import pytest +from packaging.version import Version + +from imas import DBEntry +from imas.ids_defs import READ_OP + + +@pytest.fixture +def mock_read_data(): + return { + "ids_properties/homogeneous_time": 1, + "ids_properties/version_put/data_dictionary": "4.0.0", + } + + +@pytest.fixture +def mock_ll_interface(mock_read_data): + """Mock the IMAS lowlevel interface so we can still test the our UDA-specific logic. + + Since we don't have a public UDA server available to test against, this is the + next-best thing. + """ + with patch("imas.backends.imas_core.db_entry_al.ll_interface") as mock_ll_interface: + mock_ll_interface.begin_dataentry_action.return_value = (0, 0) + mock_ll_interface.begin_global_action.return_value = (0, 0) + mock_ll_interface.begin_arraystruct_action.return_value = (0, 0, 0) + mock_ll_interface.close_pulse.return_value = 0 + mock_ll_interface._al_version = Version("5.6.0") + + def read_data(ctx, fieldPath, pyTimebasePath, ualDataType, dim): + return 0, mock_read_data.get(fieldPath) + + mock_ll_interface.read_data.side_effect = read_data + + # Also patch in al_context.py: + with patch( + "imas.backends.imas_core.al_context.ll_interface", mock_ll_interface + ): + yield mock_ll_interface + + +def test_uda_idsdef_path(mock_ll_interface): + # Check that IDSDEF_PATH env variable is set for the UDA backend + with DBEntry("imas:uda?mock", "r", dd_version="4.0.0"): + assert "IDSDEF_PATH" in os.environ + path1 = Path(os.environ["IDSDEF_PATH"]) + assert path1.exists() + with DBEntry("imas:uda?mock", "r", dd_version="3.42.0"): + assert "IDSDEF_PATH" in os.environ + path2 = Path(os.environ["IDSDEF_PATH"]) + assert path2.exists() + assert path1 != path2 + + +def test_uda_datapath(mock_ll_interface): + # Check that datapath is set when requesting the dd version + with DBEntry("imas:uda?mock", "r", dd_version="4.0.0") as entry: + mock_ll_interface.begin_global_action.assert_not_called() + entry.get("mhd", lazy=True) + # pulseCtx=0, dataobjectname="mhd", rwmode=READ_OP, datapath="ids_properties" + mock_ll_interface.begin_global_action.assert_called_with( + 0, "mhd", READ_OP, "ids_properties" + ) + + +def test_uda_version_mismatch_exception(mock_ll_interface): + # Check that we get an exception when versions mismatch + with pytest.raises(RuntimeError, match="Data Dictionary version"): + DBEntry("imas:uda?path=mock", "r", dd_version="4.1.0").get("mhd") + # No exceptions when using cache_mode=none + DBEntry("imas:uda?path=mock&cache_mode=none", "r", dd_version="4.1.0").get("mhd") + # Or when using fetch + DBEntry("imas:uda?path=mock&fetch=true", "r", dd_version="4.1.0").get("mhd") + DBEntry("imas:uda?path=mock&fetch=1", "r", dd_version="4.1.0").get("mhd") + # Or when using the exact same DD version + DBEntry("imas:uda?path=mock", "r", dd_version="4.0.0").get("mhd") From 60194cc7b09c00a83d304beb42060c001d73a542 Mon Sep 17 00:00:00 2001 From: Maarten Sebregts Date: Wed, 11 Feb 2026 15:52:56 +0100 Subject: [PATCH 9/9] Minor performance improvement on put Convert `ids_properties.homogeneous_time` to an `int` before (potentially many) comparisons are executed --- imas/backends/imas_core/db_entry_al.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/imas/backends/imas_core/db_entry_al.py b/imas/backends/imas_core/db_entry_al.py index c9e27691..fc58270d 100644 --- a/imas/backends/imas_core/db_entry_al.py +++ b/imas/backends/imas_core/db_entry_al.py @@ -303,7 +303,7 @@ def put(self, ids: IDSToplevel, occurrence: int, is_slice: bool) -> None: if occurrence != 0: ll_path += f"/{occurrence}" - time_mode = ids.ids_properties.homogeneous_time + time_mode = int(ids.ids_properties.homogeneous_time) if is_slice: with self._db_ctx.global_action(ll_path, READ_OP) as read_ctx: db_time_mode = read_ctx.read_data(