Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion imas/_to_xarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def to_xarray(ids: IDSToplevel, *paths: str) -> xarray.Dataset:
# block checks if the paths are valid, and by using "metadata.path_string" we ensure
# that / are used as separator.
try:
paths = [ids.metadata[path].path_string for path in paths]
paths: list[str] = [ids.metadata[path].path_string for path in paths]
except KeyError as exc:
raise ValueError(str(exc)) from None

Expand Down
6 changes: 3 additions & 3 deletions imas/backends/imas_core/al_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def timerange_action(
tmin: float,
tmax: float,
dtime: Optional[numpy.ndarray],
interpolation_method: int,
interpolation_method: Optional[int],
) -> "ALContext":
"""Begin a new timerange action for use in a ``with`` context."""
ctx = ll_interface.begin_timerange_action(
Expand Down Expand Up @@ -163,7 +163,7 @@ def write_data(self, path: str, timebasepath: str, data: Any) -> None:
"""Call ual_write_data with this context."""
status = ll_interface.write_data(self.ctx, path, timebasepath, data)
if status != 0:
raise LowlevelError(f"write data at {path!r}: {status=}")
raise LowlevelError(f"write data at {path!r}", status)

def list_all_occurrences(self, ids_name: str) -> List[int]:
"""List all occurrences of this IDS."""
Expand Down Expand Up @@ -359,7 +359,7 @@ def timerange_action(
tmin: float,
tmax: float,
dtime: Optional[numpy.ndarray],
interpolation_method: int,
interpolation_method: Optional[int],
) -> Iterator["LazyALContext"]:
"""Lazily start a lowlevel timerange action, see
:meth:`ALContext.timerange_action`.
Expand Down
10 changes: 6 additions & 4 deletions imas/backends/imas_core/db_entry_al.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,6 @@ def from_pulse_run(
options: Any,
factory: IDSFactory,
) -> "ALDBEntryImpl":

# Set defaults
user_name = user_name or getpass.getuser()
data_version = data_version or factory.dd_version
Expand Down Expand Up @@ -138,7 +137,7 @@ def _setup_backend(cls, backend: str, mode: int, factory: IDSFactory) -> None:
if idsdef_path is None:
# Extract XML from the DD zip and point UDA to it
idsdef_path = extract_idsdef(factory.version)
os.environ["IDSDEF_PATH"] = idsdef_path
os.environ["IDSDEF_PATH"] = str(idsdef_path)

elif backend in ["hdf5", "memory", "ascii", "flexbuffers"]:
pass # nothing to set up
Expand Down Expand Up @@ -173,7 +172,7 @@ def get(
destination: IDSToplevel,
lazy: bool,
nbc_map: Optional[NBCPathMap],
) -> None:
) -> IDSToplevel:
if self._db_ctx is None:
raise RuntimeError("Database entry is not open.")
if lazy and self.backend == "ascii":
Expand Down Expand Up @@ -333,9 +332,12 @@ def delete_data(self, ids_name: str, occurrence: int) -> None:
ll_path += f"/{occurrence}"
ids = self._ids_factory.new(ids_name)
with self._db_ctx.global_action(ll_path, WRITE_OP) as write_ctx:
delete_children(ids.metadata, write_ctx, "")
delete_children(ids.metadata, write_ctx)

def list_all_occurrences(self, ids_name: str) -> List[int]:
if self._db_ctx is None:
raise RuntimeError("Database entry is not open.")

try:
occurrence_list = self._db_ctx.list_all_occurrences(ids_name)
except LLInterfaceError:
Expand Down
2 changes: 1 addition & 1 deletion imas/backends/imas_core/mdsplus_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,7 @@ def jTraverser_jar() -> Path:
for component in os.environ.get("CLASSPATH", "").split(":"):
if component.endswith(".jar"):
if re.search(".*jTraverser.jar", component):
return component
return Path(component)
else: # assume its a directory (strip any '*' suffix)
search_dirs.append(component.rstrip("*"))

Expand Down
4 changes: 2 additions & 2 deletions imas/backends/imas_core/uda_support.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
from pathlib import Path
from typing import Union
from typing import Union, Optional
from xml.etree import ElementTree as ET

from imas import dd_zip
Expand All @@ -10,7 +10,7 @@
logger = logging.getLogger(__name__)


def get_dd_version_from_idsdef_xml(path: Union[str, Path]) -> str:
def get_dd_version_from_idsdef_xml(path: Union[str, Path]) -> Optional[str]:
"""Parse the IDSDef.xml up to the point where the Data Dictionary version is set.

Returns:
Expand Down
2 changes: 1 addition & 1 deletion imas/backends/netcdf/db_entry_nc.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def get(
destination: IDSToplevel,
lazy: bool,
nbc_map: Optional[NBCPathMap],
) -> None:
) -> IDSToplevel:
# Feature compatibility checks
if parameters is not None:
if isinstance(parameters, GetSliceParameters):
Expand Down
7 changes: 3 additions & 4 deletions imas/backends/netcdf/nc_metadata.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# This file is part of IMAS-Python.
# You should have received the IMAS-Python LICENSE file with this project.
"""NetCDF metadata for dimensions and tensorization of IDSs.
"""
"""NetCDF metadata for dimensions and tensorization of IDSs."""

from functools import lru_cache
from typing import Dict, List, Optional, Set, Tuple
Expand Down Expand Up @@ -89,7 +88,7 @@ def __init__(self, ids_metadata: IDSMetadata) -> None:
# Add cache for public API
self.get_dimensions = lru_cache(maxsize=None)(self.get_dimensions)

def get_coordinates(self, path: str, homogeneous_time: bool) -> Tuple[str]:
def get_coordinates(self, path: str, homogeneous_time: bool) -> Tuple[str, ...]:
"""Get the coordinates (adhering to CF conventions) for a netCDF variable.

Args:
Expand All @@ -109,7 +108,7 @@ def get_coordinates(self, path: str, homogeneous_time: bool) -> Tuple[str]:
for coord in self.coordinates[path]
)

def get_dimensions(self, path: str, homogeneous_time: bool) -> Tuple[str]:
def get_dimensions(self, path: str, homogeneous_time: bool) -> Tuple[str, ...]:
"""Get the dimensions for a netCDF variable.

Args:
Expand Down
6 changes: 3 additions & 3 deletions imas/ids_base.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# This file is part of IMAS-Python.
# You should have received the IMAS-Python LICENSE file with this project.
"""Base class for all IDS nodes.
"""
"""Base class for all IDS nodes."""

import logging
from typing import TYPE_CHECKING, Optional, Type
Expand All @@ -12,6 +11,7 @@

if TYPE_CHECKING:
from imas.ids_toplevel import IDSToplevel
from imas.ids_primitive import IDSInt0D

logger = logging.getLogger(__name__)

Expand All @@ -35,7 +35,7 @@ class IDSBase:
"""True iff this IDS lazy-loads its data"""

@property
def _time_mode(self) -> int:
def _time_mode(self) -> "IDSInt0D":
"""Retrieve the time mode from `/ids_properties/homogeneous_time`"""
return self._parent._time_mode

Expand Down
8 changes: 3 additions & 5 deletions imas/ids_convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import logging
from functools import lru_cache, partial
from pathlib import Path
from typing import Callable, Dict, Iterator, List, Optional, Set, Tuple
from typing import Callable, Dict, Iterator, List, Optional, Set, Tuple, Any
from xml.etree.ElementTree import Element, ElementTree

import numpy
Expand Down Expand Up @@ -70,7 +70,7 @@ def __init__(self) -> None:
self.ctxpath: Dict[str, str] = {}
"""Map providing the lowlevel context path for renamed elements."""

self.type_change: Dict[str, Optional[Callable[[IDSBase, IDSBase], None]]] = {}
self.type_change: Dict[str, Optional[Callable[[IDSBase, IDSBase], Any]]] = {}
"""Dictionary of paths that had a type change.

Type changes are mapped to None in :py:attr:`path`, this ``dict`` allows to
Expand Down Expand Up @@ -1001,9 +1001,7 @@ def _repeat_first_point(node: IDSBase) -> None:
child.value = numpy.concatenate((child.value, [child.value[0]]))


def _remove_last_point_conditional(
source_node: IDSStructure, target_node: IDSStructure
) -> None:
def _remove_last_point_conditional(source_node: IDSBase, target_node: IDSBase) -> None:
"""Type change method for nbc_description=repeat_children_first_point_conditional*.

This method handles converting from new (DDv4) to old (DDv3).
Expand Down
12 changes: 6 additions & 6 deletions imas/ids_metadata.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# This file is part of IMAS-Python.
# You should have received the IMAS-Python LICENSE file with this project.
"""Core of the IMAS-Python interpreted IDS metadata
"""
"""Core of the IMAS-Python interpreted IDS metadata"""

import re
import types
from enum import Enum
Expand Down Expand Up @@ -77,7 +77,7 @@ def get_toplevel_metadata(structure_xml: Element) -> "IDSMetadata":
IDSMetadata.__setattr__ = orig_setattr


_type_map: Dict[Tuple[IDSDataType, int], Type] = {}
_type_map: Dict[Tuple[Optional[IDSDataType], int], Type] = {}
"""Map of IDSDataType and ndim to IDSBase implementation class."""


Expand Down Expand Up @@ -205,11 +205,11 @@ def __init__(
if self._parent is not None:
self._is_dynamic = self.type.is_dynamic or self._parent._is_dynamic

self.coordinates: "tuple[IDSCoordinate]"
self.coordinates: "tuple[IDSCoordinate, ...]"
"""Tuple of coordinates of this node.

``coordinates[0]`` is the coordinate of the first dimension, etc."""
self.coordinates_same_as: "tuple[IDSCoordinate]"
self.coordinates_same_as: "tuple[IDSCoordinate, ...]"
"""Indicates quantities which share the same coordinate in a given dimension,
but the coordinate is not explicitly stored in the IDS."""
if self.ndim == 0:
Expand All @@ -231,7 +231,7 @@ def __init__(
self.coordinates_same_as = tuple(coors_same_as)

# Parse alternative coordinates
self.alternative_coordinates: "tuple[IDSPath]" = ()
self.alternative_coordinates: "tuple[IDSPath, ...]" = ()
"""Quantities that can be used as coordinate instead of this node."""
if "alternative_coordinate1" in attrib:
self.alternative_coordinates = tuple(
Expand Down
4 changes: 3 additions & 1 deletion imas/ids_toplevel.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
)
from imas.ids_metadata import IDSMetadata, IDSType, get_toplevel_metadata
from imas.ids_structure import IDSStructure
from imas.ids_primitive import IDSInt0D

if TYPE_CHECKING:
from imas.db_entry import DBEntry
Expand Down Expand Up @@ -61,6 +62,7 @@ class IDSToplevel(IDSStructure):

__doc__ = IDSDoc(__doc__)
_path = "" # Path to ourselves without the IDS name and slashes
_parent: "IDSFactory" # In contrast to IDSBase, our parent is the IDSFactory

def __init__(self, parent: "IDSFactory", structure_xml, lazy=False):
"""Save backend_version and backend_xml and build translation layer.
Expand Down Expand Up @@ -89,7 +91,7 @@ def _dd_version(self) -> str:
return self._version

@property
def _time_mode(self) -> int:
def _time_mode(self) -> IDSInt0D:
"""Retrieve the time mode from `/ids_properties/homogeneous_time`"""
return self.ids_properties.homogeneous_time

Expand Down
57 changes: 0 additions & 57 deletions tools/extract_test_data.py

This file was deleted.

Loading