From ac7c7cfe24f8df28cad2be62e1bfd2aa7bcf955e Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Wed, 21 Jan 2026 14:50:39 +0000 Subject: [PATCH 01/77] SPEC0 - support Python 3.12 to 3.14 (#6816) * matplotlib/matplotlib#30198: Python 3.14 compatible deepcopy. * Remove Python 3.14 pin. * Support Python 3.14. * Fix dumb versioning in test_python_versions. * Drop support for Python 3.11. * Update lock-files. * Removed `geovista` from py314 requirements. * pytest skips added to geovista experimental tests if import missing. * Refreshed lockfiles (no geovista in py314 lock) * Fixed lockfiles (were using JFrog Artifactory URL) * Added optional skips for doctests * Better geovista check in conf.py and reinstate missing doctest import * Updated whatsnew * Reference #6902 (reinstate goevista) in py314 YML file * Link `iris.yml` to `py314.yml` (and by association `readthedocs.yml`) --------- Co-authored-by: ukmo-ccbunney Co-authored-by: Chris Bunney <48915820+ukmo-ccbunney@users.noreply.github.com> --- .github/workflows/ci-tests.yml | 8 +- .github/workflows/ci-wheels.yml | 2 +- benchmarks/bm_runner.py | 2 +- docs/src/conf.py | 16 +- docs/src/further_topics/ugrid/operations.rst | 1 + docs/src/whatsnew/latest.rst | 8 +- lib/iris/experimental/geovista.py | 193 +++++++----- .../experimental/geovista/__init__.py | 8 + lib/iris/tests/test_coding_standards.py | 6 +- .../unit/experimental/geovista/__init__.py | 7 + noxfile.py | 2 +- pyproject.toml | 4 +- requirements/iris.yml | 2 +- requirements/locks/py312-linux-64.lock | 26 +- requirements/locks/py313-linux-64.lock | 26 +- requirements/locks/py314-linux-64.lock | 283 ++++++++++++++++++ requirements/{py311.yml => py314.yml} | 4 +- 17 files changed, 473 insertions(+), 125 deletions(-) create mode 100644 requirements/locks/py314-linux-64.lock rename requirements/{py311.yml => py314.yml} (91%) diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index adc4892b10..9088aef8f1 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -35,18 +35,18 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest"] - python-version: ["3.13"] + python-version: ["3.14"] session: ["doctest", "gallery"] include: - os: "ubuntu-latest" - python-version: "3.13" + python-version: "3.14" session: "tests" coverage: "--coverage" - os: "ubuntu-latest" - python-version: "3.12" + python-version: "3.13" session: "tests" - os: "ubuntu-latest" - python-version: "3.11" + python-version: "3.12" session: "tests" env: diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml index d2bf28aa8e..e17e2ef983 100644 --- a/.github/workflows/ci-wheels.yml +++ b/.github/workflows/ci-wheels.yml @@ -52,7 +52,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.11", "3.12", "3.13"] + python-version: ["3.12", "3.13", "3.14"] session: ["wheel"] env: ENV_NAME: "ci-wheels" diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index 7623b4458b..79cb0798fb 100755 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -67,7 +67,7 @@ def _check_requirements(package: str) -> None: def _prep_data_gen_env() -> None: """Create or access a separate, unchanging environment for generating test data.""" - python_version = "3.13" + python_version = "3.14" data_gen_var = "DATA_GEN_PYTHON" if data_gen_var in environ: echo("Using existing data generation environment.") diff --git a/docs/src/conf.py b/docs/src/conf.py index fa896aba69..7d4ad5d9e1 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -20,6 +20,7 @@ """Config for sphinx.""" import datetime +import importlib from importlib.metadata import version as get_version from inspect import getsource import ntpath @@ -219,6 +220,11 @@ def _dotv(version): autoclass_content = "both" modindex_common_prefix = ["iris"] +# if geovista is not installed we need to mock the imports so the autodoc build works: +if importlib.util.find_spec("geovista") is None: + autodoc_mock_imports = ["geovista", "pyvista"] + + # -- apidoc extension --------------------------------------------------------- # See https://github.com/sphinx-contrib/apidoc source_code_root = (Path(__file__).parents[2]).absolute() @@ -283,7 +289,15 @@ def _dotv(version): # -- Doctest ("make doctest")-------------------------------------------------- -doctest_global_setup = "import iris" +doctest_global_setup = """ +import iris + +# To handle conditional doctest skipping if geovista is not installed: +try: + import geovista as gv +except ImportError: + gv = None +""" # -- Options for HTML output -------------------------------------------------- diff --git a/docs/src/further_topics/ugrid/operations.rst b/docs/src/further_topics/ugrid/operations.rst index 14e746352f..59b178559e 100644 --- a/docs/src/further_topics/ugrid/operations.rst +++ b/docs/src/further_topics/ugrid/operations.rst @@ -583,6 +583,7 @@ below: :icon: code .. doctest:: ugrid_operations + :skipif: gv is None >>> from geovista.geodesic import BBox >>> from iris import load_cube, sample_data_path diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index ed0dff6197..3fbd65c1ae 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -98,13 +98,19 @@ This document explains the changes made to Iris for this release #. `@tkknight`_ removed flake8, we have ruff now instead. (:pull:`6889`) +#. `@trexfeathers`_ and `@ukmo-ccbunney`_ updated CI to support Python 3.14 + inline with `SPEC0 Minimum Supported Dependencies`_. Note: `pyvista` (and + hence `geovista`) is not yet compatible with Python 3.14, so + `:module:~iris.experimental.geovista` is currently only available for + Python \<3.14. (:pull:`6816`, :issue:`6775`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: .. _@hdyson: https://github.com/hdyson - +.. _SPEC0 Minimum Supported Dependencies: https://scientific-python.org/specs/spec-0000/ .. comment Whatsnew resources in alphabetical order: diff --git a/lib/iris/experimental/geovista.py b/lib/iris/experimental/geovista.py index 57cbded2c2..a8e2c25c5d 100644 --- a/lib/iris/experimental/geovista.py +++ b/lib/iris/experimental/geovista.py @@ -64,64 +64,79 @@ def cube_to_polydata(cube, **kwargs): cube_w_time = load_cube(sample_data_path("A1B_north_america.nc")) cube_mesh = load_cube(sample_data_path("mesh_C4_synthetic_float.nc")) - >>> from iris.experimental.geovista import cube_to_polydata + .. doctest:: + :skipif: gv is None + + >>> from iris.experimental.geovista import cube_to_polydata Converting a standard 2-dimensional :class:`~iris.cube.Cube` with 1-dimensional coordinates: - >>> print(cube.summary(shorten=True)) - air_temperature / (K) (latitude: 73; longitude: 96) - >>> print(cube_to_polydata(cube)) - PolyData (... - N Cells: 7008 - N Points: 7178 - N Strips: 0 - X Bounds: -9.992e-01, 9.992e-01 - Y Bounds: -9.992e-01, 9.992e-01 - Z Bounds: -1.000e+00, 1.000e+00 - N Arrays: 4 + .. doctest:: + :skipif: gv is None + + >>> print(cube.summary(shorten=True)) + air_temperature / (K) (latitude: 73; longitude: 96) + >>> print(cube_to_polydata(cube)) + PolyData (... + N Cells: 7008 + N Points: 7178 + N Strips: 0 + X Bounds: -9.992e-01, 9.992e-01 + Y Bounds: -9.992e-01, 9.992e-01 + Z Bounds: -1.000e+00, 1.000e+00 + N Arrays: 4 Configure the conversion by passing additional keyword arguments: - >>> print(cube_to_polydata(cube, radius=2)) - PolyData (... - N Cells: 7008 - N Points: 7178 - N Strips: 0 - X Bounds: -1.998e+00, 1.998e+00 - Y Bounds: -1.998e+00, 1.998e+00 - Z Bounds: -2.000e+00, 2.000e+00 - N Arrays: 4 + .. doctest:: + :skipif: gv is None + + >>> print(cube_to_polydata(cube, radius=2)) + PolyData (... + N Cells: 7008 + N Points: 7178 + N Strips: 0 + X Bounds: -1.998e+00, 1.998e+00 + Y Bounds: -1.998e+00, 1.998e+00 + Z Bounds: -2.000e+00, 2.000e+00 + N Arrays: 4 Converting a :class:`~iris.cube.Cube` that has a :attr:`~iris.cube.Cube.mesh` describing its horizontal space: - >>> print(cube_mesh.summary(shorten=True)) - synthetic / (1) (-- : 96) - >>> print(cube_to_polydata(cube_mesh)) - PolyData (... - N Cells: 96 - N Points: 98 - N Strips: 0 - X Bounds: -1.000e+00, 1.000e+00 - Y Bounds: -1.000e+00, 1.000e+00 - Z Bounds: -1.000e+00, 1.000e+00 - N Arrays: 4 + .. doctest:: + :skipif: gv is None + + >>> print(cube_mesh.summary(shorten=True)) + synthetic / (1) (-- : 96) + >>> print(cube_to_polydata(cube_mesh)) + PolyData (... + N Cells: 96 + N Points: 98 + N Strips: 0 + X Bounds: -1.000e+00, 1.000e+00 + Y Bounds: -1.000e+00, 1.000e+00 + Z Bounds: -1.000e+00, 1.000e+00 + N Arrays: 4 Remember to reduce the dimensionality of your :class:`~iris.cube.Cube` to just be the horizontal space: - >>> print(cube_w_time.summary(shorten=True)) - air_temperature / (K) (time: 240; latitude: 37; longitude: 49) - >>> print(cube_to_polydata(cube_w_time[0, :, :])) - PolyData (... - N Cells: 1813 - N Points: 1900 - N Strips: 0 - X Bounds: -6.961e-01, 6.961e-01 - Y Bounds: -9.686e-01, -3.411e-01 - Z Bounds: 2.483e-01, 8.714e-01 - N Arrays: 4 + .. doctest:: + :skipif: gv is None + + >>> print(cube_w_time.summary(shorten=True)) + air_temperature / (K) (time: 240; latitude: 37; longitude: 49) + >>> print(cube_to_polydata(cube_w_time[0, :, :])) + PolyData (... + N Cells: 1813 + N Points: 1900 + N Strips: 0 + X Bounds: -6.961e-01, 6.961e-01 + Y Bounds: -9.686e-01, -3.411e-01 + Z Bounds: 2.483e-01, 8.714e-01 + N Arrays: 4 """ if cube.mesh: @@ -227,57 +242,69 @@ def extract_unstructured_region(cube, polydata, region, **kwargs): The parameters of :func:`extract_unstructured_region` have been designed with flexibility and reuse in mind. This is demonstrated below. - >>> from geovista.geodesic import BBox - >>> from iris.experimental.geovista import cube_to_polydata, extract_unstructured_region - >>> print(cube_w_mesh.shape) - (72, 96) - >>> # The mesh dimension represents the horizontal space of the cube. - >>> print(cube_w_mesh.shape[cube_w_mesh.mesh_dim()]) - 96 - >>> cube_polydata = cube_to_polydata(cube_w_mesh[0, :]) - >>> extracted_cube = extract_unstructured_region( - ... cube=cube_w_mesh, - ... polydata=cube_polydata, - ... region=BBox(lons=[0, 70, 70, 0], lats=[-25, -25, 45, 45]), - ... ) - >>> print(extracted_cube.shape) - (72, 11) + .. doctest:: + :skipif: gv is None + + >>> from geovista.geodesic import BBox + >>> from iris.experimental.geovista import cube_to_polydata, extract_unstructured_region + >>> print(cube_w_mesh.shape) + (72, 96) + >>> # The mesh dimension represents the horizontal space of the cube. + >>> print(cube_w_mesh.shape[cube_w_mesh.mesh_dim()]) + 96 + >>> cube_polydata = cube_to_polydata(cube_w_mesh[0, :]) + >>> extracted_cube = extract_unstructured_region( + ... cube=cube_w_mesh, + ... polydata=cube_polydata, + ... region=BBox(lons=[0, 70, 70, 0], lats=[-25, -25, 45, 45]), + ... ) + >>> print(extracted_cube.shape) + (72, 11) Now reuse the same `cube` and `polydata` to extract a different region: - >>> new_region = BBox(lons=[0, 35, 35, 0], lats=[-25, -25, 45, 45]) - >>> extracted_cube = extract_unstructured_region( - ... cube=cube_w_mesh, - ... polydata=cube_polydata, - ... region=new_region, - ... ) - >>> print(extracted_cube.shape) - (72, 6) + .. doctest:: + :skipif: gv is None + + >>> new_region = BBox(lons=[0, 35, 35, 0], lats=[-25, -25, 45, 45]) + >>> extracted_cube = extract_unstructured_region( + ... cube=cube_w_mesh, + ... polydata=cube_polydata, + ... region=new_region, + ... ) + >>> print(extracted_cube.shape) + (72, 6) Now apply the same region extraction to a different `cube` that has the same horizontal shape: - >>> print(other_cube_w_mesh.shape) - (20, 96) - >>> extracted_cube = extract_unstructured_region( - ... cube=other_cube_w_mesh, - ... polydata=cube_polydata, - ... region=new_region, - ... ) - >>> print(extracted_cube.shape) - (20, 6) + .. doctest:: + :skipif: gv is None + + >>> print(other_cube_w_mesh.shape) + (20, 96) + >>> extracted_cube = extract_unstructured_region( + ... cube=other_cube_w_mesh, + ... polydata=cube_polydata, + ... region=new_region, + ... ) + >>> print(extracted_cube.shape) + (20, 6) Arbitrary keywords can be passed down to :meth:`geovista.geodesic.BBox.enclosed` (``outside`` in this example): - >>> extracted_cube = extract_unstructured_region( - ... cube=other_cube_w_mesh, - ... polydata=cube_polydata, - ... region=new_region, - ... outside=True, - ... ) - >>> print(extracted_cube.shape) - (20, 90) + .. doctest:: + :skipif: gv is None + + >>> extracted_cube = extract_unstructured_region( + ... cube=other_cube_w_mesh, + ... polydata=cube_polydata, + ... region=new_region, + ... outside=True, + ... ) + >>> print(extracted_cube.shape) + (20, 90) """ if cube.mesh: diff --git a/lib/iris/tests/integration/experimental/geovista/__init__.py b/lib/iris/tests/integration/experimental/geovista/__init__.py index 6a56e09db4..4ac6f3b36e 100644 --- a/lib/iris/tests/integration/experimental/geovista/__init__.py +++ b/lib/iris/tests/integration/experimental/geovista/__init__.py @@ -3,3 +3,11 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Integration tests for the :mod:`iris.experimental.geovista` module.""" + +import pytest + +# Skip this whole package if geovista (and by extension pyvista) is not available: +pytest.importorskip( + "geovista", + reason="Skipping geovista integration tests as `geovista` is not installed", +) diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index f51a531721..59c1b40baf 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -12,6 +12,7 @@ import subprocess from typing import List, Tuple +from packaging.version import Version import pytest import iris @@ -70,8 +71,9 @@ def test_python_versions(): Test is designed to fail whenever Iris' supported Python versions are updated, insisting that versions are updated EVERYWHERE in-sync. """ - latest_supported = "3.13" - all_supported = ["3.11", "3.12", latest_supported] + all_supported = ["3.12", "3.13", "3.14"] + _parsed = [Version(v) for v in all_supported] + latest_supported = str(max(_parsed)) root_dir = Path(__file__).parents[3] workflows_dir = root_dir / ".github" / "workflows" diff --git a/lib/iris/tests/unit/experimental/geovista/__init__.py b/lib/iris/tests/unit/experimental/geovista/__init__.py index b2024ce97d..731c8736ad 100644 --- a/lib/iris/tests/unit/experimental/geovista/__init__.py +++ b/lib/iris/tests/unit/experimental/geovista/__init__.py @@ -3,3 +3,10 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.geovista` module.""" + +import pytest + +# Skip this whole package if geovista (and by extension pyvista) is not available: +pytest.importorskip( + "geovista", reason="Skipping geovista unit tests as `geovista` is not installed" +) diff --git a/noxfile.py b/noxfile.py index 415e4fc3d5..4d733fa2d4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -15,7 +15,7 @@ nox.options.reuse_existing_virtualenvs = True #: Python versions we can run sessions under -_PY_VERSIONS_ALL = ["3.11", "3.12", "3.13"] +_PY_VERSIONS_ALL = ["3.12", "3.13", "3.14"] _PY_VERSION_LATEST = _PY_VERSIONS_ALL[-1] #: One specific python version for docs builds diff --git a/pyproject.toml b/pyproject.toml index e2b87467a5..fb9cdb3983 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,9 +21,9 @@ classifiers = [ "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Atmospheric Science", @@ -50,7 +50,7 @@ keywords = [ license = "BSD-3-Clause" license-files = ["LICENSE"] name = "scitools-iris" -requires-python = ">=3.11" +requires-python = ">=3.12" [project.urls] Code = "https://github.com/SciTools/iris" diff --git a/requirements/iris.yml b/requirements/iris.yml index 331a25f10d..053725666f 120000 --- a/requirements/iris.yml +++ b/requirements/iris.yml @@ -1 +1 @@ -py312.yml \ No newline at end of file +py314.yml \ No newline at end of file diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index 938dcba4ca..a74f3820d1 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -21,7 +21,7 @@ https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda#7df50d44d4a14d6c31a2c54f2cd92157 https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_16.conda#6d0363467e6ed84f11435eb309f2ff06 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.2-hb03c661_0.conda#ada39f5726bc5481e9dce293709dfabc +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.3-hb03c661_0.conda#dcdc58c15961dbf17a0621312b01f5cb https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda#51a19bba1b8ebfb60df25cde030b7ebc https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda#920bb03579f15389b9e512095ad995b7 @@ -74,7 +74,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172b https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_16.conda#40d9b534410403c821ff64f00d0adc22 https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h5888daf_1.conda#8422fcc9e5e172c91e99aef703b3ce65 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.53-h421ea60_0.conda#00d4e66b1f746cb14944cad23fffb405 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.54-h421ea60_0.conda#d361fa2a59e53b61c2675bfa073e5b7e https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_16.conda#1b3152694d236cf233b76b8c56bf0eae https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -105,12 +105,12 @@ https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f4 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45-default_hbd61a6d_105.conda#3ec0aa5037d39b06554109a01e6fb0c6 https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee -https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-hf08fa70_5.conda#82954a6f42e3fba59628741dca105c98 +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-hf08fa70_7.conda#3a29a37b34dbd06672bdccb63829ec14 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h46dd2a8_20.conda#df81fd57eacf341588d728c97920e86d -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.1-hf4e2dac_1.conda#b1f35e70f047918b49fb4b181e40300e +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.2-hf4e2dac_0.conda#da5be73701eecd0e8454423fd6ffcf30 https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.1-h9d88235_1.conda#cd5a90476766d53e901500df9215e927 https://conda.anaconda.org/conda-forge/linux-64/libxml2-16-2.15.1-hca6bf5a_1.conda#3fdd8d99683da9fe279c2f4cecd1e048 @@ -132,7 +132,7 @@ https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ec https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.4-h2b0a6b4_0.conda#c379d67c686fb83475c1a6ed41cc41ff https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_0.conda#fd6acbf37b40cbe919450fa58309fbe1 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.17-h717163a_0.conda#000e85703f0fd9594c81710dd5066471 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-hb8b1518_5.conda#d4a250da4737ee127fb1fa6452a9002e https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-h4e3cde8_0.conda#0a5563efed19ca4461cf927419b6eb73 @@ -142,7 +142,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd https://conda.anaconda.org/conda-forge/linux-64/python-3.12.12-hd63d673_1_cpython.conda#5c00c8cea14ee8d02941cab9121dce41 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.1-h04a0ce9_1.conda#941ee610ebf7a8047140831091dcb1f7 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d @@ -166,7 +166,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py312h68e6be4_0.con https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.2-pyhd8ed1ab_0.conda#7e7cf4d6c2be6991e6ae2b3f4331701c +https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda#2cfaaccf085c133a477f0a7a8657afe9 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py312h447239a_0.conda#63e20cf7b7460019b423fc06abb96c60 https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.1.0-pyhd8ed1ab_0.conda#1daaf94a304a27ba3446a306235a37ea @@ -219,7 +219,7 @@ https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_ https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.1-pyhd8ed1ab_0.conda#7de28c27fe620a4f7dbfaea137c6232b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda#f88bb644823094f436792f80fba3207e -https://conda.anaconda.org/conda-forge/noarch/tomli-2.3.0-pyhcf101f3_0.conda#d2732eb636c264dc9aa4cbee404b1a53 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda#72e780e9aa2d0a3295f59b1874e3768b https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda#c07a6153f8306e45794774cf9b13bd32 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py312h4c3975b_0.conda#e03a4bf52d2170d64c816b2a52972097 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d @@ -260,14 +260,14 @@ https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87 https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.328.1-h5279c79_0.conda#372a62464d47d9e966b630ffae3abe73 https://conda.anaconda.org/conda-forge/linux-64/mesalib-25.0.5-h57bcd07_2.conda#9b6b685b123906eb4ef270b50cbe826c https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.0-py312h33ff503_0.conda#4ba148299453b88d8fa9b6351eaa0df8 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.1-py312h33ff503_0.conda#ba7e6cb06c372eae6f164623e6e06db8 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/noarch/pip-25.3-pyh8b19718_0.conda#c55515ca43c6444d2572e0f0d93cb6b9 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py312h9b6a7d9_2.conda#573b9a879a3a42990f9c51d7376dce6b https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.3.0-hb700be7_2.conda#8f7278ca5f7456a974992a8b34284737 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.36.0-pyhd8ed1ab_0.conda#c9a9b6e144b880308f5eedc905fe503d +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.36.1-pyhd8ed1ab_0.conda#6b0259cea8ffa6b66b35bae0ca01c447 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.22.0-py312h8a5da7c_0.conda#6a3fd177315aaafd4366930d440e4430 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.13.3-py312h5d8c7f2_0.conda#7ee12bbdb2e989618c080c7c611048db @@ -290,7 +290,7 @@ https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py312h4f23490_2.conda#1af24b0eb99b1158c0d8c64a4d6c5d79 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py312h4f23490_2.conda#ab856c36638ab1acf90e70349c525cf9 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.16.3-py312h54fa4ab_2.conda#e82683871cbc4bb257b7694f31a91327 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py312h54fa4ab_0.conda#9faccce05511d05f22001ecc2dfe78de https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py312h383787d_2.conda#69e400d3deca12ee7afd4b73a5596905 https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c @@ -303,11 +303,11 @@ https://conda.anaconda.org/conda-forge/noarch/distributed-2025.12.0-pyhcf101f3_1 https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.0-h6083320_0.conda#1ea5ed29aea252072b975a232b195146 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.15-pyhd8ed1ab_0.conda#25f954b7dae6dd7b0dc004dab74f1ce9 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py312he3d6523_0.conda#b8dc157bbbb69c1407478feede8b7b42 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.3-nompi_py312hf6400b3_100.conda#ed7ab4073fe4c48d0f9d3a80b6a17f74 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py312h25f8dc5_101.conda#b1c45859b7cfc04b81362fe7f0b75fa2 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 diff --git a/requirements/locks/py313-linux-64.lock b/requirements/locks/py313-linux-64.lock index fdd605632c..9985e5375f 100644 --- a/requirements/locks/py313-linux-64.lock +++ b/requirements/locks/py313-linux-64.lock @@ -21,7 +21,7 @@ https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda#7df50d44d4a14d6c31a2c54f2cd92157 https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_16.conda#6d0363467e6ed84f11435eb309f2ff06 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.2-hb03c661_0.conda#ada39f5726bc5481e9dce293709dfabc +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.3-hb03c661_0.conda#dcdc58c15961dbf17a0621312b01f5cb https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda#51a19bba1b8ebfb60df25cde030b7ebc https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda#920bb03579f15389b9e512095ad995b7 @@ -75,7 +75,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172b https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_16.conda#40d9b534410403c821ff64f00d0adc22 https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h5888daf_1.conda#8422fcc9e5e172c91e99aef703b3ce65 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.53-h421ea60_0.conda#00d4e66b1f746cb14944cad23fffb405 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.54-h421ea60_0.conda#d361fa2a59e53b61c2675bfa073e5b7e https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_16.conda#1b3152694d236cf233b76b8c56bf0eae https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -106,12 +106,12 @@ https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f4 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45-default_hbd61a6d_105.conda#3ec0aa5037d39b06554109a01e6fb0c6 https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee -https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-hf08fa70_5.conda#82954a6f42e3fba59628741dca105c98 +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-hf08fa70_7.conda#3a29a37b34dbd06672bdccb63829ec14 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h46dd2a8_20.conda#df81fd57eacf341588d728c97920e86d -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.1-hf4e2dac_1.conda#b1f35e70f047918b49fb4b181e40300e +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.2-hf4e2dac_0.conda#da5be73701eecd0e8454423fd6ffcf30 https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.1-h9d88235_1.conda#cd5a90476766d53e901500df9215e927 https://conda.anaconda.org/conda-forge/linux-64/libxml2-16-2.15.1-hca6bf5a_1.conda#3fdd8d99683da9fe279c2f4cecd1e048 @@ -133,7 +133,7 @@ https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ec https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.4-h2b0a6b4_0.conda#c379d67c686fb83475c1a6ed41cc41ff https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_0.conda#fd6acbf37b40cbe919450fa58309fbe1 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.17-h717163a_0.conda#000e85703f0fd9594c81710dd5066471 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-hb8b1518_5.conda#d4a250da4737ee127fb1fa6452a9002e https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-h4e3cde8_0.conda#0a5563efed19ca4461cf927419b6eb73 @@ -143,7 +143,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd https://conda.anaconda.org/conda-forge/linux-64/python-3.13.11-hc97d973_100_cp313.conda#0cbb0010f1d8ecb64a428a8d4214609e -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.1-h04a0ce9_1.conda#941ee610ebf7a8047140831091dcb1f7 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d @@ -167,7 +167,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py313hc80a56d_0.con https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.2-pyhd8ed1ab_0.conda#7e7cf4d6c2be6991e6ae2b3f4331701c +https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda#2cfaaccf085c133a477f0a7a8657afe9 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py313h6b9daa2_0.conda#3a0be7abedcbc2aee92ea228efea8eba https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.1.0-pyhd8ed1ab_0.conda#1daaf94a304a27ba3446a306235a37ea @@ -221,7 +221,7 @@ https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_ https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.1-pyhd8ed1ab_0.conda#7de28c27fe620a4f7dbfaea137c6232b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda#f88bb644823094f436792f80fba3207e -https://conda.anaconda.org/conda-forge/noarch/tomli-2.3.0-pyhcf101f3_0.conda#d2732eb636c264dc9aa4cbee404b1a53 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda#72e780e9aa2d0a3295f59b1874e3768b https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda#c07a6153f8306e45794774cf9b13bd32 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py313h07c4f96_0.conda#82da2dcf1ea3e298f2557b50459809e0 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d @@ -260,14 +260,14 @@ https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87 https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.328.1-h5279c79_0.conda#372a62464d47d9e966b630ffae3abe73 https://conda.anaconda.org/conda-forge/linux-64/mesalib-25.0.5-h57bcd07_2.conda#9b6b685b123906eb4ef270b50cbe826c https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.0-py313hf6604e3_0.conda#07963f5dbb5351201035e1f8815ed8da +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.1-py313hf6604e3_0.conda#7d51e3bef1a4b00bde1861d85ba2f874 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py313h77f6078_2.conda#42d11c7d1ac21ae2085f58353641e71c https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.3.0-hb700be7_2.conda#8f7278ca5f7456a974992a8b34284737 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.36.0-pyhd8ed1ab_0.conda#c9a9b6e144b880308f5eedc905fe503d +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.36.1-pyhd8ed1ab_0.conda#6b0259cea8ffa6b66b35bae0ca01c447 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.22.0-py313h3dea7bd_0.conda#e9415b0f7b43d2e32a3f24fd889c9e70 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.13.3-py313hd6074c6_0.conda#684fb9c78db5024b939a1ed0a107f464 @@ -289,7 +289,7 @@ https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py313h08cd8bf_2.con https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py313h29aa505_2.conda#e3a598d20bf2fa7b03a771e9e4471be9 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py313h29aa505_2.conda#60f5d1c039da10fe89a530cc93ea50ac -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.16.3-py313h4b8bb8b_2.conda#0be9bd58abfb3e8f97260bd0176d5331 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py313h4b8bb8b_0.conda#6cf603754566f66ff2be27f7f038b83a https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py313had47c43_2.conda#6e550dd748e9ac9b2925411684e076a1 https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c @@ -303,11 +303,11 @@ https://conda.anaconda.org/conda-forge/noarch/distributed-2025.12.0-pyhcf101f3_1 https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.0-h6083320_0.conda#1ea5ed29aea252072b975a232b195146 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.15-pyhd8ed1ab_0.conda#25f954b7dae6dd7b0dc004dab74f1ce9 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py313h683a580_0.conda#ffe67570e1a9192d2f4c189b27f75f89 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.3-nompi_py313hfae5b86_100.conda#d5247c4087289475a8c324bbe03a71ce +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py313h16051e2_101.conda#2e949a2692351a2f83077d46c3e9835e https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 diff --git a/requirements/locks/py314-linux-64.lock b/requirements/locks/py314-linux-64.lock new file mode 100644 index 0000000000..8da0ab061e --- /dev/null +++ b/requirements/locks/py314-linux-64.lock @@ -0,0 +1,283 @@ +# Generated by conda-lock. +# platform: linux-64 +# input_hash: 62fdb838057aebf9fb2eeac194a9cb32933e3e85f114e5b0dc5bdd6f4a969910 +@EXPLICIT +https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 +https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_2.tar.bz2#bbf6f174dcd3254e19a2f5d2295ce808 +https://conda.anaconda.org/conda-forge/noarch/python_abi-3.14-8_cp314.conda#0539938c55b6b1a59b560e843ad864a4 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda#ad659d0a2b3e47e38d829aa8cad2d610 +https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 +https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_16.conda#26c46f90d0e727e95c6c9498a33a09f3 +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab +https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_16.conda#6d0363467e6ed84f11435eb309f2ff06 +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda#51a19bba1b8ebfb60df25cde030b7ebc +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda#920bb03579f15389b9e512095ad995b7 +https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.16-hb03c661_0.conda#f9f81ea472684d75b9dd8d0b328cf655 +https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda#38f5dbc9ac808e31c00650f7be1db93f +https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda#b38117a3c920364aff79f870c984b4a3 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1.conda#72c8fd1af66bd67bf580645b426513ed +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h9ec8514_0.conda#35f29eec58405aaf55e01cb470d8c26a +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_16.conda#5a68259fac2da8f2ee6f7bfe49c9eb8b +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_16.conda#39183d4e0c05609fd65f130633194e37 +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_2.conda#1a580f7796c7bf6393fddb8bbbde58dc +https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb9d3cd8_0.conda#c7e925f37e3b40d893459e625f6a53f1 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d864d34357c3b65a4b731f78c0801dc4 +https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda#70e3400cbbfa03e96dcde7fc13e38c7b +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_16.conda#68f68355000ec3f1d6f26ea13e8f525f +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda#db409b7c1720428638e7c0d509d3e1b5 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda#aea31d2e5b1091feca96fcfe945c3cf9 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 +https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-h280c20c_1002.conda#45161d96307e3a447cc3eb5896cf6f8c +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda#47e340acb35de30501a76c7c799c41d7 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.0-h26f9b46_0.conda#9ee58d5c534af06558933af3c845a780 +https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda#b2895afaf55bf96a8c8282a2e47a5de0 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb03c661_1.conda#1dafce8548e38671bea82e3f5c6ce22f +https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2025.1-hb03c661_0.conda#aa8d21be4b461ce612d8f5fb791decae +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.3-hb47aa4a_0.conda#607e13a8caac17f9a664bcab5302ce06 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h280c20c_3.conda#a77f85f77be52ff59391544bfe73390a +https://conda.anaconda.org/conda-forge/linux-64/geos-3.14.1-h480dda7_0.conda#4d4efd0645cd556fab54617c4ad477ef +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda#3bf7b9fd5a7136126e0234db4b87c8b6 +https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.14-hecca717_2.conda#2cd94587f3a401ae05e03a6caf09539d +https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda#186a18e3ba246eccfc7cff00cd19a870 +https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h0aef613_1.conda#9344155d33912347b37f0ae6c410a835 +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.4-h3f801dc_0.conda#01ba04e414e47f95c03d6ddd81fd37be +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.2.0-hb03c661_1.conda#366b40a69f0ad6072561c1d09301c886 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.conda#4ffbb341c8b616aa2494b6afb26a0c5f +https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda#c277e0a4d549b03ac1e9d6cbbe3d017b +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_16.conda#40d9b534410403c821ff64f00d0adc22 +https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.54-h421ea60_0.conda#d361fa2a59e53b61c2675bfa073e5b7e +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_16.conda#1b3152694d236cf233b76b8c56bf0eae +https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda#9de5350a85c4a20c685259b889aa6393 +https://conda.anaconda.org/conda-forge/linux-64/muparser-2.3.5-h5888daf_0.conda#ab3e3db511033340e75e7002e80ce8c0 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.47-haa7fec5_0.conda#7a3bff861a6583f1889021facefc08b1 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.46.4-h54a6638_1.conda#c01af13bdc553d1a8fbfff6e8db075f0 +https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda#d7d95fc8287ea7bf33e0e7116d2b95ec +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.2-h03e3b7b_1.conda#98b6c9dc80eb87b2519b97bcf7e578dd +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_ha0e22de_103.conda#86bc20552bf46075e3d92b67f089172d +https://conda.anaconda.org/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda#035da2e4f5770f036ff704fa17aace24 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda#1c74ff8c35dcadf952a16f752ca5aa49 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 +https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.2-hceb46e0_1.conda#40feea2979654ed579f1cda7c63ccb94 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda#4a13eeac0b5c8e5b8ab496e6c4ddd829 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.2.0-hb03c661_1.conda#af39b9a8711d4a8d437b52c1d78eb6a1 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45-default_hbd61a6d_105.conda#3ec0aa5037d39b06554109a01e6fb0c6 +https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-hf08fa70_7.conda#3a29a37b34dbd06672bdccb63829ec14 +https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 +https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h46dd2a8_20.conda#df81fd57eacf341588d728c97920e86d +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.2-hf4e2dac_0.conda#da5be73701eecd0e8454423fd6ffcf30 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.1-h9d88235_1.conda#cd5a90476766d53e901500df9215e927 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-16-2.15.1-hca6bf5a_1.conda#3fdd8d99683da9fe279c2f4cecd1e048 +https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.10-h05a5f5f_0.conda#da01bb40572e689bd1535a5cee6b1d68 +https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 +https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda#d71d3a66528853c0a1ac2c02d79a0284 +https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.3.0-hd9031aa_1.conda#66a1db55ecdb7377d2b91f54cd56eafa +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.12-h4f16b4b_0.conda#db038ce880f100acc74dba10302b5630 +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8ccf913aaba749a5496c17629d859ed1 +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 +https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.4-h2b0a6b4_0.conda#c379d67c686fb83475c1a6ed41cc41ff +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_0.conda#fd6acbf37b40cbe919450fa58309fbe1 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-hb8b1518_5.conda#d4a250da4737ee127fb1fa6452a9002e +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-h4e3cde8_0.conda#0a5563efed19ca4461cf927419b6eb73 +https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.14.1-ha770c72_0.conda#f4084e4e6577797150f9b04a4560ceb0 +https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c +https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd +https://conda.anaconda.org/conda-forge/linux-64/python-3.14.2-h32b2ec7_100_cp314.conda#1cef1236a05c3a98f68c33ae9425f656 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda#ba231da7fccf9ea1e768caf5c7099b84 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e +https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_1.conda#8c4061f499edec6b8ac7000f6d586829 +https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda#1fd9696649f65fd6611fcdb4ffec738a +https://conda.anaconda.org/conda-forge/noarch/attrs-25.4.0-pyhcf101f3_1.conda#537296d57ea995666c68c821b00e360b +https://conda.anaconda.org/conda-forge/noarch/backports.zstd-1.3.0-py314h680f03e_0.conda#a2ac7763a9ac75055b68f325d3255265 +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.2.0-py314h3de4e8d_1.conda#8910d2c46f7e7b519129f486e0fe927a +https://conda.anaconda.org/conda-forge/noarch/certifi-2026.1.4-pyhd8ed1ab_0.conda#eacc711330cd46939f66cd401ff9c44b +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.5.0-pyhd8ed1ab_0.conda#381bd45fb7aa032691f3063aff47e3a1 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.4-pyhd8ed1ab_0.conda#a22d1fd9bf98827e280a02875d9a007a +https://conda.anaconda.org/conda-forge/noarch/click-8.3.1-pyh8f84b5b_1.conda#ea8a6c3256897cc31263de9f455e25d9 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda#61b8078a0905b12529abc622406cb62c +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda#4c2a8fef270f6c69591889b93f9f55c1 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py314h1807b08_0.conda#866fd3d25b767bccb4adc8476f4035cd +https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 +https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc +https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda#2cfaaccf085c133a477f0a7a8657afe9 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.1.0-pyhd8ed1ab_0.conda#1daaf94a304a27ba3446a306235a37ea +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_104.conda#0857f4d157820dcd5625f61fdfefb780 +https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e +https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac +https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe63df7e10a6ba605dc5f9f961d36 +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.3.0-pyhd8ed1ab_0.conda#9614359868482abba1bd15ce465e3c42 +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.2-pyhd8ed1ab_0.conda#895f6625dd8a246fece9279fcc12c1de +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py314h97ea11e_2.conda#57f1ce4f7ba6bcd460be8f83c8f04c69 +https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.8.5-gpl_hc2c16d8_100.conda#5fdaa8b856683a5598459dead3976578 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.11.0-5_h0358290_openblas.conda#6636a2b6f1a87572df2970d3ebc87cc0 +https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a +https://conda.anaconda.org/conda-forge/linux-64/libglx-devel-1.7.0-ha4b6fd6_2.conda#27ac5ae872a21375d980bd4a6f99edf3 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.11.0-5_h47877c9_openblas.conda#b38076eb5c8e40d0106beda6f95d7609 +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda#2bca1fbb221d9c3c8e3a155784bbc2e9 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-devel-2.15.1-he237659_1.conda#644b2a3a92ba0bb8e2aa671dd831e793 +https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 +https://conda.anaconda.org/conda-forge/noarch/markupsafe-3.0.3-pyh7db6752_0.conda#fab1be106a50e20f10fe5228fd1d1651 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py314h9891dd4_1.conda#c6752022dcdbf4b9ef94163de1ab7f03 +https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 +https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda#58335b26c38bf4a20f399384c33cbcf9 +https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.0-py314h8ec4b1a_0.conda#f9b6a8fbb8dcb840a0c1c052dc5092e4 +https://conda.anaconda.org/conda-forge/noarch/pip-25.3-pyh145f28c_0.conda#bf47878473e5ab9fdb4115735230e191 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.5.1-pyhcf101f3_0.conda#1bd2e65c8c7ef24f4639ae6e850dacc2 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e +https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-h99ae125_0.conda#8bbc19a6e87fbe8b97796e9a42a47a30 +https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.1-py314h0f05182_0.conda#28af9719e28f0054e9aee68153899293 +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef +https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda#6b6ece66ebcae2d5f326c77ef2c5a066 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.1-pyhcf101f3_0.conda#d837065e4e0de4962c3462079c23f969 +https://conda.anaconda.org/conda-forge/noarch/pyshp-3.0.3-pyhd8ed1ab_0.conda#c138c7aaa6a10b5762dcd92247864aff +https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py314he82b845_1.conda#21dce7c80bbdb9785633011ad348e530 +https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 +https://conda.anaconda.org/conda-forge/noarch/pyyaml-6.0.3-pyh7db6752_0.conda#b12f41c0d7fb5ab81709fcc86579688f +https://conda.anaconda.org/conda-forge/noarch/setuptools-80.9.0-pyhff2d567_0.conda#4de79c071274a53dcaf2a8c749d1499e +https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 +https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda#755cf22df8693aa0d1aec1c123fa5863 +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda#0401a17ae845fa72c7210e206ec5647d +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.1-pyhd8ed1ab_0.conda#7de28c27fe620a4f7dbfaea137c6232b +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb +https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda#f88bb644823094f436792f80fba3207e +https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda#72e780e9aa2d0a3295f59b1874e3768b +https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda#c07a6153f8306e45794774cf9b13bd32 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py314h5bd0f2a_0.conda#e35f08043f54d26a1be93fdbf90d30c3 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.0-py314h5bd0f2a_1.conda#58e2ee530005067c5db23f33c6ab43d2 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.5-h5888daf_1.conda#5e2eb9bf77394fc2e5918beefec9f9ab +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e +https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30cd29cb87d819caead4d55184c1d115 +https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 +https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda#0a01c169f0ab0f91b26e77a3301fbfe4 +https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py314h4a8dc5f_1.conda#cf45f4278afd6f4e6d03eda0f435d527 +https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 +https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.1-py314h67df5f8_0.conda#a4525263f2fa741bffa4af1e40aec245 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py314h5bd0f2a_1.conda#51b0391b0ce96be49b1174e9a3e4a279 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee +https://conda.anaconda.org/conda-forge/noarch/fonttools-4.61.1-pyh7db6752_0.conda#d5da976e963e70364b9e3ff270842b9f +https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d +https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_h11f7409_103.conda#3ccff1066c05a1e6c221356eecc40581 +https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.1-py314h2b28147_0.conda#9536e29f857e5d0565e92fd1b54de16a +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 +https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py314h24aeaa0_2.conda#b46a7e6a2b8c064488576c3e42d85df0 +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.36.1-pyhd8ed1ab_0.conda#6b0259cea8ffa6b66b35bae0ca01c447 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f +https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 +https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0.conda#5267bef8efea4127aacd1f4e1f149b6e +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py314hc02f841_0.conda#02e3559b6260b408fc1668c1bd26df10 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py314h9891dd4_3.conda#72d57382d0f63c20a16b1d514fcde6ff +https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.12.0-pyhcf101f3_1.conda#cc7b371edd70319942c802c7d828a428 +https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_0.conda#6ce4ad29c3ae0b74df813409433457ff +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py314hc02f841_2.conda#55ac6d85f5dd8ec5e9919e7762fcb31a +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py314ha0b5721_2.conda#fe3a5c8be07a7b82058bdeb39d33d93b +https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py314hc02f841_2.conda#5be92985870940eac3f3b8cda57002cc +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py314hf07bd8e_0.conda#2d82ddc8e7a74d27382410462df062a2 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py314hbe3edd8_2.conda#5963e6ee81772d450a35e6bc95522761 +https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py314h9891dd4_6.conda#28303a78c48916ab07b95ffdbffdfd6c +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 +https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py314hc02f841_1.conda#258046835c9f2ecef87c1f11f387f72a +https://conda.anaconda.org/conda-forge/noarch/distributed-2025.12.0-pyhcf101f3_1.conda#613cea9275c4773d0b53c879838ac0ad +https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.0-h6083320_0.conda#1ea5ed29aea252072b975a232b195146 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py314h1194b4b_0.conda#b8683e6068099b69c10dbfcf7204203f +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py314h4ae7121_101.conda#31395db7aeae4be8307bcd81f1e58e53 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 +https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py314h5bd0f2a_2.conda#78071b0c9e55392e9ec1b22b18e80cdf +https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py314ha1f92a4_0.conda#15b1e205270451c078c79d0480438e8e +https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.25.0-py314ha0b5721_1.conda#fe89c5fa422f215b0d75046ecd4667de +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.9.1-pyhdfbf58e_0.conda#0155d4a55415cda7f7f09bc0b7760f51 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 +https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9f33c65b20e6a61f21714785e3613ec +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.0-h61e6d4b_0.conda#91e6d4d684e237fba31b9815c4b40edf +https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e +https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.0-h8b86629_0.conda#39dcf8bb370df27fd81dbe41d4cb605e +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda#3e6c15d914b03f83fc96344f917e0838 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.20.0-pyhd8ed1ab_0.conda#4cae490c8d142824fb80d9aed672fddd +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 +https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 diff --git a/requirements/py311.yml b/requirements/py314.yml similarity index 91% rename from requirements/py311.yml rename to requirements/py314.yml index a128b77eee..d1fdd8938f 100644 --- a/requirements/py311.yml +++ b/requirements/py314.yml @@ -4,7 +4,7 @@ channels: - conda-forge dependencies: - - python =3.11 + - python =3.14 # Setup dependencies. - setuptools >=77.0.3 @@ -26,7 +26,7 @@ dependencies: # Optional dependencies. - esmpy >=7.0 - - geovista +# - geovista # Temporarily removed until pyvista is py3.14 compatible; see #6902 - graphviz - iris-sample-data >=2.4.0 - mo_pack From fe7a4d799575ad162c04fa02eaed4078dc97e4f7 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Sat, 24 Jan 2026 09:46:53 +0000 Subject: [PATCH 02/77] DOCS: Add gallery carousel to docs homepage (#6884) * add sphinx carousel * removed flake8. We have ruff instead. * added comment about pin * added whatsnew * reinstate flake8 * remove commented out code * added copyright notice for GeoVista * ensure make-noplot works --- docs/src/_static/theme_override.css | 4 + docs/src/conf.py | 139 +++++++++++++++++++++++++++- docs/src/index.rst | 4 + docs/src/whatsnew/latest.rst | 2 +- requirements/py312.yml | 3 +- requirements/py313.yml | 3 +- requirements/py314.yml | 3 +- 7 files changed, 152 insertions(+), 6 deletions(-) diff --git a/docs/src/_static/theme_override.css b/docs/src/_static/theme_override.css index 355119f8a5..be9715ff2c 100644 --- a/docs/src/_static/theme_override.css +++ b/docs/src/_static/theme_override.css @@ -26,3 +26,7 @@ ul.squarelist { text-indent: 1em; padding-left: 5em; } + +.center { + text-align: center; +} \ No newline at end of file diff --git a/docs/src/conf.py b/docs/src/conf.py index 7d4ad5d9e1..4669b423fd 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -19,6 +19,8 @@ """Config for sphinx.""" +import ast +import contextlib import datetime import importlib from importlib.metadata import version as get_version @@ -30,6 +32,7 @@ from subprocess import run import sys from tempfile import gettempdir +import textwrap from urllib.parse import quote import warnings @@ -426,12 +429,14 @@ def reset_modules(gallery_conf, fname): ) sys.path.insert(0, str(reset_modules_dir)) +GALLERY_CODE: str = "../gallery_code" +GALLERY_DIRS: str = "generated/gallery" sphinx_gallery_conf = { # path to your example scripts - "examples_dirs": ["../gallery_code"], + "examples_dirs": GALLERY_CODE, # path to where to save gallery generated output - "gallery_dirs": ["generated/gallery"], + "gallery_dirs": GALLERY_DIRS, # filename pattern for the files in the gallery "filename_pattern": "/plot_", # filename pattern to ignore in the gallery @@ -455,3 +460,133 @@ def reset_modules(gallery_conf, fname): "section": "Section %s", "table": "Table %s", } + +# ============================================================================ +# | Copyright GeoVista | +# | Code from this point unto the termination banner is copyright GeoVista. | +# | Minimal code changes made to make it generic. | +# | | +# | License details can be found at: | +# | https://github.com/bjlittle/geovista/blob/main/LICENSE | +# ============================================================================ + +# Source: https://github.com/bjlittle/geovista/blob/main/docs/src/conf.py + + +def _bool_eval(*, arg: str | bool) -> bool: + """Sanitise to a boolean only configuration.""" + if isinstance(arg, str): + with contextlib.suppress(TypeError): + arg = ast.literal_eval(arg.capitalize()) + + return bool(arg) + + +def generate_carousel( + app: Sphinx, + fname: Path, + ncards: int | None = None, + margin: int | None = None, + width: int | None = None, +) -> None: + """Generate and write the gallery carousel RST file.""" + if ncards is None: + ncards = 3 + + if margin is None: + margin = 4 + + if width is None: + width = "25%" + + base = Path(app.srcdir, *GALLERY_DIRS.split("/")) + cards_by_link = {} + + card = r""".. card:: + :img-background: {image} + :link: {link} + :link-type: ref + :width: {width} + :margin: {margin} + :class-card: align-self-center +""" + + # TODO @bjlittle: use Path.walk when python >=3.12 + for root, _, files in os.walk(str(base)): + root = Path(root) # noqa: PLW2901 + if root.name == "images": + root_relative = root.relative_to(app.srcdir) + link_relative = root.parent.relative_to(app.srcdir) + + for file in files: + path = Path(file) + if path.suffix == ".png": + # generate the card "img-background" filename + image = root_relative / path + + # generate the card "link" reference + # remove numeric gallery image index e.g., "001" + parts = path.stem.split("_")[:-1] + link = parts[:2] + list(link_relative.parts) + parts[2:] + link = f"{'_'.join(link)}.py" + + # needed in case a gallery filename has mixed case + link = link.lower() + + kwargs = { + "image": image, + "link": link, + "width": width, + "margin": margin, + } + + cards_by_link[link] = card.format(**kwargs) + + # sort the cards by their link + cards = [cards_by_link[link] for link in sorted(cards_by_link.keys())] + cards = textwrap.indent("\n".join(cards), prefix=" " * 4) + + # now, create the card carousel + carousel = f""".. card-carousel:: {ncards} + +{cards} + +.. rst-class:: center + + :fa:`images` Gallery Carousel + +""" + + # finally, write the rst for the gallery carousel + Path(app.srcdir, fname).write_text(carousel) + + +def gallery_carousel( + app: Sphinx, + env: BuildEnvironment, # noqa: ARG001 + docnames: list[str], # noqa: ARG001 +) -> None: + """Create the gallery carousel.""" + # create empty or truncate existing file + fname = Path(app.srcdir, "gallery_carousel.txt") + + with fname.open("w"): + pass + + if _bool_eval(arg=app.builder.config.plot_gallery): + # only generate the carousel if we have a gallery + generate_carousel(app, fname) + + +# ============================================================================ +# | END GeoVista copyright | +# ============================================================================ + + +def setup(app: Sphinx) -> None: + """Configure sphinx application.""" + # we require the output of this extension + app.setup_extension("sphinx_gallery.gen_gallery") + + # register callback to generate gallery carousel + app.connect("env-before-read-docs", gallery_carousel) diff --git a/docs/src/index.rst b/docs/src/index.rst index 139e54cee0..e771239503 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -15,6 +15,7 @@ representations become unwieldy and inefficient. For more information see :ref:`why_iris`. + .. grid:: 3 .. grid-item-card:: @@ -128,6 +129,9 @@ For more information see :ref:`why_iris`. Icons made by FreePik from `Flaticon `_ +.. include:: gallery_carousel.txt + + .. _iris_support: Support diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 3fbd65c1ae..2ec0088381 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -81,7 +81,7 @@ This document explains the changes made to Iris for this release 📚 Documentation ================ -#. N/A +#. `@tkknight`_ added a gallery carousel to the documentation homepage. (:pull:`6884`) 💼 Internal diff --git a/requirements/py312.yml b/requirements/py312.yml index fbb1e8aea5..454bfd7e79 100644 --- a/requirements/py312.yml +++ b/requirements/py312.yml @@ -56,7 +56,8 @@ dependencies: - sphinx-copybutton - sphinx-gallery >=0.11.0 - sphinx-design - - pydata-sphinx-theme >=0.13.0 + # Pinned reason: https://github.com/SciTools/iris/issues/6885 + - pydata-sphinx-theme !=0.16.0,!=0.16.1,<0.16.2 # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 diff --git a/requirements/py313.yml b/requirements/py313.yml index a76d819e75..32b606c02a 100644 --- a/requirements/py313.yml +++ b/requirements/py313.yml @@ -56,7 +56,8 @@ dependencies: - sphinx-copybutton - sphinx-gallery >=0.11.0 - sphinx-design - - pydata-sphinx-theme >=0.13.0 + # Pinned reason: https://github.com/SciTools/iris/issues/6885 + - pydata-sphinx-theme !=0.16.0,!=0.16.1,<0.16.2 # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 diff --git a/requirements/py314.yml b/requirements/py314.yml index d1fdd8938f..706f697e20 100644 --- a/requirements/py314.yml +++ b/requirements/py314.yml @@ -56,7 +56,8 @@ dependencies: - sphinx-copybutton - sphinx-gallery >=0.11.0 - sphinx-design - - pydata-sphinx-theme >=0.13.0 + # Pinned reason: https://github.com/SciTools/iris/issues/6885 + - pydata-sphinx-theme !=0.16.0,!=0.16.1,<0.16.2 # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 From 5e73fb2a3784f460d3ed7d14759ca87f6a5102cb Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Mon, 26 Jan 2026 11:21:37 +0000 Subject: [PATCH 03/77] DOCS: Smaller cards on homepage (#6886) * used smaller cards on homepage * added whatsnew * corrected api link * merge tidy --- docs/src/_static/theme_override.css | 25 +++++ docs/src/index.rst | 141 ++++++++++------------------ docs/src/whatsnew/latest.rst | 3 + 3 files changed, 78 insertions(+), 91 deletions(-) diff --git a/docs/src/_static/theme_override.css b/docs/src/_static/theme_override.css index be9715ff2c..1d4d7bcb79 100644 --- a/docs/src/_static/theme_override.css +++ b/docs/src/_static/theme_override.css @@ -27,6 +27,31 @@ ul.squarelist { padding-left: 5em; } +/* custom css for the cards on the homepage */ +.sd-card-img-top { + width: 15% !important; + position: absolute !important; + padding-left: 10px; + min-width: 50px; + top: 50%; + transform: translateY(-50%); +} + +.sd-card-img { + height: auto; +} + +.custom-title { + font-weight: bold; + color: #1B8FB7 !important; + text-align: left; +} + +.custom-body { + text-align: left; + margin-left: max(45px, 15%); +} + .center { text-align: center; } \ No newline at end of file diff --git a/docs/src/index.rst b/docs/src/index.rst index e771239503..2854e2e214 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -16,114 +16,73 @@ representations become unwieldy and inefficient. For more information see :ref:`why_iris`. -.. grid:: 3 - - .. grid-item-card:: - :text-align: center +.. grid:: 1 1 2 2 + :gutter: 2 + + .. grid-item-card:: Getting Started + :class-title: custom-title + :class-body: custom-body + :link: getting_started_index + :link-type: ref :img-top: _static/icon_shuttle.svg - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg - - Information on Iris, how to install and a gallery of examples that - create plots. + :class-img-top: dark-light + :class-card: sd-rounded-3 - +++ - .. button-ref:: getting_started_index - :ref-type: ref - :color: primary - :outline: - :expand: + Installing and gallery examples. - Getting Started - - .. grid-item-card:: - :text-align: center + .. grid-item-card:: User Guide + :class-title: custom-title + :class-body: custom-body + :link: getting_started_index + :link-type: ref :img-top: _static/icon_instructions.svg - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg - - Learn how to use Iris, including loading, navigating, saving, - plotting and more. + :class-img-top: dark-light + :class-card: sd-rounded-3 - +++ - .. button-ref:: user_guide_index - :ref-type: ref - :color: primary - :outline: - :expand: + Learn how to use Iris. - User Guide - - .. grid-item-card:: - :text-align: center + .. grid-item-card:: Developers Guide + :class-title: custom-title + :class-body: custom-body + :link: development_where_to_start + :link-type: ref :img-top: _static/icon_development.svg - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg - - Information on how you can contribute to Iris as a developer. - - +++ - .. button-ref:: development_where_to_start - :ref-type: ref - :color: primary - :outline: - :expand: - - Developers Guide + :class-img-top: dark-light + :class-card: sd-rounded-3 + Contribute to Iris as a developer. -.. grid:: 3 - - .. grid-item-card:: - :text-align: center + .. grid-item-card:: Iris API + :class-title: custom-title + :class-body: custom-body + :link: generated/api/iris.html :img-top: _static/icon_api.svg - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg - - Browse full Iris functionality by module. + :class-img-top: dark-light + :class-card: sd-rounded-3 - +++ - .. button-ref:: generated/api/iris - :ref-type: doc - :color: primary - :outline: - :expand: + Iris functionality by module. - Iris API - - .. grid-item-card:: - :text-align: center + .. grid-item-card:: What's New + :class-title: custom-title + :class-body: custom-body + :link: iris_whatsnew + :link-type: ref :img-top: _static/icon_new_product.svg - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg - - Find out what has recently changed in Iris. + :class-img-top: dark-light + :class-card: sd-rounded-3 - +++ - .. button-ref:: iris_whatsnew - :ref-type: ref - :color: primary - :outline: - :expand: + Recent changes in Iris. - What's New - - .. grid-item-card:: - :text-align: center + .. grid-item-card:: Voted Issues + :class-title: custom-title + :class-body: custom-body + :link: voted_issues_top + :link-type: ref :img-top: _static/icon_thumb.png - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg - - Raise the profile of issues by voting on them. - - +++ - .. button-ref:: voted_issues_top - :ref-type: ref - :color: primary - :outline: - :expand: + :class-img-top: dark-light + :class-card: sd-rounded-3 - Voted Issues + Raise the profile of issues by voting. Icons made by FreePik from `Flaticon `_ diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 2ec0088381..730880b368 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -81,6 +81,9 @@ This document explains the changes made to Iris for this release 📚 Documentation ================ +#. `@tkknight`_ reduced the space used on the documentation homepage by the quick + link cards to allow for easier reading. (:pull:`6886`) + #. `@tkknight`_ added a gallery carousel to the documentation homepage. (:pull:`6884`) From 2ef982bdbcbbfa6259a2ea28b6a9eceacb329b24 Mon Sep 17 00:00:00 2001 From: tkknight <2108488+tkknight@users.noreply.github.com> Date: Mon, 26 Jan 2026 16:30:28 +0000 Subject: [PATCH 04/77] enable fail_on_warning for readthedocs (#6909) --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index d82bd513ca..6b0c699f21 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -23,7 +23,7 @@ conda: sphinx: configuration: docs/src/conf.py - fail_on_warning: false + fail_on_warning: true python: install: From aebae6d1b951a861592d0571f82c380ed63c4d46 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 29 Jan 2026 12:43:55 +0000 Subject: [PATCH 05/77] Bump the gha group across 1 directory with 3 updates (#6912) Bumps the gha group with 3 updates in the / directory: [scitools/workflows/.github/workflows/ci-manifest.yml](https://github.com/scitools/workflows), [scitools/workflows/.github/workflows/ci-template-check.yml](https://github.com/scitools/workflows) and [scitools/workflows/.github/workflows/refresh-lockfiles.yml](https://github.com/scitools/workflows). Updates `scitools/workflows/.github/workflows/ci-manifest.yml` from 2026.01.0 to 2026.01.1 - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2026.01.0...2026.01.1) Updates `scitools/workflows/.github/workflows/ci-template-check.yml` from 2026.01.0 to 2026.01.1 - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2026.01.0...2026.01.1) Updates `scitools/workflows/.github/workflows/refresh-lockfiles.yml` from 2026.01.0 to 2026.01.1 - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2026.01.0...2026.01.1) --- updated-dependencies: - dependency-name: scitools/workflows/.github/workflows/ci-manifest.yml dependency-version: 2026.01.1 dependency-type: direct:production dependency-group: gha - dependency-name: scitools/workflows/.github/workflows/ci-template-check.yml dependency-version: 2026.01.1 dependency-type: direct:production dependency-group: gha - dependency-name: scitools/workflows/.github/workflows/refresh-lockfiles.yml dependency-version: 2026.01.1 dependency-type: direct:production dependency-group: gha ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/ci-template-check.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 6450a2d13a..9596af3b63 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2026.01.0 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2026.01.1 diff --git a/.github/workflows/ci-template-check.yml b/.github/workflows/ci-template-check.yml index cc9f85ae44..6f97bdd1d2 100644 --- a/.github/workflows/ci-template-check.yml +++ b/.github/workflows/ci-template-check.yml @@ -10,7 +10,7 @@ on: jobs: prompt-share: - uses: scitools/workflows/.github/workflows/ci-template-check.yml@2026.01.0 + uses: scitools/workflows/.github/workflows/ci-template-check.yml@2026.01.1 secrets: inherit with: pr_number: ${{ github.event.pull_request.number }} diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index e7d743311c..e4b500fd26 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2026.01.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2026.01.1 secrets: inherit From ba57f5828a707fa5c9eb6a56765000d7e05fcfed Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Thu, 29 Jan 2026 16:52:36 +0000 Subject: [PATCH 06/77] quick fix for rules (#6897) --- lib/iris/tests/unit/fileformats/rules/test__make_cube.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py b/lib/iris/tests/unit/fileformats/rules/test__make_cube.py index c14a5df705..75d68cdbee 100644 --- a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py +++ b/lib/iris/tests/unit/fileformats/rules/test__make_cube.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.rules._make_cube`.""" -from unittest import mock - import numpy as np import pytest @@ -13,7 +11,7 @@ class Test: - def test_invalid_units(self): + def test_invalid_units(self, mocker): # Mock converter() function that returns an invalid # units string amongst the collection of other elements. factories = None @@ -36,10 +34,10 @@ def test_invalid_units(self): dim_coords_and_dims, aux_coords_and_dims, ) - converter = mock.Mock(return_value=metadata) + converter = mocker.Mock(return_value=metadata) data = np.arange(3.0) - field = mock.Mock( + field = mocker.Mock( core_data=lambda: data, bmdi=9999.0, realised_dtype=data.dtype ) From 08479dfb54565da11439ebe8bb85180ed62ca3dc Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 3 Feb 2026 09:44:37 +0000 Subject: [PATCH 07/77] chore: update pre-commit hooks (#6926) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.14.10 → v0.14.14](https://github.com/astral-sh/ruff-pre-commit/compare/v0.14.10...v0.14.14) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2c7c6819a8..f3ef97b424 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,7 +35,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.14.10" + rev: "v0.14.14" hooks: - id: ruff types: [file, python] From 850a78459e0870a3ee91048bd690ddd045fe44c1 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Tue, 3 Feb 2026 14:29:51 +0000 Subject: [PATCH 08/77] Pin pandas<3 and update lock files. (#6918) --- requirements/locks/py312-linux-64.lock | 89 +++++++++++++------------- requirements/locks/py313-linux-64.lock | 83 ++++++++++++------------ requirements/locks/py314-linux-64.lock | 69 ++++++++++---------- requirements/py312.yml | 2 +- requirements/py313.yml | 2 +- requirements/py314.yml | 2 +- 6 files changed, 125 insertions(+), 122 deletions(-) diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index a74f3820d1..9438e5dee7 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: c1404b61114d4a2769a5146aba7f4917724cd621278e6d44175768c07bf5a6b7 +# input_hash: f6f5de785dfa266ec64d091b66f6ab12432b446820ea95baba9f63fee66c3ce4 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 @@ -31,12 +31,12 @@ https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1.conda#72c8fd1af66bd67bf580645b426513ed https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 -https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h9ec8514_0.conda#35f29eec58405aaf55e01cb470d8c26a +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_16.conda#5a68259fac2da8f2ee6f7bfe49c9eb8b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_16.conda#39183d4e0c05609fd65f130633194e37 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a -https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_2.conda#1a580f7796c7bf6393fddb8bbbde58dc +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d864d34357c3b65a4b731f78c0801dc4 https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda#68e52064ed3897463c0e958ab5c8f91b @@ -47,7 +47,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.co https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-h280c20c_1002.conda#45161d96307e3a447cc3eb5896cf6f8c https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda#47e340acb35de30501a76c7c799c41d7 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.0-h26f9b46_0.conda#9ee58d5c534af06558933af3c845a780 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.1-h35e630c_1.conda#f61eb8cd60ff9057122a3d338b99c00f https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda#b2895afaf55bf96a8c8282a2e47a5de0 @@ -65,7 +65,7 @@ https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.14-hecca717_2.cond https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda#186a18e3ba246eccfc7cff00cd19a870 https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h0aef613_1.conda#9344155d33912347b37f0ae6c410a835 -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.4-h3f801dc_0.conda#01ba04e414e47f95c03d6ddd81fd37be +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.5-h088129d_0.conda#86f7414544ae606282352fa1e116b41f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.2.0-hb03c661_1.conda#366b40a69f0ad6072561c1d09301c886 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.conda#4ffbb341c8b616aa2494b6afb26a0c5f https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 @@ -89,8 +89,8 @@ https://conda.anaconda.org/conda-forge/linux-64/pixman-0.46.4-h54a6638_1.conda#c https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.15-h3f63f65_0.conda#b11a4c6bf6f6f44e5e143f759ffa2087 https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda#d7d95fc8287ea7bf33e0e7116d2b95ec https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.2-h03e3b7b_1.conda#98b6c9dc80eb87b2519b97bcf7e578dd -https://conda.anaconda.org/conda-forge/linux-64/spirv-tools-2025.4-hb700be7_0.conda#aace50912e0f7361d0d223e7f7cfa6e5 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_ha0e22de_103.conda#86bc20552bf46075e3d92b67f089172d +https://conda.anaconda.org/conda-forge/linux-64/spirv-tools-2025.5-hb700be7_0.conda#058d5f16eaa3018be91aa3508df00d7c +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda#cffd3bdd58090148f4cfcd831f4b26ab https://conda.anaconda.org/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda#035da2e4f5770f036ff704fa17aace24 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda#1c74ff8c35dcadf952a16f752ca5aa49 https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-h909a3a2_5.conda#6a0eb48e58684cca4d7acc8b7a0fd3c7 @@ -105,7 +105,7 @@ https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f4 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45-default_hbd61a6d_105.conda#3ec0aa5037d39b06554109a01e6fb0c6 https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee -https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-hf08fa70_7.conda#3a29a37b34dbd06672bdccb63829ec14 +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-ha09017c_8.conda#6e9bf4ce797d0216bd2a58298b6290b5 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 @@ -141,11 +141,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8 https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd -https://conda.anaconda.org/conda-forge/linux-64/python-3.12.12-hd63d673_1_cpython.conda#5c00c8cea14ee8d02941cab9121dce41 +https://conda.anaconda.org/conda-forge/linux-64/python-3.12.12-hd63d673_2_cpython.conda#c4540d3de3fa228d9fa95e31f8e97f89 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda#34e54f03dfea3e7a2dcf1453a85f1085 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda#ba231da7fccf9ea1e768caf5c7099b84 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_1.conda#8c4061f499edec6b8ac7000f6d586829 @@ -170,7 +170,7 @@ https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py312h447239a_0.conda#63e20cf7b7460019b423fc06abb96c60 https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.1.0-pyhd8ed1ab_0.conda#1daaf94a304a27ba3446a306235a37ea -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_104.conda#0857f4d157820dcd5625f61fdfefb780 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_105.conda#d58cd79121dd51128f2a5dab44edf1ea https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe63df7e10a6ba605dc5f9f961d36 @@ -195,16 +195,16 @@ https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py312hd9148 https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.0-py312h8a5da7c_0.conda#9fe4c848dd01cde9b8d0073744d4eef8 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda#2e5bf4f1da39c0b32778561c3c4e5878 -https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda#58335b26c38bf4a20f399384c33cbcf9 +https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.0-py312h50c33e8_0.conda#923b06ad75b7acc888fa20a22dc397cd https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.5.1-pyhcf101f3_0.conda#1bd2e65c8c7ef24f4639ae6e850dacc2 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e -https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-h99ae125_0.conda#8bbc19a6e87fbe8b97796e9a42a47a30 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_2.conda#beb1885cfdb793193bba83c9720d53b1 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.3.1-py312h178313f_0.conda#0cf580c1b73146bb9ff1bbdb4d4c8cf9 -https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.1-py312h5253ce2_0.conda#ff09ba570ce66446db523ea21c12b765 +https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py312h5253ce2_0.conda#dd94c506b119130aef5a9382aed648e7 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda#6b6ece66ebcae2d5f326c77ef2c5a066 -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.1-pyhcf101f3_0.conda#d837065e4e0de4962c3462079c23f969 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda#3687cc0b82a8b4c17e1f0eb7e47163d5 https://conda.anaconda.org/conda-forge/noarch/pyshp-3.0.3-pyhd8ed1ab_0.conda#c138c7aaa6a10b5762dcd92247864aff https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 @@ -212,11 +212,11 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py312h0d868a https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py312h8a5da7c_0.conda#fba10c2007c8b06f77c5a23ce3a635ad https://conda.anaconda.org/conda-forge/noarch/scooby-0.11.0-pyhd8ed1ab_0.conda#2d707ed62f63d72f4a0141b818e9c7b6 -https://conda.anaconda.org/conda-forge/noarch/setuptools-80.9.0-pyhff2d567_0.conda#4de79c071274a53dcaf2a8c749d1499e +https://conda.anaconda.org/conda-forge/noarch/setuptools-80.10.2-pyh332efcf_0.conda#7b446fcbb6779ee479debb4fd7453e6c https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda#755cf22df8693aa0d1aec1c123fa5863 https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda#0401a17ae845fa72c7210e206ec5647d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.1-pyhd8ed1ab_0.conda#7de28c27fe620a4f7dbfaea137c6232b +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.3-pyhd8ed1ab_0.conda#18de09b20462742fe093ba39185d9bac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda#f88bb644823094f436792f80fba3207e https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda#72e780e9aa2d0a3295f59b1874e3768b @@ -224,15 +224,14 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda#c07 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py312h4c3975b_0.conda#e03a4bf52d2170d64c816b2a52972097 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.0-py312h4c3975b_1.conda#a0b8efbe73c90f810a171a6c746be087 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda#75cb7132eb58d97896e173ef12ac9986 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.6-hb03c661_0.conda#4d1fc190b99912ed557a8236e958c559 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.5-h5888daf_1.conda#5e2eb9bf77394fc2e5918beefec9f9ab -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.6-hecca717_0.conda#93f5d4b5c17c8540479ad65f206fea51 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda#e192019153591938acf7322b6459d36e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.7-hb03c661_0.conda#665d152b9c6e78da404086088077c844 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30cd29cb87d819caead4d55184c1d115 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 @@ -242,7 +241,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py312h460c074_1.conda https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.1-py312h8a5da7c_0.conda#eafe0b486a7910e4a6973029c80d437f +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.2-py312h8a5da7c_0.conda#3935daadad011d007deb379b8188588d https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py312h4c3975b_1.conda#693cda60b9223f55d0836c885621611b https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee @@ -251,8 +250,8 @@ https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea9 https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_1.conda#e933f92cedca212eb2916f24823cf90b -https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_1.conda#e00afd65b88a3258212661b32c1469cb +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_2.conda#3c71daed530c0c26671a1b1b7010e746 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_2.conda#0ad9019bb10eda915fb0ce5f78fef13b https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_h11f7409_103.conda#3ccff1066c05a1e6c221356eecc40581 https://conda.anaconda.org/conda-forge/linux-64/libpq-18.1-hb80d175_3.conda#c39da2ad0e7dd600d1eb3146783b057d @@ -262,12 +261,12 @@ https://conda.anaconda.org/conda-forge/linux-64/mesalib-25.0.5-h57bcd07_2.conda# https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.1-py312h33ff503_0.conda#ba7e6cb06c372eae6f164623e6e06db8 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/noarch/pip-25.3-pyh8b19718_0.conda#c55515ca43c6444d2572e0f0d93cb6b9 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py312h9b6a7d9_2.conda#573b9a879a3a42990f9c51d7376dce6b https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.3.0-hb700be7_2.conda#8f7278ca5f7456a974992a8b34284737 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.36.1-pyhd8ed1ab_0.conda#6b0259cea8ffa6b66b35bae0ca01c447 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.46.3-pyhd8ed1ab_0.conda#bdbd7385b4a67025ac2dba4ef8cb6a8f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.22.0-py312h8a5da7c_0.conda#6a3fd177315aaafd4366930d440e4430 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.13.3-py312h5d8c7f2_0.conda#7ee12bbdb2e989618c080c7c611048db @@ -275,10 +274,10 @@ https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.cond https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0.conda#5267bef8efea4127aacd1f4e1f149b6e https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py312h4f23490_0.conda#acb46785d4866cec0a88b4d6e991c33f +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py312h4f23490_1.conda#84bf349fad55056ed326fc550671b65c https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312hd9148b4_3.conda#86cf7a7d861b79d38e3f0e5097e4965b -https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.12.0-pyhcf101f3_1.conda#cc7b371edd70319942c802c7d828a428 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312h0a2e395_4.conda#43c2bc96af3ae5ed9e8a10ded942aa50 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.1-pyhcf101f3_1.conda#91e3b2a0d014ac032c066a2e18051686 https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 @@ -286,35 +285,35 @@ https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_0.c https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h4f23490_2.conda#cec5bc5f7d374f8f8095f8e28e31f6cb https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py312hf79963d_1.conda#e597b3e812d9613f659b7d87ad252d18 -https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 +https://conda.anaconda.org/conda-forge/noarch/pip-25.3-pyh8b19718_0.conda#c55515ca43c6444d2572e0f0d93cb6b9 https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py312h4f23490_2.conda#1af24b0eb99b1158c0d8c64a4d6c5d79 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py312h4f23490_2.conda#ab856c36638ab1acf90e70349c525cf9 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py312h54fa4ab_0.conda#9faccce05511d05f22001ecc2dfe78de +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py312h54fa4ab_1.conda#828eb07c4c87c38ed8c6560c25893280 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py312h383787d_2.conda#69e400d3deca12ee7afd4b73a5596905 https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312hd9148b4_6.conda#f30ece80e76f9cc96e30cc5c71d2818e +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py312hd9148b4_0.conda#55fd03988b1b1bc6faabbfb5b481ecd7 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 -https://conda.anaconda.org/conda-forge/linux-64/viskores-1.0.0-hca82ae8_3.conda#efbc53222863d0f89c123cc3f9ccdc01 +https://conda.anaconda.org/conda-forge/linux-64/viskores-1.1.0-hca82ae8_0.conda#5b4d69a15107ebad71ee9aaf76c4b09e https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py312h4f23490_1.conda#ac0a1a874ce9e3f8940a3a908ff74da9 -https://conda.anaconda.org/conda-forge/noarch/distributed-2025.12.0-pyhcf101f3_1.conda#613cea9275c4773d0b53c879838ac0ad +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py312h4f23490_0.conda#6aef45ba3c0123547eb7b0f15852cac9 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.1-pyhcf101f3_1.conda#c15e359a982395be86a7576a91f9c5f5 https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.0-h6083320_0.conda#1ea5ed29aea252072b975a232b195146 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py312he3d6523_0.conda#b8dc157bbbb69c1407478feede8b7b42 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py312h25f8dc5_101.conda#b1c45859b7cfc04b81362fe7f0b75fa2 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py312h25f8dc5_102.conda#99217b58c029977345b72bb36a1f6596 +https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py312h4c3975b_2.conda#1d14b28fa4825ee30fd08e46bbcb5d63 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py312h4c3975b_3.conda#b0610b4174af97290f5f466a72583071 https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py312hcedc861_0.conda#f0d110978a87b200a06412b56b26407c https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/noarch/wslink-2.5.0-pyhd8ed1ab_0.conda#8fa415e696acd9af59ce0a4425fd1b38 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.25.0-py312hf79963d_1.conda#6c913a686cb4060cbd7639a36fa144f0 https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e @@ -323,17 +322,18 @@ https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.co https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_3.conda#d2bbbd293097e664ffb01fc4cdaf5729 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 -https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.1-hb82b983_4.conda#f4dfd61ec958d420bebdcefeb805d658 +https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-hb82b983_4.conda#9861c7820fdb45bc50a2ea60f4ff7952 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9f33c65b20e6a61f21714785e3613ec https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.0-h61e6d4b_0.conda#91e6d4d684e237fba31b9815c4b40edf -https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py312h6fba518_6.conda#24f269a608a9032faf6a1bcaea8d9e21 +https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py312h6fba518_7.conda#2edca3790f2a372db44ff1aa159769fc https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e -https://conda.anaconda.org/conda-forge/noarch/pyvista-0.46.4-pyhd8ed1ab_0.conda#89f80194003ce06e6bdf25fba539d9b1 +https://conda.anaconda.org/conda-forge/noarch/pyvista-0.46.5-pyhd8ed1ab_0.conda#4454f5c41511ece8a81a177043bc8c3b https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.0-h8b86629_0.conda#39dcf8bb370df27fd81dbe41d4cb605e -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 -https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda#3e6c15d914b03f83fc96344f917e0838 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.7.0-pyhd8ed1ab_0.conda#28eddfb8b9ecdd044a6f609f985398a7 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.20.0-pyhd8ed1ab_0.conda#4cae490c8d142824fb80d9aed672fddd https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff @@ -341,3 +341,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 + diff --git a/requirements/locks/py313-linux-64.lock b/requirements/locks/py313-linux-64.lock index 9985e5375f..f8ac96ac02 100644 --- a/requirements/locks/py313-linux-64.lock +++ b/requirements/locks/py313-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: aa998005b5dacd37fb319f00d41becfc0c4eee198d70c2eaba80542b72968540 +# input_hash: 9e22298d3c86ab9a2d785adbe961656f88dda327f4b1b70155fd64231d47d1f3 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 @@ -31,13 +31,13 @@ https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1.conda#72c8fd1af66bd67bf580645b426513ed https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 -https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h9ec8514_0.conda#35f29eec58405aaf55e01cb470d8c26a +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_16.conda#5a68259fac2da8f2ee6f7bfe49c9eb8b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_16.conda#39183d4e0c05609fd65f130633194e37 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a -https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_2.conda#1a580f7796c7bf6393fddb8bbbde58dc -https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb9d3cd8_0.conda#c7e925f37e3b40d893459e625f6a53f1 +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 +https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb03c661_1.conda#2c21e66f50753a083cbe6b80f38268fa https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d864d34357c3b65a4b731f78c0801dc4 https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda#68e52064ed3897463c0e958ab5c8f91b @@ -48,7 +48,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.co https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-h280c20c_1002.conda#45161d96307e3a447cc3eb5896cf6f8c https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda#47e340acb35de30501a76c7c799c41d7 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.0-h26f9b46_0.conda#9ee58d5c534af06558933af3c845a780 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.1-h35e630c_1.conda#f61eb8cd60ff9057122a3d338b99c00f https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda#b2895afaf55bf96a8c8282a2e47a5de0 @@ -66,7 +66,7 @@ https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.14-hecca717_2.cond https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda#186a18e3ba246eccfc7cff00cd19a870 https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h0aef613_1.conda#9344155d33912347b37f0ae6c410a835 -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.4-h3f801dc_0.conda#01ba04e414e47f95c03d6ddd81fd37be +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.5-h088129d_0.conda#86f7414544ae606282352fa1e116b41f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.2.0-hb03c661_1.conda#366b40a69f0ad6072561c1d09301c886 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.conda#4ffbb341c8b616aa2494b6afb26a0c5f https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 @@ -90,8 +90,8 @@ https://conda.anaconda.org/conda-forge/linux-64/pixman-0.46.4-h54a6638_1.conda#c https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.15-h3f63f65_0.conda#b11a4c6bf6f6f44e5e143f759ffa2087 https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda#d7d95fc8287ea7bf33e0e7116d2b95ec https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.2-h03e3b7b_1.conda#98b6c9dc80eb87b2519b97bcf7e578dd -https://conda.anaconda.org/conda-forge/linux-64/spirv-tools-2025.4-hb700be7_0.conda#aace50912e0f7361d0d223e7f7cfa6e5 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_ha0e22de_103.conda#86bc20552bf46075e3d92b67f089172d +https://conda.anaconda.org/conda-forge/linux-64/spirv-tools-2025.5-hb700be7_0.conda#058d5f16eaa3018be91aa3508df00d7c +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda#cffd3bdd58090148f4cfcd831f4b26ab https://conda.anaconda.org/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda#035da2e4f5770f036ff704fa17aace24 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda#1c74ff8c35dcadf952a16f752ca5aa49 https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-h909a3a2_5.conda#6a0eb48e58684cca4d7acc8b7a0fd3c7 @@ -106,7 +106,7 @@ https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f4 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45-default_hbd61a6d_105.conda#3ec0aa5037d39b06554109a01e6fb0c6 https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee -https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-hf08fa70_7.conda#3a29a37b34dbd06672bdccb63829ec14 +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-ha09017c_8.conda#6e9bf4ce797d0216bd2a58298b6290b5 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 @@ -142,11 +142,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8 https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd -https://conda.anaconda.org/conda-forge/linux-64/python-3.13.11-hc97d973_100_cp313.conda#0cbb0010f1d8ecb64a428a8d4214609e +https://conda.anaconda.org/conda-forge/linux-64/python-3.13.11-hc97d973_101_cp313.conda#aa23b675b860f2566af2dfb3ffdf3b8c https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda#34e54f03dfea3e7a2dcf1453a85f1085 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda#ba231da7fccf9ea1e768caf5c7099b84 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_1.conda#8c4061f499edec6b8ac7000f6d586829 @@ -171,7 +171,7 @@ https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py313h6b9daa2_0.conda#3a0be7abedcbc2aee92ea228efea8eba https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.1.0-pyhd8ed1ab_0.conda#1daaf94a304a27ba3446a306235a37ea -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_104.conda#0857f4d157820dcd5625f61fdfefb780 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_105.conda#d58cd79121dd51128f2a5dab44edf1ea https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe63df7e10a6ba605dc5f9f961d36 @@ -196,17 +196,17 @@ https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py313h7037e https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.0-py313h3dea7bd_0.conda#d182804a222acc8f2c7e215f344d229f https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda#2e5bf4f1da39c0b32778561c3c4e5878 -https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda#58335b26c38bf4a20f399384c33cbcf9 +https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.0-py313h80991f8_0.conda#183fe6b9e99e5c2b464c1573ec78eac8 https://conda.anaconda.org/conda-forge/noarch/pip-25.3-pyh145f28c_0.conda#bf47878473e5ab9fdb4115735230e191 https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.5.1-pyhcf101f3_0.conda#1bd2e65c8c7ef24f4639ae6e850dacc2 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e -https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-h99ae125_0.conda#8bbc19a6e87fbe8b97796e9a42a47a30 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_2.conda#beb1885cfdb793193bba83c9720d53b1 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.3.1-py313h8060acc_0.conda#b62867739241368f43f164889b45701b -https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.1-py313h54dd161_0.conda#d362949a1ed1ad4693b3928ad1d32c93 +https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py313h54dd161_0.conda#25fe6e02c2083497b3239e21b49d8093 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda#6b6ece66ebcae2d5f326c77ef2c5a066 -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.1-pyhcf101f3_0.conda#d837065e4e0de4962c3462079c23f969 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda#3687cc0b82a8b4c17e1f0eb7e47163d5 https://conda.anaconda.org/conda-forge/noarch/pyshp-3.0.3-pyhd8ed1ab_0.conda#c138c7aaa6a10b5762dcd92247864aff https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 @@ -214,11 +214,11 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py313heab575 https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py313h3dea7bd_0.conda#4794ea0adaebd9f844414e594b142cb2 https://conda.anaconda.org/conda-forge/noarch/scooby-0.11.0-pyhd8ed1ab_0.conda#2d707ed62f63d72f4a0141b818e9c7b6 -https://conda.anaconda.org/conda-forge/noarch/setuptools-80.9.0-pyhff2d567_0.conda#4de79c071274a53dcaf2a8c749d1499e +https://conda.anaconda.org/conda-forge/noarch/setuptools-80.10.2-pyh332efcf_0.conda#7b446fcbb6779ee479debb4fd7453e6c https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda#755cf22df8693aa0d1aec1c123fa5863 https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda#0401a17ae845fa72c7210e206ec5647d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.1-pyhd8ed1ab_0.conda#7de28c27fe620a4f7dbfaea137c6232b +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.3-pyhd8ed1ab_0.conda#18de09b20462742fe093ba39185d9bac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda#f88bb644823094f436792f80fba3207e https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda#72e780e9aa2d0a3295f59b1874e3768b @@ -230,9 +230,9 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.5-h5888daf_1.conda#5e2eb9bf77394fc2e5918beefec9f9ab -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.6-hecca717_0.conda#93f5d4b5c17c8540479ad65f206fea51 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda#e192019153591938acf7322b6459d36e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.7-hb03c661_0.conda#665d152b9c6e78da404086088077c844 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30cd29cb87d819caead4d55184c1d115 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 @@ -242,7 +242,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py313hf46b229_1.conda https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.1-py313h3dea7bd_0.conda#82315acb438e857f809f556e2dcdb822 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.2-py313h3dea7bd_0.conda#df05169cc886aaf53dc560db634519f8 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py313h07c4f96_1.conda#bcca9afd203fe05d9582249ac12762da https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee @@ -251,8 +251,8 @@ https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea9 https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_1.conda#e933f92cedca212eb2916f24823cf90b -https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_1.conda#e00afd65b88a3258212661b32c1469cb +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_2.conda#3c71daed530c0c26671a1b1b7010e746 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_2.conda#0ad9019bb10eda915fb0ce5f78fef13b https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_h11f7409_103.conda#3ccff1066c05a1e6c221356eecc40581 https://conda.anaconda.org/conda-forge/linux-64/libpq-18.1-hb80d175_3.conda#c39da2ad0e7dd600d1eb3146783b057d @@ -275,10 +275,10 @@ https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.cond https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0.conda#5267bef8efea4127aacd1f4e1f149b6e https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py313h29aa505_0.conda#14dc0f64f2e83f7bc2be5153e2ef730b +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py313h29aa505_1.conda#c63d5f9d63fe2f48b0ad75005fcae7ba https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py313h7037e92_3.conda#6186382cb34a9953bf2a18fc763dc346 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.12.0-pyhcf101f3_1.conda#cc7b371edd70319942c802c7d828a428 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py313hc8edb43_4.conda#33639459bc29437315d4bff9ed5bc7a7 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.1-pyhcf101f3_1.conda#91e3b2a0d014ac032c066a2e18051686 https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 @@ -289,29 +289,29 @@ https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py313h08cd8bf_2.con https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py313h29aa505_2.conda#e3a598d20bf2fa7b03a771e9e4471be9 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py313h29aa505_2.conda#60f5d1c039da10fe89a530cc93ea50ac -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py313h4b8bb8b_0.conda#6cf603754566f66ff2be27f7f038b83a +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py313h4b8bb8b_1.conda#2b18fe5b4b2d1611ddf8c2f080a46563 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py313had47c43_2.conda#6e550dd748e9ac9b2925411684e076a1 https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py313h7037e92_6.conda#1fa8d662361896873a165b051322073e +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py313h7037e92_0.conda#cb423e0853b3dde2b3738db4dedf5ba2 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 -https://conda.anaconda.org/conda-forge/linux-64/viskores-1.0.0-hca82ae8_3.conda#efbc53222863d0f89c123cc3f9ccdc01 +https://conda.anaconda.org/conda-forge/linux-64/viskores-1.1.0-hca82ae8_0.conda#5b4d69a15107ebad71ee9aaf76c4b09e https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py313h29aa505_1.conda#36a704169c6a0b4ce8335d160103e218 -https://conda.anaconda.org/conda-forge/noarch/distributed-2025.12.0-pyhcf101f3_1.conda#613cea9275c4773d0b53c879838ac0ad +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py313h29aa505_0.conda#3942b6a86fe92d0888b3373f2c1e1676 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.1-pyhcf101f3_1.conda#c15e359a982395be86a7576a91f9c5f5 https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.0-h6083320_0.conda#1ea5ed29aea252072b975a232b195146 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py313h683a580_0.conda#ffe67570e1a9192d2f4c189b27f75f89 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py313h16051e2_101.conda#2e949a2692351a2f83077d46c3e9835e +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py313h16051e2_102.conda#20ae46c5e9c7106bdb2cac6b44b7d845 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py313h07c4f96_2.conda#424535b78f522124143393ec02f6318c +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py313h07c4f96_3.conda#b7810803a3481e22968022a94107ed93 https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py313h2005660_0.conda#d551bd1d2fcfac36674dbe2be4b0a410 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 https://conda.anaconda.org/conda-forge/noarch/wslink-2.5.0-pyhd8ed1ab_0.conda#8fa415e696acd9af59ce0a4425fd1b38 @@ -322,17 +322,17 @@ https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.co https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_3.conda#d2bbbd293097e664ffb01fc4cdaf5729 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 -https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.1-hb82b983_4.conda#f4dfd61ec958d420bebdcefeb805d658 +https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-hb82b983_4.conda#9861c7820fdb45bc50a2ea60f4ff7952 https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9f33c65b20e6a61f21714785e3613ec https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.0-h61e6d4b_0.conda#91e6d4d684e237fba31b9815c4b40edf -https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py313hbb97348_6.conda#9f13c027bf4668c4f8a76a7bf10bd63e +https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py313hbb97348_7.conda#03c6ddd039b6877278b5c4df20b61f29 https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e -https://conda.anaconda.org/conda-forge/noarch/pyvista-0.46.4-pyhd8ed1ab_0.conda#89f80194003ce06e6bdf25fba539d9b1 +https://conda.anaconda.org/conda-forge/noarch/pyvista-0.46.5-pyhd8ed1ab_0.conda#4454f5c41511ece8a81a177043bc8c3b https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.0-h8b86629_0.conda#39dcf8bb370df27fd81dbe41d4cb605e -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 -https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda#3e6c15d914b03f83fc96344f917e0838 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.7.0-pyhd8ed1ab_0.conda#28eddfb8b9ecdd044a6f609f985398a7 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.20.0-pyhd8ed1ab_0.conda#4cae490c8d142824fb80d9aed672fddd https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff @@ -340,3 +340,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 + diff --git a/requirements/locks/py314-linux-64.lock b/requirements/locks/py314-linux-64.lock index 8da0ab061e..c219725369 100644 --- a/requirements/locks/py314-linux-64.lock +++ b/requirements/locks/py314-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 62fdb838057aebf9fb2eeac194a9cb32933e3e85f114e5b0dc5bdd6f4a969910 +# input_hash: 51877f045987ca3eb18cf2b23a50d599952703dc6a6fe8a5f1fcbcdce93433ab @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 @@ -27,13 +27,13 @@ https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1.conda#72c8fd1af66bd67bf580645b426513ed https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 -https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h9ec8514_0.conda#35f29eec58405aaf55e01cb470d8c26a +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_16.conda#5a68259fac2da8f2ee6f7bfe49c9eb8b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_16.conda#39183d4e0c05609fd65f130633194e37 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a -https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_2.conda#1a580f7796c7bf6393fddb8bbbde58dc -https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb9d3cd8_0.conda#c7e925f37e3b40d893459e625f6a53f1 +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 +https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb03c661_1.conda#2c21e66f50753a083cbe6b80f38268fa https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d864d34357c3b65a4b731f78c0801dc4 https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda#70e3400cbbfa03e96dcde7fc13e38c7b https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_16.conda#68f68355000ec3f1d6f26ea13e8f525f @@ -42,7 +42,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.co https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-h280c20c_1002.conda#45161d96307e3a447cc3eb5896cf6f8c https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda#47e340acb35de30501a76c7c799c41d7 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.0-h26f9b46_0.conda#9ee58d5c534af06558933af3c845a780 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.1-h35e630c_1.conda#f61eb8cd60ff9057122a3d338b99c00f https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda#b2895afaf55bf96a8c8282a2e47a5de0 @@ -55,7 +55,7 @@ https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda#3b https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.14-hecca717_2.conda#2cd94587f3a401ae05e03a6caf09539d https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda#186a18e3ba246eccfc7cff00cd19a870 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h0aef613_1.conda#9344155d33912347b37f0ae6c410a835 -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.4-h3f801dc_0.conda#01ba04e414e47f95c03d6ddd81fd37be +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.5-h088129d_0.conda#86f7414544ae606282352fa1e116b41f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.2.0-hb03c661_1.conda#366b40a69f0ad6072561c1d09301c886 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.conda#4ffbb341c8b616aa2494b6afb26a0c5f https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 @@ -75,7 +75,7 @@ https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.47-haa7fec5_0.conda#7a3 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.46.4-h54a6638_1.conda#c01af13bdc553d1a8fbfff6e8db075f0 https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda#d7d95fc8287ea7bf33e0e7116d2b95ec https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.2-h03e3b7b_1.conda#98b6c9dc80eb87b2519b97bcf7e578dd -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_ha0e22de_103.conda#86bc20552bf46075e3d92b67f089172d +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda#cffd3bdd58090148f4cfcd831f4b26ab https://conda.anaconda.org/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda#035da2e4f5770f036ff704fa17aace24 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda#1c74ff8c35dcadf952a16f752ca5aa49 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 @@ -88,7 +88,7 @@ https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f4 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45-default_hbd61a6d_105.conda#3ec0aa5037d39b06554109a01e6fb0c6 https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee -https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-hf08fa70_7.conda#3a29a37b34dbd06672bdccb63829ec14 +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-ha09017c_8.conda#6e9bf4ce797d0216bd2a58298b6290b5 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 @@ -118,10 +118,10 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8 https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd -https://conda.anaconda.org/conda-forge/linux-64/python-3.14.2-h32b2ec7_100_cp314.conda#1cef1236a05c3a98f68c33ae9425f656 +https://conda.anaconda.org/conda-forge/linux-64/python-3.14.2-h32b2ec7_101_cp314.conda#051f60a9d1e3aae7160d173aeb7029f8 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda#34e54f03dfea3e7a2dcf1453a85f1085 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda#ba231da7fccf9ea1e768caf5c7099b84 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_1.conda#8c4061f499edec6b8ac7000f6d586829 @@ -143,7 +143,7 @@ https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda#2cfaaccf085c133a477f0a7a8657afe9 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.1.0-pyhd8ed1ab_0.conda#1daaf94a304a27ba3446a306235a37ea -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_104.conda#0857f4d157820dcd5625f61fdfefb780 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_105.conda#d58cd79121dd51128f2a5dab44edf1ea https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe63df7e10a6ba605dc5f9f961d36 @@ -162,27 +162,27 @@ https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2# https://conda.anaconda.org/conda-forge/noarch/markupsafe-3.0.3-pyh7db6752_0.conda#fab1be106a50e20f10fe5228fd1d1651 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py314h9891dd4_1.conda#c6752022dcdbf4b9ef94163de1ab7f03 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 -https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda#58335b26c38bf4a20f399384c33cbcf9 +https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.0-py314h8ec4b1a_0.conda#f9b6a8fbb8dcb840a0c1c052dc5092e4 https://conda.anaconda.org/conda-forge/noarch/pip-25.3-pyh145f28c_0.conda#bf47878473e5ab9fdb4115735230e191 https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.5.1-pyhcf101f3_0.conda#1bd2e65c8c7ef24f4639ae6e850dacc2 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e -https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-h99ae125_0.conda#8bbc19a6e87fbe8b97796e9a42a47a30 -https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.1-py314h0f05182_0.conda#28af9719e28f0054e9aee68153899293 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_2.conda#beb1885cfdb793193bba83c9720d53b1 +https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py314h0f05182_0.conda#4f225a966cfee267a79c5cb6382bd121 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda#6b6ece66ebcae2d5f326c77ef2c5a066 -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.1-pyhcf101f3_0.conda#d837065e4e0de4962c3462079c23f969 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda#3687cc0b82a8b4c17e1f0eb7e47163d5 https://conda.anaconda.org/conda-forge/noarch/pyshp-3.0.3-pyhd8ed1ab_0.conda#c138c7aaa6a10b5762dcd92247864aff https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py314he82b845_1.conda#21dce7c80bbdb9785633011ad348e530 https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 https://conda.anaconda.org/conda-forge/noarch/pyyaml-6.0.3-pyh7db6752_0.conda#b12f41c0d7fb5ab81709fcc86579688f -https://conda.anaconda.org/conda-forge/noarch/setuptools-80.9.0-pyhff2d567_0.conda#4de79c071274a53dcaf2a8c749d1499e +https://conda.anaconda.org/conda-forge/noarch/setuptools-80.10.2-pyh332efcf_0.conda#7b446fcbb6779ee479debb4fd7453e6c https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda#755cf22df8693aa0d1aec1c123fa5863 https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda#0401a17ae845fa72c7210e206ec5647d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.1-pyhd8ed1ab_0.conda#7de28c27fe620a4f7dbfaea137c6232b +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.3-pyhd8ed1ab_0.conda#18de09b20462742fe093ba39185d9bac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda#f88bb644823094f436792f80fba3207e https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda#72e780e9aa2d0a3295f59b1874e3768b @@ -194,9 +194,9 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.5-h5888daf_1.conda#5e2eb9bf77394fc2e5918beefec9f9ab -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.6-hecca717_0.conda#93f5d4b5c17c8540479ad65f206fea51 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda#e192019153591938acf7322b6459d36e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.7-hb03c661_0.conda#665d152b9c6e78da404086088077c844 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30cd29cb87d819caead4d55184c1d115 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 @@ -204,7 +204,7 @@ https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda#0a https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py314h4a8dc5f_1.conda#cf45f4278afd6f4e6d03eda0f435d527 https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.1-py314h67df5f8_0.conda#a4525263f2fa741bffa4af1e40aec245 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.2-py314h67df5f8_0.conda#ff4ed891a8646b56042ade345ee5c88e https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py314h5bd0f2a_1.conda#51b0391b0ce96be49b1174e9a3e4a279 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee @@ -228,9 +228,9 @@ https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.cond https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0.conda#5267bef8efea4127aacd1f4e1f149b6e https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py314hc02f841_0.conda#02e3559b6260b408fc1668c1bd26df10 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py314h9891dd4_3.conda#72d57382d0f63c20a16b1d514fcde6ff -https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.12.0-pyhcf101f3_1.conda#cc7b371edd70319942c802c7d828a428 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py314hc02f841_1.conda#552b5d9d8a2a4be882e1c638953e7281 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py314h97ea11e_4.conda#95bede9cdb7a30a4b611223d52a01aa4 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.1-pyhcf101f3_1.conda#91e3b2a0d014ac032c066a2e18051686 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_0.conda#6ce4ad29c3ae0b74df813409433457ff @@ -239,27 +239,27 @@ https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py314ha0b5721_2.conda#fe3a5c8be07a7b82058bdeb39d33d93b https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py314hc02f841_2.conda#5be92985870940eac3f3b8cda57002cc -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py314hf07bd8e_0.conda#2d82ddc8e7a74d27382410462df062a2 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py314hf07bd8e_1.conda#c7df812186fb1290bc00d9b7b5a50b18 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py314hbe3edd8_2.conda#5963e6ee81772d450a35e6bc95522761 https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py314h9891dd4_6.conda#28303a78c48916ab07b95ffdbffdfd6c +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py314h9891dd4_0.conda#5d3c008e54c7f49592fca9c32896a76f https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py314hc02f841_1.conda#258046835c9f2ecef87c1f11f387f72a -https://conda.anaconda.org/conda-forge/noarch/distributed-2025.12.0-pyhcf101f3_1.conda#613cea9275c4773d0b53c879838ac0ad +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py314hc02f841_0.conda#de50a60eab348de04809a33e180b4b01 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.1-pyhcf101f3_1.conda#c15e359a982395be86a7576a91f9c5f5 https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.0-h6083320_0.conda#1ea5ed29aea252072b975a232b195146 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py314h1194b4b_0.conda#b8683e6068099b69c10dbfcf7204203f -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py314h4ae7121_101.conda#31395db7aeae4be8307bcd81f1e58e53 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py314h4ae7121_102.conda#cf495d9fc5e01a2ee10e0867ce957a44 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py314h5bd0f2a_2.conda#78071b0c9e55392e9ec1b22b18e80cdf +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py314h5bd0f2a_3.conda#b55fcaf9e1ad884241180b9c3c94384e https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py314ha1f92a4_0.conda#15b1e205270451c078c79d0480438e8e https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.25.0-py314ha0b5721_1.conda#fe89c5fa422f215b0d75046ecd4667de @@ -270,10 +270,10 @@ https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.cond https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9f33c65b20e6a61f21714785e3613ec https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.0-h61e6d4b_0.conda#91e6d4d684e237fba31b9815c4b40edf https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e -https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.0-h8b86629_0.conda#39dcf8bb370df27fd81dbe41d4cb605e -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 -https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda#3e6c15d914b03f83fc96344f917e0838 +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.7.0-pyhd8ed1ab_0.conda#28eddfb8b9ecdd044a6f609f985398a7 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.20.0-pyhd8ed1ab_0.conda#4cae490c8d142824fb80d9aed672fddd https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff @@ -281,3 +281,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 + diff --git a/requirements/py312.yml b/requirements/py312.yml index 454bfd7e79..cd3cfb7033 100644 --- a/requirements/py312.yml +++ b/requirements/py312.yml @@ -31,7 +31,7 @@ dependencies: - iris-sample-data >=2.4.0 - mo_pack - nc-time-axis >=1.4 - - pandas + - pandas <3 - pip - python-stratify - rasterio diff --git a/requirements/py313.yml b/requirements/py313.yml index 32b606c02a..eaf556b564 100644 --- a/requirements/py313.yml +++ b/requirements/py313.yml @@ -31,7 +31,7 @@ dependencies: - iris-sample-data >=2.4.0 - mo_pack - nc-time-axis >=1.4 - - pandas + - pandas <3 - pip - python-stratify - rasterio diff --git a/requirements/py314.yml b/requirements/py314.yml index 706f697e20..f1612564ce 100644 --- a/requirements/py314.yml +++ b/requirements/py314.yml @@ -31,7 +31,7 @@ dependencies: - iris-sample-data >=2.4.0 - mo_pack - nc-time-axis >=1.4 - - pandas + - pandas <3 - pip - python-stratify - rasterio From ec2e6afefe865c24776fe70e19b59274ecc7e22c Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Thu, 5 Feb 2026 15:57:12 +0000 Subject: [PATCH 09/77] Convert fileformats/nc_load_rules to pytest (#6896) * converted __init__.py * converted actions/test__grid_mappings * converted actions/test__hybrid_formulae * converted actions/test__latlon_dims * converted actions/test__misc * converted actions/test__time_coords * converted engine/test_engine * test__add_or_capture * converted test__normalise_bounds_units * converted build albers * converted build and add ancil * converted build and add aux * converted test_build_and_add_cell_measure * converted test_build_and_add_cell_methods * converted test_build_and_add_dimesnsion_coordinate * remainder * yield to return --------- Co-authored-by: Henry <84939917+HGWright@users.noreply.github.com> --- .../nc_load_rules/actions/__init__.py | 26 +- .../actions/test__grid_mappings.py | 83 ++--- .../actions/test__hybrid_formulae.py | 22 +- .../actions/test__latlon_dimcoords.py | 57 +--- .../actions/test__miscellaneous.py | 65 ++-- .../actions/test__time_coords.py | 70 ++--- .../nc_load_rules/engine/test_engine.py | 59 ++-- .../nc_load_rules/helpers/__init__.py | 11 + .../helpers/test__add_or_capture.py | 11 +- .../helpers/test__normalise_bounds_units.py | 162 +++++----- ...ild_albers_equal_area_coordinate_system.py | 17 +- .../helpers/test_build_and_add_ancil_var.py | 16 +- ...test_build_and_add_auxiliary_coordinate.py | 127 ++++---- .../test_build_and_add_cell_measure.py | 16 +- .../test_build_and_add_cell_methods.py | 16 +- ...test_build_and_add_dimension_coordinate.py | 286 ++++++++---------- .../test_build_and_add_global_attributes.py | 12 +- .../helpers/test_build_and_add_names.py | 12 +- .../helpers/test_build_and_add_units.py | 18 +- ...t_build_geostationary_coordinate_system.py | 17 +- ..._azimuthal_equal_area_coordinate_system.py | 17 +- ...ild_lambert_conformal_coordinate_system.py | 17 +- .../test_build_mercator_coordinate_system.py | 42 +-- ...uild_oblique_mercator_coordinate_system.py | 11 +- ...d_polar_stereographic_coordinate_system.py | 42 +-- ...t_build_stereographic_coordinate_system.py | 17 +- ...d_transverse_mercator_coordinate_system.py | 17 +- .../test_build_verticalp_coordinate_system.py | 17 +- .../helpers/test_get_attr_units.py | 42 ++- .../helpers/test_get_cf_bounds_var.py | 21 +- .../nc_load_rules/helpers/test_get_names.py | 28 +- .../test_has_supported_mercator_parameters.py | 71 ++--- ...upported_polar_stereographic_parameters.py | 120 ++++---- .../helpers/test_parse_cell_methods.py | 53 ++-- .../helpers/test_reorder_bounds_data.py | 38 +-- 35 files changed, 669 insertions(+), 987 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 296765f853..0b9caabdf7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -4,15 +4,15 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the module :mod:`iris.fileformats._nc_load_rules.actions`.""" -from pathlib import Path -import shutil -import tempfile import warnings +import pytest + import iris.fileformats._nc_load_rules.engine from iris.fileformats.cf import CFReader import iris.fileformats.netcdf from iris.fileformats.netcdf.loader import _load_cube +from iris.tests import _shared_utils from iris.tests.stock.netcdf import ncgen_from_cdl from iris.warnings import IrisLoadWarning @@ -35,11 +35,8 @@ class Mixin__nc_load_actions: """Class to make testcases for rules or actions code, and check results. - Defines standard setUpClass/tearDownClass methods, to create a temporary + Defines standard setup method, to create a temporary directory for intermediate files. - NOTE: owing to peculiarities of unittest, these must be explicitly called - from a setUpClass/tearDownClass within the 'final' inheritor, i.e. the - actual Test_XXX class which also inherits unittest.TestCase. Testcases are manufactured by the '_make_testcase_cdl' method. The 'run_testcase' method takes the '_make_testcase_cdl' kwargs and makes @@ -55,15 +52,10 @@ class Mixin__nc_load_actions: # "global" test setting : whether to output various debug info debug_info = False - @classmethod - def setUpClass(cls): + @pytest.fixture(autouse=True, scope="class") + def setup_mixin(self, request, tmp_path_factory): # Create a temp directory for temp files. - cls.temp_dirpath = Path(tempfile.mkdtemp()) - - @classmethod - def tearDownClass(cls): - # Destroy a temp directory for temp files. - shutil.rmtree(cls.temp_dirpath) + request.cls.temp_dirpath = tmp_path_factory.mktemp("temp") def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path, mocker=None): """Load the 'phenom' data variable in a CDL testcase, as a cube. @@ -135,9 +127,9 @@ def run_testcase(self, warning_regex=None, **testcase_kwargs): print("------\n") if warning_regex is None: - context = self.assertNoWarningsRegexp() + context = _shared_utils.assert_no_warnings_regexp() else: - context = self.assertWarnsRegex(IrisLoadWarning, warning_regex) + context = pytest.warns(IrisLoadWarning, match=warning_regex) with context: cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index 202fb0fa16..f0eca80d32 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -9,12 +9,11 @@ """ -import iris.coord_systems - -import iris.tests as tests # isort: skip +import re import pytest +import iris.coord_systems import iris.coord_systems as ics import iris.fileformats._nc_load_rules.helpers as hh from iris.loading import LOAD_PROBLEMS @@ -267,8 +266,8 @@ def check_result( Various options control the expected things which are tested. """ - self.assertEqual(cube.standard_name, "air_temperature") - self.assertEqual(cube.var_name, "phenom") + assert cube.standard_name == "air_temperature" + assert cube.var_name == "phenom" x_coords = cube.coords(dimensions=(1,)) y_coords = cube.coords(dimensions=(0,)) @@ -283,40 +282,40 @@ def check_result( else: expected_dim_coords += x_coords - self.assertEqual(set(expected_dim_coords), set(cube.coords(dim_coords=True))) + assert set(expected_dim_coords) == set(cube.coords(dim_coords=True)) if cube_no_xycoords: - self.assertEqual(expected_dim_coords, []) + assert expected_dim_coords == [] x_coord = None y_coord = None else: - self.assertEqual(len(x_coords), 1) + assert len(x_coords) == 1 (x_coord,) = x_coords - self.assertEqual(len(y_coords), 1) + assert len(y_coords) == 1 (y_coord,) = y_coords - self.assertEqual(set(expected_aux_coords), set(cube.coords(dim_coords=False))) + assert set(expected_aux_coords) == set(cube.coords(dim_coords=False)) if x_coord: if xco_stdname is None: # no check pass elif xco_stdname is True: - self.assertIsNotNone(x_coord.standard_name) + assert x_coord.standard_name is not None elif xco_stdname is False: - self.assertIsNone(x_coord.standard_name) + assert x_coord.standard_name is None else: - self.assertEqual(x_coord.standard_name, xco_stdname) + assert x_coord.standard_name == xco_stdname if y_coord: if yco_stdname is None: # no check pass if yco_stdname is True: - self.assertIsNotNone(y_coord.standard_name) + assert y_coord.standard_name is not None elif yco_stdname is False: - self.assertIsNone(y_coord.standard_name) + assert y_coord.standard_name is None else: - self.assertEqual(y_coord.standard_name, yco_stdname) + assert y_coord.standard_name == yco_stdname cube_cs = cube.coord_system() if cube_no_xycoords: @@ -326,36 +325,29 @@ def check_result( yco_cs = y_coord.coord_system xco_cs = x_coord.coord_system if cube_no_cs: - self.assertIsNone(cube_cs) - self.assertIsNone(yco_cs) - self.assertIsNone(xco_cs) + assert cube_cs is None + assert yco_cs is None + assert xco_cs is None else: - self.assertIsNotNone(cube_cs) + assert cube_cs is not None if cube_cstype is not None: - self.assertIsInstance(cube_cs, cube_cstype) + assert isinstance(cube_cs, cube_cstype) if xco_no_cs: - self.assertIsNone(xco_cs) + assert xco_cs is None else: - self.assertEqual(xco_cs, cube_cs) + assert xco_cs == cube_cs if yco_no_cs: - self.assertIsNone(yco_cs) + assert yco_cs is None else: - self.assertEqual(yco_cs, cube_cs) + assert yco_cs == cube_cs if load_problems_regex is not None: load_problem = LOAD_PROBLEMS.problems[-1] - self.assertRegex(str(load_problem.stack_trace), load_problems_regex) + assert re.search(load_problems_regex, str(load_problem.stack_trace)) -class Test__grid_mapping(Mixin__grid_mapping, tests.IrisTest): +class Test__grid_mapping(Mixin__grid_mapping): # Various testcases for translation of grid-mappings - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() def test_basic_latlon(self): # A basic reference example with a lat-long grid. @@ -801,15 +793,8 @@ def test_extended_mapping_basic_latlon_missing_coords(self): self.check_result(result, xco_no_cs=True) -class Test__aux_latlons(Mixin__grid_mapping, tests.IrisTest): +class Test__aux_latlons(Mixin__grid_mapping): # Testcases for translating auxiliary latitude+longitude variables - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() def test_aux_lon(self): # Change the name of xdim, and put xco on the coords list. @@ -933,15 +918,7 @@ def test_extended_grid_mapping_aux_lat_and_lon(self): self.check_result(result, xco_is_aux=True, yco_is_aux=True, cube_no_cs=False) -class Test__nondimcoords(Mixin__grid_mapping, tests.IrisTest): - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - +class Test__nondimcoords(Mixin__grid_mapping): def test_nondim_lats(self): # Fix a coord's values so it cannot be a dim-coord. # @@ -1235,7 +1212,3 @@ def test_one_coord_system_simple(self, osgb_cs, latlon_cs, mocker, tmp_path): # Loading multiple coord systems or using extended grid mapping implies ordered axes: assert cube.extended_grid_mapping is False - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py index 65b0ecd94e..838cb8b9c7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py @@ -9,21 +9,11 @@ """ -import iris.tests as tests # isort: skip - import iris.fileformats._nc_load_rules.helpers as hh from iris.tests.unit.fileformats.nc_load_rules.actions import Mixin__nc_load_actions -class Test__formulae_tests(Mixin__nc_load_actions, tests.IrisTest): - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - +class Test__formulae_tests(Mixin__nc_load_actions): def _make_testcase_cdl( self, formula_root_name=None, term_names=None, extra_formula_type=None ): @@ -111,7 +101,7 @@ def check_result(self, cube, factory_type="_auto", formula_terms="_auto"): # replace with our 'default', which is hybrid-height. # N.B. 'None' is different: it means expect *no* factory. factory_type = "atmosphere_hybrid_height_coordinate" - self.assertEqual(cube._formula_type_name, factory_type) + assert cube._formula_type_name == factory_type if formula_terms == "_auto": # Set default terms-expected, according to the expected factory @@ -130,12 +120,12 @@ def check_result(self, cube, factory_type="_auto", formula_terms="_auto"): # N.B. the terms dictionary can be missing, if there were none actual_terms = cube._formula_terms_byname or {} - self.assertEqual(sorted(formula_terms), sorted(actual_terms.keys())) + assert sorted(formula_terms) == sorted(actual_terms.keys()) # Check that there is an aux-coord of the expected name for each term for var_name in actual_terms.values(): coords = cube.coords(var_name=var_name, dim_coords=False) - self.assertEqual(len(coords), 1) + assert len(coords) == 1 # # Actual testcase routines @@ -272,7 +262,3 @@ def test_ocean_s_coordinate_g2(self): term_names = hh.CF_COORD_VERTICAL[hybrid_type] result = self.run_testcase(formula_root_name=hybrid_type, term_names=term_names) self.check_result(result, factory_type=hybrid_type, formula_terms=term_names) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py index 0694ebe250..9e534521f1 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py @@ -10,9 +10,10 @@ """ +import re from typing import Literal -import iris.tests as tests # isort: skip +import pytest from iris.common import LimitedAttributeDict from iris.coord_systems import GeogCS, RotatedGeogCS @@ -27,8 +28,8 @@ class Mixin_latlon_dimcoords(Mixin__nc_load_actions): # Set by inheritor classes, which are actual TestCases. lat_1_or_lon_0: Literal[0, 1] - def setUp(self): - super().setUp() + @pytest.fixture(autouse=True) + def _setup(self): # Generate some useful settings : just to generalise operation over # both latitude and longitude. islat = self.lat_1_or_lon_0 @@ -133,9 +134,9 @@ def check_result( # affect the results here, in some cases. coords = cube.coords() # There should be one and only one coord. - self.assertEqual(1, len(coords)) + assert 1 == len(coords) # It should also be a dim-coord - self.assertEqual(1, len(cube.coords(dim_coords=True))) + assert 1 == len(cube.coords(dim_coords=True)) (coord,) = coords if self.debug_info: print() @@ -146,24 +147,24 @@ def check_result( getattr(coord, name) for name in ("standard_name", "long_name", "units", "coord_system") ] - self.assertEqual(standard_name, coord_stdname, context_message) - self.assertEqual(long_name, coord_longname, context_message) - self.assertEqual(units, coord_units, context_message) + assert standard_name == coord_stdname, context_message + assert long_name == coord_longname, context_message + assert units == coord_units, context_message assert crs in (None, "latlon", "rotated") if crs is None: - self.assertEqual(None, coord_crs, context_message) + assert None is coord_crs, context_message elif crs == "latlon": - self.assertIsInstance(coord_crs, GeogCS, context_message) + assert isinstance(coord_crs, GeogCS), context_message elif crs == "rotated": - self.assertIsInstance(coord_crs, RotatedGeogCS, context_message) + assert isinstance(coord_crs, RotatedGeogCS), context_message def check_load_problem(self, setup_kwargs, expected_msg): # Check that the expected load problem is stored. _ = self.run_testcase(**setup_kwargs) load_problem = LOAD_PROBLEMS.problems[-1] attributes = load_problem.loaded.attributes[LimitedAttributeDict.IRIS_RAW] - self.assertEqual(attributes["standard_name"], setup_kwargs["standard_name"]) - self.assertRegex("".join(load_problem.stack_trace.format()), expected_msg) + assert attributes["standard_name"] == setup_kwargs["standard_name"] + assert re.search(expected_msg, "".join(load_problem.stack_trace.format())) # # Testcase routines @@ -330,35 +331,9 @@ def test_fail_projected(self): ) -class Test__longitude_coords(Mixin_latlon_dimcoords, tests.IrisTest): +class Test__longitude_coords(Mixin_latlon_dimcoords): lat_1_or_lon_0 = 0 - @classmethod - def setUpClass(cls): - super().setUpClass() - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def setUp(self): - super().setUp() - - -class Test__latitude_coords(Mixin_latlon_dimcoords, tests.IrisTest): +class Test__latitude_coords(Mixin_latlon_dimcoords): lat_1_or_lon_0 = 1 - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def setUp(self): - super().setUp() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py index 7f4c84ec78..283f01920c 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py @@ -13,22 +13,13 @@ """ -import iris.tests as tests # isort: skip - from iris.coords import AncillaryVariable, AuxCoord, CellMeasure from iris.fileformats.pp import STASH from iris.tests.unit.fileformats.nc_load_rules.actions import Mixin__nc_load_actions -class Test__ukmo_attributes(Mixin__nc_load_actions, tests.IrisTest): +class Test__ukmo_attributes(Mixin__nc_load_actions): # Tests for handling of the special UM-specific data-var attributes. - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() def _make_testcase_cdl(self, **add_attrs): phenom_attrs_string = "" @@ -55,16 +46,16 @@ def check_result(self, cube, stashcode=None, processflags=None): cube_processflags = cube.attributes.get("ukmo__process_flags") if stashcode is not None: - self.assertIsInstance(cube_stashattr, STASH) - self.assertEqual(str(stashcode), str(cube_stashattr)) + assert isinstance(cube_stashattr, STASH) + assert str(stashcode) == str(cube_stashattr) else: - self.assertIsNone(cube_stashattr) + assert cube_stashattr is None if processflags is not None: - self.assertIsInstance(cube_processflags, tuple) - self.assertEqual(set(cube_processflags), set(processflags)) + assert isinstance(cube_processflags, tuple) + assert set(cube_processflags) == set(processflags) else: - self.assertIsNone(cube_processflags) + assert cube_processflags is None # # Testcase routines @@ -85,8 +76,8 @@ def test_stash_empty(self): ukmo__um_stash_source=value, warning_regex="Invalid content for managed attribute name 'um_stash_source'", ) - self.assertNotIn("STASH", cube.attributes) - self.assertEqual(cube.attributes["ukmo__um_stash_source"], value) + assert "STASH" not in cube.attributes + assert cube.attributes["ukmo__um_stash_source"] == value def test_stash_invalid(self): value = "XXX" @@ -94,8 +85,8 @@ def test_stash_invalid(self): ukmo__um_stash_source="XXX", warning_regex="Invalid content for managed attribute name 'um_stash_source'", ) - self.assertNotIn("STASH", cube.attributes) - self.assertEqual(cube.attributes["ukmo__um_stash_source"], value) + assert "STASH" not in cube.attributes + assert cube.attributes["ukmo__um_stash_source"] == value def test_processflags_single(self): cube = self.run_testcase(ukmo__process_flags="this") @@ -113,17 +104,9 @@ def test_processflags_empty(self): self.check_result(cube, processflags=expected_result) -class Test__labels_cellmeasures_ancils(Mixin__nc_load_actions, tests.IrisTest): +class Test__labels_cellmeasures_ancils(Mixin__nc_load_actions): # Tests for some simple rules that translate facts directly into cube data, # with no alternative actions, complications or failure modes to test. - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - def _make_testcase_cdl( self, include_label=False, @@ -185,28 +168,28 @@ def check_result( ): label_coords = cube.coords(var_name="v_label") if expect_label: - self.assertEqual(len(label_coords), 1) + assert len(label_coords) == 1 (coord,) = label_coords - self.assertIsInstance(coord, AuxCoord) - self.assertEqual(coord.dtype.kind, "U") + assert isinstance(coord, AuxCoord) + assert coord.dtype.kind == "U" else: - self.assertEqual(len(label_coords), 0) + assert len(label_coords) == 0 cell_measures = cube.cell_measures() if expect_cellmeasure: - self.assertEqual(len(cell_measures), 1) + assert len(cell_measures) == 1 (cellm,) = cell_measures - self.assertIsInstance(cellm, CellMeasure) + assert isinstance(cellm, CellMeasure) else: - self.assertEqual(len(cell_measures), 0) + assert len(cell_measures) == 0 ancils = cube.ancillary_variables() if expect_ancil: - self.assertEqual(len(ancils), 1) + assert len(ancils) == 1 (ancil,) = ancils - self.assertIsInstance(ancil, AncillaryVariable) + assert isinstance(ancil, AncillaryVariable) else: - self.assertEqual(len(ancils), 0) + assert len(ancils) == 0 def test_label(self): cube = self.run_testcase(include_label=True) @@ -219,7 +202,3 @@ def test_ancil(self): def test_cellmeasure(self): cube = self.run_testcase(include_cellmeasure=True) self.check_result(cube, expect_cellmeasure=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index ab7eedb7e8..6c13c35144 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -9,10 +9,9 @@ """ +import re from typing import ClassVar -import iris.tests as tests # isort: skip - from iris.coords import AuxCoord, DimCoord from iris.loading import LOAD_PROBLEMS from iris.tests.unit.fileformats.nc_load_rules.actions import Mixin__nc_load_actions @@ -187,39 +186,39 @@ def check_result( period_auxcos = cube.coords(period_name, dim_coords=False) if time_is == "dim": - self.assertEqual(len(time_dimcos), 1) - self.assertEqual(len(time_auxcos), 0) + assert len(time_dimcos) == 1 + assert len(time_auxcos) == 0 elif time_is == "aux": - self.assertEqual(len(time_dimcos), 0) - self.assertEqual(len(time_auxcos), 1) + assert len(time_dimcos) == 0 + assert len(time_auxcos) == 1 else: - self.assertEqual(len(time_dimcos), 0) - self.assertEqual(len(time_auxcos), 0) + assert len(time_dimcos) == 0 + assert len(time_auxcos) == 0 if period_is == "dim": - self.assertEqual(len(period_dimcos), 1) - self.assertEqual(len(period_auxcos), 0) + assert len(period_dimcos) == 1 + assert len(period_auxcos) == 0 elif period_is == "aux": - self.assertEqual(len(period_dimcos), 0) - self.assertEqual(len(period_auxcos), 1) + assert len(period_dimcos) == 0 + assert len(period_auxcos) == 1 else: - self.assertEqual(len(period_dimcos), 0) - self.assertEqual(len(period_auxcos), 0) + assert len(period_dimcos) == 0 + assert len(period_auxcos) == 0 # Also check expected built Coord types. if time_is == "dim": - self.assertIsInstance(time_dimcos[0], DimCoord) + assert isinstance(time_dimcos[0], DimCoord) elif time_is == "aux": - self.assertIsInstance(time_auxcos[0], AuxCoord) + assert isinstance(time_auxcos[0], AuxCoord) if period_is == "dim": - self.assertIsInstance(period_dimcos[0], DimCoord) + assert isinstance(period_dimcos[0], DimCoord) elif period_is == "aux": - self.assertIsInstance(period_auxcos[0], AuxCoord) + assert isinstance(period_auxcos[0], AuxCoord) if load_problems_regex is not None: load_problem = LOAD_PROBLEMS.problems[-1] - self.assertRegex(str(load_problem.stack_trace), load_problems_regex) + assert re.search(load_problems_regex, str(load_problem.stack_trace)) class Mixin__singlecoord__tests(Mixin__timecoords__common): @@ -396,43 +395,20 @@ def test_aux_fails_typeident(self): self.check_result(result, "aux") -class Test__time(Mixin__singlecoord__tests, tests.IrisTest): +class Test__time(Mixin__singlecoord__tests): # Run 'time' coord tests which = "time" - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - -class Test__period(Mixin__singlecoord__tests, tests.IrisTest): +class Test__period(Mixin__singlecoord__tests): # Run 'time_period' coord tests which = "period" - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - -class Test__dualcoord(Mixin__timecoords__common, tests.IrisTest): +class Test__dualcoord(Mixin__timecoords__common): # Coordinate tests for a combination of 'time' and 'time_period'. # Not strictly necessary, as handling is independent, but a handy check # on typical usage. - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() def test_time_and_period(self): # Test case with both 'time' and 'period', with separate dims. @@ -461,7 +437,3 @@ def test_time_dim_period_aux(self): ), ) self.check_result(result, time_is="dim", period_is="aux") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py index 71280e5f60..a0cdf99eb1 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py @@ -4,14 +4,14 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module.""" -from unittest import mock +import pytest from iris.fileformats._nc_load_rules.engine import Engine, FactEntity -import iris.tests as tests -class Test_Engine(tests.IrisTest): - def setUp(self): +class Test_Engine: + @pytest.fixture(autouse=True) + def _setup(self): self.empty_engine = Engine() engine = Engine() engine.add_fact("this", ("that", "other")) @@ -20,74 +20,69 @@ def setUp(self): def test__init(self): # Check that init creates an empty Engine. engine = Engine() - self.assertIsInstance(engine, Engine) - self.assertIsInstance(engine.facts, FactEntity) - self.assertEqual(list(engine.facts.entity_lists.keys()), []) + assert isinstance(engine, Engine) + assert isinstance(engine.facts, FactEntity) + assert list(engine.facts.entity_lists.keys()) == [] def test_reset(self): # Check that calling reset() causes a non-empty engine to be emptied. engine = self.nonempty_engine fact_names = list(engine.facts.entity_lists.keys()) - self.assertNotEqual(len(fact_names), 0) + assert len(fact_names) != 0 engine.reset() fact_names = list(engine.facts.entity_lists.keys()) - self.assertEqual(len(fact_names), 0) + assert len(fact_names) == 0 - def test_activate(self): + def test_activate(self, mocker): # Check that calling engine.activate() --> actions.run_actions(engine) engine = self.empty_engine target = "iris.fileformats._nc_load_rules.engine.run_actions" - run_call = self.patch(target) + run_call = mocker.patch(target) engine.activate() - self.assertEqual(run_call.call_args_list, [mock.call(engine)]) + assert run_call.call_args_list == [mocker.call(engine)] def test_add_case_specific_fact__newname(self): # Adding a new fact to a new fact-name records as expected. engine = self.nonempty_engine engine.add_case_specific_fact("new_fact", ("a1", "a2")) - self.assertEqual(engine.fact_list("new_fact"), [("a1", "a2")]) + assert engine.fact_list("new_fact") == [("a1", "a2")] def test_add_case_specific_fact__existingname(self): # Adding a new fact to an existing fact-name records as expected. engine = self.nonempty_engine name = "this" - self.assertEqual(engine.fact_list(name), [("that", "other")]) + assert engine.fact_list(name) == [("that", "other")] engine.add_case_specific_fact(name, ("yetanother",)) - self.assertEqual(engine.fact_list(name), [("that", "other"), ("yetanother",)]) + assert engine.fact_list(name) == [("that", "other"), ("yetanother",)] def test_add_case_specific_fact__emptyargs(self): # Check that empty args work ok, and will create a new fact. engine = self.empty_engine engine.add_case_specific_fact("new_fact", ()) - self.assertIn("new_fact", engine.facts.entity_lists) - self.assertEqual(engine.fact_list("new_fact"), [()]) + assert "new_fact" in engine.facts.entity_lists + assert engine.fact_list("new_fact") == [()] - def test_add_fact(self): + def test_add_fact(self, mocker): # Check that 'add_fact' is equivalent to (short for) a call to # 'add_case_specific_fact'. engine = self.empty_engine target = "iris.fileformats._nc_load_rules.engine.Engine.add_case_specific_fact" - acsf_call = self.patch(target) + acsf_call = mocker.patch(target) engine.add_fact("extra", ()) - self.assertEqual(acsf_call.call_count, 1) - self.assertEqual( - acsf_call.call_args_list, - [mock.call(fact_name="extra", fact_arglist=())], - ) + assert acsf_call.call_count == 1 + assert acsf_call.call_args_list == [ + mocker.call(fact_name="extra", fact_arglist=()) + ] def test_get_kb(self): # Check that this stub just returns the facts database. engine = self.nonempty_engine kb = engine.get_kb() - self.assertIsInstance(kb, FactEntity) - self.assertIs(kb, engine.facts) + assert isinstance(kb, FactEntity) + assert kb is engine.facts def test_fact_list__existing(self): - self.assertEqual(self.nonempty_engine.fact_list("this"), [("that", "other")]) + assert self.nonempty_engine.fact_list("this") == [("that", "other")] def test_fact_list__nonexisting(self): - self.assertEqual(self.empty_engine.fact_list("odd-unknown"), []) - - -if __name__ == "__main__": - tests.main() + assert self.empty_engine.fact_list("odd-unknown") == [] diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py index e151d92aa8..36ea8c9953 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py @@ -6,3 +6,14 @@ :mod:`iris.fileformats.netcdf._nc_load_rules.helpers` . """ + +import pytest +from pytest_mock import MockerFixture + + +class MockerMixin: + mocker: MockerFixture + + @pytest.fixture(autouse=True) + def _mocker_mixin_setup(self, mocker): + self.mocker = mocker diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__add_or_capture.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__add_or_capture.py index eecc23b653..cadd2efa62 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__add_or_capture.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__add_or_capture.py @@ -4,9 +4,8 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers._add_or_capture`.""" -from unittest.mock import MagicMock - import pytest +from pytest_mock import MockType from iris.cube import Cube from iris.fileformats._nc_load_rules import helpers @@ -15,9 +14,9 @@ class Mixin: - build_func: MagicMock - add_method: MagicMock - cf_var: MagicMock + build_func: MockType + add_method: MockType + cf_var: MockType filename: str = "test__add_or_capture.nc" attr_key: str = "attr_key" @@ -63,7 +62,7 @@ def _setup(self, make_args): @pytest.fixture def patch_build_raw_cube(self, mocker): patch = mocker.patch.object(helpers, "build_raw_cube", return_value="RAW_CUBE") - yield patch + return patch @pytest.fixture def cause_build_raw_cube_error(self, patch_build_raw_cube): diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__normalise_bounds_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__normalise_bounds_units.py index 337279426e..4d85ccacd6 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__normalise_bounds_units.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__normalise_bounds_units.py @@ -4,99 +4,99 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers._normalise_bounds_units`.""" -# import iris tests first so that some things can be initialised before -# importing anything else from typing import Optional -from unittest import mock import numpy as np import pytest +from pytest_mock import MockType from iris.fileformats._nc_load_rules.helpers import ( _normalise_bounds_units, _WarnComboIgnoringCfLoad, ) +from iris.tests import _shared_utils +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin from iris.warnings import IrisCfLoadWarning -BOUNDS = mock.sentinel.bounds CF_NAME = "dummy_bnds" -def _make_cf_bounds_var( - units: Optional[str] = None, - unitless: bool = False, -) -> mock.MagicMock: - """Construct a mock CF bounds variable.""" - if units is None: +class Test(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self): + self.bounds = self.mocker.sentinel.bounds + + def _make_cf_bounds_var( + self, + units: Optional[str] = None, + unitless: bool = False, + ) -> MockType: + """Construct a mock CF bounds variable.""" + if units is None: + units = "days since 1970-01-01" + + cf_data = self.mocker.Mock(spec=[]) + # we want to mock the absence of flag attributes to helpers.get_attr_units + # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes + del cf_data.flag_values + del cf_data.flag_masks + del cf_data.flag_meanings + + cf_var = self.mocker.MagicMock( + cf_name=CF_NAME, + cf_data=cf_data, + units=units, + calendar=None, + dtype=float, + ) + + if unitless: + del cf_var.units + + return cf_var + + def test_unitless(self) -> None: + """Test bounds variable with no units.""" + cf_bounds_var = self._make_cf_bounds_var(unitless=True) + result = _normalise_bounds_units(None, cf_bounds_var, self.bounds) + assert result == self.bounds + + def test_invalid_units__pass_through(self) -> None: + """Test bounds variable with invalid units.""" + units = "invalid" + cf_bounds_var = self._make_cf_bounds_var(units=units) + wmsg = f"Ignoring invalid units {units!r} on netCDF variable {CF_NAME!r}" + with pytest.warns(_WarnComboIgnoringCfLoad, match=wmsg): + result = _normalise_bounds_units(None, cf_bounds_var, self.bounds) + assert result == self.bounds + + @pytest.mark.parametrize("units", ["unknown", "no_unit", "1", "kelvin"]) + def test_ignore_bounds(self, units) -> None: + """Test bounds variable with incompatible units compared to points.""" + points_units = "km" + cf_bounds_var = self._make_cf_bounds_var(units=units) + wmsg = ( + f"Ignoring bounds on NetCDF variable {CF_NAME!r}. " + f"Expected units compatible with {points_units!r}" + ) + with pytest.warns(IrisCfLoadWarning, match=wmsg): + result = _normalise_bounds_units(points_units, cf_bounds_var, self.bounds) + assert result is None + + def test_compatible(self) -> None: + """Test bounds variable with compatible units requiring conversion.""" + points_units, bounds_units = "days since 1970-01-01", "hours since 1970-01-01" + cf_bounds_var = self._make_cf_bounds_var(units=bounds_units) + bounds = np.arange(10, dtype=float) * 24 + result = _normalise_bounds_units(points_units, cf_bounds_var, bounds) + expected = bounds / 24 + _shared_utils.assert_array_equal(result, expected) + + def test_same_units(self) -> None: + """Test bounds variable with same units as points.""" units = "days since 1970-01-01" - - cf_data = mock.Mock(spec=[]) - # we want to mock the absence of flag attributes to helpers.get_attr_units - # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes - del cf_data.flag_values - del cf_data.flag_masks - del cf_data.flag_meanings - - cf_var = mock.MagicMock( - cf_name=CF_NAME, - cf_data=cf_data, - units=units, - calendar=None, - dtype=float, - ) - - if unitless: - del cf_var.units - - return cf_var - - -def test_unitless() -> None: - """Test bounds variable with no units.""" - cf_bounds_var = _make_cf_bounds_var(unitless=True) - result = _normalise_bounds_units(None, cf_bounds_var, BOUNDS) - assert result == BOUNDS - - -def test_invalid_units__pass_through() -> None: - """Test bounds variable with invalid units.""" - units = "invalid" - cf_bounds_var = _make_cf_bounds_var(units=units) - wmsg = f"Ignoring invalid units {units!r} on netCDF variable {CF_NAME!r}" - with pytest.warns(_WarnComboIgnoringCfLoad, match=wmsg): - result = _normalise_bounds_units(None, cf_bounds_var, BOUNDS) - assert result == BOUNDS - - -@pytest.mark.parametrize("units", ["unknown", "no_unit", "1", "kelvin"]) -def test_ignore_bounds(units) -> None: - """Test bounds variable with incompatible units compared to points.""" - points_units = "km" - cf_bounds_var = _make_cf_bounds_var(units=units) - wmsg = ( - f"Ignoring bounds on NetCDF variable {CF_NAME!r}. " - f"Expected units compatible with {points_units!r}" - ) - with pytest.warns(IrisCfLoadWarning, match=wmsg): - result = _normalise_bounds_units(points_units, cf_bounds_var, BOUNDS) - assert result is None - - -def test_compatible() -> None: - """Test bounds variable with compatible units requiring conversion.""" - points_units, bounds_units = "days since 1970-01-01", "hours since 1970-01-01" - cf_bounds_var = _make_cf_bounds_var(units=bounds_units) - bounds = np.arange(10, dtype=float) * 24 - result = _normalise_bounds_units(points_units, cf_bounds_var, bounds) - expected = bounds / 24 - np.testing.assert_array_equal(result, expected) - - -def test_same_units() -> None: - """Test bounds variable with same units as points.""" - units = "days since 1970-01-01" - cf_bounds_var = _make_cf_bounds_var(units=units) - bounds = np.arange(10, dtype=float) - result = _normalise_bounds_units(units, cf_bounds_var, bounds) - np.testing.assert_array_equal(result, bounds) - assert result is bounds + cf_bounds_var = self._make_cf_bounds_var(units=units) + bounds = np.arange(10, dtype=float) + result = _normalise_bounds_units(units, cf_bounds_var, bounds) + _shared_utils.assert_array_equal(result, bounds) + assert result is bounds diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py index 7d5aa24219..d9a33dd948 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import AlbersEqualArea from iris.fileformats._nc_load_rules.helpers import ( build_albers_equal_area_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildAlbersEqualAreaCoordinateSystem(tests.IrisTest): +class TestBuildAlbersEqualAreaCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_optionals=False): if no_optionals: # Most properties are optional for this system. @@ -57,7 +52,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) - cf_grid_var = mock.Mock(spec=[], **gridvar_props) + cf_grid_var = self.mocker.Mock(spec=[], **gridvar_props) cs = build_albers_equal_area_coordinate_system(None, cf_grid_var) @@ -70,7 +65,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): ellipsoid=expected_ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_basic(self): self._test() @@ -82,7 +77,3 @@ def test_inverse_flattening(self): def test_no_optionals(self): # Check defaults, when all optional attributes are absent. self._test(no_optionals=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_ancil_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_ancil_var.py index dc1dfcc052..ce2fd7bf8b 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_ancil_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_ancil_var.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_ancil_var`.""" -from unittest import mock - import numpy as np import pytest @@ -18,25 +16,25 @@ @pytest.fixture -def mock_engine(): - return mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), +def mock_engine(mocker): + return mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock(dimensions=("foo", "bar")), filename="DUMMY", cube_parts=dict(ancillary_variables=[]), ) @pytest.fixture -def mock_cf_av_var(monkeypatch, mock_engine): +def mock_cf_av_var(mocker, monkeypatch, mock_engine): data = np.arange(6) - output = mock.Mock( + output = mocker.Mock( spec=CFAncillaryDataVariable, dimensions=("foo",), scale_factor=1, add_offset=0, cf_name="wibble", - cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]), + cf_data=mocker.MagicMock(chunking=mocker.Mock(return_value=None), spec=[]), filename=mock_engine.filename, standard_name=None, long_name="wibble", diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py index a44986ec98..a6d613eb9a 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py @@ -7,12 +7,7 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import contextlib -from unittest import mock import numpy as np import pytest @@ -24,9 +19,10 @@ from iris.fileformats._nc_load_rules.helpers import build_and_add_auxiliary_coordinate from iris.fileformats.cf import CFVariable from iris.loading import LOAD_PROBLEMS +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBoundsVertexDim(tests.IrisTest): +class TestBoundsVertexDim(MockerMixin): # Lookup for various tests (which change the dimension order). dim_names_lens = { "foo": 2, @@ -37,19 +33,20 @@ class TestBoundsVertexDim(tests.IrisTest): "y": 3, } - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create coordinate cf variables and pyke engine. dimension_names = ("foo", "bar") - points, cf_data = self._make_array_and_cf_data(dimension_names) + points, cf_data = self._make_array_and_cf_data(mocker, dimension_names) - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar"), cf_data=cf_data), + self.engine = mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock(dimensions=("foo", "bar"), cf_data=cf_data), filename="DUMMY", cube_parts=dict(coordinates=[]), ) - self.cf_coord_var = mock.Mock( + self.cf_coord_var = mocker.Mock( spec=CFVariable, dimensions=dimension_names, cf_name="wibble", @@ -65,7 +62,7 @@ def setUp(self): ) expected_bounds, _ = self._make_array_and_cf_data( - dimension_names=("foo", "bar", "nv") + mocker, dimension_names=("foo", "bar", "nv") ) self.expected_coord = AuxCoord( self.cf_coord_var[:], @@ -83,7 +80,7 @@ def patched__getitem__(proxy_self, keys): return var[keys] raise RuntimeError() - self.patch( + mocker.patch( "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", new=patched__getitem__, ) @@ -95,16 +92,16 @@ def _get_per_test_bounds_var(_coord_unused): # Return the 'cf_bounds_var' created by the current test. return (self.cf_bounds_var, False) - self.patch( + mocker.patch( "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", new=_get_per_test_bounds_var, ) @classmethod - def _make_array_and_cf_data(cls, dimension_names, rollaxis=False): + def _make_array_and_cf_data(cls, mocker, dimension_names, rollaxis=False): shape = tuple(cls.dim_names_lens[name] for name in dimension_names) - cf_data = mock.MagicMock(_FillValue=None, spec=[]) - cf_data.chunking = mock.MagicMock(return_value=shape) + cf_data = mocker.MagicMock(_FillValue=None, spec=[]) + cf_data.chunking = mocker.MagicMock(return_value=shape) data = np.arange(np.prod(shape), dtype=float) if rollaxis: shape = shape[1:] + (shape[0],) @@ -114,13 +111,13 @@ def _make_array_and_cf_data(cls, dimension_names, rollaxis=False): data = data.reshape(shape) return data, cf_data - def _make_cf_bounds_var(self, dimension_names, rollaxis=False): + def _make_cf_bounds_var(self, mocker, dimension_names, rollaxis=False): # Create the bounds cf variable. bounds, cf_data = self._make_array_and_cf_data( - dimension_names, rollaxis=rollaxis + mocker, dimension_names, rollaxis=rollaxis ) bounds *= 1000 # Convert to metres. - cf_bounds_var = mock.Mock( + cf_bounds_var = self.mocker.Mock( spec=CFVariable, dimensions=dimension_names, cf_name="wibble_bnds", @@ -137,7 +134,7 @@ def _make_cf_bounds_var(self, dimension_names, rollaxis=False): def _check_case(self, dimension_names, rollaxis=False): self.cf_bounds_var = self._make_cf_bounds_var( - dimension_names, rollaxis=rollaxis + self.mocker, dimension_names, rollaxis=rollaxis ) # Asserts must lie within context manager because of deferred loading. @@ -148,7 +145,7 @@ def _check_case(self, dimension_names, rollaxis=False): # Test that engine.cube_parts container is correctly populated. expected_list = [(self.expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) + assert self.engine.cube_parts["coordinates"] == expected_list def test_fastest_varying_vertex_dim__normalise_bounds(self): # The usual order. @@ -165,21 +162,22 @@ def test_fastest_with_different_dim_names__normalise_bounds(self): self._check_case(dimension_names=("x", "y", "nv")) -class TestDtype(tests.IrisTest): - def setUp(self): +class TestDtype(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create coordinate cf variables and pyke engine. points = np.arange(6).reshape(2, 3) - cf_data = mock.MagicMock(_FillValue=None) - cf_data.chunking = mock.MagicMock(return_value=points.shape) + cf_data = mocker.MagicMock(_FillValue=None) + cf_data.chunking = mocker.MagicMock(return_value=points.shape) - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), + self.engine = mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock(dimensions=("foo", "bar")), filename="DUMMY", cube_parts=dict(coordinates=[]), ) - self.cf_coord_var = mock.Mock( + self.cf_coord_var = mocker.Mock( spec=CFVariable, dimensions=("foo", "bar"), cf_name="wibble", @@ -202,23 +200,22 @@ def patched__getitem__(proxy_self, keys): raise RuntimeError() # Fix for deferred load, *AND* avoid loading small variable data in real arrays. - with mock.patch( + self.mocker.patch( "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", new=patched__getitem__, - ): - # While loading, "turn off" loading small variables as real data. - with mock.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0): - yield + ) + # While loading, "turn off" loading small variables as real data. + self.mocker.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0) + yield def test_scale_factor_add_offset_int(self): self.cf_coord_var.scale_factor = 3 self.cf_coord_var.add_offset = 5 - with self.deferred_load_patch(): - build_and_add_auxiliary_coordinate(self.engine, self.cf_coord_var) + build_and_add_auxiliary_coordinate(self.engine, self.cf_coord_var) coord, _ = self.engine.cube_parts["coordinates"][0] - self.assertEqual(coord.dtype.kind, "i") + assert coord.dtype.kind == "i" def test_scale_factor_float(self): self.cf_coord_var.scale_factor = 3.0 @@ -227,7 +224,7 @@ def test_scale_factor_float(self): build_and_add_auxiliary_coordinate(self.engine, self.cf_coord_var) coord, _ = self.engine.cube_parts["coordinates"][0] - self.assertEqual(coord.dtype.kind, "f") + assert coord.dtype.kind == "f" def test_add_offset_float(self): self.cf_coord_var.add_offset = 5.0 @@ -236,28 +233,29 @@ def test_add_offset_float(self): build_and_add_auxiliary_coordinate(self.engine, self.cf_coord_var) coord, _ = self.engine.cube_parts["coordinates"][0] - self.assertEqual(coord.dtype.kind, "f") + assert coord.dtype.kind == "f" -class TestCoordConstruction(tests.IrisTest): - def setUp(self): +class TestCoordConstruction: + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create dummy pyke engine. - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), + self.engine = mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock(dimensions=("foo", "bar")), filename="DUMMY", cube_parts=dict(coordinates=[]), ) points = np.arange(6) units = "days since 1970-01-01" - self.cf_coord_var = mock.Mock( + self.cf_coord_var = mocker.Mock( spec=CFVariable, dimensions=("foo",), scale_factor=1, add_offset=0, cf_name="wibble", - cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]), + cf_data=mocker.MagicMock(chunking=mocker.Mock(return_value=None), spec=[]), filename=self.engine.filename, standard_name=None, long_name="wibble", @@ -270,13 +268,13 @@ def setUp(self): ) bounds = np.arange(12).reshape(6, 2) - cf_data = mock.MagicMock(chunking=mock.Mock(return_value=None)) + cf_data = mocker.MagicMock(chunking=mocker.Mock(return_value=None)) # we want to mock the absence of flag attributes to helpers.get_attr_units # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes del cf_data.flag_values del cf_data.flag_masks del cf_data.flag_meanings - self.cf_bounds_var = mock.Mock( + self.cf_bounds_var = mocker.Mock( spec=CFVariable, dimensions=("x", "nv"), scale_factor=1, @@ -300,7 +298,7 @@ def patched__getitem__(proxy_self, keys): return var[keys] raise RuntimeError() - self.patch( + mocker.patch( "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", new=patched__getitem__, ) @@ -312,7 +310,7 @@ def patched__getitem__(proxy_self, keys): def get_cf_bounds_var(coord_var): return self.cf_bounds_var, self.use_climatology_bounds - self.patch( + mocker.patch( "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", new=get_cf_bounds_var, ) @@ -368,7 +366,7 @@ def test_with_coord_system(self): # Test that expected coord is built and added to cube. self.engine.cube.add_aux_coord.assert_called_with(expected_coord, [0]) - def test_bad_coord_system(self): + def test_bad_coord_system(self, mocker): coord_system = RotatedGeogCS( grid_north_pole_latitude=45.0, grid_north_pole_longitude=45.0 ) @@ -382,19 +380,16 @@ def mock_setter(self, value): else: self._metadata_manager.coord_system = value - with mock.patch.object( + mocker.patch.object( AuxCoord, "coord_system", new=property(AuxCoord.coord_system.fget, mock_setter), - ): - build_and_add_auxiliary_coordinate( - self.engine, self.cf_coord_var, coord_system=coord_system - ) - load_problem = LOAD_PROBLEMS.problems[-1] - self.assertIn( - "test_bad_coord_system", - "".join(load_problem.stack_trace.format()), - ) + ) + build_and_add_auxiliary_coordinate( + self.engine, self.cf_coord_var, coord_system=coord_system + ) + load_problem = LOAD_PROBLEMS.problems[-1] + assert "test_bad_coord_system" in "".join(load_problem.stack_trace.format()) def test_not_added(self): # Confirm that the coord will be skipped if a CannotAddError is raised @@ -418,7 +413,7 @@ def test_unhandlable_error(self): m.setattr(self.engine, "cube", "foo") n_problems = len(LOAD_PROBLEMS.problems) build_and_add_auxiliary_coordinate(self.engine, self.cf_coord_var) - self.assertTrue(len(LOAD_PROBLEMS.problems) > n_problems) + assert len(LOAD_PROBLEMS.problems) > n_problems assert self.engine.cube_parts["coordinates"] == [] @@ -433,7 +428,3 @@ def test_problem_destination(self): assert destination.identifier == self.engine.cf_var.cf_name assert self.engine.cube_parts["coordinates"] == [] - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_measure.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_measure.py index 7e55366a5a..aea5061f1e 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_measure.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_measure.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_cell_measure`.""" -from unittest import mock - import numpy as np import pytest @@ -18,25 +16,25 @@ @pytest.fixture -def mock_engine(): - return mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), +def mock_engine(mocker): + return mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock(dimensions=("foo", "bar")), filename="DUMMY", cube_parts=dict(cell_measures=[]), ) @pytest.fixture -def mock_cf_cm_var(monkeypatch, mock_engine): +def mock_cf_cm_var(monkeypatch, mock_engine, mocker): data = np.arange(6) - output = mock.Mock( + output = mocker.Mock( spec=CFMeasureVariable, dimensions=("foo",), scale_factor=1, add_offset=0, cf_name="wibble", - cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]), + cf_data=mocker.MagicMock(chunking=mocker.Mock(return_value=None), spec=[]), filename=mock_engine.filename, standard_name=None, long_name="wibble", diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_methods.py index 19782a0f8c..31c063a90d 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_methods.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_cell_methods`.""" -from unittest import mock - import pytest from iris.coords import CellMethod @@ -16,8 +14,8 @@ @pytest.fixture -def mock_cf_data_var(): - yield mock.Mock( +def mock_cf_data_var(mocker): + return mocker.Mock( spec=CFDataVariable, cell_methods="time: mean", cf_name="wibble", @@ -26,9 +24,9 @@ def mock_cf_data_var(): @pytest.fixture -def mock_engine(mock_cf_data_var): - yield mock.Mock( - cube=mock.Mock(), +def mock_engine(mock_cf_data_var, mocker): + return mocker.Mock( + cube=mocker.Mock(), cf_var=mock_cf_data_var, filename=mock_cf_data_var.filename, ) @@ -55,10 +53,10 @@ def mock_parse_cell_methods(nc_cell_methods, cf_name=None): assert mock_engine.cube.cell_methods == cm_original -def test_not_added(monkeypatch, mock_engine, mock_cf_data_var): +def test_not_added(monkeypatch, mock_engine, mock_cf_data_var, mocker): cm_original = mock_engine.cube.cell_methods - class NoCellMethods(mock.Mock): + class NoCellMethods(mocker.Mock): def __setattr__(self, key, value): if key == "cell_methods": raise RuntimeError("Not added") diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py index a871c967ab..9cf983d0a3 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py @@ -4,11 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_dimension_coordinate`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock import warnings import numpy as np @@ -20,37 +15,16 @@ from iris.exceptions import CannotAddError from iris.fileformats._nc_load_rules.helpers import build_and_add_dimension_coordinate from iris.loading import LOAD_PROBLEMS +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -def _make_bounds_var(bounds, dimensions, units): - bounds = np.array(bounds) - cf_data = mock.Mock(spec=[]) - # we want to mock the absence of flag attributes to helpers.get_attr_units - # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes - del cf_data.flag_values - del cf_data.flag_masks - del cf_data.flag_meanings - result = mock.Mock( - dimensions=dimensions, - cf_name="wibble_bnds", - cf_data=cf_data, - units=units, - calendar=None, - shape=bounds.shape, - size=np.prod(bounds.shape), - dtype=bounds.dtype, - __getitem__=lambda self, key: bounds[key], - ) - delattr(result, "_data_array") - return result - - -class RulesTestMixin: - def setUp(self): +class RulesTestMixin(MockerMixin): + @pytest.fixture(autouse=True) + def _mixin_setup(self, mocker): # Create dummy pyke engine. - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), + self.engine = mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock(dimensions=("foo", "bar")), filename="DUMMY", cube_parts=dict(coordinates=[]), ) @@ -64,7 +38,7 @@ def patched__getitem__(proxy_self, keys): return var[keys] raise RuntimeError() - self.deferred_load_patch = mock.patch( + self.deferred_load_patch = mocker.patch( "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", new=patched__getitem__, ) @@ -76,21 +50,41 @@ def patched__getitem__(proxy_self, keys): def get_cf_bounds_var(coord_var): return self.cf_bounds_var, self.use_climatology_bounds - self.get_cf_bounds_var_patch = mock.patch( + self.get_cf_bounds_var_patch = mocker.patch( "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", new=get_cf_bounds_var, ) + def _make_bounds_var(self, bounds, dimensions, units): + bounds = np.array(bounds) + cf_data = self.mocker.Mock(spec=[]) + # we want to mock the absence of flag attributes to helpers.get_attr_units + # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes + del cf_data.flag_values + del cf_data.flag_masks + del cf_data.flag_meanings + result = self.mocker.Mock( + dimensions=dimensions, + cf_name="wibble_bnds", + cf_data=cf_data, + units=units, + calendar=None, + shape=bounds.shape, + size=np.prod(bounds.shape), + dtype=bounds.dtype, + __getitem__=lambda self, key: bounds[key], + ) + delattr(result, "_data_array") + return result -class TestCoordConstruction(tests.IrisTest, RulesTestMixin): - def setUp(self): - # Call parent setUp explicitly, because of how unittests work. - RulesTestMixin.setUp(self) +class TestCoordConstruction(RulesTestMixin, MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self): bounds = np.arange(12).reshape(6, 2) dimensions = ("x", "nv") units = "days since 1970-01-01" - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) self.bounds = bounds # test_dimcoord_not_added() and test_auxcoord_not_added have been @@ -100,10 +94,10 @@ def setUp(self): self.monkeypatch = pytest.MonkeyPatch() def _set_cf_coord_var(self, points): - self.cf_coord_var = mock.Mock( + self.cf_coord_var = self.mocker.Mock( dimensions=("foo",), cf_name="wibble", - cf_data=mock.Mock(spec=[]), + cf_data=self.mocker.Mock(spec=[]), standard_name=None, long_name="wibble", units="days since 1970-01-01", @@ -131,12 +125,10 @@ def check_case_dim_coord_construction(self, climatology=False): climatological=climatology, ) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) def test_dim_coord_construction(self): self.check_case_dim_coord_construction(climatology=False) @@ -162,13 +154,10 @@ def test_dim_coord_construction_masked_array(self): ) with warnings.catch_warnings(record=True) as w: - # Asserts must lie within context manager because of deferred - # loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) # Assert warning is raised assert len(w) == 1 @@ -191,13 +180,10 @@ def test_dim_coord_construction_masked_array_mask_does_nothing(self): ) with warnings.catch_warnings(record=True) as w: - # Asserts must lie within context manager because of deferred - # loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) # Assert no warning is raised assert len(w) == 0 @@ -215,13 +201,10 @@ def test_dim_coord_construction_masked_bounds_mask_does_nothing(self): ) with warnings.catch_warnings(record=True) as w: - # Asserts must lie within context manager because of deferred - # loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) # Assert no warning is raised assert len(w) == 0 @@ -241,16 +224,14 @@ def test_with_coord_system(self): coord_system=coord_system, ) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate( - self.engine, self.cf_coord_var, coord_system=coord_system - ) + build_and_add_dimension_coordinate( + self.engine, self.cf_coord_var, coord_system=coord_system + ) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) - def test_bad_coord_system(self): + def test_bad_coord_system(self, mocker): self._set_cf_coord_var(np.arange(6)) coord_system = RotatedGeogCS( grid_north_pole_latitude=45.0, grid_north_pole_longitude=45.0 @@ -265,20 +246,16 @@ def mock_setter(self, value): else: self._metadata_manager.coord_system = value - with mock.patch.object( + _ = mocker.patch.object( DimCoord, "coord_system", new=property(DimCoord.coord_system.fget, mock_setter), - ): - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate( - self.engine, self.cf_coord_var, coord_system=coord_system - ) - load_problem = LOAD_PROBLEMS.problems[-1] - self.assertIn( - "test_bad_coord_system", - "".join(load_problem.stack_trace.format()), - ) + ) + build_and_add_dimension_coordinate( + self.engine, self.cf_coord_var, coord_system=coord_system + ) + load_problem = LOAD_PROBLEMS.problems[-1] + assert "test_bad_coord_system" in "".join(load_problem.stack_trace.format()) def test_aux_coord_construction(self): # Use non monotonically increasing coordinates to force aux coord @@ -293,18 +270,15 @@ def test_aux_coord_construction(self): bounds=self.bounds, ) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with(expected_coord, [0]) - load_problem = LOAD_PROBLEMS.problems[-1] - self.assertIn( - "creating 'wibble' auxiliary coordinate instead", - "".join(load_problem.stack_trace.format()), - ) - self.assertTrue(load_problem.handled) + # Test that expected coord is built and added to cube. + self.engine.cube.add_aux_coord.assert_called_with(expected_coord, [0]) + load_problem = LOAD_PROBLEMS.problems[-1] + assert "creating 'wibble' auxiliary coordinate instead" in "".join( + load_problem.stack_trace.format() + ) + assert load_problem.handled def test_dimcoord_not_added(self): # Confirm that the coord will be skipped if a CannotAddError is raised @@ -317,8 +291,7 @@ def mock_add_dim_coord(_, __): self._set_cf_coord_var(np.arange(6)) - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) load_problem = LOAD_PROBLEMS.problems[-1] assert load_problem.stack_trace.exc_type is CannotAddError @@ -336,8 +309,7 @@ def mock_add_aux_coord(_, __): self._set_cf_coord_var(np.array([1, 3, 2, 4, 6, 5])) - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) load_problem = LOAD_PROBLEMS.problems[-1] assert load_problem.stack_trace.exc_type is CannotAddError @@ -351,7 +323,7 @@ def test_unhandlable_error(self): n_problems = len(LOAD_PROBLEMS.problems) self._set_cf_coord_var(np.array([1, 3, 2, 4, 6, 5])) build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - self.assertTrue(len(LOAD_PROBLEMS.problems) > n_problems) + assert len(LOAD_PROBLEMS.problems) > n_problems assert self.engine.cube_parts["coordinates"] == [] @@ -369,18 +341,17 @@ def test_problem_destination(self): assert self.engine.cube_parts["coordinates"] == [] -class TestBoundsVertexDim(tests.IrisTest, RulesTestMixin): - def setUp(self): - # Call parent setUp explicitly, because of how unittests work. - RulesTestMixin.setUp(self) +class TestBoundsVertexDim(RulesTestMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create test coordinate cf variable. points = np.arange(6) - self.cf_coord_var = mock.Mock( + self.cf_coord_var = mocker.Mock( dimensions=("foo",), cf_name="wibble", standard_name=None, long_name="wibble", - cf_data=mock.Mock(spec=[]), + cf_data=mocker.Mock(spec=[]), units="km", shape=points.shape, dtype=points.dtype, @@ -392,7 +363,7 @@ def test_slowest_varying_vertex_dim__normalise_bounds(self): bounds = np.arange(12).reshape(2, 6) * 1000 dimensions = ("nv", "foo") units = "m" - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) # Expected bounds on the resulting coordinate should be rolled so that # the vertex dimension is at the end. @@ -405,22 +376,20 @@ def test_slowest_varying_vertex_dim__normalise_bounds(self): bounds=expected_bounds, ) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) - # Test that engine.cube_parts container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) + # Test that engine.cube_parts container is correctly populated. + expected_list = [(expected_coord, self.cf_coord_var.cf_name)] + assert self.engine.cube_parts["coordinates"] == expected_list def test_fastest_varying_vertex_dim__normalise_bounds(self): bounds = np.arange(12).reshape(6, 2) * 1000 dimensions = ("foo", "nv") units = "m" - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) expected_coord = DimCoord( self.cf_coord_var[:], @@ -430,16 +399,14 @@ def test_fastest_varying_vertex_dim__normalise_bounds(self): bounds=bounds / 1000, ) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) - # Test that engine.cube_parts container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) + # Test that engine.cube_parts container is correctly populated. + expected_list = [(expected_coord, self.cf_coord_var.cf_name)] + assert self.engine.cube_parts["coordinates"] == expected_list def test_fastest_with_different_dim_names__normalise_bounds(self): # Despite the dimension names 'x' differing from the coord's @@ -448,7 +415,7 @@ def test_fastest_with_different_dim_names__normalise_bounds(self): bounds = np.arange(12).reshape(6, 2) * 1000 dimensions = ("x", "nv") units = "m" - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) expected_coord = DimCoord( self.cf_coord_var[:], @@ -457,34 +424,30 @@ def test_fastest_with_different_dim_names__normalise_bounds(self): units=self.cf_coord_var.units, bounds=bounds / 1000, ) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) - # Test that engine.cube_parts container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) + # Test that engine.cube_parts container is correctly populated. + expected_list = [(expected_coord, self.cf_coord_var.cf_name)] + assert self.engine.cube_parts["coordinates"] == expected_list -class TestCircular(tests.IrisTest, RulesTestMixin): +class TestCircular(RulesTestMixin): # Test the rules logic for marking a coordinate "circular". - def setUp(self): - # Call parent setUp explicitly, because of how unittests work. - RulesTestMixin.setUp(self) + @pytest.fixture(autouse=True) + def _setup(self): self.cf_bounds_var = None def _make_vars(self, points, bounds=None, units="degrees"): points = np.array(points) - self.cf_coord_var = mock.MagicMock( + self.cf_coord_var = self.mocker.MagicMock( dimensions=("foo",), cf_name="wibble", standard_name=None, long_name="wibble", - cf_data=mock.Mock(spec=[]), + cf_data=self.mocker.Mock(spec=[]), units=units, shape=points.shape, dtype=points.dtype, @@ -493,7 +456,7 @@ def _make_vars(self, points, bounds=None, units="degrees"): if bounds: bounds = np.array(bounds).reshape(self.cf_coord_var.shape + (2,)) dimensions = ("x", "nv") - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) def _check_circular(self, circular, *args, **kwargs): if "coord_name" in kwargs: @@ -501,13 +464,12 @@ def _check_circular(self, circular, *args, **kwargs): else: coord_name = "longitude" self._make_vars(*args, **kwargs) - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate( - self.engine, self.cf_coord_var, coord_name=coord_name - ) - self.assertEqual(self.engine.cube.add_dim_coord.call_count, 1) - coord, dims = self.engine.cube.add_dim_coord.call_args[0] - self.assertEqual(coord.circular, circular) + build_and_add_dimension_coordinate( + self.engine, self.cf_coord_var, coord_name=coord_name + ) + assert self.engine.cube.add_dim_coord.call_count == 1 + coord, dims = self.engine.cube.add_dim_coord.call_args[0] + assert coord.circular == circular def check_circular(self, *args, **kwargs): self._check_circular(True, *args, **kwargs) @@ -566,23 +528,20 @@ def test_multiple_bounded_noncircular(self): ) -class TestCircularScalar(tests.IrisTest, RulesTestMixin): - def setUp(self): - RulesTestMixin.setUp(self) - +class TestCircularScalar(RulesTestMixin): def _make_vars(self, bounds): # Create cf vars for the coordinate and its bounds. # Note that for a scalar the shape of the array from # the cf var is (), rather than (1,). points = np.array([0.0]) units = "degrees" - self.cf_coord_var = mock.Mock( + self.cf_coord_var = self.mocker.Mock( dimensions=(), cf_name="wibble", standard_name=None, long_name="wibble", units=units, - cf_data=mock.Mock(spec=[]), + cf_data=self.mocker.Mock(spec=[]), shape=(), dtype=points.dtype, __getitem__=lambda self, key: points[key], @@ -590,16 +549,15 @@ def _make_vars(self, bounds): bounds = np.array(bounds) dimensions = ("bnds",) - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) def _assert_circular(self, value): - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate( - self.engine, self.cf_coord_var, coord_name="longitude" - ) - self.assertEqual(self.engine.cube.add_aux_coord.call_count, 1) - coord, dims = self.engine.cube.add_aux_coord.call_args[0] - self.assertEqual(coord.circular, value) + build_and_add_dimension_coordinate( + self.engine, self.cf_coord_var, coord_name="longitude" + ) + assert self.engine.cube.add_aux_coord.call_count == 1 + coord, dims = self.engine.cube.add_aux_coord.call_args[0] + assert coord.circular == value def test_two_bounds_noncircular(self): self._make_vars([0.0, 180.0]) @@ -624,7 +582,3 @@ def test_two_bounds_circular_alt_decreasing(self): def test_four_bounds(self): self._make_vars([0.0, 10.0, 20.0, 30.0]) self._assert_circular(False) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_global_attributes.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_global_attributes.py index 1b4d48f6bf..b906f076c7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_global_attributes.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_global_attributes.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_global_attributes`.""" -from unittest import mock - import numpy as np import pytest @@ -15,13 +13,13 @@ @pytest.fixture -def mock_engine(): +def mock_engine(mocker): global_attributes = { "Conventions": "CF-1.5", "comment": "Mocked test object", } - cf_group = mock.Mock(global_attributes=global_attributes) - cf_var = mock.MagicMock( + cf_group = mocker.Mock(global_attributes=global_attributes) + cf_var = mocker.MagicMock( cf_name="wibble", standard_name=None, long_name=None, @@ -30,8 +28,8 @@ def mock_engine(): cell_methods=None, cf_group=cf_group, ) - engine = mock.Mock(cube=Cube([23]), cf_var=cf_var, filename="foo.nc") - yield engine + engine = mocker.Mock(cube=Cube([23]), cf_var=cf_var, filename="foo.nc") + return engine def test_construction(mock_engine): diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_names.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_names.py index 357a199546..ba6a289a08 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_names.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_names.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_names`.""" -from unittest import mock - import numpy as np import pytest @@ -15,13 +13,13 @@ @pytest.fixture -def mock_engine(): +def mock_engine(mocker): global_attributes = { "Conventions": "CF-1.5", "comment": "Mocked test object", } - cf_group = mock.Mock(global_attributes=global_attributes) - cf_var = mock.MagicMock( + cf_group = mocker.Mock(global_attributes=global_attributes) + cf_var = mocker.MagicMock( cf_name="wibble", standard_name=None, long_name=None, @@ -30,8 +28,8 @@ def mock_engine(): cell_methods=None, cf_group=cf_group, ) - engine = mock.Mock(cube=Cube([23]), cf_var=cf_var, filename="foo.nc") - yield engine + engine = mocker.Mock(cube=Cube([23]), cf_var=cf_var, filename="foo.nc") + return engine class TestCubeName: diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_units.py index f0e7d16113..ef2931963f 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_units.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_units.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_units`.""" -from unittest import mock - from cf_units import Unit import pytest @@ -16,21 +14,21 @@ @pytest.fixture -def mock_cf_data_var(): - yield mock.Mock( +def mock_cf_data_var(mocker): + return mocker.Mock( spec=CFDataVariable, units="kelvin", cf_name="wibble", filename="DUMMY", dtype=float, - cf_data=mock.Mock(spec=[]), + cf_data=mocker.Mock(spec=[]), ) @pytest.fixture -def mock_engine(mock_cf_data_var): - yield mock.Mock( - cube=mock.Mock(attributes={}), +def mock_engine(mock_cf_data_var, mocker): + return mocker.Mock( + cube=mocker.Mock(attributes={}), cf_var=mock_cf_data_var, filename=mock_cf_data_var.filename, ) @@ -65,10 +63,10 @@ def mock_get_attr_units(cf_var, attributes, capture_invalid=False): assert mock_engine.cube.units == units_original -def test_not_added(monkeypatch, mock_engine, mock_cf_data_var): +def test_not_added(monkeypatch, mock_engine, mock_cf_data_var, mocker): units_original = mock_engine.cube.units - class NoUnits(mock.Mock): + class NoUnits(mocker.Mock): def __setattr__(self, key, value): if key == "units": raise RuntimeError("Not added") diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py index 41be1ea932..d810152196 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import Geostationary from iris.fileformats._nc_load_rules.helpers import ( build_geostationary_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildGeostationaryCoordinateSystem(tests.IrisTest): +class TestBuildGeostationaryCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, replace_props=None, remove_props=None): """Generic test that can check vertical perspective validity with or without inverse flattening. @@ -54,11 +49,11 @@ def _test(self, inverse_flattening=False, replace_props=None, remove_props=None) cf_grid_var_kwargs = non_ellipsoid_kwargs.copy() cf_grid_var_kwargs.update(ellipsoid_kwargs) - cf_grid_var = mock.Mock(spec=[], **cf_grid_var_kwargs) + cf_grid_var = self.mocker.Mock(spec=[], **cf_grid_var_kwargs) cs = build_geostationary_coordinate_system(None, cf_grid_var) ellipsoid = iris.coord_systems.GeogCS(**ellipsoid_kwargs) expected = Geostationary(ellipsoid=ellipsoid, **non_ellipsoid_kwargs) - self.assertEqual(cs, expected) + assert cs == expected def test_valid(self): self._test(inverse_flattening=False) @@ -71,7 +66,3 @@ def test_false_offsets_missing(self): def test_false_offsets_none(self): self._test(replace_props={"false_easting": None, "false_northing": None}) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py index 45241fbced..0d4af82889 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import LambertAzimuthalEqualArea from iris.fileformats._nc_load_rules.helpers import ( build_lambert_azimuthal_equal_area_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildLambertAzimuthalEqualAreaCoordinateSystem(tests.IrisTest): +class TestBuildLambertAzimuthalEqualAreaCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_optionals=False): if no_optionals: # Most properties are optional for this system. @@ -54,7 +49,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) - cf_grid_var = mock.Mock(spec=[], **gridvar_props) + cf_grid_var = self.mocker.Mock(spec=[], **gridvar_props) cs = build_lambert_azimuthal_equal_area_coordinate_system(None, cf_grid_var) @@ -66,7 +61,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): ellipsoid=expected_ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_basic(self): self._test() @@ -78,7 +73,3 @@ def test_inverse_flattening(self): def test_no_optionals(self): # Check defaults, when all optional attributes are absent. self._test(no_optionals=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py index fc45a6eab8..9d91c2a3f7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import LambertConformal from iris.fileformats._nc_load_rules.helpers import ( build_lambert_conformal_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildLambertConformalCoordinateSystem(tests.IrisTest): +class TestBuildLambertConformalCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_optionals=False): if no_optionals: # Most properties are optional in this case. @@ -57,7 +52,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) - cf_grid_var = mock.Mock(spec=[], **gridvar_props) + cf_grid_var = self.mocker.Mock(spec=[], **gridvar_props) cs = build_lambert_conformal_coordinate_system(None, cf_grid_var) @@ -70,7 +65,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): ellipsoid=expected_ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_basic(self): self._test() @@ -82,7 +77,3 @@ def test_inverse_flattening(self): def test_no_optionals(self): # Check defaults, when all optional attributes are absent. self._test(no_optionals=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py index dc2188b65e..3752337ea9 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py @@ -7,20 +7,14 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import Mercator from iris.fileformats._nc_load_rules.helpers import build_mercator_coordinate_system -class TestBuildMercatorCoordinateSystem(tests.IrisTest): - def test_valid(self): - cf_grid_var = mock.Mock( +class TestBuildMercatorCoordinateSystem: + def test_valid(self, mocker): + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, @@ -37,10 +31,10 @@ def test_valid(self): ), standard_parallel=(cf_grid_var.standard_parallel), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_inverse_flattening(self): - cf_grid_var = mock.Mock( + def test_inverse_flattening(self, mocker): + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, @@ -58,10 +52,10 @@ def test_inverse_flattening(self): ), standard_parallel=(cf_grid_var.standard_parallel), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_longitude_missing(self): - cf_grid_var = mock.Mock( + def test_longitude_missing(self, mocker): + cf_grid_var = mocker.Mock( spec=[], semi_major_axis=6377563.396, inverse_flattening=299.3249646, @@ -77,10 +71,10 @@ def test_longitude_missing(self): ), standard_parallel=(cf_grid_var.standard_parallel), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_standard_parallel_missing(self): - cf_grid_var = mock.Mock( + def test_standard_parallel_missing(self, mocker): + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, @@ -95,10 +89,10 @@ def test_standard_parallel_missing(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_scale_factor_at_projection_origin(self): - cf_grid_var = mock.Mock( + def test_scale_factor_at_projection_origin(self, mocker): + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, @@ -117,8 +111,4 @@ def test_scale_factor_at_projection_origin(self): cf_grid_var.scale_factor_at_projection_origin ), ) - self.assertEqual(cs, expected) - - -if __name__ == "__main__": - tests.main() + assert cs == expected diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py index 50b171655e..ed4395fffa 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py @@ -5,7 +5,6 @@ """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_oblique_mercator_coordinate_system`.""" from typing import List, NamedTuple, Type -from unittest import mock import pytest @@ -148,22 +147,22 @@ def make_variant_inputs(self, request) -> None: self.coord_system_args_expected = list(coord_system_kwargs_expected.values()) - def test_attributes(self): - cf_var_mock = mock.Mock(spec=[], **self.nc_attributes) - coord_system_mock = mock.Mock(spec=self.expected_class) + def test_attributes(self, mocker): + cf_var_mock = mocker.Mock(spec=[], **self.nc_attributes) + coord_system_mock = mocker.Mock(spec=self.expected_class) setattr(coord_systems, self.expected_class.__name__, coord_system_mock) _ = build_oblique_mercator_coordinate_system(None, cf_var_mock) coord_system_mock.assert_called_with(*self.coord_system_args_expected) -def test_deprecation(): +def test_deprecation(mocker): nc_attributes = dict( grid_mapping_name="rotated_mercator", latitude_of_projection_origin=0.0, longitude_of_projection_origin=0.0, scale_factor_at_projection_origin=1.0, ) - cf_var_mock = mock.Mock(spec=[], **nc_attributes) + cf_var_mock = mocker.Mock(spec=[], **nc_attributes) with pytest.warns(IrisDeprecation, match="azimuth_of_central_line = 90"): _ = build_oblique_mercator_coordinate_system(None, cf_var_mock) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py index 4661ea5449..3e9396cca4 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py @@ -7,12 +7,6 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import PolarStereographic from iris.fileformats._nc_load_rules.helpers import ( @@ -20,9 +14,9 @@ ) -class TestBuildPolarStereographicCoordinateSystem(tests.IrisTest): - def test_valid_north(self): - cf_grid_var = mock.Mock( +class TestBuildPolarStereographicCoordinateSystem: + def test_valid_north(self, mocker): + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -43,10 +37,10 @@ def test_valid_north(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_valid_south(self): - cf_grid_var = mock.Mock( + def test_valid_south(self, mocker): + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=-90, @@ -67,10 +61,10 @@ def test_valid_south(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_valid_with_standard_parallel(self): - cf_grid_var = mock.Mock( + def test_valid_with_standard_parallel(self, mocker): + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -89,10 +83,10 @@ def test_valid_with_standard_parallel(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_valid_with_false_easting_northing(self): - cf_grid_var = mock.Mock( + def test_valid_with_false_easting_northing(self, mocker): + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -117,10 +111,10 @@ def test_valid_with_false_easting_northing(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_valid_nonzero_veritcal_lon(self): - cf_grid_var = mock.Mock( + def test_valid_nonzero_veritcal_lon(self, mocker): + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=30, latitude_of_projection_origin=90, @@ -141,8 +135,4 @@ def test_valid_nonzero_veritcal_lon(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) - - -if __name__ == "__main__": - tests.main() + assert cs == expected diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py index 4928631336..481d4441f8 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import Stereographic from iris.fileformats._nc_load_rules.helpers import ( build_stereographic_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildStereographicCoordinateSystem(tests.IrisTest): +class TestBuildStereographicCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_offsets=False): test_easting = -100 test_northing = 200 @@ -49,7 +44,7 @@ def _test(self, inverse_flattening=False, no_offsets=False): test_easting = 0 test_northing = 0 - cf_grid_var = mock.Mock(spec=[], **gridvar_props) + cf_grid_var = self.mocker.Mock(spec=[], **gridvar_props) cs = build_stereographic_coordinate_system(None, cf_grid_var) @@ -62,7 +57,7 @@ def _test(self, inverse_flattening=False, no_offsets=False): ellipsoid=expected_ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_basic(self): self._test() @@ -74,7 +69,3 @@ def test_inverse_flattening(self): def test_no_offsets(self): # Check when false_easting/northing attributes are absent. self._test(no_offsets=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py index ad61c485e0..f63402dcc2 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import TransverseMercator from iris.fileformats._nc_load_rules.helpers import ( build_transverse_mercator_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildTransverseMercatorCoordinateSystem(tests.IrisTest): +class TestBuildTransverseMercatorCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_options=False): test_easting = -100 test_northing = 200 @@ -51,7 +46,7 @@ def _test(self, inverse_flattening=False, no_options=False): test_northing = 0 test_scale_factor = 1.0 - cf_grid_var = mock.Mock(spec=[], **gridvar_props) + cf_grid_var = self.mocker.Mock(spec=[], **gridvar_props) cs = build_transverse_mercator_coordinate_system(None, cf_grid_var) @@ -64,7 +59,7 @@ def _test(self, inverse_flattening=False, no_options=False): ellipsoid=expected_ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_basic(self): self._test() @@ -74,7 +69,3 @@ def test_inverse_flattening(self): def test_missing_optionals(self): self._test(no_options=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py index 2c65e09c3f..932e1d085d 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import VerticalPerspective from iris.fileformats._nc_load_rules.helpers import ( build_vertical_perspective_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildVerticalPerspectiveCoordinateSystem(tests.IrisTest): +class TestBuildVerticalPerspectiveCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_offsets=False): """Generic test that can check vertical perspective validity with or without inverse flattening, and false_east/northing-s. @@ -50,7 +45,7 @@ def _test(self, inverse_flattening=False, no_offsets=False): test_easting = 0 test_northing = 0 - cf_grid_var = mock.Mock(**cf_grid_var_kwargs) + cf_grid_var = self.mocker.Mock(**cf_grid_var_kwargs) ellipsoid = iris.coord_systems.GeogCS(**ellipsoid_kwargs) cs = build_vertical_perspective_coordinate_system(None, cf_grid_var) @@ -63,7 +58,7 @@ def _test(self, inverse_flattening=False, no_offsets=False): ellipsoid=ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_valid(self): self._test(inverse_flattening=False) @@ -75,7 +70,3 @@ def test_inverse_flattening(self): def test_no_offsets(self): # Check when false_easting/northing attributes are absent. self._test(no_offsets=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py index e7e49879cd..50698e72f8 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py @@ -7,32 +7,28 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest from iris.fileformats._nc_load_rules.helpers import get_attr_units from iris.fileformats.cf import CFDataVariable from iris.loading import LOAD_PROBLEMS +from iris.tests import _shared_utils +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin from iris.warnings import IrisCfLoadWarning -class TestGetAttrUnits(tests.IrisTest): - @staticmethod - def _make_cf_var(global_attributes=None): +class TestGetAttrUnits(MockerMixin): + def _make_cf_var(self, global_attributes=None): if global_attributes is None: global_attributes = {} - cf_group = mock.Mock(global_attributes=global_attributes) + cf_group = self.mocker.Mock(global_attributes=global_attributes) - cf_var = mock.MagicMock( + cf_var = self.mocker.MagicMock( spec=CFDataVariable, cf_name="sound_frequency", - cf_data=mock.Mock(spec=[]), + cf_data=self.mocker.Mock(spec=[]), filename="DUMMY", standard_name=None, long_name=None, @@ -48,30 +44,26 @@ def test_unicode_character(self): expected_attributes = {"invalid_units": "\u266b"} cf_var = self._make_cf_var() attr_units = get_attr_units(cf_var, attributes) - self.assertEqual(attr_units, "?") - self.assertEqual(attributes, expected_attributes) + assert attr_units == "?" + assert attributes == expected_attributes def test_warn(self): attributes = {} expected_attributes = {"invalid_units": "\u266b"} cf_var = self._make_cf_var() - with self.assertWarns(IrisCfLoadWarning, msg="Ignoring invalid units"): + with pytest.warns(IrisCfLoadWarning, match="Ignoring invalid units"): attr_units = get_attr_units(cf_var, attributes) - self.assertEqual(attr_units, "?") - self.assertEqual(attributes, expected_attributes) + assert attr_units == "?" + assert attributes == expected_attributes def test_capture(self): attributes = {} expected_attributes = {"invalid_units": "\u266b"} cf_var = self._make_cf_var() - with self.assertNoWarningsRegexp("Ignoring invalid units"): + with _shared_utils.assert_no_warnings_regexp("Ignoring invalid units"): attr_units = get_attr_units(cf_var, attributes, capture_invalid=True) - self.assertEqual(attr_units, "?") - self.assertEqual(attributes, expected_attributes) + assert attr_units == "?" + assert attributes == expected_attributes load_problem = LOAD_PROBLEMS.problems[-1] - self.assertEqual(load_problem.loaded, {"units": "\u266b"}) - - -if __name__ == "__main__": - tests.main() + assert load_problem.loaded == {"units": "\u266b"} diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py index 43a07fe17b..39a2e6caa7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py @@ -7,26 +7,21 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - from iris.fileformats._nc_load_rules.helpers import ( CF_ATTR_BOUNDS, CF_ATTR_CLIMATOLOGY, get_cf_bounds_var, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestGetCFBoundsVar(tests.IrisTest): +class TestGetCFBoundsVar(MockerMixin): # Tests to check that get_cf_bounds_var will return the bounds_var and # the correct climatological flag. def _generic_test(self, test_climatological_bounds=False): - cf_coord_var = mock.MagicMock() + cf_coord_var = self.mocker.MagicMock() - cf_group_dict = {"TEST": mock.sentinel.bounds_var} + cf_group_dict = {"TEST": self.mocker.sentinel.bounds_var} if test_climatological_bounds: cf_coord_var.cf_group.climatology = cf_group_dict test_attr = CF_ATTR_CLIMATOLOGY @@ -39,15 +34,11 @@ def _generic_test(self, test_climatological_bounds=False): setattr(cf_coord_var, attr, attr_val) bounds_var, climatological = get_cf_bounds_var(cf_coord_var) - self.assertIs(bounds_var, mock.sentinel.bounds_var) - self.assertEqual(climatological, test_climatological_bounds) + assert bounds_var is self.mocker.sentinel.bounds_var + assert climatological == test_climatological_bounds def test_bounds_normal(self): self._generic_test(test_climatological_bounds=False) def test_bounds_climatological(self): self._generic_test(test_climatological_bounds=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py index 5817a4228d..d54aec0692 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py @@ -7,18 +7,13 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np from iris.fileformats._nc_load_rules.helpers import get_names +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestGetNames(tests.IrisTest): +class TestGetNames(MockerMixin): """The tests included in this class cover all the variations of possible combinations of the following inputs: * standard_name = [None, 'projection_y_coordinate', 'latitude_coordinate'] @@ -32,16 +27,15 @@ class TestGetNames(tests.IrisTest): """ - @staticmethod - def _make_cf_var(standard_name, long_name, cf_name): - cf_var = mock.Mock( + def _make_cf_var(self, standard_name, long_name, cf_name): + cf_var = self.mocker.Mock( cf_name=cf_name, standard_name=standard_name, long_name=long_name, units="degrees", dtype=np.float64, cell_methods=None, - cf_group=mock.Mock(global_attributes={}), + cf_group=self.mocker.Mock(global_attributes={}), ) return cf_var @@ -61,10 +55,10 @@ def check_names(self, inputs, expected): ) # Check the names and attributes are as expected. - self.assertEqual(res_standard_name, exp_std_name) - self.assertEqual(res_long_name, exp_long_name) - self.assertEqual(res_var_name, exp_var_name) - self.assertEqual(attributes, exp_attributes) + assert res_standard_name == exp_std_name + assert res_long_name == exp_long_name + assert res_var_name == exp_var_name + assert attributes == exp_attributes def test_var_name_valid(self): # Only var_name is set and it is set to a valid standard name. @@ -283,7 +277,3 @@ def test_std_name_invalid_long_name_set_var_name_invalid_coord_name_set( {"invalid_standard_name": "latitude_coord"}, ) self.check_names(inp, exp) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index f0dd80de85..8ac08c330c 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -7,26 +7,24 @@ """ -from unittest import mock +import re import warnings from iris.fileformats._nc_load_rules.helpers import has_supported_mercator_parameters +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip +class _EngineMixin(MockerMixin): + def engine(self, cf_grid_var, cf_name): + cf_group = {cf_name: cf_grid_var} + cf_var = self.mocker.Mock(cf_group=cf_group) + return self.mocker.Mock(cf_var=cf_var) -def _engine(cf_grid_var, cf_name): - cf_group = {cf_name: cf_grid_var} - cf_var = mock.Mock(cf_group=cf_group) - return mock.Mock(cf_var=cf_var) - -class TestHasSupportedMercatorParameters(tests.IrisTest): - def test_valid_base(self): +class TestHasSupportedMercatorParameters(_EngineMixin): + def test_valid_base(self, mocker): cf_name = "mercator" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, false_easting=0, @@ -35,15 +33,15 @@ def test_valid_base(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_false_easting_northing(self): + def test_valid_false_easting_northing(self, mocker): cf_name = "mercator" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, false_easting=15, @@ -52,15 +50,15 @@ def test_valid_false_easting_northing(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_standard_parallel(self): + def test_valid_standard_parallel(self, mocker): cf_name = "mercator" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, false_easting=0, @@ -69,15 +67,15 @@ def test_valid_standard_parallel(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_scale_factor(self): + def test_valid_scale_factor(self, mocker): cf_name = "mercator" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=0, false_easting=0, @@ -86,17 +84,17 @@ def test_valid_scale_factor(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_invalid_scale_factor_and_standard_parallel(self): + def test_invalid_scale_factor_and_standard_parallel(self, mocker): # Scale factor and standard parallel cannot both be specified for # Mercator projections cf_name = "mercator" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=0, false_easting=0, @@ -106,19 +104,16 @@ def test_invalid_scale_factor_and_standard_parallel(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex( - str(warns[0]), - 'both "scale_factor_at_projection_origin" and "standard_parallel"', - ) - + assert not is_valid + assert len(warns) == 1 -if __name__ == "__main__": - tests.main() + msg = re.escape( + 'both "scale_factor_at_projection_origin" and "standard_parallel"' + ) + assert re.search(msg, str(warns[0])) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py index 8ced149ff1..2bfc801af2 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py @@ -7,28 +7,26 @@ """ -from unittest import mock +import re import warnings from iris.fileformats._nc_load_rules.helpers import ( has_supported_polar_stereographic_parameters, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip +class _EngineMixin(MockerMixin): + def engine(self, cf_grid_var, cf_name): + cf_group = {cf_name: cf_grid_var} + cf_var = self.mocker.Mock(cf_group=cf_group) + return self.mocker.Mock(cf_var=cf_var) -def _engine(cf_grid_var, cf_name): - cf_group = {cf_name: cf_grid_var} - cf_var = mock.Mock(cf_group=cf_group) - return mock.Mock(cf_var=cf_var) - -class TestHasSupportedPolarStereographicParameters(tests.IrisTest): - def test_valid_base_north(self): +class TestHasSupportedPolarStereographicParameters(_EngineMixin): + def test_valid_base_north(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -38,15 +36,15 @@ def test_valid_base_north(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_base_south(self): + def test_valid_base_south(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=-90, @@ -56,15 +54,15 @@ def test_valid_base_south(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_straight_vertical_longitude(self): + def test_valid_straight_vertical_longitude(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=30, latitude_of_projection_origin=90, @@ -74,15 +72,15 @@ def test_valid_straight_vertical_longitude(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_false_easting_northing(self): + def test_valid_false_easting_northing(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -92,15 +90,15 @@ def test_valid_false_easting_northing(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_standard_parallel(self): + def test_valid_standard_parallel(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -110,15 +108,15 @@ def test_valid_standard_parallel(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_scale_factor(self): + def test_valid_scale_factor(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -128,17 +126,17 @@ def test_valid_scale_factor(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_invalid_scale_factor_and_standard_parallel(self): + def test_invalid_scale_factor_and_standard_parallel(self, mocker): # Scale factor and standard parallel cannot both be specified for # Polar Stereographic projections cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -149,24 +147,25 @@ def test_invalid_scale_factor_and_standard_parallel(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex( - str(warns[0]), - 'both "scale_factor_at_projection_origin" and "standard_parallel"', + assert not is_valid + assert len(warns) == 1 + + msg = re.escape( + 'both "scale_factor_at_projection_origin" and "standard_parallel"' ) + assert re.search(msg, str(warns[0])) - def test_absent_scale_factor_and_standard_parallel(self): + def test_absent_scale_factor_and_standard_parallel(self, mocker): # Scale factor and standard parallel cannot both be specified for # Polar Stereographic projections cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -175,25 +174,25 @@ def test_absent_scale_factor_and_standard_parallel(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex( - str(warns[0]), - 'One of "scale_factor_at_projection_origin" and ' - '"standard_parallel" is required.', + assert not is_valid + assert len(warns) == 1 + + msg = re.escape( + 'One of "scale_factor_at_projection_origin" and "standard_parallel" is required.' ) + assert re.search(msg, str(warns[0])) - def test_invalid_latitude_of_projection_origin(self): + def test_invalid_latitude_of_projection_origin(self, mocker): # Scale factor and standard parallel cannot both be specified for # Polar Stereographic projections cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=45, @@ -203,19 +202,14 @@ def test_invalid_latitude_of_projection_origin(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex( - str(warns[0]), - r'"latitude_of_projection_origin" must be \+90 or -90\.', - ) - + assert not is_valid + assert len(warns) == 1 -if __name__ == "__main__": - tests.main() + msg = r'"latitude_of_projection_origin" must be \+90 or -90\.' + assert re.search(msg, str(warns[0])) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py index 528e9d7579..e0da327693 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py @@ -4,18 +4,16 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.netcdf.parse_cell_methods`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip +import re -from unittest import mock +import pytest from iris.coords import CellMethod from iris.fileformats._nc_load_rules.helpers import parse_cell_methods from iris.warnings import IrisCfLoadWarning -class Test(tests.IrisTest): +class Test: def test_simple(self): cell_method_strings = [ "time: mean", @@ -25,7 +23,7 @@ def test_simple(self): expected = (CellMethod(method="mean", coords="time"),) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_with_interval(self): cell_method_strings = [ @@ -35,7 +33,7 @@ def test_with_interval(self): expected = (CellMethod(method="variance", coords="time", intervals="1 hr"),) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_multiple_axes(self): cell_method_strings = [ @@ -47,7 +45,7 @@ def test_multiple_axes(self): expected = (CellMethod(method="standard_deviation", coords=["lat", "lon"]),) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_multiple(self): cell_method_strings = [ @@ -62,7 +60,7 @@ def test_multiple(self): ) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_comment(self): cell_method_strings = [ @@ -91,7 +89,7 @@ def test_comment(self): ) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_comment_brackets(self): cell_method_strings = [ @@ -108,7 +106,7 @@ def test_comment_brackets(self): ) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_comment_bracket_mismatch_warning(self): cell_method_strings = [ @@ -116,9 +114,9 @@ def test_comment_bracket_mismatch_warning(self): "time : minimum within days (comment: 18h day-1)-18h)", ] for cell_method_str in cell_method_strings: - with self.assertWarns( + with pytest.warns( IrisCfLoadWarning, - msg="Cell methods may be incorrectly parsed due to mismatched brackets", + match="Cell methods may be incorrectly parsed due to mismatched brackets", ): _ = parse_cell_methods(cell_method_str) @@ -133,9 +131,12 @@ def test_badly_formatted_warning(self): "time: (interval: 1 day comment: second bit)", ] for cell_method_str in cell_method_strings: - with self.assertWarns( + msg = ( + rf"^Failed to .*parse cell method string: {re.escape(cell_method_str)}$" + ) + with pytest.warns( IrisCfLoadWarning, - msg=f"Failed to fully parse cell method string: {cell_method_str}", + match=msg, ): _ = parse_cell_methods(cell_method_str) @@ -147,7 +148,7 @@ def test_portions_of_cells(self): expected = (CellMethod(method="mean where sea_ice over sea", coords="area"),) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_climatology(self): cell_method_strings = [ @@ -162,9 +163,9 @@ def test_climatology(self): ) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected - def test_climatology_with_unknown_method(self): + def test_climatology_with_unknown_method(self, mocker): cell_method_strings = [ "time: min within days time: mean over days", "time : min within days time: mean over days", @@ -176,14 +177,10 @@ def test_climatology_with_unknown_method(self): CellMethod(method="mean over days", coords="time"), ) for cell_method_str in cell_method_strings: - with mock.patch("warnings.warn") as warn: - res = parse_cell_methods(cell_method_str) - self.assertIn( - "NetCDF variable contains unknown cell method 'min'", - warn.call_args[0][0], + warn = mocker.patch("warnings.warn") + res = parse_cell_methods(cell_method_str) + assert ( + "NetCDF variable contains unknown cell method 'min'" + in warn.call_args[0][0] ) - self.assertEqual(res, expected) - - -if __name__ == "__main__": - tests.main() + assert res == expected diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py index 66620166c5..e98de2cbb3 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py @@ -7,48 +7,40 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest from iris.fileformats._nc_load_rules.helpers import reorder_bounds_data +from iris.tests import _shared_utils -class Test(tests.IrisTest): - def test_fastest_varying(self): +class Test: + def test_fastest_varying(self, mocker): bounds_data = np.arange(24).reshape(2, 3, 4) - cf_bounds_var = mock.Mock( + cf_bounds_var = mocker.Mock( dimensions=("foo", "bar", "nv"), cf_name="wibble_bnds" ) - cf_coord_var = mock.Mock(dimensions=("foo", "bar")) + cf_coord_var = mocker.Mock(dimensions=("foo", "bar")) res = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) # Vertex dimension (nv) is already at the end. - self.assertArrayEqual(res, bounds_data) + _shared_utils.assert_array_equal(res, bounds_data) - def test_slowest_varying(self): + def test_slowest_varying(self, mocker): bounds_data = np.arange(24).reshape(4, 2, 3) - cf_bounds_var = mock.Mock(dimensions=("nv", "foo", "bar")) - cf_coord_var = mock.Mock(dimensions=("foo", "bar")) + cf_bounds_var = mocker.Mock(dimensions=("nv", "foo", "bar")) + cf_coord_var = mocker.Mock(dimensions=("foo", "bar")) res = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) # Move zeroth dimension (nv) to the end. expected = np.rollaxis(bounds_data, 0, bounds_data.ndim) - self.assertArrayEqual(res, expected) + _shared_utils.assert_array_equal(res, expected) - def test_different_dim_names(self): + def test_different_dim_names(self, mocker): bounds_data = np.arange(24).reshape(2, 3, 4) - cf_bounds_var = mock.Mock( + cf_bounds_var = mocker.Mock( dimensions=("foo", "bar", "nv"), cf_name="wibble_bnds" ) - cf_coord_var = mock.Mock(dimensions=("x", "y"), cf_name="wibble") - with self.assertRaisesRegex(ValueError, "dimension names"): + cf_coord_var = mocker.Mock(dimensions=("x", "y"), cf_name="wibble") + with pytest.raises(ValueError, match="dimension names"): reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) - - -if __name__ == "__main__": - tests.main() From fdb80afe7632fa8ab27dcce72f9e0c9c4a48e3aa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 6 Feb 2026 09:35:39 +0000 Subject: [PATCH 10/77] Bump the gha group across 1 directory with 3 updates (#6928) Bumps the gha group with 3 updates in the / directory: [scitools/workflows/.github/workflows/ci-manifest.yml](https://github.com/scitools/workflows), [scitools/workflows/.github/workflows/ci-template-check.yml](https://github.com/scitools/workflows) and [scitools/workflows/.github/workflows/refresh-lockfiles.yml](https://github.com/scitools/workflows). Updates `scitools/workflows/.github/workflows/ci-manifest.yml` from 2026.01.1 to 2026.02.0 - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2026.01.1...2026.02.0) Updates `scitools/workflows/.github/workflows/ci-template-check.yml` from 2026.01.1 to 2026.02.0 - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2026.01.1...2026.02.0) Updates `scitools/workflows/.github/workflows/refresh-lockfiles.yml` from 2026.01.1 to 2026.02.0 - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2026.01.1...2026.02.0) --- updated-dependencies: - dependency-name: scitools/workflows/.github/workflows/ci-manifest.yml dependency-version: 2026.02.0 dependency-type: direct:production dependency-group: gha - dependency-name: scitools/workflows/.github/workflows/ci-template-check.yml dependency-version: 2026.02.0 dependency-type: direct:production dependency-group: gha - dependency-name: scitools/workflows/.github/workflows/refresh-lockfiles.yml dependency-version: 2026.02.0 dependency-type: direct:production dependency-group: gha ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/ci-template-check.yml | 2 +- .github/workflows/refresh-lockfiles.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 9596af3b63..4406e1c01d 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2026.01.1 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2026.02.0 diff --git a/.github/workflows/ci-template-check.yml b/.github/workflows/ci-template-check.yml index 6f97bdd1d2..32f74f9043 100644 --- a/.github/workflows/ci-template-check.yml +++ b/.github/workflows/ci-template-check.yml @@ -10,7 +10,7 @@ on: jobs: prompt-share: - uses: scitools/workflows/.github/workflows/ci-template-check.yml@2026.01.1 + uses: scitools/workflows/.github/workflows/ci-template-check.yml@2026.02.0 secrets: inherit with: pr_number: ${{ github.event.pull_request.number }} diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index e4b500fd26..b78a6523c3 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2026.01.1 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2026.02.0 secrets: inherit From a4a9e544568cea64b62e01589b1feac16e3f2067 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Fri, 6 Feb 2026 09:36:56 +0000 Subject: [PATCH 11/77] Convert fileformats/ff to pytest (#6899) * actioned test_ArakawaC * actioned test_ENDGame * converted test_FF2PP * converted test_FFHeader * converted test_Grid * converted test_NewDynamics * removed redundant import --- lib/iris/tests/unit/fileformats/__init__.py | 11 + .../unit/fileformats/ff/test_ArakawaC.py | 27 +-- .../tests/unit/fileformats/ff/test_ENDGame.py | 19 +- .../tests/unit/fileformats/ff/test_FF2PP.py | 195 ++++++++---------- .../unit/fileformats/ff/test_FFHeader.py | 72 +++---- .../tests/unit/fileformats/ff/test_Grid.py | 74 +++---- .../unit/fileformats/ff/test_NewDynamics.py | 19 +- 7 files changed, 192 insertions(+), 225 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/__init__.py b/lib/iris/tests/unit/fileformats/__init__.py index c5982fc475..bebe6301eb 100644 --- a/lib/iris/tests/unit/fileformats/__init__.py +++ b/lib/iris/tests/unit/fileformats/__init__.py @@ -3,3 +3,14 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats` package.""" + +import pytest +from pytest_mock import MockerFixture + + +class MockerMixin: + mocker: MockerFixture + + @pytest.fixture(autouse=True) + def _mocker_mixin_setup(self, mocker): + self.mocker = mocker diff --git a/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py b/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py index 5e731632c6..42451e2af9 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py +++ b/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py @@ -4,22 +4,19 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.ArakawaC`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris.fileformats._ff import ArakawaC +from iris.tests import _shared_utils -class Test__x_vectors(tests.IrisTest): +class Test__x_vectors: def _test(self, column, horiz_grid_type, xp, xu): reals = np.arange(6) + 100 grid = ArakawaC(column, None, reals, horiz_grid_type) result_xp, result_xu = grid._x_vectors() - self.assertArrayEqual(result_xp, xp) - self.assertArrayEqual(result_xu, xu) + _shared_utils.assert_array_equal(result_xp, xp) + _shared_utils.assert_array_equal(result_xu, xu) def test_none(self): self._test(column=None, horiz_grid_type=None, xp=None, xu=None) @@ -49,12 +46,12 @@ def test_2d_with_wrap(self): ) -class Test_regular_x(tests.IrisTest): +class Test_regular_x: def _test(self, subgrid, bzx, bdx): grid = ArakawaC(None, None, [4.0, None, None, -5.0, None, None], None) result_bzx, result_bdx = grid.regular_x(subgrid) - self.assertEqual(result_bzx, bzx) - self.assertEqual(result_bdx, bdx) + assert result_bzx == bzx + assert result_bdx == bdx def test_theta_subgrid(self): self._test(1, -9.0, 4.0) @@ -63,13 +60,13 @@ def test_u_subgrid(self): self._test(11, -7.0, 4.0) -class Test_regular_y(tests.IrisTest): +class Test_regular_y: def _test(self, v_offset, subgrid, bzy, bdy): grid = ArakawaC(None, None, [None, 4.0, 45.0, None, None, None], None) grid._v_offset = v_offset result_bzy, result_bdy = grid.regular_y(subgrid) - self.assertEqual(result_bzy, bzy) - self.assertEqual(result_bdy, bdy) + assert result_bzy == bzy + assert result_bdy == bdy def test_theta_subgrid_NewDynamics(self): self._test(0.5, 1, 41.0, 4.0) @@ -82,7 +79,3 @@ def test_theta_subgrid_ENDGame(self): def test_v_subgrid_ENDGame(self): self._test(-0.5, 11, 39.0, 4.0) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py b/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py index 2a09a60275..865cdf7691 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py +++ b/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py @@ -4,29 +4,26 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.ENDGame`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris.fileformats._ff import ENDGame +from iris.tests import _shared_utils -class Test(tests.IrisTest): +class Test: def test_class_attributes(self): reals = np.arange(6) + 100 grid = ENDGame(None, None, reals, None) - self.assertEqual(grid._v_offset, -0.5) + assert grid._v_offset == -0.5 -class Test__y_vectors(tests.IrisTest): +class Test__y_vectors: def _test(self, row, yp, yv): reals = np.arange(6) + 100 grid = ENDGame(None, row, reals, None) result_yp, result_yv = grid._y_vectors() - self.assertArrayEqual(result_yp, yp) - self.assertArrayEqual(result_yv, yv) + _shared_utils.assert_array_equal(result_yp, yp) + _shared_utils.assert_array_equal(result_yv, yv) def test_none(self): self._test(row=None, yp=None, yv=None) @@ -40,7 +37,3 @@ def test_2d(self): yp=np.array([0, 1, 2]), yv=np.array([0, 10, 20, 30]), ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py index c21fc39821..e1cd1f5912 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py @@ -4,20 +4,18 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.fileformat.ff.FF2PP` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import collections import contextlib -from unittest import mock import numpy as np +import pytest from iris.exceptions import NotYetImplementedError import iris.fileformats._ff as ff from iris.fileformats._ff import FF2PP import iris.fileformats.pp as pp +from iris.tests import _shared_utils +from iris.tests.unit.fileformats import MockerMixin from iris.warnings import IrisLoadWarning # PP-field: LBPACK N1 values. @@ -42,32 +40,31 @@ ) -class Test____iter__(tests.IrisTest): - @mock.patch("iris.fileformats._ff.FFHeader") - def test_call_structure(self, _FFHeader): +class Test____iter__(MockerMixin): + def test_call_structure(self, mocker): # Check that the iter method calls the two necessary utility # functions - extract_result = mock.Mock() - interpret_patch = mock.patch( + _FFHeader = mocker.patch("iris.fileformats._ff.FFHeader") + extract_result = mocker.Mock() + interpret_patch = mocker.patch( "iris.fileformats.pp._interpret_fields", autospec=True, return_value=iter([]), ) - extract_patch = mock.patch( + extract_patch = mocker.patch( "iris.fileformats._ff.FF2PP._extract_field", autospec=True, return_value=extract_result, ) FF2PP_instance = ff.FF2PP("mock") - with interpret_patch as interpret, extract_patch as extract: - list(iter(FF2PP_instance)) + list(iter(FF2PP_instance)) - interpret.assert_called_once_with(extract_result) - extract.assert_called_once_with(FF2PP_instance) + interpret_patch.assert_called_once_with(extract_result) + extract_patch.assert_called_once_with(FF2PP_instance) -class Test__extract_field__LBC_format(tests.IrisTest): +class Test__extract_field__LBC_format(MockerMixin): @contextlib.contextmanager def mock_for_extract_field(self, fields, x=None, y=None): """A context manager to ensure FF2PP._extract_field gets a field @@ -75,22 +72,26 @@ def mock_for_extract_field(self, fields, x=None, y=None): the "make_pp_field" call. """ - with mock.patch("iris.fileformats._ff.FFHeader"): + with self.mocker.patch("iris.fileformats._ff.FFHeader"): ff2pp = ff.FF2PP("mock") ff2pp._ff_header.lookup_table = [0, 0, len(fields)] # Fake level constants, with shape specifying just one model-level. ff2pp._ff_header.level_dependent_constants = np.zeros(1) - grid = mock.Mock() - grid.vectors = mock.Mock(return_value=(x, y)) - ff2pp._ff_header.grid = mock.Mock(return_value=grid) + grid = self.mocker.Mock() + grid.vectors = self.mocker.Mock(return_value=(x, y)) + ff2pp._ff_header.grid = self.mocker.Mock(return_value=grid) open_func = "builtins.open" with ( - mock.patch("iris.fileformats._ff._parse_binary_stream", return_value=[0]), - mock.patch(open_func), - mock.patch("struct.unpack_from", return_value=[4]), - mock.patch("iris.fileformats.pp.make_pp_field", side_effect=fields), - mock.patch("iris.fileformats._ff.FF2PP._payload", return_value=(0, 0)), + self.mocker.patch( + "iris.fileformats._ff._parse_binary_stream", return_value=[0] + ), + self.mocker.patch(open_func), + self.mocker.patch("struct.unpack_from", return_value=[4]), + self.mocker.patch("iris.fileformats.pp.make_pp_field", side_effect=fields), + self.mocker.patch( + "iris.fileformats._ff.FF2PP._payload", return_value=(0, 0) + ), ): yield ff2pp @@ -101,7 +102,7 @@ def _mock_lbc(self, **kwargs): # Apply provided args (replacing any defaults if specified). field_kwargs.update(kwargs) # Return a mock with just those properties pre-defined. - return mock.Mock(**field_kwargs) + return self.mocker.Mock(**field_kwargs) def test_LBC_header(self): bzx, bzy = -10, 15 @@ -121,18 +122,18 @@ def test_LBC_header(self): ff2pp._ff_header.dataset_type = 5 result = list(ff2pp._extract_field()) - self.assertEqual([field], result) - self.assertEqual(field.lbrow, 10 + 14 * 2) - self.assertEqual(field.lbnpt, 12 + 16 * 2) + assert [field] == result + assert 10 + 14 * 2 == field.lbrow + assert 12 + 16 * 2 == field.lbnpt name_mapping_dict = dict( rim_width=slice(4, 6), y_halo=slice(2, 4), x_halo=slice(0, 2) ) boundary_packing = pp.SplittableInt(121416, name_mapping_dict) - self.assertEqual(field.boundary_packing, boundary_packing) - self.assertEqual(field.bzy, bzy - boundary_packing.y_halo * field.bdy) - self.assertEqual(field.bzx, bzx - boundary_packing.x_halo * field.bdx) + assert field.boundary_packing == boundary_packing + assert field.bzy == bzy - boundary_packing.y_halo * field.bdy + assert field.bzx == bzx - boundary_packing.x_halo * field.bdx def check_non_trivial_coordinate_warning(self, field): field.lbegin = 0 @@ -146,25 +147,17 @@ def check_non_trivial_coordinate_warning(self, field): y = np.array([1, 2, 6]) with self.mock_for_extract_field([field], x, y) as ff2pp: ff2pp._ff_header.dataset_type = 5 - with mock.patch("warnings.warn") as warn: + msg = ( + "The x or y coordinates of your boundary condition field may " + "be incorrect, not having taken into account the boundary " + "size." + ) + with pytest.warns(IrisLoadWarning, match=msg): list(ff2pp._extract_field()) # Check the values are unchanged. - self.assertEqual(field.bdy, orig_bdy) - self.assertEqual(field.bdx, orig_bdx) - - # Check a warning was raised with a suitable message. - warn_error_tmplt = "Unexpected warning message: {}" - non_trivial_coord_warn_msg = warn.call_args[0][0] - msg = ( - "The x or y coordinates of your boundary condition field may " - "be incorrect, not having taken into account the boundary " - "size." - ) - self.assertTrue( - non_trivial_coord_warn_msg.startswith(msg), - warn_error_tmplt.format(non_trivial_coord_warn_msg), - ) + assert field.bdy == orig_bdy + assert field.bdx == orig_bdx def test_LBC_header_non_trivial_coords_both(self): # Check a warning is raised when both bdx and bdy are bad. @@ -205,19 +198,16 @@ def test_negative_bdy(self): ) with self.mock_for_extract_field([field]) as ff2pp: ff2pp._ff_header.dataset_type = 5 - with mock.patch("warnings.warn") as warn: + msg = "The LBC has a bdy less than 0." + with pytest.warns(IrisLoadWarning, match=msg): list(ff2pp._extract_field()) - msg = "The LBC has a bdy less than 0." - self.assertTrue( - warn.call_args[0][0].startswith(msg), - "Northwards bdy warning not correctly raised.", - ) -class Test__payload(tests.IrisTest): - def setUp(self): +class Test__payload(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create a mock LBC type PPField. - self.mock_field = mock.Mock() + self.mock_field = mocker.Mock() field = self.mock_field field.raw_lbpack = _UNPACKED field.lbuser = [_REAL] @@ -227,14 +217,14 @@ def setUp(self): field.boundary_packing = None def _test(self, mock_field, expected_depth, expected_dtype, word_depth=None): - with mock.patch("iris.fileformats._ff.FFHeader", return_value=None): + with self.mocker.patch("iris.fileformats._ff.FFHeader", return_value=None): kwargs = {} if word_depth is not None: kwargs["word_depth"] = word_depth ff2pp = FF2PP("dummy_filename", **kwargs) data_depth, data_dtype = ff2pp._payload(mock_field) - self.assertEqual(data_depth, expected_depth) - self.assertEqual(data_dtype, expected_dtype) + assert data_depth == expected_depth + assert data_dtype == expected_dtype def test_unpacked_real(self): mock_field = _DummyField( @@ -388,9 +378,9 @@ def test_lbpack_unsupported(self): lbuser=[_INTEGER], boundary_packing=None, ) - with self.assertRaisesRegex( + with pytest.raises( NotYetImplementedError, - "PP fields with LBPACK of 1239 are not supported.", + match="PP fields with LBPACK of 1239 are not supported.", ): self._test(mock_field, None, None) @@ -418,7 +408,7 @@ def test_lbc_wgdos_unsupported(self): # Anything not None will do here. boundary_packing=0, ) - with self.assertRaisesRegex(ValueError, "packed LBC data is not supported"): + with pytest.raises(ValueError, match="packed LBC data is not supported"): self._test(mock_field, None, None) def test_lbc_cray(self): @@ -436,13 +426,13 @@ def test_lbc_cray(self): self._test(mock_field, ((47 * 34) - (19 * 14)) * 4, ">f4") -class Test__det_border(tests.IrisTest): - def setUp(self): - _FFH_patch = mock.patch("iris.fileformats._ff.FFHeader") +class Test__det_border: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + _FFH_patch = mocker.patch("iris.fileformats._ff.FFHeader") _FFH_patch.start() - self.addCleanup(_FFH_patch.stop) - def test_unequal_spacing_eitherside(self): + def test_unequal_spacing_eitherside(self, mocker): # Ensure that we do not interpret the case where there is not the same # spacing on the lower edge as the upper edge. ff2pp = FF2PP("dummy") @@ -454,10 +444,9 @@ def test_unequal_spacing_eitherside(self): "size." ) - with mock.patch("warnings.warn") as warn: + with pytest.warns(IrisLoadWarning, match=msg): result = ff2pp._det_border(field_x, None) - warn.assert_called_with(msg, category=IrisLoadWarning) - self.assertIs(result, field_x) + assert result is field_x def test_increasing_field_values(self): # Field where its values a increasing. @@ -465,7 +454,7 @@ def test_increasing_field_values(self): field_x = np.array([1, 2, 3]) com = np.array([0, 1, 2, 3, 4]) result = ff2pp._det_border(field_x, 1) - self.assertArrayEqual(result, com) + _shared_utils.assert_array_equal(result, com) def test_decreasing_field_values(self): # Field where its values a decreasing. @@ -473,20 +462,21 @@ def test_decreasing_field_values(self): field_x = np.array([3, 2, 1]) com = np.array([4, 3, 2, 1, 0]) result = ff2pp._det_border(field_x, 1) - self.assertArrayEqual(result, com) + _shared_utils.assert_array_equal(result, com) -class Test__adjust_field_for_lbc(tests.IrisTest): - def setUp(self): +class Test__adjust_field_for_lbc: + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Patch FFHeader to produce a mock header instead of opening a file. - self.mock_ff_header = mock.Mock() + self.mock_ff_header = mocker.Mock() self.mock_ff_header.dataset_type = 5 - self.mock_ff = self.patch( + self.mock_ff = mocker.patch( "iris.fileformats._ff.FFHeader", return_value=self.mock_ff_header ) # Create a mock LBC type PPField. - self.mock_field = mock.Mock() + self.mock_field = mocker.Mock() field = self.mock_field field.lbtim = 0 field.lblev = 7777 @@ -505,54 +495,55 @@ def test__basic(self): ff2pp = FF2PP("dummy_filename") field = self.mock_field ff2pp._adjust_field_for_lbc(field) - self.assertEqual(field.lbtim, 11) - self.assertEqual(field.lbvc, 65) - self.assertEqual(field.boundary_packing.rim_width, 8) - self.assertEqual(field.boundary_packing.y_halo, 5) - self.assertEqual(field.boundary_packing.x_halo, 4) - self.assertEqual(field.lbnpt, 1009) - self.assertEqual(field.lbrow, 2011) + assert field.lbtim == 11 + assert field.lbvc == 65 + assert field.boundary_packing.rim_width == 8 + assert field.boundary_packing.y_halo == 5 + assert field.boundary_packing.x_halo == 4 + assert field.lbnpt == 1009 + assert field.lbrow == 2011 def test__bad_lbtim(self): self.mock_field.lbtim = 717 ff2pp = FF2PP("dummy_filename") - with self.assertRaisesRegex(ValueError, "LBTIM of 717, expected only 0 or 11"): + with pytest.raises(ValueError, match="LBTIM of 717, expected only 0 or 11"): ff2pp._adjust_field_for_lbc(self.mock_field) def test__bad_lbvc(self): self.mock_field.lbvc = 312 ff2pp = FF2PP("dummy_filename") - with self.assertRaisesRegex(ValueError, "LBVC of 312, expected only 0 or 65"): + with pytest.raises(ValueError, match="LBVC of 312, expected only 0 or 65"): ff2pp._adjust_field_for_lbc(self.mock_field) -class Test__fields_over_all_levels(tests.IrisTest): - def setUp(self): +class Test__fields_over_all_levels: + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Patch FFHeader to produce a mock header instead of opening a file. - self.mock_ff_header = mock.Mock() + self.mock_ff_header = mocker.Mock() self.mock_ff_header.dataset_type = 5 # Fake the level constants to look like 3 model levels. self.n_all_levels = 3 self.mock_ff_header.level_dependent_constants = np.zeros((self.n_all_levels)) - self.mock_ff = self.patch( + self.mock_ff = mocker.patch( "iris.fileformats._ff.FFHeader", return_value=self.mock_ff_header ) # Create a simple mock for a test field. - self.mock_field = mock.Mock() + self.mock_field = mocker.Mock() field = self.mock_field field.lbhem = 103 - self.original_lblev = mock.sentinel.untouched_lbev + self.original_lblev = mocker.sentinel.untouched_lbev field.lblev = self.original_lblev def _check_expected_levels(self, results, n_levels): if n_levels == 0: - self.assertEqual(len(results), 1) - self.assertEqual(results[0].lblev, self.original_lblev) + assert len(results) == 1 + assert results[0].lblev == self.original_lblev else: - self.assertEqual(len(results), n_levels) - self.assertEqual([fld.lblev for fld in results], list(range(n_levels))) + assert len(results) == n_levels + assert [fld.lblev for fld in results] == list(range(n_levels)) def test__is_lbc(self): ff2pp = FF2PP("dummy_filename") @@ -564,18 +555,14 @@ def test__lbhem_too_small(self): ff2pp = FF2PP("dummy_filename") field = self.mock_field field.lbhem = 100 - with self.assertRaisesRegex(ValueError, "hence >= 101"): + with pytest.raises(ValueError, match="hence >= 101"): _ = list(ff2pp._fields_over_all_levels(field)) def test__lbhem_too_large(self): ff2pp = FF2PP("dummy_filename") field = self.mock_field field.lbhem = 105 - with self.assertRaisesRegex( - ValueError, "more than the total number of levels in the file = 3" + with pytest.raises( + ValueError, match="more than the total number of levels in the file = 3" ): _ = list(ff2pp._fields_over_all_levels(field)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py index 1c20acd39d..d855f194a2 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py @@ -4,40 +4,42 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.FFHeader`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import collections -from unittest import mock import numpy as np +import pytest from iris.fileformats._ff import FFHeader, _WarnComboLoadingDefaulting +from iris.tests import _shared_utils +from iris.tests.unit.fileformats import MockerMixin MyGrid = collections.namedtuple("MyGrid", "column row real horiz_grid_type") -class Test_grid(tests.IrisTest): +class Test_grid(MockerMixin): def _header(self, grid_staggering): - with mock.patch.object(FFHeader, "__init__", mock.Mock(return_value=None)): - header = FFHeader() + _ = self.mocker.patch.object( + FFHeader, "__init__", self.mocker.Mock(return_value=None) + ) + header = FFHeader() header.grid_staggering = grid_staggering - header.column_dependent_constants = mock.sentinel.column - header.row_dependent_constants = mock.sentinel.row - header.real_constants = mock.sentinel.real - header.horiz_grid_type = mock.sentinel.horiz_grid_type + header.column_dependent_constants = self.mocker.sentinel.column + header.row_dependent_constants = self.mocker.sentinel.row + header.real_constants = self.mocker.sentinel.real + header.horiz_grid_type = self.mocker.sentinel.horiz_grid_type return header def _test_grid_staggering(self, grid_staggering): header = self._header(grid_staggering) - with mock.patch.dict(FFHeader.GRID_STAGGERING_CLASS, {grid_staggering: MyGrid}): - grid = header.grid() - self.assertIsInstance(grid, MyGrid) - self.assertIs(grid.column, mock.sentinel.column) - self.assertIs(grid.row, mock.sentinel.row) - self.assertIs(grid.real, mock.sentinel.real) - self.assertIs(grid.horiz_grid_type, mock.sentinel.horiz_grid_type) + _ = self.mocker.patch.dict( + FFHeader.GRID_STAGGERING_CLASS, {grid_staggering: MyGrid} + ) + grid = header.grid() + assert isinstance(grid, MyGrid) + assert grid.column is self.mocker.sentinel.column + assert grid.row is self.mocker.sentinel.row + assert grid.real is self.mocker.sentinel.real + assert grid.horiz_grid_type is self.mocker.sentinel.horiz_grid_type def test_new_dynamics(self): self._test_grid_staggering(3) @@ -45,29 +47,23 @@ def test_new_dynamics(self): def test_end_game(self): self._test_grid_staggering(6) - def test_unknown(self): + def test_unknown(self, mocker): header = self._header(0) - with mock.patch( + _ = mocker.patch( "iris.fileformats._ff.NewDynamics", - mock.Mock(return_value=mock.sentinel.grid), - ): - with mock.patch("warnings.warn") as warn: - grid = header.grid() - warn.assert_called_with( - "Staggered grid type: 0 not currently" - " interpreted, assuming standard C-grid", - category=_WarnComboLoadingDefaulting, + mocker.Mock(return_value=mocker.sentinel.grid), ) - self.assertIs(grid, mock.sentinel.grid) + msg = ( + "Staggered grid type: 0 not currently interpreted, assuming standard C-grid" + ) + with pytest.warns(_WarnComboLoadingDefaulting, match=msg): + grid = header.grid() + assert grid is mocker.sentinel.grid -@tests.skip_data -class Test_integer_constants(tests.IrisTest): +@_shared_utils.skip_data +class Test_integer_constants: def test_read_ints(self): - test_file_path = tests.get_data_path(("FF", "structured", "small")) + test_file_path = _shared_utils.get_data_path(("FF", "structured", "small")) ff_header = FFHeader(test_file_path) - self.assertEqual(ff_header.integer_constants.dtype, np.dtype(">i8")) - - -if __name__ == "__main__": - tests.main() + assert ff_header.integer_constants.dtype == np.dtype(">i8") diff --git a/lib/iris/tests/unit/fileformats/ff/test_Grid.py b/lib/iris/tests/unit/fileformats/ff/test_Grid.py index 1bb9688c1a..d3ece16916 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_Grid.py +++ b/lib/iris/tests/unit/fileformats/ff/test_Grid.py @@ -4,57 +4,55 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.Grid`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +import pytest from iris.fileformats._ff import Grid +from iris.tests.unit.fileformats import MockerMixin -class Test___init__(tests.IrisTest): - def test_attributes(self): +class Test___init__: + def test_attributes(self, mocker): # Ensure the constructor initialises all the grid's attributes # correctly, including unpacking values from the REAL constants. reals = ( - mock.sentinel.ew, - mock.sentinel.ns, - mock.sentinel.first_lat, - mock.sentinel.first_lon, - mock.sentinel.pole_lat, - mock.sentinel.pole_lon, + mocker.sentinel.ew, + mocker.sentinel.ns, + mocker.sentinel.first_lat, + mocker.sentinel.first_lon, + mocker.sentinel.pole_lat, + mocker.sentinel.pole_lon, ) grid = Grid( - mock.sentinel.column, - mock.sentinel.row, + mocker.sentinel.column, + mocker.sentinel.row, reals, - mock.sentinel.horiz_grid_type, + mocker.sentinel.horiz_grid_type, ) - self.assertIs(grid.column_dependent_constants, mock.sentinel.column) - self.assertIs(grid.row_dependent_constants, mock.sentinel.row) - self.assertIs(grid.ew_spacing, mock.sentinel.ew) - self.assertIs(grid.ns_spacing, mock.sentinel.ns) - self.assertIs(grid.first_lat, mock.sentinel.first_lat) - self.assertIs(grid.first_lon, mock.sentinel.first_lon) - self.assertIs(grid.pole_lat, mock.sentinel.pole_lat) - self.assertIs(grid.pole_lon, mock.sentinel.pole_lon) - self.assertIs(grid.horiz_grid_type, mock.sentinel.horiz_grid_type) - - -class Test_vectors(tests.IrisTest): - def setUp(self): - self.xp = mock.sentinel.xp - self.xu = mock.sentinel.xu - self.yp = mock.sentinel.yp - self.yv = mock.sentinel.yv + assert grid.column_dependent_constants is mocker.sentinel.column + assert grid.row_dependent_constants is mocker.sentinel.row + assert grid.ew_spacing is mocker.sentinel.ew + assert grid.ns_spacing is mocker.sentinel.ns + assert grid.first_lat is mocker.sentinel.first_lat + assert grid.first_lon is mocker.sentinel.first_lon + assert grid.pole_lat is mocker.sentinel.pole_lat + assert grid.pole_lon is mocker.sentinel.pole_lon + assert grid.horiz_grid_type is mocker.sentinel.horiz_grid_type + + +class Test_vectors(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.xp = mocker.sentinel.xp + self.xu = mocker.sentinel.xu + self.yp = mocker.sentinel.yp + self.yv = mocker.sentinel.yv def _test_subgrid_vectors(self, subgrid, expected): grid = Grid(None, None, (None,) * 6, None) - grid._x_vectors = mock.Mock(return_value=(self.xp, self.xu)) - grid._y_vectors = mock.Mock(return_value=(self.yp, self.yv)) + grid._x_vectors = self.mocker.Mock(return_value=(self.xp, self.xu)) + grid._y_vectors = self.mocker.Mock(return_value=(self.yp, self.yv)) result = grid.vectors(subgrid) - self.assertEqual(result, expected) + assert result == expected def test_1(self): # Data on atmospheric theta points. @@ -103,7 +101,3 @@ def test_28(self): def test_29(self): # Orography field for atmospheric LBCs. self._test_subgrid_vectors(29, (self.xp, self.yp)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py b/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py index f3cc41aa82..e623afdeb6 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py +++ b/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py @@ -4,29 +4,26 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.NewDynamics`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris.fileformats._ff import NewDynamics +from iris.tests import _shared_utils -class Test(tests.IrisTest): +class Test: def test_class_attributes(self): reals = np.arange(6) + 100 grid = NewDynamics(None, None, reals, None) - self.assertEqual(grid._v_offset, 0.5) + assert grid._v_offset == 0.5 -class Test__y_vectors(tests.IrisTest): +class Test__y_vectors: def _test(self, row, yp, yv): reals = np.arange(6) + 100 grid = NewDynamics(None, row, reals, None) result_yp, result_yv = grid._y_vectors() - self.assertArrayEqual(result_yp, yp) - self.assertArrayEqual(result_yv, yv) + _shared_utils.assert_array_equal(result_yp, yp) + _shared_utils.assert_array_equal(result_yv, yv) def test_none(self): self._test(row=None, yp=None, yv=None) @@ -44,7 +41,3 @@ def test_2d(self): yp=np.array([0, 1, 2, 3]), yv=np.array([0, 10, 20]), ) - - -if __name__ == "__main__": - tests.main() From 5bc44521ba03b3573b46f5ccc37788c9c091b528 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Fri, 6 Feb 2026 09:46:10 +0000 Subject: [PATCH 12/77] Converted fileformats/netcdf to pytest (#6906) * loader chunk control * loader get cf var data * loader load aux factory * loader load cube * loader translate constraints to var callback * loader load cubes * all saver, minus cdl * review comments * setup_method * some funky integration issues * some funky integration issues --- .../integration/netcdf/test_coord_systems.py | 15 +- .../basic_mesh.cdl | 0 .../_thread_safe_nc/test_NetCDFWriteProxy.py | 6 +- .../test_GribParamHandler.py | 2 +- .../test_UkmoProcessFlagsHandler.py | 2 +- .../netcdf/loader/test__chunk_control.py | 37 +- .../netcdf/loader/test__get_cf_var_data.py | 90 ++- .../netcdf/loader/test__load_aux_factory.py | 132 ++--- .../netcdf/loader/test__load_cube.py | 118 ++-- ...__translate_constraints_to_var_callback.py | 91 ++- .../netcdf/loader/test_load_cubes.py | 84 ++- .../fileformats/netcdf/saver/test_Saver.py | 557 +++++++++--------- .../netcdf/saver/test_Saver__lazy.py | 69 ++- .../saver/test_Saver__lazy_stream_data.py | 29 +- .../netcdf/saver/test_Saver__ugrid.py | 529 ++++++++--------- .../fileformats/netcdf/saver/test_save.py | 274 +++++---- 16 files changed, 980 insertions(+), 1055 deletions(-) rename lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/{TestSaveUgrid__cube => SaveUgrid__cube}/basic_mesh.cdl (100%) diff --git a/lib/iris/tests/integration/netcdf/test_coord_systems.py b/lib/iris/tests/integration/netcdf/test_coord_systems.py index aa7b715912..db3e21d22f 100644 --- a/lib/iris/tests/integration/netcdf/test_coord_systems.py +++ b/lib/iris/tests/integration/netcdf/test_coord_systems.py @@ -8,6 +8,7 @@ import numpy as np import pytest +from pytest import MonkeyPatch import iris from iris.coords import DimCoord @@ -168,14 +169,16 @@ def multi_cs_osgb_wkt(): """ -@_shared_utils.skip_data -class TestCoordSystem: - @pytest.fixture(autouse=True) - def _setup(self): - tlc.setUpModule() +@pytest.fixture(autouse=True, scope="module") +def _setup(tmp_path_factory): + if not hasattr(tlc, "TMP_DIR"): + tlc.TMP_DIR = tmp_path_factory.mktemp("temp") yield - tlc.tearDownModule() + delattr(tlc, "TMP_DIR") + +@_shared_utils.skip_data +class TestCoordSystem: def test_load_laea_grid(self, request): cube = iris.load_cube( _shared_utils.get_data_path( diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/SaveUgrid__cube/basic_mesh.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/SaveUgrid__cube/basic_mesh.cdl diff --git a/lib/iris/tests/unit/fileformats/netcdf/_thread_safe_nc/test_NetCDFWriteProxy.py b/lib/iris/tests/unit/fileformats/netcdf/_thread_safe_nc/test_NetCDFWriteProxy.py index b33cb515a2..9f6d26f975 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/_thread_safe_nc/test_NetCDFWriteProxy.py +++ b/lib/iris/tests/unit/fileformats/netcdf/_thread_safe_nc/test_NetCDFWriteProxy.py @@ -64,11 +64,15 @@ def __call__(self, *args, **kwargs) -> nc.Dataset: def test_handle_hdf_locking_error(dataset_path, monkeypatch, write_proxy): """Test that NetCDFWriteProxy can handle non-deterministic HDF locking errors.""" monkeypatch.setattr(nc, "Dataset", UnreliableDatasetMaker()) - with pytest.raises(OSError, match="Simulated non-deterministic HDF locking error"): + + def _file_lock_failure(): dataset = nc.Dataset(write_proxy.path, "r+") var = dataset.variables[write_proxy.varname] var[0] = 1.0 + with pytest.raises(OSError, match="Simulated non-deterministic HDF locking error"): + _file_lock_failure() + # Reset. monkeypatch.setattr(nc, "Dataset", UnreliableDatasetMaker()) try: diff --git a/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_GribParamHandler.py b/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_GribParamHandler.py index d0189b474f..9e9ad017e2 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_GribParamHandler.py +++ b/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_GribParamHandler.py @@ -104,5 +104,5 @@ def test_odd_array_case(self): ) def test_badvalue__fail(self, badval): # It can convert random values to strings, but they mostly won't satisfy. - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=r"Invalid.*"): GP_HANDLER.decode_attribute(badval) diff --git a/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_UkmoProcessFlagsHandler.py b/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_UkmoProcessFlagsHandler.py index 7f6eda4397..585d3d966f 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_UkmoProcessFlagsHandler.py +++ b/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_UkmoProcessFlagsHandler.py @@ -107,7 +107,7 @@ def test_junk_string(self): result = UPF_HANDLER.decode_attribute(test_string) assert result == ("xxx",) - @pytest.mark.parametrize("badtype", ("int", "intarray", "floatarray")) + @pytest.mark.parametrize("badtype", ["int", "intarray", "floatarray"]) def test_numeric_values(self, badtype): """Even array attributes get converted to a string + split.""" if badtype == "int": diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py index 3051754423..fb53fa01f0 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__chunk_control.py @@ -4,10 +4,7 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformats.netcdf.loader.ChunkControl`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip -from unittest.mock import ANY, patch +import re import dask import numpy as np @@ -20,7 +17,7 @@ import iris.tests.stock as istk -@pytest.fixture() +@pytest.fixture def save_cubelist_with_sigma(tmp_filepath): cube = istk.simple_4d_with_hybrid_height() cube_varname = "my_var" @@ -129,13 +126,15 @@ def test_control_cube_var(tmp_filepath, save_cubelist_with_sigma): def test_invalid_chunksize(tmp_filepath, save_cubelist_with_sigma): - with pytest.raises(ValueError): + msg = "'dimension_chunksizes' kwargs should be a dict of `str: int` pairs, not {'model_level_numer': '2'}." + with pytest.raises(ValueError, match=msg): with CHUNK_CONTROL.set(model_level_numer="2"): CubeList(loader.load_cubes(tmp_filepath)) def test_invalid_var_name(tmp_filepath, save_cubelist_with_sigma): - with pytest.raises(ValueError): + msg = re.escape("'var_names' should be an iterable of strings, not [1, 2].") + with pytest.raises(ValueError, match=msg): with CHUNK_CONTROL.set([1, 2], model_level_numer="2"): CubeList(loader.load_cubes(tmp_filepath)) @@ -187,7 +186,7 @@ def test_no_chunks_from_file(tmp_filepath, save_cubelist_with_sigma): CubeList(loader.load_cubes(tmp_filepath)) -def test_as_dask(tmp_filepath, save_cubelist_with_sigma): +def test_as_dask(tmp_filepath, save_cubelist_with_sigma, mocker): """Test as dask. No return values, as we can't be sure @@ -195,15 +194,15 @@ def test_as_dask(tmp_filepath, save_cubelist_with_sigma): from our own chunking behaviour. """ message = "Mock called, rest of test unneeded" - with patch("iris.fileformats.netcdf.loader.as_lazy_data") as as_lazy_data: - as_lazy_data.side_effect = RuntimeError(message) - with CHUNK_CONTROL.as_dask(): - try: - CubeList(loader.load_cubes(tmp_filepath)) - except RuntimeError as e: - if str(e) != message: - raise e - as_lazy_data.assert_called_with(ANY, meta=ANY, chunks="auto") + as_lazy_data = mocker.patch("iris.fileformats.netcdf.loader.as_lazy_data") + as_lazy_data.side_effect = RuntimeError(message) + with CHUNK_CONTROL.as_dask(): + try: + CubeList(loader.load_cubes(tmp_filepath)) + except RuntimeError as e: + if str(e) != message: + raise e + as_lazy_data.assert_called_with(mocker.ANY, meta=mocker.ANY, chunks="auto") def test_pinned_optimisation(tmp_filepath, save_cubelist_with_sigma): @@ -221,7 +220,3 @@ def test_pinned_optimisation(tmp_filepath, save_cubelist_with_sigma): assert sigma.shape == (4,) assert sigma.lazy_points().chunksize == (2,) assert sigma.lazy_bounds().chunksize == (2, 2) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py index e29f0de012..f92e8288b7 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py @@ -4,39 +4,37 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.netcdf._get_cf_var_data` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import dask.array as da import numpy as np +import pytest from iris._lazy_data import _optimum_chunksize import iris.fileformats.cf from iris.fileformats.netcdf._thread_safe_nc import VLType from iris.fileformats.netcdf.loader import CHUNK_CONTROL, _get_cf_var_data +from iris.tests import _shared_utils +from iris.tests.unit.fileformats import MockerMixin -class Test__get_cf_var_data(tests.IrisTest): - def setUp(self): +class Test__get_cf_var_data(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self): self.filename = "DUMMY" self.shape = (300000, 240, 200) self.expected_chunks = _optimum_chunksize(self.shape, self.shape) def _make(self, chunksizes=None, shape=None, dtype="i4", **extra_properties): - cf_data = mock.MagicMock( + cf_data = self.mocker.MagicMock( _FillValue=None, __getitem__="", dimensions=["dim_" + str(x) for x in range(len(shape or "1"))], ) - cf_data.chunking = mock.MagicMock(return_value=chunksizes) + cf_data.chunking = self.mocker.MagicMock(return_value=chunksizes) if shape is None: shape = self.shape if dtype is not str: # for testing VLen str arrays (dtype=`class `) dtype = np.dtype(dtype) - cf_var = mock.MagicMock( + cf_var = self.mocker.MagicMock( spec=iris.fileformats.cf.CFVariable, dtype=dtype, cf_data=cf_data, @@ -46,15 +44,15 @@ def _make(self, chunksizes=None, shape=None, dtype="i4", **extra_properties): size=np.prod(shape), **extra_properties, ) - cf_var.__getitem__.return_value = mock.sentinel.real_data_accessed + cf_var.__getitem__.return_value = self.mocker.sentinel.real_data_accessed return cf_var def test_cf_data_type(self): chunks = [1, 12, 100] cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var) - self.assertIsInstance(lazy_data, da.Array) - self.assertIsInstance(da.utils.meta_from_array(lazy_data), np.ma.MaskedArray) + assert isinstance(lazy_data, da.Array) + assert isinstance(da.utils.meta_from_array(lazy_data), np.ma.MaskedArray) def test_cf_data_chunks(self): chunks = [2500, 240, 200] @@ -62,7 +60,7 @@ def test_cf_data_chunks(self): lazy_data = _get_cf_var_data(cf_var) lazy_data_chunks = [c[0] for c in lazy_data.chunks] expected_chunks = _optimum_chunksize(chunks, self.shape) - self.assertArrayEqual(lazy_data_chunks, expected_chunks) + _shared_utils.assert_array_equal(lazy_data_chunks, expected_chunks) def test_cf_data_chunk_control(self): # more thorough testing can be found at `test__chunk_control` @@ -72,7 +70,7 @@ def test_cf_data_chunk_control(self): lazy_data = _get_cf_var_data(cf_var) lazy_data_chunks = [c[0] for c in lazy_data.chunks] expected_chunks = (25, 24, 20) - self.assertArrayEqual(lazy_data_chunks, expected_chunks) + _shared_utils.assert_array_equal(lazy_data_chunks, expected_chunks) def test_cf_data_no_chunks(self): # No chunks means chunks are calculated from the array's shape by @@ -81,7 +79,7 @@ def test_cf_data_no_chunks(self): cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var) lazy_data_chunks = [c[0] for c in lazy_data.chunks] - self.assertArrayEqual(lazy_data_chunks, self.expected_chunks) + _shared_utils.assert_array_equal(lazy_data_chunks, self.expected_chunks) def test_cf_data_contiguous(self): # Chunks 'contiguous' is equivalent to no chunks. @@ -89,77 +87,73 @@ def test_cf_data_contiguous(self): cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var) lazy_data_chunks = [c[0] for c in lazy_data.chunks] - self.assertArrayEqual(lazy_data_chunks, self.expected_chunks) + _shared_utils.assert_array_equal(lazy_data_chunks, self.expected_chunks) def test_type__1kf8_is_lazy(self): cf_var = self._make(shape=(1000,), dtype="f8") var_data = _get_cf_var_data(cf_var) - self.assertIsInstance(var_data, da.Array) + assert isinstance(var_data, da.Array) - def test_arraytype__1ki2_is_real(self): + def test_arraytype__1ki2_is_real(self, mocker): cf_var = self._make(shape=(1000,), dtype="i2") var_data = _get_cf_var_data(cf_var) - self.assertIs(var_data, mock.sentinel.real_data_accessed) + assert var_data is mocker.sentinel.real_data_accessed - def test_arraytype__100f8_is_real(self): + def test_arraytype__100f8_is_real(self, mocker): cf_var = self._make(shape=(100,), dtype="f8") var_data = _get_cf_var_data(cf_var) - self.assertIs(var_data, mock.sentinel.real_data_accessed) + assert var_data is mocker.sentinel.real_data_accessed - def test_vltype__1000str_is_lazy(self): + def test_vltype__1000str_is_lazy(self, mocker): # Variable length string type - mock_vltype = mock.Mock(spec=VLType, dtype=str, name="varlen string type") + mock_vltype = mocker.Mock(spec=VLType, dtype=str, name="varlen string type") cf_var = self._make(shape=(1000,), dtype=str, datatype=mock_vltype) var_data = _get_cf_var_data(cf_var) - self.assertIsInstance(var_data, da.Array) + assert isinstance(var_data, da.Array) - def test_vltype__1000str_is_real_with_hint(self): + def test_vltype__1000str_is_real_with_hint(self, mocker): # Variable length string type with a hint on the array variable length size - mock_vltype = mock.Mock(spec=VLType, dtype=str, name="varlen string type") + mock_vltype = mocker.Mock(spec=VLType, dtype=str, name="varlen string type") cf_var = self._make(shape=(100,), dtype=str, datatype=mock_vltype) with CHUNK_CONTROL.set("DUMMY_VAR", _vl_hint=1): var_data = _get_cf_var_data(cf_var) - self.assertIs(var_data, mock.sentinel.real_data_accessed) + assert var_data is mocker.sentinel.real_data_accessed - def test_vltype__100str_is_real(self): + def test_vltype__100str_is_real(self, mocker): # Variable length string type - mock_vltype = mock.Mock(spec=VLType, dtype=str, name="varlen string type") + mock_vltype = mocker.Mock(spec=VLType, dtype=str, name="varlen string type") cf_var = self._make(shape=(100,), dtype=str, datatype=mock_vltype) var_data = _get_cf_var_data(cf_var) - self.assertIs(var_data, mock.sentinel.real_data_accessed) + assert var_data is mocker.sentinel.real_data_accessed - def test_vltype__100str_is_lazy_with_hint(self): + def test_vltype__100str_is_lazy_with_hint(self, mocker): # Variable length string type with a hint on the array variable length size - mock_vltype = mock.Mock(spec=VLType, dtype=str, name="varlen string type") + mock_vltype = mocker.Mock(spec=VLType, dtype=str, name="varlen string type") cf_var = self._make(shape=(100,), dtype=str, datatype=mock_vltype) with CHUNK_CONTROL.set("DUMMY_VAR", _vl_hint=50): var_data = _get_cf_var_data(cf_var) - self.assertIsInstance(var_data, da.Array) + assert isinstance(var_data, da.Array) - def test_vltype__100f8_is_lazy(self): + def test_vltype__100f8_is_lazy(self, mocker): # Variable length float64 type - mock_vltype = mock.Mock(spec=VLType, dtype="f8", name="varlen float64 type") + mock_vltype = mocker.Mock(spec=VLType, dtype="f8", name="varlen float64 type") cf_var = self._make(shape=(1000,), dtype="f8", datatype=mock_vltype) var_data = _get_cf_var_data(cf_var) - self.assertIsInstance(var_data, da.Array) + assert isinstance(var_data, da.Array) - def test_vltype__100f8_is_real_with_hint(self): + def test_vltype__100f8_is_real_with_hint(self, mocker): # Variable length float64 type with a hint on the array variable length size - mock_vltype = mock.Mock(spec=VLType, dtype="f8", name="varlen float64 type") + mock_vltype = mocker.Mock(spec=VLType, dtype="f8", name="varlen float64 type") cf_var = self._make(shape=(100,), dtype="f8", datatype=mock_vltype) with CHUNK_CONTROL.set("DUMMY_VAR", _vl_hint=2): var_data = _get_cf_var_data(cf_var) - self.assertIs(var_data, mock.sentinel.real_data_accessed) + assert var_data is mocker.sentinel.real_data_accessed - def test_cf_data_emulation(self): + def test_cf_data_emulation(self, mocker): # Check that a variable emulation object passes its real data directly. - emulated_data = mock.Mock() + emulated_data = mocker.Mock() # Make a cf_var with a special extra '_data_array' property. cf_var = self._make(chunksizes=None, _data_array=emulated_data) result = _get_cf_var_data(cf_var) # This should get directly returned. - self.assertIs(emulated_data, result) - - -if __name__ == "__main__": - tests.main() + assert emulated_data is result diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py index 5aafeaf0fc..da3ceaf77a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py @@ -4,53 +4,50 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.netcdf._load_aux_factory` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +import re import warnings import numpy as np +import pytest from iris.coords import DimCoord from iris.cube import Cube from iris.fileformats.netcdf.loader import _load_aux_factory +from iris.tests.unit.fileformats import MockerMixin from iris.warnings import IrisFactoryCoordNotFoundWarning -class TestAtmosphereHybridSigmaPressureCoordinate(tests.IrisTest): - def setUp(self): +class TestAtmosphereHybridSigmaPressureCoordinate(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" self.requires = dict(formula_type=standard_name) - self.ap = mock.MagicMock(units="units") - self.ps = mock.MagicMock(units="units") - coordinates = [(mock.sentinel.b, "b"), (self.ps, "ps")] + self.ap = mocker.MagicMock(units="units") + self.ps = mocker.MagicMock(units="units") + coordinates = [(mocker.sentinel.b, "b"), (self.ps, "ps")] self.cube_parts = dict(coordinates=coordinates) - self.engine = mock.Mock(requires=self.requires, cube_parts=self.cube_parts) - self.cube = mock.create_autospec(Cube, spec_set=True, instance=True) + self.engine = mocker.Mock(requires=self.requires, cube_parts=self.cube_parts) + self.cube = mocker.create_autospec(Cube, spec_set=True, instance=True) # Patch out the check_dependencies functionality. func = "iris.aux_factory.HybridPressureFactory._check_dependencies" - patcher = mock.patch(func) - patcher.start() - self.addCleanup(patcher.stop) + _ = mocker.patch(func) - def test_formula_terms_ap(self): + def test_formula_terms_ap(self, mocker): self.cube_parts["coordinates"].append((self.ap, "ap")) self.requires["formula_terms"] = dict(ap="ap", b="b", ps="ps") _load_aux_factory(self.engine, self.cube) # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 0) + assert self.cube.add_aux_coord.call_count == 0 # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) + assert self.cube.add_aux_factory.call_count == 1 args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) + assert len(args) == 1 factory = args[0] - self.assertEqual(factory.delta, self.ap) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) + assert factory.delta == self.ap + assert factory.sigma == mocker.sentinel.b + assert factory.surface_air_pressure == self.ps - def test_formula_terms_a_p0(self): + def test_formula_terms_a_p0(self, mocker): coord_a = DimCoord(np.arange(5), units="1") coord_p0 = DimCoord(10, units="Pa") coord_expected = DimCoord( @@ -63,28 +60,26 @@ def test_formula_terms_a_p0(self): self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") _load_aux_factory(self.engine, self.cube) # Check cube.coord_dims method. - self.assertEqual(self.cube.coord_dims.call_count, 1) + assert self.cube.coord_dims.call_count == 1 args, _ = self.cube.coord_dims.call_args - self.assertEqual(len(args), 1) - self.assertIs(args[0], coord_a) + assert len(args) == 1 + assert args[0] is coord_a # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 1) + assert self.cube.add_aux_coord.call_count == 1 args, _ = self.cube.add_aux_coord.call_args - self.assertEqual(len(args), 2) - self.assertEqual(args[0], coord_expected) - self.assertIsInstance(args[1], mock.Mock) + assert len(args) == 2 + assert args[0] == coord_expected + assert isinstance(args[1], mocker.Mock) # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) + assert self.cube.add_aux_factory.call_count == 1 args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) + assert len(args) == 1 factory = args[0] - self.assertEqual(factory.delta, coord_expected) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) + assert factory.delta == coord_expected + assert factory.sigma == mocker.sentinel.b + assert factory.surface_air_pressure == self.ps - def test_formula_terms_a_p0__promote_a_units_unknown_to_dimensionless( - self, - ): + def test_formula_terms_a_p0__promote_a_units_unknown_to_dimensionless(self, mocker): coord_a = DimCoord(np.arange(5), units="unknown") coord_p0 = DimCoord(10, units="Pa") coord_expected = DimCoord( @@ -97,31 +92,34 @@ def test_formula_terms_a_p0__promote_a_units_unknown_to_dimensionless( self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") _load_aux_factory(self.engine, self.cube) # Check cube.coord_dims method. - self.assertEqual(self.cube.coord_dims.call_count, 1) + assert self.cube.coord_dims.call_count == 1 args, _ = self.cube.coord_dims.call_args - self.assertEqual(len(args), 1) - self.assertIs(args[0], coord_a) - self.assertEqual("1", args[0].units) + assert len(args) == 1 + assert args[0] is coord_a + assert "1" == args[0].units # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 1) + assert self.cube.add_aux_coord.call_count == 1 args, _ = self.cube.add_aux_coord.call_args - self.assertEqual(len(args), 2) - self.assertEqual(args[0], coord_expected) - self.assertIsInstance(args[1], mock.Mock) + assert len(args) == 2 + assert args[0] == coord_expected + assert isinstance(args[1], mocker.Mock) # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) + assert self.cube.add_aux_factory.call_count == 1 args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) + assert len(args) == 1 factory = args[0] - self.assertEqual(factory.delta, coord_expected) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) + assert factory.delta == coord_expected + assert factory.sigma == mocker.sentinel.b + assert factory.surface_air_pressure == self.ps def test_formula_terms_p0_non_scalar(self): coord_p0 = DimCoord(np.arange(5)) + msg = re.escape( + "Expecting None to be a scalar reference pressure coordinate, got shape (5,)" + ) self.cube_parts["coordinates"].append((coord_p0, "p0")) self.requires["formula_terms"] = dict(p0="p0") - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match=msg): _load_aux_factory(self.engine, self.cube) def test_formula_terms_p0_bounded(self): @@ -132,34 +130,34 @@ def test_formula_terms_p0_bounded(self): with warnings.catch_warnings(record=True) as warn: warnings.simplefilter("always") _load_aux_factory(self.engine, self.cube) - self.assertEqual(len(warn), 1) + assert len(warn) == 1 msg = ( "Ignoring atmosphere hybrid sigma pressure scalar " "coordinate {!r} bounds.".format(coord_p0.name()) ) - self.assertEqual(msg, str(warn[0].message)) + assert msg == str(warn[0].message) def _check_no_delta(self): # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 0) + assert self.cube.add_aux_coord.call_count == 0 # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) + assert self.cube.add_aux_factory.call_count == 1 args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) + assert len(args) == 1 factory = args[0] # Check that the factory has no delta term - self.assertEqual(factory.delta, None) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) + assert factory.delta == None + assert factory.sigma == self.mocker.sentinel.b + assert factory.surface_air_pressure == self.ps def test_formula_terms_ap_missing_coords(self): self.requires["formula_terms"] = dict(ap="ap", b="b", ps="ps") - with mock.patch("warnings.warn") as warn: + with pytest.warns( + IrisFactoryCoordNotFoundWarning, + match="Unable to find coordinate for variable 'ap'", + ) as warn: _load_aux_factory(self.engine, self.cube) - warn.assert_called_once_with( - "Unable to find coordinate for variable 'ap'", - category=IrisFactoryCoordNotFoundWarning, - ) + assert len(warn) == 1 self._check_no_delta() def test_formula_terms_no_delta_terms(self): @@ -180,7 +178,3 @@ def test_formula_terms_no_a_term(self): self.requires["formula_terms"] = dict(a="p0", b="b", ps="ps") _load_aux_factory(self.engine, self.cube) self._check_no_delta() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py index ad6017c4be..cc6e03a2cc 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py @@ -4,18 +4,14 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.netcdf._load_cube` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest from iris.coords import DimCoord import iris.fileformats.cf from iris.fileformats.netcdf.loader import _load_cube from iris.loading import LOAD_PROBLEMS +from iris.tests.unit.fileformats import MockerMixin class NoStr: @@ -23,7 +19,7 @@ def __str__(self): raise RuntimeError("No string representation") -class TestCoordAttributes(tests.IrisTest): +class TestCoordAttributes(MockerMixin): @staticmethod def _patcher(engine, cf, cf_group): coordinates = [] @@ -32,19 +28,18 @@ def _patcher(engine, cf, cf_group): coordinates.append((coord, coord.name())) engine.cube_parts["coordinates"] = coordinates - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self, mocker): this = "iris.fileformats.netcdf.loader._assert_case_specific_facts" - patch = mock.patch(this, side_effect=self._patcher) - patch.start() - self.addCleanup(patch.stop) - self.engine = mock.Mock() + _ = mocker.patch(this, side_effect=self._patcher) + self.engine = mocker.Mock() self.filename = "DUMMY" - self.flag_masks = mock.sentinel.flag_masks - self.flag_meanings = mock.sentinel.flag_meanings - self.flag_values = mock.sentinel.flag_values - self.valid_range = mock.sentinel.valid_range - self.valid_min = mock.sentinel.valid_min - self.valid_max = mock.sentinel.valid_max + self.flag_masks = mocker.sentinel.flag_masks + self.flag_meanings = mocker.sentinel.flag_meanings + self.flag_values = mocker.sentinel.flag_values + self.valid_range = mocker.sentinel.valid_range + self.valid_min = mocker.sentinel.valid_min + self.valid_max = mocker.sentinel.valid_max def _make(self, names, attrs): coords = [DimCoord(i, long_name=name) for i, name in enumerate(names)] @@ -52,13 +47,13 @@ def _make(self, names, attrs): cf_group = {} for name, cf_attrs in zip(names, attrs): - cf_attrs_unused = mock.Mock(return_value=cf_attrs) - cf_group[name] = mock.Mock(cf_attrs_unused=cf_attrs_unused) - cf = mock.Mock(cf_group=cf_group) + cf_attrs_unused = self.mocker.Mock(return_value=cf_attrs) + cf_group[name] = self.mocker.Mock(cf_attrs_unused=cf_attrs_unused) + cf = self.mocker.Mock(cf_group=cf_group) - cf_data = mock.Mock(_FillValue=None) - cf_data.chunking = mock.MagicMock(return_value=shape) - cf_var = mock.MagicMock( + cf_data = self.mocker.Mock(_FillValue=None) + cf_data.chunking = self.mocker.MagicMock(return_value=shape) + cf_var = self.mocker.MagicMock( spec=iris.fileformats.cf.CFVariable, dtype=np.dtype("i4"), cf_data=cf_data, @@ -80,11 +75,11 @@ def test_flag_pass_thru(self): attrs = [[(attr, value)]] cf, cf_var = self._make(names, attrs) cube = _load_cube(self.engine, cf, cf_var, self.filename) - self.assertEqual(len(cube.coords(name)), 1) + assert len(cube.coords(name)) == 1 coord = cube.coord(name) - self.assertEqual(len(coord.attributes), 1) - self.assertEqual(list(coord.attributes.keys()), [attr]) - self.assertEqual(list(coord.attributes.values()), [value]) + assert len(coord.attributes) == 1 + assert list(coord.attributes.keys()) == [attr] + assert list(coord.attributes.values()) == [value] def test_flag_pass_thru_multi(self): names = ["masks", "meanings", "values"] @@ -101,8 +96,8 @@ def test_flag_pass_thru_multi(self): ] cf, cf_var = self._make(names, attrs) cube = _load_cube(self.engine, cf, cf_var, self.filename) - self.assertEqual(len(cube.coords()), 3) - self.assertEqual(set([c.name() for c in cube.coords()]), set(names)) + assert len(cube.coords()) == 3 + assert set([c.name() for c in cube.coords()]) == set(names) expected = [ attrs[0], [attrs[1][0]], @@ -113,7 +108,7 @@ def test_flag_pass_thru_multi(self): ] for name, expect in zip(names, expected): attributes = cube.coord(name).attributes - self.assertEqual(set(attributes.items()), set(expect)) + assert set(attributes.items()) == set(expect) def test_load_problems(self): key_and_val = (NoStr(), "wibble") @@ -121,43 +116,40 @@ def test_load_problems(self): cf, cf_var = self._make(["foo"], [[key_and_val]]) _ = _load_cube(self.engine, cf, cf_var, self.filename) load_problem = LOAD_PROBLEMS.problems[-1] - self.assertIn( - "No string representation", "".join(load_problem.stack_trace.format()) - ) + assert "No string representation" in "".join(load_problem.stack_trace.format()) destination = load_problem.destination - self.assertIs(destination.iris_class, DimCoord) + assert destination.iris_class is DimCoord # Note: cannot test destination.identifier without large increase in # complexity. Rely on TestCubeAttributes.test_load_problems for this. -class TestCubeAttributes(tests.IrisTest): - def setUp(self): +class TestCubeAttributes(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): this = "iris.fileformats.netcdf.loader._assert_case_specific_facts" - patch = mock.patch(this) - patch.start() - self.addCleanup(patch.stop) - self.engine = mock.Mock() + _ = mocker.patch(this) + self.engine = mocker.Mock() self.cf = None self.filename = "DUMMY" - self.flag_masks = mock.sentinel.flag_masks - self.flag_meanings = mock.sentinel.flag_meanings - self.flag_values = mock.sentinel.flag_values - self.valid_range = mock.sentinel.valid_range - self.valid_min = mock.sentinel.valid_min - self.valid_max = mock.sentinel.valid_max + self.flag_masks = mocker.sentinel.flag_masks + self.flag_meanings = mocker.sentinel.flag_meanings + self.flag_values = mocker.sentinel.flag_values + self.valid_range = mocker.sentinel.valid_range + self.valid_min = mocker.sentinel.valid_min + self.valid_max = mocker.sentinel.valid_max def _make(self, attrs): shape = (1,) - cf_attrs_unused = mock.Mock(return_value=attrs) - cf_data = mock.Mock(_FillValue=None) - cf_data.chunking = mock.MagicMock(return_value=shape) - cf_var = mock.MagicMock( + cf_attrs_unused = self.mocker.Mock(return_value=attrs) + cf_data = self.mocker.Mock(_FillValue=None) + cf_data.chunking = self.mocker.MagicMock(return_value=shape) + cf_var = self.mocker.MagicMock( spec=iris.fileformats.cf.CFVariable, dtype=np.dtype("i4"), cf_data=cf_data, cf_name="DUMMY_VAR", filename="DUMMY", - cf_group=mock.Mock(), + cf_group=self.mocker.Mock(), cf_attrs_unused=cf_attrs_unused, shape=shape, size=np.prod(shape), @@ -173,9 +165,9 @@ def test_flag_pass_thru(self): for key, value in attrs: cf_var = self._make([(key, value)]) cube = _load_cube(self.engine, self.cf, cf_var, self.filename) - self.assertEqual(len(cube.attributes), 1) - self.assertEqual(list(cube.attributes.keys()), [key]) - self.assertEqual(list(cube.attributes.values()), [value]) + assert len(cube.attributes) == 1 + assert list(cube.attributes.keys()) == [key] + assert list(cube.attributes.values()) == [value] def test_flag_pass_thru_multi(self): attrs = [ @@ -195,8 +187,8 @@ def test_flag_pass_thru_multi(self): expected = set([attrs[ind] for ind in [0, 1, 2, 4, 6, 7, 8]]) cf_var = self._make(attrs) cube = _load_cube(self.engine, self.cf, cf_var, self.filename) - self.assertEqual(len(cube.attributes), len(expected)) - self.assertEqual(set(cube.attributes.items()), expected) + assert len(cube.attributes) == len(expected) + assert set(cube.attributes.items()) == expected def test_load_problems(self): key_and_val = (NoStr(), "wibble") @@ -204,13 +196,7 @@ def test_load_problems(self): cf_var = self._make([key_and_val]) _ = _load_cube(self.engine, self.cf, cf_var, self.filename) load_problem = LOAD_PROBLEMS.problems[-1] - self.assertIn( - "No string representation", "".join(load_problem.stack_trace.format()) - ) + assert "No string representation" in "".join(load_problem.stack_trace.format()) destination = load_problem.destination - self.assertIs(destination.iris_class, self.engine.cube.__class__) - self.assertEqual(destination.identifier, cf_var.cf_name) - - -if __name__ == "__main__": - tests.main() + assert destination.iris_class is self.engine.cube.__class__ + assert destination.identifier == cf_var.cf_name diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py index b95bbd0552..3f386238ea 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py @@ -7,28 +7,29 @@ """ -from unittest.mock import MagicMock +import pytest import iris from iris.fileformats.cf import CFDataVariable from iris.fileformats.netcdf.loader import _translate_constraints_to_var_callback - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests - - -class Test(tests.IrisTest): - data_variables = [ - CFDataVariable("var1", MagicMock(standard_name="x_wind")), - CFDataVariable("var2", MagicMock(standard_name="y_wind")), - CFDataVariable("var1", MagicMock(long_name="x component of wind")), - CFDataVariable( - "var1", - MagicMock(standard_name="x_wind", long_name="x component of wind"), - ), - CFDataVariable("var1", MagicMock()), - ] +from iris.tests import _shared_utils + + +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.data_variables = [ + CFDataVariable("var1", mocker.MagicMock(standard_name="x_wind")), + CFDataVariable("var2", mocker.MagicMock(standard_name="y_wind")), + CFDataVariable("var1", mocker.MagicMock(long_name="x component of wind")), + CFDataVariable( + "var1", + mocker.MagicMock( + standard_name="x_wind", long_name="x component of wind" + ), + ), + CFDataVariable("var1", mocker.MagicMock()), + ] def test_multiple_constraints(self): constrs = [ @@ -37,7 +38,7 @@ def test_multiple_constraints(self): ] callback = _translate_constraints_to_var_callback(constrs) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [True, True, False, True, False]) + _shared_utils.assert_array_equal(result, [True, True, False, True, False]) def test_multiple_constraints_invalid(self): constrs = [ @@ -45,9 +46,9 @@ def test_multiple_constraints_invalid(self): iris.NameConstraint(var_name="var1", STASH="m01s00i024"), ] result = _translate_constraints_to_var_callback(constrs) - self.assertIsNone(result) + assert result is None - def test_multiple_constraints__multiname(self): + def test_multiple_constraints__multiname(self, mocker): # Modify the first constraint to require BOTH var-name and std-name match constrs = [ iris.NameConstraint(standard_name="x_wind", var_name="var1"), @@ -56,51 +57,53 @@ def test_multiple_constraints__multiname(self): callback = _translate_constraints_to_var_callback(constrs) # Add 2 extra vars: one passes both name checks, and the other does not vars = self.data_variables + [ - CFDataVariable("var1", MagicMock(standard_name="x_wind")), - CFDataVariable("var1", MagicMock(standard_name="air_pressure")), + CFDataVariable("var1", mocker.MagicMock(standard_name="x_wind")), + CFDataVariable("var1", mocker.MagicMock(standard_name="air_pressure")), ] result = [callback(var) for var in vars] - self.assertArrayEqual(result, [True, True, False, True, False, True, False]) + _shared_utils.assert_array_equal( + result, [True, True, False, True, False, True, False] + ) - def test_non_NameConstraint(self): + def test_non_name_constraint(self): constr = iris.AttributeConstraint(STASH="m01s00i002") result = _translate_constraints_to_var_callback(constr) - self.assertIsNone(result) + assert result is None def test_str_constraint(self): result = _translate_constraints_to_var_callback("x_wind") - self.assertIsNone(result) + assert result is None - def test_Constaint_with_name(self): + def test_constaint_with_name(self): constr = iris.Constraint(name="x_wind") result = _translate_constraints_to_var_callback(constr) - self.assertIsNone(result) + assert result is None - def test_NameConstraint_standard_name(self): + def test_name_constraint_standard_name(self): constr = iris.NameConstraint(standard_name="x_wind") callback = _translate_constraints_to_var_callback(constr) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [True, False, False, True, False]) + _shared_utils.assert_array_equal(result, [True, False, False, True, False]) - def test_NameConstraint_long_name(self): + def test_name_constraint_long_name(self): constr = iris.NameConstraint(long_name="x component of wind") callback = _translate_constraints_to_var_callback(constr) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [False, False, True, True, False]) + _shared_utils.assert_array_equal(result, [False, False, True, True, False]) - def test_NameConstraint_var_name(self): + def test_name_constraint_var_name(self): constr = iris.NameConstraint(var_name="var1") callback = _translate_constraints_to_var_callback(constr) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [True, False, True, True, True]) + _shared_utils.assert_array_equal(result, [True, False, True, True, True]) - def test_NameConstraint_standard_name_var_name(self): + def test_name_constraint_standard_name_var_name(self): constr = iris.NameConstraint(standard_name="x_wind", var_name="var1") callback = _translate_constraints_to_var_callback(constr) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [True, False, False, True, False]) + _shared_utils.assert_array_equal(result, [True, False, False, True, False]) - def test_NameConstraint_standard_name_long_name_var_name(self): + def test_name_constraint_standard_name_long_name_var_name(self): constr = iris.NameConstraint( standard_name="x_wind", long_name="x component of wind", @@ -108,18 +111,14 @@ def test_NameConstraint_standard_name_long_name_var_name(self): ) callback = _translate_constraints_to_var_callback(constr) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [False, False, False, True, False]) + _shared_utils.assert_array_equal(result, [False, False, False, True, False]) - def test_NameConstraint_with_STASH(self): + def test_name_constraint_with_stash(self): constr = iris.NameConstraint(standard_name="x_wind", STASH="m01s00i024") result = _translate_constraints_to_var_callback(constr) - self.assertIsNone(result) + assert result is None def test_no_constraints(self): constrs = [] result = _translate_constraints_to_var_callback(constrs) - self.assertIsNone(result) - - -if __name__ == "__main__": - tests.main() + assert result is None diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py index 5fa37b18ef..5ebbbcc96c 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py @@ -9,14 +9,6 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from pathlib import Path -from shutil import rmtree -import tempfile - from cf_units import as_unit import numpy as np import pytest @@ -27,17 +19,14 @@ from iris.fileformats.netcdf.loader import load_cubes from iris.loading import LOAD_PROBLEMS from iris.mesh import MeshCoord +from iris.tests import _shared_utils from iris.tests.stock.netcdf import ncgen_from_cdl -def setUpModule(): +@pytest.fixture(autouse=True, scope="module") +def _setup(tmp_path_factory): global TMP_DIR - TMP_DIR = Path(tempfile.mkdtemp()) - - -def tearDownModule(): - if TMP_DIR is not None: - rmtree(TMP_DIR) + TMP_DIR = tmp_path_factory.mktemp("temp") def cdl_to_nc(cdl): @@ -47,7 +36,7 @@ def cdl_to_nc(cdl): return str(nc_path) -class Tests(tests.IrisTest): +class Tests: def test_ancillary_variables(self): # Note: using a CDL string as a test data reference, rather than a # binary file. @@ -76,9 +65,9 @@ def test_ancillary_variables(self): # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) + assert len(cubes) == 1 avs = cubes[0].ancillary_variables() - self.assertEqual(len(avs), 1) + assert len(avs) == 1 expected = AncillaryVariable( np.ma.array([11.0, 12.0, 13.0]), long_name="refs", @@ -86,7 +75,7 @@ def test_ancillary_variables(self): units="1", attributes={"custom": "extra-attribute"}, ) - self.assertEqual(avs[0], expected) + assert avs[0] == expected def test_status_flags(self): # Note: using a CDL string as a test data reference, rather than a binary file. @@ -115,9 +104,9 @@ def test_status_flags(self): # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) + assert len(cubes) == 1 avs = cubes[0].ancillary_variables() - self.assertEqual(len(avs), 1) + assert len(avs) == 1 expected = AncillaryVariable( np.ma.array([1, 1, 2], dtype=np.int8), long_name="qq status_flag", @@ -128,7 +117,7 @@ def test_status_flags(self): "flag_meanings": "a b", }, ) - self.assertEqual(avs[0], expected) + assert avs[0] == expected def test_cell_measures(self): # Note: using a CDL string as a test data reference, rather than a binary file. @@ -162,9 +151,9 @@ def test_cell_measures(self): # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) + assert len(cubes) == 1 cms = cubes[0].cell_measures() - self.assertEqual(len(cms), 1) + assert len(cms) == 1 expected = CellMeasure( np.ma.array([[110.0, 120.0, 130.0], [221.0, 231.0, 241.0]]), measure="area", @@ -173,7 +162,7 @@ def test_cell_measures(self): units="m2", attributes={"custom": "extra-attribute"}, ) - self.assertEqual(cms[0], expected) + assert cms[0] == expected def test_default_units(self): # Note: using a CDL string as a test data reference, rather than a binary file. @@ -206,17 +195,17 @@ def test_default_units(self): # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) - self.assertEqual(cubes[0].units, as_unit("unknown")) - self.assertEqual(cubes[0].coord("y").units, as_unit("unknown")) - self.assertEqual(cubes[0].coord("x").units, as_unit(1)) - self.assertEqual(cubes[0].ancillary_variable("refs").units, as_unit("unknown")) - self.assertEqual(cubes[0].cell_measure("areas").units, as_unit("unknown")) + assert len(cubes) == 1 + assert cubes[0].units == as_unit("unknown") + assert cubes[0].coord("y").units == as_unit("unknown") + assert cubes[0].coord("x").units == as_unit(1) + assert cubes[0].ancillary_variable("refs").units == as_unit("unknown") + assert cubes[0].cell_measure("areas").units == as_unit("unknown") -class TestsMesh(tests.IrisTest): +class TestsMesh: @classmethod - def setUpClass(cls): + def setup_class(cls): cls.ref_cdl = """ netcdf mesh_test { dimensions: @@ -266,14 +255,15 @@ def setUpClass(cls): cls.nc_path = cdl_to_nc(cls.ref_cdl) cls.mesh_cubes = list(load_cubes(cls.nc_path)) - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): # Interim measure to allow pytest-style patching in the absence of # full-scale pytest conversion. self.monkeypatch = pytest.MonkeyPatch() def test_standard_dims(self): for cube in self.mesh_cubes: - self.assertIsNotNone(cube.coords("levels")) + assert cube.coords("levels") is not None def test_mesh_coord(self): cube = [cube for cube in self.mesh_cubes if cube.var_name == "face_data"][0] @@ -281,21 +271,21 @@ def test_mesh_coord(self): face_y = cube.coord("latitude") for coord in (face_x, face_y): - self.assertIsInstance(coord, MeshCoord) - self.assertEqual("face", coord.location) - self.assertArrayEqual(np.ma.array([0.5]), coord.points) + assert isinstance(coord, MeshCoord) + assert "face" == coord.location + _shared_utils.assert_array_equal(np.ma.array([0.5]), coord.points) - self.assertEqual("x", face_x.axis) - self.assertEqual("y", face_y.axis) - self.assertEqual(face_x.mesh, face_y.mesh) - self.assertArrayEqual(np.ma.array([[0.0, 2.0, 1.0]]), face_x.bounds) - self.assertArrayEqual(np.ma.array([[0.0, 0.0, 1.0]]), face_y.bounds) + assert "x" == face_x.axis + assert "y" == face_y.axis + assert face_x.mesh == face_y.mesh + _shared_utils.assert_array_equal(np.ma.array([[0.0, 2.0, 1.0]]), face_x.bounds) + _shared_utils.assert_array_equal(np.ma.array([[0.0, 0.0, 1.0]]), face_y.bounds) def test_shared_mesh(self): cube_meshes = [cube.coord("latitude").mesh for cube in self.mesh_cubes] - self.assertEqual(cube_meshes[0], cube_meshes[1]) + assert cube_meshes[0] == cube_meshes[1] - def test_missing_mesh(self): + def test_missing_mesh(self, caplog): ref_cdl = self.ref_cdl.replace( 'face_data:mesh = "mesh"', 'face_data:mesh = "mesh2"' ) @@ -305,7 +295,9 @@ def test_missing_mesh(self): _ = list(load_cubes(nc_path)) log_regex = r".*could not be found in file." - with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): + with _shared_utils.assert_logs( + caplog, logger=logger, level="DEBUG", msg_regex=log_regex + ): _ = list(load_cubes(nc_path)) def test_mesh_coord_not_built(self): diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index 0905c3d2a9..a2d2ff71ee 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -4,15 +4,9 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.fileformats.netcdf.Saver` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -from types import ModuleType - -import iris.tests as tests # isort:skip - import collections from contextlib import contextmanager -from unittest import mock +from types import ModuleType import numpy as np from numpy import ma @@ -36,11 +30,13 @@ from iris.coords import AncillaryVariable, AuxCoord, DimCoord from iris.cube import Cube from iris.fileformats.netcdf import Saver, _thread_safe_nc +from iris.tests import _shared_utils from iris.tests._shared_utils import assert_CDL import iris.tests.stock as stock +from iris.tests.unit.fileformats import MockerMixin -class Test_write(tests.IrisTest): +class Test_write: # ------------------------------------------------------------------------- # It is not considered necessary to have integration tests for saving # EVERY coordinate system. A subset are tested below. @@ -119,64 +115,64 @@ def _stereo_cube(self, ellipsoid=None, scale_factor=None): cube.add_dim_coord(coord, 1) return cube - def test_transverse_mercator(self): + def test_transverse_mercator(self, request, tmp_path): # Create a Cube with a transverse Mercator coordinate system. ellipsoid = GeogCS(6377563.396, 6356256.909) cube = self._transverse_mercator_cube(ellipsoid) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_transverse_mercator_no_ellipsoid(self): + def test_transverse_mercator_no_ellipsoid(self, request, tmp_path): # Create a Cube with a transverse Mercator coordinate system. cube = self._transverse_mercator_cube() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_mercator(self): + def test_mercator(self, request, tmp_path): # Create a Cube with a Mercator coordinate system. ellipsoid = GeogCS(6377563.396, 6356256.909) cube = self._mercator_cube(ellipsoid) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_stereographic(self): + def test_stereographic(self, request, tmp_path): # Create a Cube with a stereographic coordinate system. ellipsoid = GeogCS(6377563.396, 6356256.909) cube = self._stereo_cube(ellipsoid) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_mercator_no_ellipsoid(self): + def test_mercator_no_ellipsoid(self, request, tmp_path): # Create a Cube with a Mercator coordinate system. cube = self._mercator_cube() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_stereographic_no_ellipsoid(self): + def test_stereographic_no_ellipsoid(self, request, tmp_path): # Create a Cube with a stereographic coordinate system. cube = self._stereo_cube() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_stereographic_scale_factor(self): + def test_stereographic_scale_factor(self, request, tmp_path): # Create a Cube with a stereographic coordinate system. cube = self._stereo_cube(scale_factor=1.3) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) @staticmethod def _filter_compression_calls(patch, compression_kwargs, mismatch=False): @@ -199,27 +195,27 @@ def _simple_cube(self, dtype): cube.add_dim_coord(coord, 0) return cube - def test_little_endian(self): + def test_little_endian(self, request, tmp_path): # Create a Cube with little-endian data. cube = self._simple_cube("f4") - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - result_path = self.result_path("endian", "cdl") - self.assertCDL(nc_path, result_path, flags="") + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + result_path = _shared_utils.result_path(request, "endian", "cdl") + _shared_utils.assert_CDL(request, nc_path, result_path, flags="") - def test_zlib(self): + def test_zlib(self, mocker): cube = self._simple_cube(">f4") - api = self.patch("iris.fileformats.netcdf.saver._thread_safe_nc") + api = mocker.patch("iris.fileformats.netcdf.saver._thread_safe_nc") # Define mocked default fill values to prevent deprecation warning (#4374). api.default_fillvals = collections.defaultdict(lambda: -99.0) # Mock the apparent dtype of mocked variables, to avoid an error. @@ -231,7 +227,7 @@ def test_zlib(self): with Saver("/dummy/path", "NETCDF4", compute=False) as saver: saver.write(cube, zlib=True) dataset = api.DatasetWrapper.return_value - create_var_call = mock.call( + create_var_call = mocker.call( "air_pressure_anomaly", np.dtype("float32"), ["dim0", "dim1"], @@ -245,9 +241,9 @@ def test_zlib(self): complevel=4, chunksizes=None, ) - self.assertIn(create_var_call, dataset.createVariable.call_args_list) + assert create_var_call in dataset.createVariable.call_args_list - def test_compression(self): + def test_compression(self, mocker, tmp_path): cube = self._simple_cube(">f4") data_dims, shape = range(cube.ndim), cube.shape @@ -261,7 +257,7 @@ def test_compression(self): ) cube.add_ancillary_variable(anc_coord, data_dims=data_dims) - patch = self.patch( + patch = mocker.patch( "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable" ) compression_kwargs = { @@ -271,16 +267,16 @@ def test_compression(self): "zlib": True, } - with self.temp_filename(suffix=".nc") as nc_path: - with Saver(nc_path, "NETCDF4", compute=False) as saver: - saver.write(cube, **compression_kwargs) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4", compute=False) as saver: + saver.write(cube, **compression_kwargs) - self.assertEqual(5, patch.call_count) + assert 5 == patch.call_count result = self._filter_compression_calls(patch, compression_kwargs) - self.assertEqual(3, len(result)) - self.assertEqual({cube.name(), aux_coord.name(), anc_coord.name()}, set(result)) + assert 3 == len(result) + assert {cube.name(), aux_coord.name(), anc_coord.name()} == set(result) - def test_non_compression__shape(self): + def test_non_compression__shape(self, mocker, tmp_path): cube = self._simple_cube(">f4") data_dims, shape = (0, 1), cube.shape @@ -294,7 +290,7 @@ def test_non_compression__shape(self): ) cube.add_ancillary_variable(anc_coord, data_dims=data_dims[1]) - patch = self.patch( + patch = mocker.patch( "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable" ) compression_kwargs = { @@ -304,21 +300,21 @@ def test_non_compression__shape(self): "zlib": True, } - with self.temp_filename(suffix=".nc") as nc_path: - with Saver(nc_path, "NETCDF4", compute=False) as saver: - saver.write(cube, **compression_kwargs) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4", compute=False) as saver: + saver.write(cube, **compression_kwargs) - self.assertEqual(5, patch.call_count) + assert 5 == patch.call_count result = self._filter_compression_calls( patch, compression_kwargs, mismatch=True ) - self.assertEqual(4, len(result)) + assert 4 == len(result) # the aux coord and ancil variable are not compressed due to shape, and # the dim coord and its associated bounds are also not compressed expected = {aux_coord.name(), anc_coord.name(), "dim0", "dim0_bnds"} - self.assertEqual(expected, set(result)) + assert expected == set(result) - def test_non_compression__dtype(self): + def test_non_compression__dtype(self, mocker, tmp_path): cube = self._simple_cube(">f4") data_dims, shape = (0, 1), cube.shape @@ -327,10 +323,10 @@ def test_non_compression__dtype(self): aux_coord = AuxCoord(data, var_name="non_compress_aux", units="1") cube.add_aux_coord(aux_coord, data_dims=data_dims) - patch = self.patch( + patch = mocker.patch( "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable" ) - patch.return_value = mock.MagicMock(dtype=np.dtype("S1")) + patch.return_value = mocker.MagicMock(dtype=np.dtype("S1")) compression_kwargs = { "complevel": 9, "fletcher32": True, @@ -338,21 +334,21 @@ def test_non_compression__dtype(self): "zlib": True, } - with self.temp_filename(suffix=".nc") as nc_path: - with Saver(nc_path, "NETCDF4", compute=False) as saver: - saver.write(cube, **compression_kwargs) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4", compute=False) as saver: + saver.write(cube, **compression_kwargs) - self.assertEqual(4, patch.call_count) + assert 4 == patch.call_count result = self._filter_compression_calls( patch, compression_kwargs, mismatch=True ) - self.assertEqual(3, len(result)) + assert 3 == len(result) # the aux coord is not compressed due to its string dtype, and # the dim coord and its associated bounds are also not compressed expected = {aux_coord.name(), "dim0", "dim0_bnds"} - self.assertEqual(expected, set(result)) + assert expected == set(result) - def test_least_significant_digit(self): + def test_least_significant_digit(self, tmp_path): cube = Cube( self.array_lib.array([1.23, 4.56, 7.89]), standard_name="surface_temperature", @@ -360,102 +356,102 @@ def test_least_significant_digit(self): var_name="temp", units="K", ) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, least_significant_digit=1) - cube_saved = iris.load_cube(nc_path) - self.assertEqual(cube_saved.attributes["least_significant_digit"], 1) - self.assertFalse(np.all(cube.data == cube_saved.data)) - self.assertArrayAllClose(cube.data, cube_saved.data, 0.1) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, least_significant_digit=1) + cube_saved = iris.load_cube(nc_path) + assert cube_saved.attributes["least_significant_digit"] == 1 + assert not np.all(cube.data == cube_saved.data) + _shared_utils.assert_array_all_close(cube.data, cube_saved.data, 0.1) - def test_default_unlimited_dimensions(self): + def test_default_unlimited_dimensions(self, tmp_path): # Default is no unlimited dimensions. cube = self._simple_cube(">f4") - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertFalse(ds.dimensions["dim0"].isunlimited()) - self.assertFalse(ds.dimensions["dim1"].isunlimited()) - ds.close() + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + assert not ds.dimensions["dim0"].isunlimited() + assert not ds.dimensions["dim1"].isunlimited() + ds.close() - def test_no_unlimited_dimensions(self): + def test_no_unlimited_dimensions(self, tmp_path): cube = self._simple_cube(">f4") - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=None) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - for dim in ds.dimensions.values(): - self.assertFalse(dim.isunlimited()) - ds.close() + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=None) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + for dim in ds.dimensions.values(): + assert not dim.isunlimited() + ds.close() - def test_invalid_unlimited_dimensions(self): + def test_invalid_unlimited_dimensions(self, tmp_path): cube = self._simple_cube(">f4") - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - # should not raise an exception - saver.write(cube, unlimited_dimensions=["not_found"]) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + # should not raise an exception + saver.write(cube, unlimited_dimensions=["not_found"]) - def test_custom_unlimited_dimensions(self): + def test_custom_unlimited_dimensions(self, tmp_path): cube = self._transverse_mercator_cube() unlimited_dimensions = [ "projection_y_coordinate", "projection_x_coordinate", ] # test coordinates by name - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=unlimited_dimensions) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - for dim in unlimited_dimensions: - self.assertTrue(ds.dimensions[dim].isunlimited()) - ds.close() + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=unlimited_dimensions) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + for dim in unlimited_dimensions: + assert ds.dimensions[dim].isunlimited() + ds.close() # test coordinate arguments - with self.temp_filename(".nc") as nc_path: - coords = [cube.coord(dim) for dim in unlimited_dimensions] - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=coords) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - for dim in unlimited_dimensions: - self.assertTrue(ds.dimensions[dim].isunlimited()) - ds.close() + nc_path = tmp_path / "temp2.nc" + coords = [cube.coord(dim) for dim in unlimited_dimensions] + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=coords) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + for dim in unlimited_dimensions: + assert ds.dimensions[dim].isunlimited() + ds.close() - def test_reserved_attributes(self): + def test_reserved_attributes(self, tmp_path): cube = self._simple_cube(">f4") cube.attributes["dimensions"] = "something something_else" - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - res = ds.getncattr("dimensions") - ds.close() - self.assertEqual(res, "something something_else") + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + res = ds.getncattr("dimensions") + ds.close() + assert res == "something something_else" - def test_with_climatology(self): + def test_with_climatology(self, request, tmp_path): cube = stock.climatology_3d() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_dimensional_to_scalar(self): + def test_dimensional_to_scalar(self, tmp_path): # Bounds for 1 point are still in a 2D array. scalar_bounds = self.array_lib.arange(2).reshape(1, 2) scalar_point = scalar_bounds.mean() scalar_data = self.array_lib.zeros(1) scalar_coord = AuxCoord(points=scalar_point, bounds=scalar_bounds) cube = Cube(scalar_data, aux_coords_and_dims=[(scalar_coord, 0)])[0] - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - # Confirm that the only dimension is the one denoting the number - # of bounds - have successfully saved the 2D bounds array into 1D. - self.assertEqual(["bnds"], list(ds.dimensions.keys())) - ds.close() + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + # Confirm that the only dimension is the one denoting the number + # of bounds - have successfully saved the 2D bounds array into 1D. + assert ["bnds"] == list(ds.dimensions.keys()) + ds.close() -class Test__create_cf_bounds(tests.IrisTest): +class Test__create_cf_bounds(MockerMixin): # Method is substituted in test_Saver__lazy. @staticmethod def climatology_3d(): @@ -478,34 +474,34 @@ def _check_bounds_setting(self, climatological=False): boundsvar_name = "time_" + varname_extra # Set up arguments for testing _create_cf_bounds. - saver = mock.MagicMock(spec=Saver) + saver = self.mocker.MagicMock(spec=Saver) # NOTE: 'saver' must have spec=Saver to fake isinstance(save, Saver), # so it can pass as 'self' in the call to _create_cf_cbounds. # Mock a '_dataset' property; not automatic because 'spec=Saver'. - saver._dataset = mock.MagicMock() + saver._dataset = self.mocker.MagicMock() # Mock the '_ensure_valid_dtype' method to return an object with a # suitable 'shape' and 'dtype'. - saver._ensure_valid_dtype.return_value = mock.Mock( + saver._ensure_valid_dtype.return_value = self.mocker.Mock( shape=coord.bounds.shape, dtype=coord.bounds.dtype ) - var = mock.MagicMock(spec=_thread_safe_nc.VariableWrapper) + var = self.mocker.MagicMock(spec=_thread_safe_nc.VariableWrapper) # Make the main call. Saver._create_cf_bounds(saver, coord, var, "time") # Test the call of _setncattr in _create_cf_bounds. - setncattr_call = mock.call( + setncattr_call = self.mocker.call( property_name, boundsvar_name.encode(encoding="ascii") ) - self.assertEqual(setncattr_call, var.setncattr.call_args) + assert setncattr_call == var.setncattr.call_args # Test the call of createVariable in _create_cf_bounds. dataset = saver._dataset expected_dimensions = var.dimensions + ("bnds",) - create_var_call = mock.call( + create_var_call = self.mocker.call( boundsvar_name, coord.bounds.dtype, expected_dimensions ) - self.assertEqual(create_var_call, dataset.createVariable.call_args) + assert create_var_call == dataset.createVariable.call_args def test_set_bounds_default(self): self._check_bounds_setting(climatological=False) @@ -514,95 +510,95 @@ def test_set_bounds_climatology(self): self._check_bounds_setting(climatological=True) -class Test_write__valid_x_cube_attributes(tests.IrisTest): +class Test_write__valid_x_cube_attributes: """Testing valid_range, valid_min and valid_max attributes.""" # Attribute is substituted in test_Saver__lazy. array_lib: ModuleType = np - def test_valid_range_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_range_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") vrange = self.array_lib.array([1, 2], dtype="int32") cube.attributes["valid_range"] = vrange - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual(ds.valid_range, vrange) - ds.close() + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + _shared_utils.assert_array_equal(ds.valid_range, vrange) + ds.close() - def test_valid_min_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_min_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") cube.attributes["valid_min"] = 1 - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual(ds.valid_min, 1) - ds.close() + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + _shared_utils.assert_array_equal(ds.valid_min, 1) + ds.close() - def test_valid_max_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_max_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") cube.attributes["valid_max"] = 2 - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual(ds.valid_max, 2) - ds.close() + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + _shared_utils.assert_array_equal(ds.valid_max, 2) + ds.close() -class Test_write__valid_x_coord_attributes(tests.IrisTest): +class Test_write__valid_x_coord_attributes: """Testing valid_range, valid_min and valid_max attributes.""" # Attribute is substituted in test_Saver__lazy. array_lib: ModuleType = np - def test_valid_range_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_range_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") vrange = self.array_lib.array([1, 2], dtype="int32") cube.coord(axis="x").attributes["valid_range"] = vrange - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual(ds.variables["longitude"].valid_range, vrange) - ds.close() + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + _shared_utils.assert_array_equal(ds.variables["longitude"].valid_range, vrange) + ds.close() - def test_valid_min_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_min_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") cube.coord(axis="x").attributes["valid_min"] = 1 - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual(ds.variables["longitude"].valid_min, 1) - ds.close() + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + _shared_utils.assert_array_equal(ds.variables["longitude"].valid_min, 1) + ds.close() - def test_valid_max_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_max_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") cube.coord(axis="x").attributes["valid_max"] = 2 - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual(ds.variables["longitude"].valid_max, 2) - ds.close() + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + _shared_utils.assert_array_equal(ds.variables["longitude"].valid_max, 2) + ds.close() -class Test_write_fill_value(tests.IrisTest): +class Test_write_fill_value: # Attribute is substituted in test_Saver__lazy. array_lib: ModuleType = np @@ -622,11 +618,13 @@ def _make_cube(self, dtype, masked_value=None, masked_index=None): dim_coords_and_dims=[(lat, 0), (lon, 1)], ) - @contextmanager - def _netCDF_var(self, cube, **kwargs): - # Get the netCDF4 Variable for a cube from a temp file - standard_name = cube.standard_name - with self.temp_filename(".nc") as nc_path: + @pytest.fixture + def _netCDF_var(self, tmp_path): + @contextmanager + def netCDF_var(cube, **kwargs): + # Get the netCDF4 Variable for a cube from a temp file + standard_name = cube.standard_name + nc_path = tmp_path / "temp.nc" with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, **kwargs) ds = _thread_safe_nc.DatasetWrapper(nc_path) @@ -637,88 +635,87 @@ def _netCDF_var(self, cube, **kwargs): ] yield var - def test_fill_value(self): + return netCDF_var + + def test_fill_value(self, _netCDF_var): # Test that a passed fill value is saved as a _FillValue attribute. cube = self._make_cube(">f4") fill_value = 12345.0 - with self._netCDF_var(cube, fill_value=fill_value) as var: - self.assertEqual(fill_value, var._FillValue) + with _netCDF_var(cube, fill_value=fill_value) as var: + assert fill_value == var._FillValue - def test_default_fill_value(self): + def test_default_fill_value(self, _netCDF_var): # Test that if no fill value is passed then there is no _FillValue. # attribute. cube = self._make_cube(">f4") - with self._netCDF_var(cube) as var: - self.assertNotIn("_FillValue", var.ncattrs()) + with _netCDF_var(cube) as var: + assert "_FillValue" not in var.ncattrs() - def test_mask_fill_value(self): + def test_mask_fill_value(self, _netCDF_var): # Test that masked data saves correctly when given a fill value. index = (1, 1) fill_value = 12345.0 cube = self._make_cube(">f4", masked_index=index) - with self._netCDF_var(cube, fill_value=fill_value) as var: - self.assertEqual(fill_value, var._FillValue) - self.assertTrue(var[index].mask) + with _netCDF_var(cube, fill_value=fill_value) as var: + assert fill_value == var._FillValue + assert var[index].mask - def test_mask_default_fill_value(self): + def test_mask_default_fill_value(self, _netCDF_var): # Test that masked data saves correctly using the default fill value. index = (1, 1) cube = self._make_cube(">f4", masked_index=index) - with self._netCDF_var(cube) as var: - self.assertNotIn("_FillValue", var.ncattrs()) - self.assertTrue(var[index].mask) + with _netCDF_var(cube) as var: + assert "_FillValue" not in var.ncattrs() + assert var[index].mask -class Test_cf_valid_var_name(tests.IrisTest): +class Test_cf_valid_var_name: def test_no_replacement(self): - self.assertEqual(Saver.cf_valid_var_name("valid_Nam3"), "valid_Nam3") + assert Saver.cf_valid_var_name("valid_Nam3") == "valid_Nam3" def test_special_chars(self): - self.assertEqual(Saver.cf_valid_var_name("inv?alid"), "inv_alid") + assert Saver.cf_valid_var_name("inv?alid") == "inv_alid" def test_leading_underscore(self): - self.assertEqual(Saver.cf_valid_var_name("_invalid"), "var__invalid") + assert Saver.cf_valid_var_name("_invalid") == "var__invalid" def test_leading_number(self): - self.assertEqual(Saver.cf_valid_var_name("2invalid"), "var_2invalid") + assert Saver.cf_valid_var_name("2invalid") == "var_2invalid" def test_leading_invalid(self): - self.assertEqual(Saver.cf_valid_var_name("?invalid"), "var__invalid") + assert Saver.cf_valid_var_name("?invalid") == "var__invalid" def test_no_hyphen(self): # CF explicitly prohibits hyphen, even though it is fine in NetCDF. - self.assertEqual(Saver.cf_valid_var_name("valid-netcdf"), "valid_netcdf") + assert Saver.cf_valid_var_name("valid-netcdf") == "valid_netcdf" class _Common__check_attribute_compliance: # Attribute is substituted in test_Saver__lazy. array_lib: ModuleType = np - def setUp(self): - self.container = mock.Mock(name="container", attributes={}) + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.container = mocker.Mock(name="container", attributes={}) self.data_dtype = np.dtype("int32") # We need to create mock datasets which look like they are closed. - dataset_class = mock.Mock( - return_value=mock.Mock( + dataset_class = mocker.Mock( + return_value=mocker.Mock( # Mock dataset : the isopen() call should return 0. - isopen=mock.Mock(return_value=0) + isopen=mocker.Mock(return_value=0) ) ) - patch = mock.patch( + _ = mocker.patch( "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", dataset_class, ) - _ = patch.start() - self.addCleanup(patch.stop) def set_attribute(self, value): self.container.attributes[self.attribute] = value - def assertAttribute(self, value): - self.assertEqual( - np.asarray(self.container.attributes[self.attribute]).dtype, value - ) + def assert_attribute(self, value): + assert np.asarray(self.container.attributes[self.attribute]).dtype == value def check_attribute_compliance_call(self, value, file_type="NETCDF4"): self.set_attribute(value) @@ -728,9 +725,7 @@ def check_attribute_compliance_call(self, value, file_type="NETCDF4"): saver.check_attribute_compliance(self.container, self.data_dtype) -class Test_check_attribute_compliance__valid_range( - _Common__check_attribute_compliance, tests.IrisTest -): +class Test_check_attribute_compliance__valid_range(_Common__check_attribute_compliance): @property def attribute(self): return "valid_range" @@ -738,18 +733,18 @@ def attribute(self): def test_valid_range_type_coerce(self): value = self.array_lib.array([1, 2], dtype="float") self.check_attribute_compliance_call(value) - self.assertAttribute(self.data_dtype) + self.assert_attribute(self.data_dtype) def test_valid_range_unsigned_int8_data_signed_range(self): self.data_dtype = np.dtype("uint8") value = self.array_lib.array([1, 2], dtype="int8") self.check_attribute_compliance_call(value) - self.assertAttribute(value.dtype) + self.assert_attribute(value.dtype) def test_valid_range_cannot_coerce(self): value = self.array_lib.array([1.5, 2.5], dtype="float64") msg = '"valid_range" is not of a suitable value' - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): self.check_attribute_compliance_call(value) def test_valid_range_not_numpy_array(self): @@ -757,18 +752,16 @@ def test_valid_range_not_numpy_array(self): self.data_dtype = np.dtype("int8") value = [1, 2] self.check_attribute_compliance_call(value) - self.assertAttribute(np.int64) + self.assert_attribute(np.int64) def test_uncastable_dtype(self): self.data_dtype = np.dtype("int64") value = [0, np.iinfo(self.data_dtype).max] - with self.assertRaisesRegex(ValueError, "cannot be safely cast"): + with pytest.raises(ValueError, match="cannot be safely cast"): self.check_attribute_compliance_call(value, file_type="NETCDF4_CLASSIC") -class Test_check_attribute_compliance__valid_min( - _Common__check_attribute_compliance, tests.IrisTest -): +class Test_check_attribute_compliance__valid_min(_Common__check_attribute_compliance): @property def attribute(self): return "valid_min" @@ -776,18 +769,18 @@ def attribute(self): def test_valid_range_type_coerce(self): value = self.array_lib.array(1, dtype="float") self.check_attribute_compliance_call(value) - self.assertAttribute(self.data_dtype) + self.assert_attribute(self.data_dtype) def test_valid_range_unsigned_int8_data_signed_range(self): self.data_dtype = np.dtype("uint8") value = self.array_lib.array(1, dtype="int8") self.check_attribute_compliance_call(value) - self.assertAttribute(value.dtype) + self.assert_attribute(value.dtype) def test_valid_range_cannot_coerce(self): value = self.array_lib.array(1.5, dtype="float64") msg = '"valid_min" is not of a suitable value' - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): self.check_attribute_compliance_call(value) def test_valid_range_not_numpy_array(self): @@ -795,18 +788,16 @@ def test_valid_range_not_numpy_array(self): self.data_dtype = np.dtype("int8") value = 1 self.check_attribute_compliance_call(value) - self.assertAttribute(np.int64) + self.assert_attribute(np.int64) def test_uncastable_dtype(self): self.data_dtype = np.dtype("int64") value = np.iinfo(self.data_dtype).min - with self.assertRaisesRegex(ValueError, "cannot be safely cast"): + with pytest.raises(ValueError, match="cannot be safely cast"): self.check_attribute_compliance_call(value, file_type="NETCDF4_CLASSIC") -class Test_check_attribute_compliance__valid_max( - _Common__check_attribute_compliance, tests.IrisTest -): +class Test_check_attribute_compliance__valid_max(_Common__check_attribute_compliance): @property def attribute(self): return "valid_max" @@ -814,18 +805,18 @@ def attribute(self): def test_valid_range_type_coerce(self): value = self.array_lib.array(2, dtype="float") self.check_attribute_compliance_call(value) - self.assertAttribute(self.data_dtype) + self.assert_attribute(self.data_dtype) def test_valid_range_unsigned_int8_data_signed_range(self): self.data_dtype = np.dtype("uint8") value = self.array_lib.array(2, dtype="int8") self.check_attribute_compliance_call(value) - self.assertAttribute(value.dtype) + self.assert_attribute(value.dtype) def test_valid_range_cannot_coerce(self): value = self.array_lib.array(2.5, dtype="float64") msg = '"valid_max" is not of a suitable value' - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): self.check_attribute_compliance_call(value) def test_valid_range_not_numpy_array(self): @@ -833,17 +824,17 @@ def test_valid_range_not_numpy_array(self): self.data_dtype = np.dtype("int8") value = 2 self.check_attribute_compliance_call(value) - self.assertAttribute(np.int64) + self.assert_attribute(np.int64) def test_uncastable_dtype(self): self.data_dtype = np.dtype("int64") value = np.iinfo(self.data_dtype).max - with self.assertRaisesRegex(ValueError, "cannot be safely cast"): + with pytest.raises(ValueError, match="cannot be safely cast"): self.check_attribute_compliance_call(value, file_type="NETCDF4_CLASSIC") class Test_check_attribute_compliance__exception_handling( - _Common__check_attribute_compliance, tests.IrisTest + _Common__check_attribute_compliance ): def test_valid_range_and_valid_min_valid_max_provided(self): # Conflicting attributes should raise a suitable exception. @@ -852,17 +843,17 @@ def test_valid_range_and_valid_min_valid_max_provided(self): self.container.attributes["valid_min"] = [1] msg = 'Both "valid_range" and "valid_min"' with Saver("nonexistent test file", "NETCDF4") as saver: - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): saver.check_attribute_compliance(self.container, self.data_dtype) -class Test__cf_coord_identity(tests.IrisTest): +class Test__cf_coord_identity: def check_call(self, coord_name, coord_system, units, expected_units): coord = iris.coords.DimCoord( [30, 45], coord_name, units=units, coord_system=coord_system ) result = Saver._cf_coord_standardised_units(coord) - self.assertEqual(result, expected_units) + assert result == expected_units def test_geogcs_latitude(self): crs = iris.coord_systems.GeogCS(60, 30) @@ -1006,7 +997,7 @@ def test_no_cs(self, transverse_mercator_cube_multi_cs, tmp_path, request): assert_CDL(request, nc_path) -class Test_create_cf_grid_mapping: +class Test_create_cf_grid_mapping(MockerMixin): """Tests correct generation of CF grid_mapping variable attributes. Note: The first 3 tests are run with the "extended grid" mapping @@ -1033,15 +1024,15 @@ def _grid_mapping_variable(self, coord_system): """ cube = self._cube_with_cs(coord_system) - class NCMock(mock.Mock): + class NCMock(self.mocker.Mock): def setncattr(self, name, attr): setattr(self, name, attr) # Calls the actual NetCDF saver with appropriate mocking, returning # the grid variable that gets created. grid_variable = NCMock(name="NetCDFVariable") - create_var_fn = mock.Mock(side_effect=[grid_variable]) - dataset = mock.Mock(variables=[], createVariable=create_var_fn) + create_var_fn = self.mocker.Mock(side_effect=[grid_variable]) + dataset = self.mocker.Mock(variables=[], createVariable=create_var_fn) variable = NCMock() saver = Saver(dataset, "NETCDF4", compute=False) @@ -1443,7 +1434,3 @@ def test_oblique_cs(self): def extended_grid_mapping(request): """Fixture for enabling/disabling extended grid mapping.""" return request.param - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py index 5b04b3b042..a8175da116 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py @@ -4,27 +4,33 @@ # See LICENSE in the root of the repository for full licensing details. """Mirror of :mod:`iris.tests.unit.fileformats.netcdf.test_Saver`, but with lazy arrays.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. from types import ModuleType -import iris.tests as tests # isort:skip - from dask import array as da +import pytest from iris.coords import AuxCoord from iris.fileformats.netcdf import Saver -from iris.tests import stock +from iris.tests import _shared_utils, stock from iris.tests.unit.fileformats.netcdf.saver import test_Saver -class LazyMixin(tests.IrisTest): +class LazyMixin: array_lib: ModuleType = da - def result_path(self, basename=None, ext=""): - # Precisely mirroring the tests in test_Saver, so use those CDL's. - original = super().result_path(basename, ext) - return original.replace("Saver__lazy", "Saver") + @pytest.fixture(autouse=True) + def _setup_lazy_mixin(self, monkeypatch): + rp = _shared_utils.result_path + + def _result_path(request, basename=None, ext=""): + # Precisely mirroring the tests in test_Saver, so use those CDL's. + original = rp(request, basename, ext) + return original.replace("Saver__lazy", "Saver") + + monkeypatch.setattr( + "iris.tests._shared_utils.result_path", # IMPORTANT: patch where it is USED + _result_path, + ) class Test_write(LazyMixin, test_Saver.Test_write): @@ -81,39 +87,40 @@ class Test_check_attribute_compliance__exception_handling( pass -class TestStreamed(tests.IrisTest): - def setUp(self): +class TestStreamed: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.cube = stock.simple_2d() - self.store_watch = self.patch("dask.array.store") + self.store_watch = mocker.patch("dask.array.store") - def save_common(self, cube_to_save): - with self.temp_filename(".nc") as nc_path: + @pytest.fixture + def save_common(self, tmp_path): + def _save_common(cube_to_save): + nc_path = tmp_path / "temp.nc" with Saver(nc_path, "NETCDF4") as saver: saver.write(cube_to_save) - def test_realised_not_streamed(self): - self.save_common(self.cube) - self.assertFalse(self.store_watch.called) + return _save_common - def test_lazy_streamed_data(self): + def test_realised_not_streamed(self, save_common): + save_common(self.cube) + assert not self.store_watch.called + + def test_lazy_streamed_data(self, save_common): self.cube.data = self.cube.lazy_data() - self.save_common(self.cube) - self.assertTrue(self.store_watch.called) + save_common(self.cube) + assert self.store_watch.called - def test_lazy_streamed_coord(self): + def test_lazy_streamed_coord(self, save_common): aux_coord = AuxCoord.from_coord(self.cube.coords()[0]) lazy_coord = aux_coord.copy(aux_coord.lazy_points(), aux_coord.lazy_bounds()) self.cube.replace_coord(lazy_coord) - self.save_common(self.cube) - self.assertTrue(self.store_watch.called) + save_common(self.cube) + assert self.store_watch.called - def test_lazy_streamed_bounds(self): + def test_lazy_streamed_bounds(self, save_common): aux_coord = AuxCoord.from_coord(self.cube.coords()[0]) lazy_coord = aux_coord.copy(aux_coord.points, aux_coord.lazy_bounds()) self.cube.replace_coord(lazy_coord) - self.save_common(self.cube) - self.assertTrue(self.store_watch.called) - - -if __name__ == "__main__": - tests.main() + save_common(self.cube) + assert self.store_watch.called diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py index 7c884e4c22..0f3a91fec2 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -11,7 +11,6 @@ """ from collections.abc import Iterator -from unittest import mock import dask.array as da import numpy as np @@ -24,19 +23,19 @@ class Test__lazy_stream_data: @staticmethod @pytest.fixture(autouse=True) - def saver_patch(): + def saver_patch(mocker): # Install patches, so we can create a Saver without opening a real output file. # Mock just enough of Dataset behaviour to allow a 'Saver.complete()' call. - mock_dataset = mock.MagicMock() - mock_dataset_class = mock.Mock(return_value=mock_dataset) + mock_dataset = mocker.MagicMock() + mock_dataset_class = mocker.Mock(return_value=mock_dataset) # Mock the wrapper within the netcdf saver target1 = "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper" # Mock the real netCDF4.Dataset within the threadsafe-nc module, as this is # used by NetCDFDataProxy and NetCDFWriteProxy. target2 = "iris.fileformats.netcdf._thread_safe_nc.netCDF4.Dataset" - with mock.patch(target1, mock_dataset_class): - with mock.patch(target2, mock_dataset_class): - yield + mocker.patch(target1, mock_dataset_class) + mocker.patch(target2, mock_dataset_class) + return # A fixture to parametrise tests over delayed and non-delayed Saver type. # NOTE: this only affects the saver context-exit, which we do not test here, so @@ -44,13 +43,13 @@ def saver_patch(): @staticmethod @pytest.fixture(params=[False, True], ids=["nocompute", "compute"]) def compute(request) -> Iterator[bool]: - yield request.param + return request.param # A fixture to parametrise tests over real and lazy-type data. @staticmethod @pytest.fixture(params=["realdata", "lazydata", "emulateddata"]) def data_form(request) -> Iterator[bool]: - yield request.param + return request.param @staticmethod def saver(compute) -> Saver: @@ -58,14 +57,14 @@ def saver(compute) -> Saver: return Saver(filename="", netcdf_format="NETCDF4", compute=compute) @staticmethod - def mock_var(shape, with_data_array): + def mock_var(shape, with_data_array, mocker): # Create a test cf_var object. # N.B. using 'spec=' so we can control whether it has a '_data_array' property. if with_data_array: - extra_properties = {"_data_array": mock.sentinel.initial_data_array} + extra_properties = {"_data_array": mocker.sentinel.initial_data_array} else: extra_properties = {} - mock_cfvar = mock.MagicMock( + mock_cfvar = mocker.MagicMock( spec=threadsafe_nc.VariableWrapper, shape=tuple(shape), dtype=np.dtype(np.float32), @@ -77,7 +76,7 @@ def mock_var(shape, with_data_array): mock_cfvar.name = "" return mock_cfvar - def test_data_save(self, compute, data_form): + def test_data_save(self, compute, data_form, mocker): """Real data is transferred immediately, lazy data creates a delayed write.""" saver = self.saver(compute=compute) @@ -86,7 +85,7 @@ def test_data_save(self, compute, data_form): data = da.from_array(data) cf_var = self.mock_var( - data.shape, with_data_array=(data_form == "emulateddata") + data.shape, with_data_array=(data_form == "emulateddata"), mocker=mocker ) saver._lazy_stream_data(data=data, cf_var=cf_var) if data_form == "lazydata": @@ -111,4 +110,4 @@ def test_data_save(self, compute, data_form): cf_var.__setitem__.assert_called_once_with(slice(None), data) else: assert data_form == "emulateddata" - cf_var._data_array == mock.sentinel.exact_data_array + cf_var._data_array == mocker.sentinel.exact_data_array diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py index 9494eabebf..e5783925b0 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py @@ -9,21 +9,17 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from pathlib import Path -import shutil -import tempfile import numpy as np +import pytest from iris import save from iris.coords import AuxCoord from iris.cube import Cube, CubeList from iris.fileformats.netcdf import _thread_safe_nc from iris.mesh import Connectivity, MeshXY, save_mesh +from iris.tests import _shared_utils from iris.tests.stock import realistic_4d XY_LOCS = ("x", "y") @@ -358,38 +354,39 @@ def filter_compression_calls(patch, compression_kwargs, mismatch=False): return result -class TestSaveUgrid__cube(tests.IrisTest): +class TestSaveUgrid__cube: """Test for saving cubes which have meshes.""" - @classmethod - def setUpClass(cls): - cls.temp_dir = Path(tempfile.mkdtemp()) + @pytest.fixture(autouse=True, scope="class") + @staticmethod + def _setup(request, tmp_path_factory): + request.cls.temp_dir = tmp_path_factory.mktemp("test") - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.temp_dir) + @pytest.fixture + def check_save_cubes(self, request): + def _check_save_cubes(cube_or_cubes, compression_kwargs=None): + """Write cubes to a new file in the common temporary directory. - def check_save_cubes(self, cube_or_cubes, compression_kwargs=None): - """Write cubes to a new file in the common temporary directory. + Use a name unique to this testcase, to avoid any clashes. - Use a name unique to this testcase, to avoid any clashes. + """ + # use 'result_path' to name the file after the test function + tempfile_path = _shared_utils.result_path(request, ext=".nc") + # Create a file of that name, but discard the result path and put it + # in the common temporary directory. + tempfile_path = self.temp_dir / Path(tempfile_path).name - """ - # use 'result_path' to name the file after the test function - tempfile_path = self.result_path(ext=".nc") - # Create a file of that name, but discard the result path and put it - # in the common temporary directory. - tempfile_path = self.temp_dir / Path(tempfile_path).name + if compression_kwargs is None: + compression_kwargs = {} - if compression_kwargs is None: - compression_kwargs = {} + # Save data to the file. + save(cube_or_cubes, tempfile_path, **compression_kwargs) - # Save data to the file. - save(cube_or_cubes, tempfile_path, **compression_kwargs) + return tempfile_path - return tempfile_path + return _check_save_cubes - def test_compression(self): + def test_compression(self, check_save_cubes, mocker): """Test NetCDF serialization of a cube with attached mesh using compression. NetCDF data compression keyword arguments include "complevel", @@ -400,12 +397,12 @@ def test_compression(self): # Note that the patch location is "_thread_safe_nc" when it is imported # into the iris.fileformats.netcdf.saver. Also we want to check that the # compression kwargs are passed into the NetCDF4 createVariable method - patch = self.patch( + patch = mocker.patch( "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", ) # No need to patch this NetCDF4 variable to compensate for the previous patch # on createVariable, which doesn't actually create the variable. - self.patch( + mocker.patch( "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.variables" ) cube = make_cube(var_name=(var_name := "a")) @@ -416,7 +413,7 @@ def test_compression(self): "zlib": True, } - _ = self.check_save_cubes(cube, compression_kwargs=compression_kwargs) + _ = check_save_cubes(cube, compression_kwargs=compression_kwargs) # The following mesh components and cube should be compressed on serialization. result = filter_compression_calls(patch, compression_kwargs) @@ -427,12 +424,12 @@ def test_compression(self): expected = {"Mesh2d"} assert result == expected - def test_basic_mesh(self): + def test_basic_mesh(self, request, check_save_cubes): # Save a small mesh example and check aspects of the resulting file. cube = make_cube() # A simple face-mapped data example. # Save and snapshot the result - tempfile_path = self.check_save_cubes(cube) + tempfile_path = check_save_cubes(cube) dims, vars = scan_dataset(tempfile_path) # There is exactly 1 mesh var. @@ -444,68 +441,64 @@ def test_basic_mesh(self): mesh_props = vars[mesh_name] # The mesh var links to the mesh, with location 'faces' - self.assertEqual(a_name, "unknown") - self.assertEqual(a_props["mesh"], mesh_name) - self.assertEqual(a_props["location"], "face") + assert a_name == "unknown" + assert a_props["mesh"] == mesh_name + assert a_props["location"] == "face" # There are 2 face coords == those listed in the mesh face_coords = mesh_props["face_coordinates"].split(" ") - self.assertEqual(len(face_coords), 2) + assert len(face_coords) == 2 # The face coords should both map that single dim. face_dim = vars_meshdim(vars, "face") - self.assertTrue(all(vars[co][_VAR_DIMS] == [face_dim] for co in face_coords)) + assert all(vars[co][_VAR_DIMS] == [face_dim] for co in face_coords) # The face coordinates should be referenced by the data variable. for coord in face_coords: - self.assertIn(coord, a_props["coordinates"]) + assert coord in a_props["coordinates"] # The dims of the datavar also == [] - self.assertEqual(a_props[_VAR_DIMS], [face_dim]) + assert a_props[_VAR_DIMS] == [face_dim] # There are 2 node coordinates == those listed in the mesh. node_coords = mesh_props["node_coordinates"].split(" ") - self.assertEqual(len(node_coords), 2) + assert len(node_coords) == 2 # These are the *only* ones using the 'nodes' dimension. node_dim = vars_meshdim(vars, "node") - self.assertEqual( - sorted(node_coords), sorted(vars_w_dims(vars, [node_dim]).keys()) - ) + assert sorted(node_coords) == sorted(vars_w_dims(vars, [node_dim]).keys()) # There are no edges. - self.assertNotIn("edge_node_connectivity", mesh_props) - self.assertEqual(len(vars_w_props(vars, cf_role="edge_node_connectivity")), 0) + assert "edge_node_connectivity" not in mesh_props + assert len(vars_w_props(vars, cf_role="edge_node_connectivity")) == 0 # The dims are precisely (nodes, faces, nodes-per-face), in that order. - self.assertEqual( - list(dims.keys()), - ["Mesh2d_nodes", "Mesh2d_faces", "Mesh2d_face_N_nodes"], - ) + assert list(dims.keys()) == [ + "Mesh2d_nodes", + "Mesh2d_faces", + "Mesh2d_face_N_nodes", + ] # The variables are exactly (mesh, 2*node-coords, 2*face-coords, # face-nodes, data) -- in that order - self.assertEqual( - list(vars.keys()), - [ - "Mesh2d", - "node_x", - "node_y", - "face_x", - "face_y", - "mesh2d_faces", - "unknown", - ], - ) + assert list(vars.keys()) == [ + "Mesh2d", + "node_x", + "node_y", + "face_x", + "face_y", + "mesh2d_faces", + "unknown", + ] # For completeness, also check against a full CDL snapshot - self.assertCDL(tempfile_path) + _shared_utils.assert_CDL(request, tempfile_path) - def test_multi_cubes_common_mesh(self): + def test_multi_cubes_common_mesh(self, check_save_cubes): cube1 = make_cube(var_name="a") cube2 = make_cube(var_name="b") # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) + tempfile_path = check_save_cubes([cube1, cube2]) dims, vars = scan_dataset(tempfile_path) # there is exactly 1 mesh in the file @@ -513,19 +506,19 @@ def test_multi_cubes_common_mesh(self): # both the main variables reference the same mesh, and 'face' location v_a, v_b = vars["a"], vars["b"] - self.assertEqual(v_a["mesh"], mesh_name) - self.assertEqual(v_a["location"], "face") - self.assertEqual(v_a["coordinates"], "face_x face_y") - self.assertEqual(v_b["mesh"], mesh_name) - self.assertEqual(v_b["location"], "face") - self.assertEqual(v_b["coordinates"], "face_x face_y") - - def test_multi_cubes_different_locations(self): + assert v_a["mesh"] == mesh_name + assert v_a["location"] == "face" + assert v_a["coordinates"] == "face_x face_y" + assert v_b["mesh"] == mesh_name + assert v_b["location"] == "face" + assert v_b["coordinates"] == "face_x face_y" + + def test_multi_cubes_different_locations(self, check_save_cubes): cube1 = make_cube(var_name="a", location="face") cube2 = make_cube(var_name="b", location="node") # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) + tempfile_path = check_save_cubes([cube1, cube2]) dims, vars = scan_dataset(tempfile_path) # there is exactly 1 mesh in the file @@ -533,20 +526,20 @@ def test_multi_cubes_different_locations(self): # the main variables reference the same mesh at different locations v_a, v_b = vars["a"], vars["b"] - self.assertEqual(v_a["mesh"], mesh_name) - self.assertEqual(v_a["location"], "face") - self.assertEqual(v_a["coordinates"], "face_x face_y") - self.assertEqual(v_b["mesh"], mesh_name) - self.assertEqual(v_b["location"], "node") - self.assertEqual(v_b["coordinates"], "node_x node_y") + assert v_a["mesh"] == mesh_name + assert v_a["location"] == "face" + assert v_a["coordinates"] == "face_x face_y" + assert v_b["mesh"] == mesh_name + assert v_b["location"] == "node" + assert v_b["coordinates"] == "node_x node_y" # the main variables map the face and node dimensions face_dim = vars_meshdim(vars, "face") node_dim = vars_meshdim(vars, "node") - self.assertEqual(v_a[_VAR_DIMS], [face_dim]) - self.assertEqual(v_b[_VAR_DIMS], [node_dim]) + assert v_a[_VAR_DIMS] == [face_dim] + assert v_b[_VAR_DIMS] == [node_dim] - def test_multi_cubes_equal_meshes(self): + def test_multi_cubes_equal_meshes(self, check_save_cubes): # Make 2 identical meshes # NOTE: *can't* name these explicitly, as it stops them being identical. mesh1 = make_mesh() @@ -555,49 +548,49 @@ def test_multi_cubes_equal_meshes(self): cube2 = make_cube(var_name="b", mesh=mesh2) # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) + tempfile_path = check_save_cubes([cube1, cube2]) dims, vars = scan_dataset(tempfile_path) # there is exactly 1 mesh in the file mesh_names = vars_meshnames(vars) - self.assertEqual(sorted(mesh_names), ["Mesh2d"]) + assert sorted(mesh_names) == ["Mesh2d"] # same dimensions - self.assertEqual(vars_meshdim(vars, "node", mesh_name="Mesh2d"), "Mesh2d_nodes") - self.assertEqual(vars_meshdim(vars, "face", mesh_name="Mesh2d"), "Mesh2d_faces") + assert vars_meshdim(vars, "node", mesh_name="Mesh2d") == "Mesh2d_nodes" + assert vars_meshdim(vars, "face", mesh_name="Mesh2d") == "Mesh2d_faces" # there are exactly two data-variables with a 'mesh' property mesh_datavars = vars_w_props(vars, mesh="*") - self.assertEqual(["a", "b"], list(mesh_datavars)) + assert ["a", "b"] == list(mesh_datavars) # the data variables reference the same mesh a_props, b_props = vars["a"], vars["b"] for props in a_props, b_props: - self.assertEqual(props["mesh"], "Mesh2d") - self.assertEqual(props["location"], "face") - self.assertEqual(props["coordinates"], "face_x face_y") + assert props["mesh"] == "Mesh2d" + assert props["location"] == "face" + assert props["coordinates"] == "face_x face_y" # the data variables map the appropriate node dimension - self.assertEqual(a_props[_VAR_DIMS], ["Mesh2d_faces"]) - self.assertEqual(b_props[_VAR_DIMS], ["Mesh2d_faces"]) + assert a_props[_VAR_DIMS] == ["Mesh2d_faces"] + assert b_props[_VAR_DIMS] == ["Mesh2d_faces"] - def test_multi_cubes_different_mesh(self): + def test_multi_cubes_different_mesh(self, check_save_cubes): # Check that we can correctly distinguish 2 different meshes. cube1 = make_cube(var_name="a") cube2 = make_cube(var_name="b", mesh=make_mesh(n_faces=4)) # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) + tempfile_path = check_save_cubes([cube1, cube2]) dims, vars = scan_dataset(tempfile_path) # there are 2 meshes in the file mesh_names = vars_meshnames(vars) - self.assertEqual(len(mesh_names), 2) + assert len(mesh_names) == 2 # there are two (data)variables with a 'mesh' property mesh_datavars = vars_w_props(vars, mesh="*") - self.assertEqual(2, len(mesh_datavars)) - self.assertEqual(["a", "b"], sorted(mesh_datavars.keys())) + assert 2 == len(mesh_datavars) + assert ["a", "b"] == sorted(mesh_datavars.keys()) def get_props_attrs(props: dict): return props["mesh"], props["location"], props["coordinates"] @@ -606,18 +599,18 @@ def get_props_attrs(props: dict): a_props, b_props = vars["a"], vars["b"] mesh_a, loc_a, coords_a = get_props_attrs(a_props) mesh_b, loc_b, coords_b = get_props_attrs(b_props) - self.assertNotEqual(mesh_a, mesh_b) - self.assertNotEqual(coords_a, coords_b) - self.assertEqual(loc_a, "face") - self.assertEqual(loc_b, "face") + assert mesh_a != mesh_b + assert coords_a != coords_b + assert loc_a == "face" + assert loc_b == "face" - def test_nonmesh_dim(self): + def test_nonmesh_dim(self, check_save_cubes): # Check where the data variable has a 'normal' dim and a mesh dim. cube = make_cube() cube = add_height_dim(cube) # Save and snapshot the result - tempfile_path = self.check_save_cubes(cube) + tempfile_path = check_save_cubes(cube) dims, vars = scan_dataset(tempfile_path) # have just 1 mesh, including a face and node coordinates. @@ -631,12 +624,12 @@ def test_nonmesh_dim(self): ((data_name, data_props),) = vars_w_props(vars, mesh="*").items() # data maps to the height + mesh dims - self.assertEqual(data_props[_VAR_DIMS], ["height", face_dim]) - self.assertEqual(data_props["mesh"], mesh_name) - self.assertEqual(data_props["location"], "face") + assert data_props[_VAR_DIMS] == ["height", face_dim] + assert data_props["mesh"] == mesh_name + assert data_props["location"] == "face" - @tests.skip_data - def test_nonmesh_hybrid_dim(self): + @_shared_utils.skip_data + def test_nonmesh_hybrid_dim(self, check_save_cubes): # Check a case with a hybrid non-mesh dimension cube = realistic_4d() # Strip off the time and longitude dims, to make it simpler. @@ -663,7 +656,7 @@ def test_nonmesh_hybrid_dim(self): cube.add_aux_coord(coord, (i_horizontal_dim,)) # Save and snapshot the result - tempfile_path = self.check_save_cubes(cube) + tempfile_path = check_save_cubes(cube) dims, vars = scan_dataset(tempfile_path) # have just 1 mesh, including face and node coordinates. @@ -672,29 +665,26 @@ def test_nonmesh_hybrid_dim(self): _ = vars_meshdim(vars, "node", mesh_name) # have hybrid vertical dimension, with all the usual term variables. - self.assertIn("model_level_number", dims) + assert "model_level_number" in dims vert_vars = list(vars_w_dims(vars, ["model_level_number"]).keys()) # The list of file variables mapping the vertical dimension: # = the data-var, plus all the height terms - self.assertEqual( - vert_vars, - [ - "air_potential_temperature", - "model_level_number", - "level_height", - "level_height_bnds", - "sigma", - "sigma_bnds", - ], - ) + assert vert_vars == [ + "air_potential_temperature", + "model_level_number", + "level_height", + "level_height_bnds", + "sigma", + "sigma_bnds", + ] # have just 1 data-variable, which maps to hybrid-height and mesh dims ((data_name, data_props),) = vars_w_props(vars, mesh="*").items() - self.assertEqual(data_props[_VAR_DIMS], ["model_level_number", face_dim]) - self.assertEqual(data_props["mesh"], mesh_name) - self.assertEqual(data_props["location"], "face") + assert data_props[_VAR_DIMS] == ["model_level_number", face_dim] + assert data_props["mesh"] == mesh_name + assert data_props["location"] == "face" - def test_alternate_cube_dim_order(self): + def test_alternate_cube_dim_order(self, check_save_cubes): # A cube transposed from the 'usual' order # Should work much the same as the "basic" case. cube_1 = make_cube(var_name="a") @@ -705,7 +695,7 @@ def test_alternate_cube_dim_order(self): cube_2.transpose() # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube_1, cube_2]) + tempfile_path = check_save_cubes([cube_1, cube_2]) dims, vars = scan_dataset(tempfile_path) # There is only 1 mesh @@ -713,16 +703,16 @@ def test_alternate_cube_dim_order(self): # both variables reference the same mesh v_a, v_b = vars["a"], vars["b"] - self.assertEqual(v_a["mesh"], mesh_name) - self.assertEqual(v_a["location"], "face") - self.assertEqual(v_b["mesh"], mesh_name) - self.assertEqual(v_b["location"], "face") + assert v_a["mesh"] == mesh_name + assert v_a["location"] == "face" + assert v_b["mesh"] == mesh_name + assert v_b["location"] == "face" # Check the var dimensions - self.assertEqual(v_a[_VAR_DIMS], ["height", "Mesh2d_faces"]) - self.assertEqual(v_b[_VAR_DIMS], ["Mesh2d_faces", "height"]) + assert v_a[_VAR_DIMS] == ["height", "Mesh2d_faces"] + assert v_b[_VAR_DIMS] == ["Mesh2d_faces", "height"] - def test_mixed_aux_coords(self): + def test_mixed_aux_coords(self, check_save_cubes): """``coordinates`` attribute should include mesh location coords and 'normal' coords.""" cube = make_cube() mesh_dim = cube.mesh_dim() @@ -731,7 +721,7 @@ def test_mixed_aux_coords(self): cube.add_aux_coord(coord, mesh_dim) # Save and snapshot the result - tempfile_path = self.check_save_cubes(cube) + tempfile_path = check_save_cubes(cube) dims, vars = scan_dataset(tempfile_path) # There is exactly 1 mesh-linked (data)var @@ -742,41 +732,42 @@ def test_mixed_aux_coords(self): expected_coords.append(coord) expected_coord_names = [c.var_name for c in expected_coords] expected_coord_attr = " ".join(sorted(expected_coord_names)) - self.assertEqual(a_props["coordinates"], expected_coord_attr) + assert a_props["coordinates"] == expected_coord_attr -class TestSaveUgrid__mesh(tests.IrisTest): +class TestSaveUgrid__mesh: """Tests for saving meshes to a file.""" - @classmethod - def setUpClass(cls): - cls.temp_dir = Path(tempfile.mkdtemp()) + @pytest.fixture(autouse=True, scope="class") + @staticmethod + def _setup(request, tmp_path_factory): + request.cls.temp_dir = tmp_path_factory.mktemp("test") - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.temp_dir) + @pytest.fixture + def check_save_mesh(self, request): + def _check_save_mesh(mesh, compression_kwargs=None): + """Write a mesh to a new file in the common temporary directory. - def check_save_mesh(self, mesh, compression_kwargs=None): - """Write a mesh to a new file in the common temporary directory. + Use a name unique to this testcase, to avoid any clashes. - Use a name unique to this testcase, to avoid any clashes. + """ + # use 'result_path' to name the file after the test function + tempfile_path = _shared_utils.result_path(request, ext=".nc") + # Create a file of that name, but discard the result path and put it + # in the common temporary directory. + tempfile_path = self.temp_dir / Path(tempfile_path).name - """ - # use 'result_path' to name the file after the test function - tempfile_path = self.result_path(ext=".nc") - # Create a file of that name, but discard the result path and put it - # in the common temporary directory. - tempfile_path = self.temp_dir / Path(tempfile_path).name + if compression_kwargs is None: + compression_kwargs = {} - if compression_kwargs is None: - compression_kwargs = {} + # Save data to the file. + save_mesh(mesh, tempfile_path, **compression_kwargs) - # Save data to the file. - save_mesh(mesh, tempfile_path, **compression_kwargs) + return tempfile_path - return tempfile_path + return _check_save_mesh - def test_compression(self): + def test_compression(self, check_save_mesh, mocker): """Test NetCDF serialization of a mesh using compression. NetCDF data compression keyword arguments include "complevel", @@ -784,10 +775,10 @@ def test_compression(self): are only applicable when "zlib=True". """ - patch = self.patch( + patch = mocker.patch( "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", ) - self.patch( + mocker.patch( "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.variables" ) mesh = make_mesh() @@ -798,7 +789,7 @@ def test_compression(self): "zlib": True, } - _ = self.check_save_mesh(mesh, compression_kwargs=compression_kwargs) + _ = check_save_mesh(mesh, compression_kwargs=compression_kwargs) # The following mesh components should be compressed on serialization. result = filter_compression_calls(patch, compression_kwargs) @@ -809,7 +800,7 @@ def test_compression(self): expected = {"Mesh2d"} assert result == expected - def test_connectivity_dim_order(self): + def test_connectivity_dim_order(self, check_save_mesh): """Test a mesh with some connectivities in the 'other' order. This should also create a property with the dimension name. @@ -832,7 +823,7 @@ def test_connectivity_dim_order(self): ) # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh2) + tempfile_path = check_save_mesh(mesh2) dims, vars = scan_dataset(tempfile_path) # Check shape and dimensions of the associated connectivity variables. @@ -842,24 +833,20 @@ def test_connectivity_dim_order(self): edgeconn_name = mesh_props["edge_node_connectivity"] faceconn_props = vars[faceconn_name] edgeconn_props = vars[edgeconn_name] - self.assertEqual( - faceconn_props[_VAR_DIMS], ["Mesh_2d_face_N_nodes", "Mesh2d_face"] - ) - self.assertEqual( - edgeconn_props[_VAR_DIMS], ["Mesh_2d_edge_N_nodes", "Mesh2d_edge"] - ) + assert faceconn_props[_VAR_DIMS] == ["Mesh_2d_face_N_nodes", "Mesh2d_face"] + assert edgeconn_props[_VAR_DIMS] == ["Mesh_2d_edge_N_nodes", "Mesh2d_edge"] # Check the dimension lengths are also as expected - self.assertEqual(dims["Mesh2d_face"], 2) - self.assertEqual(dims["Mesh_2d_face_N_nodes"], 4) - self.assertEqual(dims["Mesh2d_edge"], 7) - self.assertEqual(dims["Mesh_2d_edge_N_nodes"], 2) + assert dims["Mesh2d_face"] == 2 + assert dims["Mesh_2d_face_N_nodes"] == 4 + assert dims["Mesh2d_edge"] == 7 + assert dims["Mesh_2d_edge_N_nodes"] == 2 # the mesh has extra location-dimension properties - self.assertEqual(mesh_props["face_dimension"], "Mesh2d_face") - self.assertEqual(mesh_props["edge_dimension"], "Mesh2d_edge") + assert mesh_props["face_dimension"] == "Mesh2d_face" + assert mesh_props["edge_dimension"] == "Mesh2d_edge" - def test_connectivity_start_index(self): + def test_connectivity_start_index(self, check_save_mesh): """Test a mesh where some connectivities have start_index = 1.""" # Make a mesh with both faces *and* some edges mesh = make_mesh(n_edges=7) @@ -881,7 +868,7 @@ def test_connectivity_start_index(self): ) # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh2) + tempfile_path = check_save_mesh(mesh2) dims, vars = scan_dataset(tempfile_path) # Check shape and dimensions of the associated connectivity variables. @@ -891,10 +878,10 @@ def test_connectivity_start_index(self): edgeconn_name = mesh_props["edge_node_connectivity"] faceconn_props = vars[faceconn_name] edgeconn_props = vars[edgeconn_name] - self.assertEqual(faceconn_props["start_index"], 0) - self.assertEqual(edgeconn_props["start_index"], 1) + assert faceconn_props["start_index"] == 0 + assert edgeconn_props["start_index"] == 1 - def test_nonuniform_connectivity(self): + def test_nonuniform_connectivity(self, check_save_mesh): # Check handling of connectivities with missing points. n_faces = 7 mesh = make_mesh(n_faces=n_faces) @@ -915,65 +902,66 @@ def test_nonuniform_connectivity(self): mesh.add_connectivities(conn) # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh) + tempfile_path = check_save_mesh(mesh) dims, vars = scan_dataset(tempfile_path) # Check that the mesh saved with the additional connectivity (mesh_name,) = vars_meshnames(vars) mesh_props = vars[mesh_name] - self.assertIn("face_face_connectivity", mesh_props) + assert "face_face_connectivity" in mesh_props ff_conn_name = mesh_props["face_face_connectivity"] # check that the connectivity has the corrects dims and fill-property ff_props = vars[ff_conn_name] - self.assertEqual(ff_props[_VAR_DIMS], ["Mesh2d_faces", "Mesh2d_face_N_faces"]) - self.assertIn("_FillValue", ff_props) - self.assertEqual(ff_props["_FillValue"], -1) + assert ff_props[_VAR_DIMS] == ["Mesh2d_faces", "Mesh2d_face_N_faces"] + assert "_FillValue" in ff_props + assert ff_props["_FillValue"] == -1 # Check that a 'normal' connectivity does *not* have a _FillValue fn_conn_name = mesh_props["face_node_connectivity"] fn_props = vars[fn_conn_name] - self.assertNotIn("_FillValue", fn_props) + assert "_FillValue" not in fn_props # For what it's worth, *also* check the actual data array in the file ds = _thread_safe_nc.DatasetWrapper(tempfile_path) conn_var = ds.variables[ff_conn_name] data = conn_var[:] ds.close() - self.assertIsInstance(data, np.ma.MaskedArray) - self.assertEqual(data.fill_value, -1) + assert isinstance(data, np.ma.MaskedArray) + assert data.fill_value == -1 # Compare raw values stored to indices, but with -1 at missing points raw_data = data.data filled_indices = indices.filled(-1) - self.assertArrayEqual(raw_data, filled_indices) + _shared_utils.assert_array_equal(raw_data, filled_indices) - def test_one_dimensional(self): + def test_one_dimensional(self, check_save_mesh): # Test a mesh with edges only. mesh = make_mesh(n_edges=5, n_faces=0, mesh_kwargs={"var_name": "Mesh1d"}) # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh) + tempfile_path = check_save_mesh(mesh) dims, vars = scan_dataset(tempfile_path) # there is a single mesh-var (mesh_name,) = vars_meshnames(vars) # the dims include edges but not faces - self.assertEqual( - list(dims.keys()), - ["Mesh1d_node", "Mesh1d_edge", "Mesh1d_edge_N_nodes"], - ) - self.assertEqual(vars_meshdim(vars, "node"), "Mesh1d_node") - self.assertEqual(vars_meshdim(vars, "edge"), "Mesh1d_edge") + assert list(dims.keys()) == [ + "Mesh1d_node", + "Mesh1d_edge", + "Mesh1d_edge_N_nodes", + ] + assert vars_meshdim(vars, "node") == "Mesh1d_node" + assert vars_meshdim(vars, "edge") == "Mesh1d_edge" # check suitable mesh properties - self.assertEqual(mesh_name, "Mesh1d") + assert mesh_name == "Mesh1d" mesh_props = vars[mesh_name] - self.assertEqual(mesh_props["topology_dimension"], 1) - self.assertIn("edge_node_connectivity", mesh_props) - self.assertNotIn("face_node_connectivity", mesh_props) + assert mesh_props["topology_dimension"] == 1 + assert "edge_node_connectivity" in mesh_props + assert "face_node_connectivity" not in mesh_props - def test_location_coord_units(self): + def test_location_coord_units(self, check_save_mesh): # Check that units on mesh locations are handled correctly. # NOTE: at present, the MeshXY class cannot handle coordinates that are # not recognised by 'guess_coord_axis' == suitable standard names @@ -1007,7 +995,7 @@ def test_location_coord_units(self): ) # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh) + tempfile_path = check_save_mesh(mesh) dims, vars = scan_dataset(tempfile_path) # there is a single mesh-var @@ -1024,17 +1012,17 @@ def test_location_coord_units(self): # 2. non- (plain) lonlat are NOT converted # 3. other names remain as whatever was given # 4. no units on input --> none on output - self.assertEqual(node_x["units"], "degrees") - self.assertEqual(node_y["units"], "ms-1") - self.assertNotIn("units", face_x) - self.assertEqual(face_y["units"], "degrees_north") + assert node_x["units"] == "degrees" + assert node_y["units"] == "ms-1" + assert "units" not in face_x + assert face_y["units"] == "degrees_north" # Check also that we did not add 'axis' properties. # We should *only* do that for dim-coords. - self.assertNotIn("axis", node_x) - self.assertNotIn("axis", node_y) - self.assertNotIn("axis", face_x) - self.assertNotIn("axis", face_y) + assert "axis" not in node_x + assert "axis" not in node_y + assert "axis" not in face_x + assert "axis" not in face_y @staticmethod def _namestext(names): @@ -1044,7 +1032,7 @@ def _namestext(names): ] return f"({' '.join(name_texts)})" - def test_mesh_names(self): + def test_mesh_names(self, check_save_mesh): # Check the selection of mesh-variables names. # N.B. this is basically centralised in Saver._get_mesh_variable_name, # but we test in an implementation-neutral way (as it's fairly easy). @@ -1100,7 +1088,7 @@ def test_mesh_names(self): # Make a mesh, with the mesh names set for the testcase mesh = make_mesh(mesh_kwargs=mesh_name_kwargs) - filepath = self.check_save_mesh(mesh) + filepath = check_save_mesh(mesh) dims, vars = scan_dataset(filepath) (mesh_name,) = vars_meshnames(vars) @@ -1114,9 +1102,9 @@ def test_mesh_names(self): f"Unexpected resulting names {self._namestext(result_names)} " f"when saving mesh with {self._namestext(given_names)}" ) - self.assertEqual(expected_names, result_names, fail_msg) + assert expected_names == result_names, fail_msg - def test_location_coord_names(self): + def test_location_coord_names(self, check_save_mesh): # Check the selection of mesh-element coordinate names. # Check the selection of mesh-variables names. # N.B. this is basically centralised in Saver._get_mesh_variable_name, @@ -1165,7 +1153,7 @@ def test_location_coord_names(self): ): setattr(coord, key, name) - filepath = self.check_save_mesh(mesh) + filepath = check_save_mesh(mesh) dims, vars = scan_dataset(filepath) (mesh_name,) = vars_meshnames(vars) @@ -1181,9 +1169,9 @@ def test_location_coord_names(self): "when saving mesh coordinate " f"with {self._namestext(given_names)}" ) - self.assertEqual(expected_names, result_names, fail_msg) + assert expected_names == result_names, fail_msg - def test_mesh_dim_names(self): + def test_mesh_dim_names(self, check_save_mesh): # Check the selection of dimension names from the mesh. dim_names_tests = [ @@ -1194,7 +1182,7 @@ def test_mesh_dim_names(self): for given_name, expected_name in dim_names_tests: mesh = make_mesh(mesh_kwargs={"face_dimension": given_name}) - filepath = self.check_save_mesh(mesh) + filepath = check_save_mesh(mesh) dims, vars = scan_dataset(filepath) (mesh_name,) = vars_meshnames(vars) @@ -1204,9 +1192,9 @@ def test_mesh_dim_names(self): f'Unexpected resulting dimension name "{face_dim}" ' f'when saving mesh with dimension name of "{given_name}".' ) - self.assertEqual(expected_name, face_dim, fail_msg) + assert expected_name == face_dim, fail_msg - def test_connectivity_names(self): + def test_connectivity_names(self, check_save_mesh): # Check the selection of connectivity names. conn_names_tests = [ # var_name only @@ -1258,7 +1246,7 @@ def test_connectivity_names(self): ): setattr(conn, key, name) - filepath = self.check_save_mesh(mesh) + filepath = check_save_mesh(mesh) dims, vars = scan_dataset(filepath) (mesh_name,) = vars_meshnames(vars) @@ -1275,111 +1263,98 @@ def test_connectivity_names(self): "when saving connectivity " f"with {self._namestext(given_names)}" ) - self.assertEqual(expected_names, result_names, fail_msg) + assert expected_names == result_names, fail_msg - def test_multiple_equal_mesh(self): + def test_multiple_equal_mesh(self, check_save_mesh): mesh1 = make_mesh() mesh2 = make_mesh() # Save and snapshot the result - tempfile_path = self.check_save_mesh([mesh1, mesh2]) + tempfile_path = check_save_mesh([mesh1, mesh2]) dims, vars = scan_dataset(tempfile_path) # In this case there should be only *one* mesh. mesh_names = vars_meshnames(vars) - self.assertEqual(1, len(mesh_names)) + assert 1 == len(mesh_names) # Check it has the correct number of coords + conns (no duplicates) # Should have 2 each X and Y coords (face+node): _no_ edge coords. coord_vars_x = vars_w_props(vars, standard_name="longitude") coord_vars_y = vars_w_props(vars, standard_name="latitude") - self.assertEqual(2, len(coord_vars_x)) - self.assertEqual(2, len(coord_vars_y)) + assert 2 == len(coord_vars_x) + assert 2 == len(coord_vars_y) # Check the connectivities are all present: _only_ 1 var of each type. for conn in mesh1.all_connectivities: if conn is not None: conn_vars = vars_w_props(vars, cf_role=conn.cf_role) - self.assertEqual(1, len(conn_vars)) + assert 1 == len(conn_vars) - def test_multiple_different_meshes(self): + def test_multiple_different_meshes(self, check_save_mesh): # Create 2 meshes with different faces, but same edges. # N.B. they should then share an edge dimension. mesh1 = make_mesh(n_faces=3, n_edges=2) mesh2 = make_mesh(n_faces=4, n_edges=2) # Save and snapshot the result - tempfile_path = self.check_save_mesh([mesh1, mesh2]) + tempfile_path = check_save_mesh([mesh1, mesh2]) dims, vars = scan_dataset(tempfile_path) # Check the dims are as expected - self.assertEqual(dims["Mesh2d_faces"], 3) - self.assertEqual(dims["Mesh2d_faces_0"], 4) + assert dims["Mesh2d_faces"] == 3 + assert dims["Mesh2d_faces_0"] == 4 # There are no 'second' edge and node dims - self.assertEqual(dims["Mesh2d_nodes"], 5) - self.assertEqual(dims["Mesh2d_edge"], 2) + assert dims["Mesh2d_nodes"] == 5 + assert dims["Mesh2d_edge"] == 2 # Check there are two independent meshes in the file... # there are exactly 2 meshes in the file mesh_names = vars_meshnames(vars) - self.assertEqual(sorted(mesh_names), ["Mesh2d", "Mesh2d_0"]) + assert sorted(mesh_names) == ["Mesh2d", "Mesh2d_0"] # they use different dimensions # mesh1 - self.assertEqual(vars_meshdim(vars, "node", mesh_name="Mesh2d"), "Mesh2d_nodes") - self.assertEqual(vars_meshdim(vars, "face", mesh_name="Mesh2d"), "Mesh2d_faces") + assert vars_meshdim(vars, "node", mesh_name="Mesh2d") == "Mesh2d_nodes" + assert vars_meshdim(vars, "face", mesh_name="Mesh2d") == "Mesh2d_faces" if "edge_coordinates" in vars["Mesh2d"]: - self.assertEqual( - vars_meshdim(vars, "edge", mesh_name="Mesh2d"), "Mesh2d_edge" - ) + assert vars_meshdim(vars, "edge", mesh_name="Mesh2d") == "Mesh2d_edge" # mesh2 - self.assertEqual( - vars_meshdim(vars, "node", mesh_name="Mesh2d_0"), "Mesh2d_nodes" - ) - self.assertEqual( - vars_meshdim(vars, "face", mesh_name="Mesh2d_0"), "Mesh2d_faces_0" - ) + assert vars_meshdim(vars, "node", mesh_name="Mesh2d_0") == "Mesh2d_nodes" + assert vars_meshdim(vars, "face", mesh_name="Mesh2d_0") == "Mesh2d_faces_0" if "edge_coordinates" in vars["Mesh2d_0"]: - self.assertEqual( - vars_meshdim(vars, "edge", mesh_name="Mesh2d_0"), - "Mesh2d_edge", - ) + assert vars_meshdim(vars, "edge", mesh_name="Mesh2d_0") == "Mesh2d_edge" # the relevant coords + connectivities are also distinct # mesh1 - self.assertEqual(vars["node_x"][_VAR_DIMS], ["Mesh2d_nodes"]) - self.assertEqual(vars["face_x"][_VAR_DIMS], ["Mesh2d_faces"]) - self.assertEqual( - vars["mesh2d_faces"][_VAR_DIMS], - ["Mesh2d_faces", "Mesh2d_face_N_nodes"], - ) + assert vars["node_x"][_VAR_DIMS] == ["Mesh2d_nodes"] + assert vars["face_x"][_VAR_DIMS] == ["Mesh2d_faces"] + assert vars["mesh2d_faces"][_VAR_DIMS] == [ + "Mesh2d_faces", + "Mesh2d_face_N_nodes", + ] if "edge_coordinates" in vars["Mesh2d"]: - self.assertEqual(vars["longitude"][_VAR_DIMS], ["Mesh2d_edge"]) - self.assertEqual( - vars["mesh2d_edge"][_VAR_DIMS], - ["Mesh2d_edge", "Mesh2d_edge_N_nodes"], - ) + assert vars["longitude"][_VAR_DIMS] == ["Mesh2d_edge"] + assert vars["mesh2d_edge"][_VAR_DIMS] == [ + "Mesh2d_edge", + "Mesh2d_edge_N_nodes", + ] # mesh2 - self.assertEqual(vars["node_x_0"][_VAR_DIMS], ["Mesh2d_nodes"]) - self.assertEqual(vars["face_x_0"][_VAR_DIMS], ["Mesh2d_faces_0"]) - self.assertEqual( - vars["mesh2d_faces_0"][_VAR_DIMS], - ["Mesh2d_faces_0", "Mesh2d_0_face_N_nodes"], - ) + assert vars["node_x_0"][_VAR_DIMS] == ["Mesh2d_nodes"] + assert vars["face_x_0"][_VAR_DIMS] == ["Mesh2d_faces_0"] + assert vars["mesh2d_faces_0"][_VAR_DIMS] == [ + "Mesh2d_faces_0", + "Mesh2d_0_face_N_nodes", + ] if "edge_coordinates" in vars["Mesh2d_0"]: - self.assertEqual(vars["longitude_0"][_VAR_DIMS], ["Mesh2d_edge"]) - self.assertEqual( - vars["mesh2d_edge_0"][_VAR_DIMS], - ["Mesh2d_edge", "Mesh2d_0_edge_N_nodes"], - ) + assert vars["longitude_0"][_VAR_DIMS] == ["Mesh2d_edge"] + assert vars["mesh2d_edge_0"][_VAR_DIMS] == [ + "Mesh2d_edge", + "Mesh2d_0_edge_N_nodes", + ] # WHEN MODIFYING THIS MODULE, CHECK IF ANY CORRESPONDING CHANGES ARE NEEDED IN # :mod:`iris.tests.unit.fileformats.netcdf.test_Saver__lazy.` - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py index b4b06c8c33..b4ae37f29b 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py @@ -4,15 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.fileformats.netcdf.save` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from pathlib import Path -from shutil import rmtree -from tempfile import mkdtemp -from unittest import mock - import numpy as np import pytest @@ -20,81 +11,83 @@ from iris.coords import AuxCoord, DimCoord from iris.cube import Cube, CubeList from iris.fileformats.netcdf import CF_CONVENTIONS_VERSION, Saver, _thread_safe_nc, save +from iris.tests import _shared_utils from iris.tests.stock import lat_lon_cube from iris.tests.stock.mesh import sample_mesh_cube -class Test_conventions(tests.IrisTest): - def setUp(self): +class Test_conventions: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = Cube([0]) self.custom_conventions = "convention1 convention2" self.cube.attributes["Conventions"] = self.custom_conventions self.options = iris.config.netcdf - def test_custom_conventions__ignored(self): + def test_custom_conventions__ignored(self, tmp_path): # Ensure that we drop existing conventions attributes and replace with # CF convention. - with self.temp_filename(".nc") as nc_path: - save(self.cube, nc_path, "NETCDF4") - ds = _thread_safe_nc.DatasetWrapper(nc_path) - res = ds.getncattr("Conventions") - ds.close() - self.assertEqual(res, CF_CONVENTIONS_VERSION) - - def test_custom_conventions__allowed(self): + nc_path = tmp_path / "dummy.nc" + save(self.cube, nc_path, "NETCDF4") + ds = _thread_safe_nc.DatasetWrapper(nc_path) + res = ds.getncattr("Conventions") + ds.close() + assert res == CF_CONVENTIONS_VERSION + + def test_custom_conventions__allowed(self, mocker, tmp_path): # Ensure that existing conventions attributes are passed through if the # relevant Iris option is set. - with mock.patch.object(self.options, "conventions_override", True): - with self.temp_filename(".nc") as nc_path: - save(self.cube, nc_path, "NETCDF4") - ds = _thread_safe_nc.DatasetWrapper(nc_path) - res = ds.getncattr("Conventions") - ds.close() - self.assertEqual(res, self.custom_conventions) - - def test_custom_conventions__allowed__missing(self): + nc_path = tmp_path / "dummy.nc" + mocker.patch.object(self.options, "conventions_override", True) + save(self.cube, nc_path, "NETCDF4") + ds = _thread_safe_nc.DatasetWrapper(nc_path) + res = ds.getncattr("Conventions") + ds.close() + assert res == self.custom_conventions + + def test_custom_conventions__allowed__missing(self, mocker, tmp_path): # Ensure the default conventions attribute is set if the relevant Iris # option is set but there is no custom conventions attribute. del self.cube.attributes["Conventions"] - with mock.patch.object(self.options, "conventions_override", True): - with self.temp_filename(".nc") as nc_path: - save(self.cube, nc_path, "NETCDF4") - ds = _thread_safe_nc.DatasetWrapper(nc_path) - res = ds.getncattr("Conventions") - ds.close() - self.assertEqual(res, CF_CONVENTIONS_VERSION) + mocker.patch.object(self.options, "conventions_override", True) + nc_path = tmp_path / "dummy.nc" + save(self.cube, nc_path, "NETCDF4") + ds = _thread_safe_nc.DatasetWrapper(nc_path) + res = ds.getncattr("Conventions") + ds.close() + assert res == CF_CONVENTIONS_VERSION -class Test_attributes(tests.IrisTest): - def test_attributes_arrays(self): +class Test_attributes: + def test_attributes_arrays(self, tmp_path): # Ensure that attributes containing NumPy arrays can be equality # checked and their cubes saved as appropriate. c1 = Cube([1], attributes={"bar": np.arange(2)}) c2 = Cube([2], attributes={"bar": np.arange(2)}) - with self.temp_filename("foo.nc") as nc_out: - save([c1, c2], nc_out) - ds = _thread_safe_nc.DatasetWrapper(nc_out) - res = ds.getncattr("bar") - ds.close() - self.assertArrayEqual(res, np.arange(2)) + nc_out = tmp_path / "foo.nc" + save([c1, c2], nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) + res = ds.getncattr("bar") + ds.close() + _shared_utils.assert_array_equal(res, np.arange(2)) - def test_attributes_arrays_incompatible_shapes(self): + def test_attributes_arrays_incompatible_shapes(self, tmp_path): # Ensure successful comparison without raising a broadcast error. c1 = Cube([1], attributes={"bar": np.arange(2)}) c2 = Cube([2], attributes={"bar": np.arange(3)}) - with self.temp_filename("foo.nc") as nc_out: - save([c1, c2], nc_out) - ds = _thread_safe_nc.DatasetWrapper(nc_out) - with pytest.raises(AttributeError): - _ = ds.getncattr("bar") - for var in ds.variables.values(): - res = var.getncattr("bar") - self.assertIsInstance(res, np.ndarray) - ds.close() - - def test_no_special_attribute_clash(self): + nc_out = tmp_path / "foo.nc" + save([c1, c2], nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) + with pytest.raises(AttributeError): + _ = ds.getncattr("bar") + for var in ds.variables.values(): + res = var.getncattr("bar") + assert isinstance(res, np.ndarray) + ds.close() + + def test_no_special_attribute_clash(self, tmp_path): # Ensure that saving multiple cubes with netCDF4 protected attributes # works as expected. # Note that here we are testing variable attribute clashes only - by @@ -103,35 +96,36 @@ def test_no_special_attribute_clash(self): c1 = Cube([0], var_name="test", attributes={"name": "bar"}) c2 = Cube([0], var_name="test_1", attributes={"name": "bar_1"}) - with self.temp_filename("foo.nc") as nc_out: - save([c1, c2], nc_out) - ds = _thread_safe_nc.DatasetWrapper(nc_out) - res = ds.variables["test"].getncattr("name") - res_1 = ds.variables["test_1"].getncattr("name") - ds.close() - self.assertEqual(res, "bar") - self.assertEqual(res_1, "bar_1") + nc_out = tmp_path / "foo.nc" + save([c1, c2], nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) + res = ds.variables["test"].getncattr("name") + res_1 = ds.variables["test_1"].getncattr("name") + ds.close() + assert res == "bar" + assert res_1 == "bar_1" -class Test_unlimited_dims(tests.IrisTest): - def test_no_unlimited_dims(self): +class Test_unlimited_dims: + def test_no_unlimited_dims(self, tmp_path): cube = lat_lon_cube() - with self.temp_filename("foo.nc") as nc_out: - save(cube, nc_out) - ds = _thread_safe_nc.DatasetWrapper(nc_out) - self.assertFalse(ds.dimensions["latitude"].isunlimited()) + nc_out = tmp_path / "foo.nc" + save(cube, nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) + assert not ds.dimensions["latitude"].isunlimited() - def test_unlimited_dim_latitude(self): + def test_unlimited_dim_latitude(self, tmp_path): cube = lat_lon_cube() unlim_dim_name = "latitude" - with self.temp_filename("foo.nc") as nc_out: - save(cube, nc_out, unlimited_dimensions=[unlim_dim_name]) - ds = _thread_safe_nc.DatasetWrapper(nc_out) - self.assertTrue(ds.dimensions[unlim_dim_name].isunlimited()) + nc_out = tmp_path / "foo.nc" + save(cube, nc_out, unlimited_dimensions=[unlim_dim_name]) + ds = _thread_safe_nc.DatasetWrapper(nc_out) + assert ds.dimensions[unlim_dim_name].isunlimited() -class Test_fill_value(tests.IrisTest): - def setUp(self): +class Test_fill_value: + @pytest.fixture(autouse=True) + def _setup(self): self.standard_names = [ "air_temperature", "air_potential_temperature", @@ -152,84 +146,85 @@ def _make_cubes(self): for name in self.standard_names ) - def test_None(self): + def test_none(self, mocker): # Test that when no fill_value argument is passed, the fill_value # argument to Saver.write is None or not present. cubes = self._make_cubes() - with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver: - save(cubes, "dummy.nc") + Saver = mocker.patch("iris.fileformats.netcdf.saver.Saver") + save(cubes, "dummy.nc") # Get the Saver.write mock with Saver() as saver: write = saver.write - self.assertEqual(3, write.call_count) + assert 3 == write.call_count for call in write.mock_calls: _, _, kwargs = call if "fill_value" in kwargs: - self.assertIs(None, kwargs["fill_value"]) + assert None is kwargs["fill_value"] - def test_single(self): + def test_single(self, mocker): # Test that when a single value is passed as the fill_value argument, # that value is passed to each call to Saver.write cubes = self._make_cubes() fill_value = 12345.0 - with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver: - save(cubes, "dummy.nc", fill_value=fill_value) + Saver = mocker.patch("iris.fileformats.netcdf.saver.Saver") + save(cubes, "dummy.nc", fill_value=fill_value) # Get the Saver.write mock with Saver() as saver: write = saver.write - self.assertEqual(3, write.call_count) + assert 3 == write.call_count for call in write.mock_calls: _, _, kwargs = call - self.assertEqual(fill_value, kwargs["fill_value"]) + assert fill_value == kwargs["fill_value"] - def test_multiple(self): + def test_multiple(self, mocker): # Test that when a list is passed as the fill_value argument, # each element is passed to separate calls to Saver.write cubes = self._make_cubes() fill_values = [123.0, 456.0, 789.0] - with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver: - save(cubes, "dummy.nc", fill_value=fill_values) + Saver = mocker.patch("iris.fileformats.netcdf.saver.Saver") + save(cubes, "dummy.nc", fill_value=fill_values) # Get the Saver.write mock with Saver() as saver: write = saver.write - self.assertEqual(3, write.call_count) + assert 3 == write.call_count for call, fill_value in zip(write.mock_calls, fill_values): _, _, kwargs = call - self.assertEqual(fill_value, kwargs["fill_value"]) + assert fill_value == kwargs["fill_value"] - def test_single_string(self): + def test_single_string(self, mocker): # Test that when a string is passed as the fill_value argument, # that value is passed to calls to Saver.write cube = Cube(["abc", "def", "hij"]) fill_value = "xyz" - with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver: - save(cube, "dummy.nc", fill_value=fill_value) + Saver = mocker.patch("iris.fileformats.netcdf.saver.Saver") + save(cube, "dummy.nc", fill_value=fill_value) # Get the Saver.write mock with Saver() as saver: write = saver.write - self.assertEqual(1, write.call_count) + assert 1 == write.call_count _, _, kwargs = write.mock_calls[0] - self.assertEqual(fill_value, kwargs["fill_value"]) + assert fill_value == kwargs["fill_value"] - def test_multi_wrong_length(self): + def test_multi_wrong_length(self, mocker): # Test that when a list of a different length to the number of cubes # is passed as the fill_value argument, an error is raised cubes = self._make_cubes() fill_values = [1.0, 2.0, 3.0, 4.0] - with mock.patch("iris.fileformats.netcdf.saver.Saver"): - with self.assertRaises(ValueError): + msg = "If fill_value is a list, it must have the same number of elements as the cube argument." + with mocker.patch("iris.fileformats.netcdf.saver.Saver"): + with pytest.raises(ValueError, match=msg): save(cubes, "dummy.nc", fill_value=fill_values) -class Test_HdfSaveBug(tests.IrisTest): +class Test_HdfSaveBug: """Check for a known problem with netcdf4. If you create dimension with the same name as an existing variable, there @@ -252,10 +247,10 @@ class Test_HdfSaveBug(tests.IrisTest): """ - def _check_save_and_reload(self, cubes): - tempdir = Path(mkdtemp()) - filepath = tempdir / "tmp.nc" - try: + @pytest.fixture + def _check_save_and_reload(self, tmp_path): + def check_save_and_reload(cubes): + filepath = tmp_path / "temp.nc" # Save the given cubes. save(cubes, filepath) @@ -263,7 +258,7 @@ def _check_save_and_reload(self, cubes): new_cubes = iris.load(str(filepath)) # There should definitely still be the same number of cubes. - self.assertEqual(len(new_cubes), len(cubes)) + assert len(new_cubes) == len(cubes) # Get results in the input order, matching by var_names. result = [new_cubes.extract_cube(cube.var_name) for cube in cubes] @@ -272,15 +267,14 @@ def _check_save_and_reload(self, cubes): # NB in this codeblock, before we destroy the temporary file. for cube_in, cube_out in zip(cubes, result): # Using special tolerant equivalence-check. - self.assertSameCubes(cube_in, cube_out) + self.assert_same_cubes(cube_in, cube_out) - finally: - rmtree(tempdir) + # Return result cubes for any additional checks. + return result - # Return result cubes for any additional checks. - return result + return check_save_and_reload - def assertSameCubes(self, cube1, cube2): + def assert_same_cubes(self, cube1, cube2): """A special tolerant cube compare. Ignore any 'Conventions' attributes. @@ -304,20 +298,20 @@ def clean_cube(cube): return cube - self.assertEqual(clean_cube(cube1), clean_cube(cube2)) + assert clean_cube(cube1) == clean_cube(cube2) - def test_dimcoord_varname_collision(self): + def test_dimcoord_varname_collision(self, _check_save_and_reload): cube_2 = Cube([0, 1], var_name="cube_2") x_dim = DimCoord([0, 1], long_name="dim_x", var_name="dimco_name") cube_2.add_dim_coord(x_dim, 0) # First cube has a varname which collides with the dimcoord. cube_1 = Cube([0, 1], long_name="cube_1", var_name="dimco_name") # Test save + loadback - reload_1, reload_2 = self._check_save_and_reload([cube_1, cube_2]) + reload_1, reload_2 = _check_save_and_reload([cube_1, cube_2]) # As re-loaded, the coord will have a different varname. - self.assertEqual(reload_2.coord("dim_x").var_name, "dimco_name_0") + assert reload_2.coord("dim_x").var_name == "dimco_name_0" - def test_anonymous_dim_varname_collision(self): + def test_anonymous_dim_varname_collision(self, _check_save_and_reload): # Second cube is going to name an anonymous dim. cube_2 = Cube([0, 1], var_name="cube_2") # First cube has a varname which collides with the dim-name. @@ -326,9 +320,9 @@ def test_anonymous_dim_varname_collision(self): x_dim = DimCoord([0, 1], long_name="dim_x", var_name="dimco_name") cube_1.add_dim_coord(x_dim, 0) # Test save + loadback - self._check_save_and_reload([cube_1, cube_2]) + _check_save_and_reload([cube_1, cube_2]) - def test_bounds_dim_varname_collision(self): + def test_bounds_dim_varname_collision(self, _check_save_and_reload): cube_2 = Cube([0, 1], var_name="cube_2") x_dim = DimCoord([0, 1], long_name="dim_x", var_name="dimco_name") x_dim.guess_bounds() @@ -336,9 +330,9 @@ def test_bounds_dim_varname_collision(self): # First cube has a varname which collides with the bounds dimension. cube_1 = Cube([0], long_name="cube_1", var_name="bnds") # Test save + loadback - self._check_save_and_reload([cube_1, cube_2]) + _check_save_and_reload([cube_1, cube_2]) - def test_string_dim_varname_collision(self): + def test_string_dim_varname_collision(self, _check_save_and_reload): cube_2 = Cube([0, 1], var_name="cube_2") # NOTE: it *should* be possible for a cube with string data to cause # this collision, but cubes with string data are currently not working. @@ -347,21 +341,21 @@ def test_string_dim_varname_collision(self): cube_2.add_aux_coord(x_dim, 0) cube_1 = Cube([0], long_name="cube_1", var_name="string4") # Test save + loadback - self._check_save_and_reload([cube_1, cube_2]) + _check_save_and_reload([cube_1, cube_2]) - def test_mesh_location_dim_varname_collision(self): + def test_mesh_location_dim_varname_collision(self, _check_save_and_reload): cube_2 = sample_mesh_cube() cube_2.var_name = "cube_2" # Make it identifiable cube_1 = Cube([0], long_name="cube_1", var_name="Mesh2d_node") # Test save + loadback - self._check_save_and_reload([cube_1, cube_2]) + _check_save_and_reload([cube_1, cube_2]) - def test_connectivity_dim_varname_collision(self): + def test_connectivity_dim_varname_collision(self, _check_save_and_reload): cube_2 = sample_mesh_cube() cube_2.var_name = "cube_2" # Make it identifiable cube_1 = Cube([0], long_name="cube_1", var_name="Mesh_2d_face_N_nodes") # Test save + loadback - self._check_save_and_reload([cube_1, cube_2]) + _check_save_and_reload([cube_1, cube_2]) class Test_compute_usage: @@ -374,13 +368,13 @@ class Test_compute_usage: # A fixture to mock out Saver object creation in a 'save' call. @staticmethod @pytest.fixture - def mock_saver_creation(): + def mock_saver_creation(mocker): # A mock for a Saver object. - mock_saver = mock.MagicMock(spec=Saver) + mock_saver = mocker.MagicMock(spec=Saver) # make an __enter__ call return the object itself (as the real Saver does). - mock_saver.__enter__ = mock.Mock(return_value=mock_saver) + mock_saver.__enter__ = mocker.Mock(return_value=mock_saver) # A mock for the Saver() constructor call. - mock_new_saver_call = mock.Mock(return_value=mock_saver) + mock_new_saver_call = mocker.Mock(return_value=mock_saver) # Replace the whole Saver class with a simple function, which thereby emulates # the constructor call. This avoids complications due to the fact that Mock @@ -389,23 +383,23 @@ def mock_saver_class_create(*args, **kwargs): return mock_new_saver_call(*args, **kwargs) # Patch the Saver() creation to return our mock Saver object. - with mock.patch("iris.fileformats.netcdf.saver.Saver", mock_saver_class_create): - # Return mocks for both constructor call, and Saver object. - yield mock_new_saver_call, mock_saver + mocker.patch("iris.fileformats.netcdf.saver.Saver", mock_saver_class_create) + # Return mocks for both constructor call, and Saver object. + return mock_new_saver_call, mock_saver # A fixture to provide some mock args for 'Saver' creation. @staticmethod @pytest.fixture - def mock_saver_args(): + def mock_saver_args(mocker): from collections import namedtuple # A special object for the cube, since cube.attributes must be indexable - mock_cube = mock.MagicMock() + mock_cube = mocker.MagicMock() args = namedtuple("saver_args", ["cube", "filename", "format", "compute"])( cube=mock_cube, - filename=mock.sentinel.filepath, - format=mock.sentinel.netcdf4, - compute=mock.sentinel.compute, + filename=mocker.sentinel.filepath, + format=mocker.sentinel.netcdf4, + compute=mocker.sentinel.compute, ) return args @@ -454,7 +448,3 @@ def test_compute_false_result_delayed(self, mock_saver_creation, mock_saver_args assert mock_saver.delayed_completion.call_count == 1 # .. and should return the result of that. assert result is mock_saver.delayed_completion.return_value - - -if __name__ == "__main__": - tests.main() From dc4cd76f9e9a6d7d7136bfd5422792f702d31f61 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Fri, 6 Feb 2026 10:12:29 +0000 Subject: [PATCH 13/77] Converted fileformats/pp to pytest (#6904) * converted __convert_constraints * converted __create_field_data * converted __data_bytes... * test__field_gen * test__interpret * test_as_fields * test_load * test_PPData * test_PPField * test_save * test_save_fields * test_save_pairs * removed redundant import * PT fixes * fixed mocker patch cm --- .../unit/fileformats/pp/test_PPDataProxy.py | 34 ++-- .../tests/unit/fileformats/pp/test_PPField.py | 192 +++++++++--------- .../pp/test__convert_constraints.py | 72 +++---- .../fileformats/pp/test__create_field_data.py | 62 +++--- .../pp/test__data_bytes_to_shaped_array.py | 53 +++-- .../unit/fileformats/pp/test__field_gen.py | 114 +++++------ .../fileformats/pp/test__interpret_field.py | 56 +++-- .../unit/fileformats/pp/test_as_fields.py | 17 +- .../tests/unit/fileformats/pp/test_load.py | 27 +-- .../tests/unit/fileformats/pp/test_save.py | 16 +- .../unit/fileformats/pp/test_save_fields.py | 42 ++-- .../pp/test_save_pairs_from_cube.py | 25 +-- 12 files changed, 314 insertions(+), 396 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py b/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py index 4d963e7f08..cddc6ccad4 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py @@ -4,33 +4,23 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.PPDataProxy` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - from iris.fileformats.pp import PPDataProxy, SplittableInt -class Test_lbpack(tests.IrisTest): - def test_lbpack_SplittableInt(self): - lbpack = mock.Mock(spec_set=SplittableInt) +class Test_lbpack: + def test_lbpack_splittable_int(self, mocker): + lbpack = mocker.Mock(spec_set=SplittableInt) proxy = PPDataProxy(None, None, None, None, None, lbpack, None, None) - self.assertEqual(proxy.lbpack, lbpack) - self.assertIs(proxy.lbpack, lbpack) + assert proxy.lbpack == lbpack + assert proxy.lbpack is lbpack def test_lbpack_raw(self): lbpack = 4321 proxy = PPDataProxy(None, None, None, None, None, lbpack, None, None) - self.assertEqual(proxy.lbpack, lbpack) - self.assertIsNot(proxy.lbpack, lbpack) - self.assertIsInstance(proxy.lbpack, SplittableInt) - self.assertEqual(proxy.lbpack.n1, lbpack % 10) - self.assertEqual(proxy.lbpack.n2, lbpack // 10 % 10) - self.assertEqual(proxy.lbpack.n3, lbpack // 100 % 10) - self.assertEqual(proxy.lbpack.n4, lbpack // 1000 % 10) - - -if __name__ == "__main__": - tests.main() + assert proxy.lbpack == lbpack + assert proxy.lbpack is not lbpack + assert isinstance(proxy.lbpack, SplittableInt) + assert proxy.lbpack.n1 == lbpack % 10 + assert proxy.lbpack.n2 == lbpack // 10 % 10 + assert proxy.lbpack.n3 == lbpack // 100 % 10 + assert proxy.lbpack.n4 == lbpack // 1000 % 10 diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPField.py b/lib/iris/tests/unit/fileformats/pp/test_PPField.py index e3d782b156..5a63f6a8cd 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPField.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPField.py @@ -4,16 +4,13 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.PPField` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest import iris.fileformats.pp as pp from iris.fileformats.pp import PPField, SplittableInt +from iris.tests import _shared_utils +from iris.tests.unit.fileformats import MockerMixin from iris.warnings import IrisDefaultingWarning, IrisMaskValueMatchWarning # The PPField class is abstract, so to test we define a minimal, @@ -75,51 +72,51 @@ def t2(self): return None -class Test_save(tests.IrisTest): - def test_float64(self): +class Test_save: + def test_float64(self, tmp_path): # Tests down-casting of >f8 data to >f4. def field_checksum(data): field = DummyPPField()._ready_for_save() field.data = data - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) - checksum = self.file_checksum(temp_filename) + temp_filename = tmp_path / "temp.pp" + with open(temp_filename, "wb") as pp_file: + field.save(pp_file) + checksum = _shared_utils.file_checksum(temp_filename) return checksum data_64 = np.linspace(0, 1, num=10, endpoint=False).reshape(2, 5) checksum_32 = field_checksum(data_64.astype(">f4")) msg = "Downcasting array precision from float64 to float32 for save." - with self.assertWarnsRegex(IrisDefaultingWarning, msg): + with pytest.warns(IrisDefaultingWarning, match=msg): checksum_64 = field_checksum(data_64.astype(">f8")) - self.assertEqual(checksum_32, checksum_64) + assert checksum_32 == checksum_64 - def test_masked_mdi_value_warning(self): + def test_masked_mdi_value_warning(self, tmp_path): # Check that an unmasked MDI value raises a warning. field = DummyPPField()._ready_for_save() # Make float32 data, as float64 default produces an extra warning. field.bmdi = np.float32(-123.4) field.data = np.ma.masked_array([1.0, field.bmdi, 3.0], dtype=np.float32) msg = "PPField data contains unmasked points" - with self.assertWarnsRegex(IrisMaskValueMatchWarning, msg): - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) + temp_filename = tmp_path / "temp.pp" + with pytest.warns(IrisMaskValueMatchWarning, match=msg): + with open(temp_filename, "wb") as pp_file: + field.save(pp_file) - def test_unmasked_mdi_value_warning(self): + def test_unmasked_mdi_value_warning(self, tmp_path): # Check that MDI in *unmasked* data raises a warning. field = DummyPPField()._ready_for_save() field.bmdi = -123.4 # Make float32 data, as float64 default produces an extra warning. field.data = np.array([1.0, field.bmdi, 3.0], dtype=np.float32) msg = "PPField data contains unmasked points" - with self.assertWarnsRegex(IrisMaskValueMatchWarning, msg): - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) + temp_filename = tmp_path / "temp.pp" + with pytest.warns(IrisMaskValueMatchWarning, match=msg): + with open(temp_filename, "wb") as pp_file: + field.save(pp_file) - def test_mdi_masked_value_nowarning(self): + def test_mdi_masked_value_nowarning(self, tmp_path): # Check that a *masked* MDI value does not raise a warning. field = DummyPPField()._ready_for_save() field.bmdi = -123.4 @@ -129,49 +126,48 @@ def test_mdi_masked_value_nowarning(self): ) # Set underlying data value at masked point to BMDI value. field.data.data[1] = field.bmdi - self.assertArrayAllClose(field.data.data[1], field.bmdi) - with self.assertNoWarningsRegexp(r"\(mask\|fill\)"): - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) + _shared_utils.assert_array_all_close(field.data.data[1], field.bmdi) + with _shared_utils.assert_no_warnings_regexp(r"\(mask\|fill\)"): + temp_filename = tmp_path / "temp.pp" + with open(temp_filename, "wb") as pp_file: + field.save(pp_file) -class Test_calendar(tests.IrisTest): +class Test_calendar: def test_greg(self): field = DummyPPField() field.lbtim = SplittableInt(1, {"ia": 2, "ib": 1, "ic": 0}) - self.assertEqual(field.calendar, "standard") + assert field.calendar == "standard" def test_360(self): field = DummyPPField() field.lbtim = SplittableInt(2, {"ia": 2, "ib": 1, "ic": 0}) - self.assertEqual(field.calendar, "360_day") + assert field.calendar == "360_day" def test_365(self): field = DummyPPField() field.lbtim = SplittableInt(4, {"ia": 2, "ib": 1, "ic": 0}) - self.assertEqual(field.calendar, "365_day") + assert field.calendar == "365_day" -class Test_coord_system(tests.IrisTest): +class Test_coord_system(MockerMixin): def _check_cs(self, bplat, bplon, rotated): field = DummyPPField() field.bplat = bplat field.bplon = bplon - with mock.patch("iris.fileformats.pp.iris.coord_systems") as mock_cs_mod: - result = field.coord_system() + mock_cs_mod = self.mocker.patch("iris.fileformats.pp.iris.coord_systems") + result = field.coord_system() if not rotated: # It should return a standard unrotated CS. - self.assertTrue(mock_cs_mod.GeogCS.call_count == 1) - self.assertEqual(result, mock_cs_mod.GeogCS()) + assert mock_cs_mod.GeogCS.call_count == 1 + assert result == mock_cs_mod.GeogCS() else: # It should return a rotated CS with the correct makeup. - self.assertTrue(mock_cs_mod.GeogCS.call_count == 1) - self.assertTrue(mock_cs_mod.RotatedGeogCS.call_count == 1) - self.assertEqual(result, mock_cs_mod.RotatedGeogCS()) - self.assertEqual( - mock_cs_mod.RotatedGeogCS.call_args_list[0], - mock.call(bplat, bplon, ellipsoid=mock_cs_mod.GeogCS()), + assert mock_cs_mod.GeogCS.call_count == 1 + assert mock_cs_mod.RotatedGeogCS.call_count == 1 + assert result == mock_cs_mod.RotatedGeogCS() + assert mock_cs_mod.RotatedGeogCS.call_args_list[0] == self.mocker.call( + bplat, bplon, ellipsoid=mock_cs_mod.GeogCS() ) def test_normal_unrotated(self): @@ -191,41 +187,43 @@ def test_odd_bplon_rotated(self): self._check_cs(bplat=90, bplon=123.45, rotated=True) -class Test__init__(tests.IrisTest): - def setUp(self): +class Test__init__: + @pytest.fixture(autouse=True) + def _setup(self): header_longs = np.zeros(pp.NUM_LONG_HEADERS, dtype=np.int_) header_floats = np.zeros(pp.NUM_FLOAT_HEADERS, dtype=np.float64) self.header = list(header_longs) + list(header_floats) def test_no_headers(self): field = DummyPPField() - self.assertIsNone(field._raw_header) - self.assertIsNone(field.raw_lbtim) - self.assertIsNone(field.raw_lbpack) + assert field._raw_header is None + assert field.raw_lbtim is None + assert field.raw_lbpack is None def test_lbtim_lookup(self): - self.assertEqual(DummyPPField.HEADER_DICT["lbtim"], (12,)) + assert DummyPPField.HEADER_DICT["lbtim"] == (12,) def test_lbpack_lookup(self): - self.assertEqual(DummyPPField.HEADER_DICT["lbpack"], (20,)) + assert DummyPPField.HEADER_DICT["lbpack"] == (20,) def test_raw_lbtim(self): raw_lbtim = 4321 (loc,) = DummyPPField.HEADER_DICT["lbtim"] self.header[loc] = raw_lbtim field = DummyPPField(header=self.header) - self.assertEqual(field.raw_lbtim, raw_lbtim) + assert field.raw_lbtim == raw_lbtim def test_raw_lbpack(self): raw_lbpack = 4321 (loc,) = DummyPPField.HEADER_DICT["lbpack"] self.header[loc] = raw_lbpack field = DummyPPField(header=self.header) - self.assertEqual(field.raw_lbpack, raw_lbpack) + assert field.raw_lbpack == raw_lbpack -class Test__getattr__(tests.IrisTest): - def setUp(self): +class Test__getattr__: + @pytest.fixture(autouse=True) + def _setup(self): header_longs = np.zeros(pp.NUM_LONG_HEADERS, dtype=np.int_) header_floats = np.zeros(pp.NUM_FLOAT_HEADERS, dtype=np.float64) self.header = list(header_longs) + list(header_floats) @@ -235,21 +233,21 @@ def test_attr_singular_long(self): (loc,) = DummyPPField.HEADER_DICT["lbrow"] self.header[loc] = lbrow field = DummyPPField(header=self.header) - self.assertEqual(field.lbrow, lbrow) + assert field.lbrow == lbrow def test_attr_multi_long(self): lbuser = (100, 101, 102, 103, 104, 105, 106) loc = DummyPPField.HEADER_DICT["lbuser"] self.header[loc[0] : loc[-1] + 1] = lbuser field = DummyPPField(header=self.header) - self.assertEqual(field.lbuser, lbuser) + assert field.lbuser == lbuser def test_attr_singular_float(self): bdatum = 1234 (loc,) = DummyPPField.HEADER_DICT["bdatum"] self.header[loc] = bdatum field = DummyPPField(header=self.header) - self.assertEqual(field.bdatum, bdatum) + assert field.bdatum == bdatum def test_attr_multi_float(self): brsvd = (100, 101, 102, 103) @@ -258,7 +256,7 @@ def test_attr_multi_float(self): stop = loc[-1] + 1 self.header[start:stop] = brsvd field = DummyPPField(header=self.header) - self.assertEqual(field.brsvd, brsvd) + assert field.brsvd == brsvd def test_attr_lbtim(self): raw_lbtim = 4321 @@ -266,11 +264,11 @@ def test_attr_lbtim(self): self.header[loc] = raw_lbtim field = DummyPPField(header=self.header) result = field.lbtim - self.assertEqual(result, raw_lbtim) - self.assertIsInstance(result, SplittableInt) + assert result == raw_lbtim + assert isinstance(result, SplittableInt) result = field._lbtim - self.assertEqual(result, raw_lbtim) - self.assertIsInstance(result, SplittableInt) + assert result == raw_lbtim + assert isinstance(result, SplittableInt) def test_attr_lbpack(self): raw_lbpack = 4321 @@ -278,55 +276,57 @@ def test_attr_lbpack(self): self.header[loc] = raw_lbpack field = DummyPPField(header=self.header) result = field.lbpack - self.assertEqual(result, raw_lbpack) - self.assertIsInstance(result, SplittableInt) + assert result == raw_lbpack + assert isinstance(result, SplittableInt) result = field._lbpack - self.assertEqual(result, raw_lbpack) - self.assertIsInstance(result, SplittableInt) + assert result == raw_lbpack + assert isinstance(result, SplittableInt) def test_attr_raw_lbtim_assign(self): field = DummyPPField(header=self.header) - self.assertEqual(field.raw_lbpack, 0) - self.assertEqual(field.lbtim, 0) + assert field.raw_lbpack == 0 + assert field.lbtim == 0 raw_lbtim = 4321 field.lbtim = raw_lbtim - self.assertEqual(field.raw_lbtim, raw_lbtim) - self.assertNotIsInstance(field.raw_lbtim, SplittableInt) + assert field.raw_lbtim == raw_lbtim + assert not isinstance(field.raw_lbtim, SplittableInt) def test_attr_raw_lbpack_assign(self): field = DummyPPField(header=self.header) - self.assertEqual(field.raw_lbpack, 0) - self.assertEqual(field.lbpack, 0) + assert field.raw_lbpack == 0 + assert field.lbpack == 0 raw_lbpack = 4321 field.lbpack = raw_lbpack - self.assertEqual(field.raw_lbpack, raw_lbpack) - self.assertNotIsInstance(field.raw_lbpack, SplittableInt) + assert field.raw_lbpack == raw_lbpack + assert not isinstance(field.raw_lbpack, SplittableInt) def test_attr_unknown(self): - with self.assertRaises(AttributeError): + with pytest.raises( + AttributeError, match="'DummyPPField' object has no attribute 'x'" + ): DummyPPField().x -class Test_lbtim(tests.IrisTest): +class Test_lbtim: def test_get_splittable(self): headers = [0] * 64 headers[12] = 12345 field = DummyPPField(headers) - self.assertIsInstance(field.lbtim, SplittableInt) - self.assertEqual(field.lbtim.ia, 123) - self.assertEqual(field.lbtim.ib, 4) - self.assertEqual(field.lbtim.ic, 5) + assert isinstance(field.lbtim, SplittableInt) + assert field.lbtim.ia == 123 + assert field.lbtim.ib == 4 + assert field.lbtim.ic == 5 def test_set_int(self): headers = [0] * 64 headers[12] = 12345 field = DummyPPField(headers) field.lbtim = 34567 - self.assertIsInstance(field.lbtim, SplittableInt) - self.assertEqual(field.lbtim.ia, 345) - self.assertEqual(field.lbtim.ib, 6) - self.assertEqual(field.lbtim.ic, 7) - self.assertEqual(field.raw_lbtim, 34567) + assert isinstance(field.lbtim, SplittableInt) + assert field.lbtim.ia == 345 + assert field.lbtim.ib == 6 + assert field.lbtim.ic == 7 + assert field.raw_lbtim == 34567 def test_set_splittable(self): # Check that assigning a SplittableInt to lbtim uses the integer @@ -337,14 +337,12 @@ def test_set_splittable(self): field = DummyPPField(headers) si = SplittableInt(34567, {"foo": 0}) field.lbtim = si - self.assertIsInstance(field.lbtim, SplittableInt) - with self.assertRaises(AttributeError): + assert isinstance(field.lbtim, SplittableInt) + with pytest.raises( + AttributeError, match="'SplittableInt' object has no attribute 'foo'" + ): field.lbtim.foo - self.assertEqual(field.lbtim.ia, 345) - self.assertEqual(field.lbtim.ib, 6) - self.assertEqual(field.lbtim.ic, 7) - self.assertEqual(field.raw_lbtim, 34567) - - -if __name__ == "__main__": - tests.main() + assert field.lbtim.ia == 345 + assert field.lbtim.ib == 6 + assert field.lbtim.ic == 7 + assert field.raw_lbtim == 34567 diff --git a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py b/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py index 51b5c5732f..2e88a8378d 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py +++ b/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py @@ -4,68 +4,62 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.load` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.fileformats.pp import STASH, _convert_constraints -class Test_convert_constraints(tests.IrisTest): +class Test_convert_constraints: def _single_stash(self): constraint = iris.AttributeConstraint(STASH="m01s03i236") return _convert_constraints(constraint) - def test_single_stash(self): + def test_single_stash(self, mocker): pp_filter = self._single_stash() - stcube = mock.Mock(stash=STASH.from_msi("m01s03i236")) - self.assertTrue(pp_filter(stcube)) + stcube = mocker.Mock(stash=STASH.from_msi("m01s03i236")) + assert pp_filter(stcube) - def test_stash_object(self): + def test_stash_object(self, mocker): constraint = iris.AttributeConstraint(STASH=STASH.from_msi("m01s03i236")) pp_filter = _convert_constraints(constraint) - stcube = mock.Mock(stash=STASH.from_msi("m01s03i236")) - self.assertTrue(pp_filter(stcube)) + stcube = mocker.Mock(stash=STASH.from_msi("m01s03i236")) + assert pp_filter(stcube) - def test_surface_altitude(self): + def test_surface_altitude(self, mocker): # Ensure that surface altitude fields are not filtered. pp_filter = self._single_stash() - orography_cube = mock.Mock(stash=STASH.from_msi("m01s00i033")) - self.assertTrue(pp_filter(orography_cube)) + orography_cube = mocker.Mock(stash=STASH.from_msi("m01s00i033")) + assert pp_filter(orography_cube) - def test_surface_pressure(self): + def test_surface_pressure(self, mocker): # Ensure that surface pressure fields are not filtered. pp_filter = self._single_stash() - pressure_cube = mock.Mock(stash=STASH.from_msi("m01s00i001")) - self.assertTrue(pp_filter(pressure_cube)) + pressure_cube = mocker.Mock(stash=STASH.from_msi("m01s00i001")) + assert pp_filter(pressure_cube) - def test_double_stash(self): - stcube236 = mock.Mock(stash=STASH.from_msi("m01s03i236")) - stcube4 = mock.Mock(stash=STASH.from_msi("m01s00i004")) - stcube7 = mock.Mock(stash=STASH.from_msi("m01s00i007")) + def test_double_stash(self, mocker): + stcube236 = mocker.Mock(stash=STASH.from_msi("m01s03i236")) + stcube4 = mocker.Mock(stash=STASH.from_msi("m01s00i004")) + stcube7 = mocker.Mock(stash=STASH.from_msi("m01s00i007")) constraints = [ iris.AttributeConstraint(STASH="m01s03i236"), iris.AttributeConstraint(STASH="m01s00i004"), ] pp_filter = _convert_constraints(constraints) - self.assertTrue(pp_filter(stcube236)) - self.assertTrue(pp_filter(stcube4)) - self.assertFalse(pp_filter(stcube7)) - - def test_callable_stash(self): - stcube236 = mock.Mock(stash=STASH.from_msi("m01s03i236")) - stcube4 = mock.Mock(stash=STASH.from_msi("m01s00i004")) - stcube7 = mock.Mock(stash=STASH.from_msi("m01s00i007")) + assert pp_filter(stcube236) + assert pp_filter(stcube4) + assert not pp_filter(stcube7) + + def test_callable_stash(self, mocker): + stcube236 = mocker.Mock(stash=STASH.from_msi("m01s03i236")) + stcube4 = mocker.Mock(stash=STASH.from_msi("m01s00i004")) + stcube7 = mocker.Mock(stash=STASH.from_msi("m01s00i007")) con1 = iris.AttributeConstraint(STASH=lambda s: s.endswith("004")) con2 = iris.AttributeConstraint(STASH=lambda s: s == "m01s00i007") constraints = [con1, con2] pp_filter = _convert_constraints(constraints) - self.assertFalse(pp_filter(stcube236)) - self.assertTrue(pp_filter(stcube4)) - self.assertTrue(pp_filter(stcube7)) + assert not pp_filter(stcube236) + assert pp_filter(stcube4) + assert pp_filter(stcube7) def test_multiple_with_stash(self): constraints = [ @@ -73,7 +67,7 @@ def test_multiple_with_stash(self): iris.AttributeConstraint(STASH="m01s00i004"), ] pp_filter = _convert_constraints(constraints) - self.assertIsNone(pp_filter) + assert pp_filter is None def test_no_stash(self): constraints = [ @@ -81,13 +75,9 @@ def test_no_stash(self): iris.AttributeConstraint(source="asource"), ] pp_filter = _convert_constraints(constraints) - self.assertIsNone(pp_filter) + assert pp_filter is None def test_no_constraint(self): constraints = [] pp_filter = _convert_constraints(constraints) - self.assertIsNone(pp_filter) - - -if __name__ == "__main__": - tests.main() + assert pp_filter is None diff --git a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py b/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py index 266502253a..a0a8fa3a5b 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py +++ b/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py @@ -4,33 +4,25 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp._create_field_data` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np import iris.fileformats.pp as pp -class Test__create_field_data(tests.IrisTest): - def test_loaded_bytes(self): +class Test__create_field_data: + def test_loaded_bytes(self, mocker): # Check that a field with LoadedArrayBytes in core_data gets the # result of a suitable call to _data_bytes_to_shaped_array(). - mock_loaded_bytes = mock.Mock(spec=pp.LoadedArrayBytes) - core_data = mock.MagicMock(return_value=mock_loaded_bytes) - field = mock.Mock(core_data=core_data) - data_shape = mock.Mock() - land_mask = mock.Mock() - with mock.patch( - "iris.fileformats.pp._data_bytes_to_shaped_array" - ) as convert_bytes: - convert_bytes.return_value = mock.sentinel.array - pp._create_field_data(field, data_shape, land_mask) + mock_loaded_bytes = mocker.Mock(spec=pp.LoadedArrayBytes) + core_data = mocker.MagicMock(return_value=mock_loaded_bytes) + field = mocker.Mock(core_data=core_data) + data_shape = mocker.Mock() + land_mask = mocker.Mock() + convert_bytes = mocker.patch("iris.fileformats.pp._data_bytes_to_shaped_array") + convert_bytes.return_value = mocker.sentinel.array + pp._create_field_data(field, data_shape, land_mask) - self.assertIs(field.data, mock.sentinel.array) + assert field.data is mocker.sentinel.array convert_bytes.assert_called_once_with( mock_loaded_bytes.bytes, field.lbpack, @@ -41,19 +33,19 @@ def test_loaded_bytes(self): land_mask, ) - def test_deferred_bytes(self): + def test_deferred_bytes(self, mocker): # Check that a field with deferred array bytes in core_data gets a # dask array. - fname = mock.sentinel.fname - position = mock.sentinel.position - n_bytes = mock.sentinel.n_bytes - newbyteorder = mock.Mock(return_value=mock.sentinel.dtype) - dtype = mock.Mock(newbyteorder=newbyteorder) + fname = mocker.sentinel.fname + position = mocker.sentinel.position + n_bytes = mocker.sentinel.n_bytes + newbyteorder = mocker.Mock(return_value=mocker.sentinel.dtype) + dtype = mocker.Mock(newbyteorder=newbyteorder) deferred_bytes = (fname, position, n_bytes, dtype) - core_data = mock.MagicMock(return_value=deferred_bytes) - field = mock.Mock(core_data=core_data) + core_data = mocker.MagicMock(return_value=deferred_bytes) + field = mocker.Mock(core_data=core_data) data_shape = (100, 120) - proxy = mock.Mock( + proxy = mocker.Mock( dtype=np.dtype("f4"), dask_meta=np.empty((0,) * len(data_shape), dtype=np.dtype("f4")), shape=data_shape, @@ -63,13 +55,13 @@ def test_deferred_bytes(self): # We can't directly inspect the concrete data source underlying # the dask array, so instead we patch the proxy creation and check it's # being created and invoked correctly. - with mock.patch("iris.fileformats.pp.PPDataProxy") as PPDataProxy: - PPDataProxy.return_value = proxy - pp._create_field_data(field, data_shape, land_mask_field=None) + PPDataProxy = mocker.patch("iris.fileformats.pp.PPDataProxy") + PPDataProxy.return_value = proxy + pp._create_field_data(field, data_shape, land_mask_field=None) # The data should be assigned via field.data. As this is a mock object # we can check the attribute directly. - self.assertEqual(field.data.shape, data_shape) - self.assertEqual(field.data.dtype, np.dtype("f4")) + assert field.data.shape == data_shape + assert field.data.dtype == np.dtype("f4") # Is it making use of a correctly configured proxy? # NB. We know it's *using* the result of this call because # that's where the dtype came from above. @@ -83,7 +75,3 @@ def test_deferred_bytes(self): field.boundary_packing, field.bmdi, ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py index 3bd8fcb8d7..205c910dbc 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py +++ b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py @@ -4,18 +4,15 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp._data_bytes_to_shaped_array` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import io -from unittest import mock import numpy as np import numpy.ma as ma import pytest import iris.fileformats.pp as pp +from iris.tests import _shared_utils +from iris.tests.unit.fileformats import MockerMixin @pytest.mark.parametrize("data_shape", [(2, 3)]) @@ -54,8 +51,9 @@ def test_data_padding__no_compression(data_shape, expected_shape, data_type): _ = pp._data_bytes_to_shaped_array(*args) -class Test__data_bytes_to_shaped_array__lateral_boundary_compression(tests.IrisTest): - def setUp(self): +class Test__data_bytes_to_shaped_array__lateral_boundary_compression: + @pytest.fixture(autouse=True) + def _setup(self): self.data_shape = 30, 40 y_halo, x_halo, rim = 2, 3, 4 @@ -84,9 +82,9 @@ def setUp(self): buf.seek(0) self.data_payload_bytes = buf.read() - def test_boundary_decompression(self): - boundary_packing = mock.Mock(rim_width=4, x_halo=3, y_halo=2) - lbpack = mock.Mock(n1=0) + def test_boundary_decompression(self, mocker): + boundary_packing = mocker.Mock(rim_width=4, x_halo=3, y_halo=2) + lbpack = mocker.Mock(n1=0) r = pp._data_bytes_to_shaped_array( self.data_payload_bytes, lbpack, @@ -96,11 +94,12 @@ def test_boundary_decompression(self): -9223372036854775808, ) r = ma.masked_array(r, np.isnan(r), fill_value=-9223372036854775808) - self.assertMaskedArrayEqual(r, self.decompressed) + _shared_utils.assert_masked_array_equal(r, self.decompressed) -class Test__data_bytes_to_shaped_array__land_packed(tests.IrisTest): - def setUp(self): +class Test__data_bytes_to_shaped_array__land_packed(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Sets up some useful arrays for use with the land/sea mask # decompression. self.land = np.array( @@ -123,7 +122,7 @@ def setUp(self): dtype=np.float64, ) - self.land_mask = mock.Mock( + self.land_mask = mocker.Mock( data=self.land, lbrow=self.land.shape[0], lbnpt=self.land.shape[1] ) @@ -131,11 +130,11 @@ def create_lbpack(self, value): name_mapping = dict(n5=slice(4, None), n4=3, n3=2, n2=1, n1=0) return pp.SplittableInt(value, name_mapping) - def test_no_land_mask(self): + def test_no_land_mask(self, mocker): # Check that without a mask, it returns the raw (compressed) data. - with mock.patch("numpy.frombuffer", return_value=np.arange(3)): + with mocker.patch("numpy.frombuffer", return_value=np.arange(3)): result = pp._data_bytes_to_shaped_array( - mock.Mock(), + mocker.Mock(), self.create_lbpack(120), None, (3, 4), @@ -143,44 +142,44 @@ def test_no_land_mask(self): -999, mask=None, ) - self.assertArrayAllClose(result, np.arange(3)) + _shared_utils.assert_array_all_close(result, np.arange(3)) def test_land_mask(self): # Check basic land unpacking. field_data = self.land_masked_data result = self.check_read_data(field_data, 120, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_land_data) + _shared_utils.assert_masked_array_equal(result, self.decomp_land_data) def test_land_masked_data_too_long(self): # Check land unpacking with field data that is larger than the mask. field_data = np.tile(self.land_masked_data, 2) result = self.check_read_data(field_data, 120, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_land_data) + _shared_utils.assert_masked_array_equal(result, self.decomp_land_data) def test_sea_mask(self): # Check basic land unpacking. field_data = self.sea_masked_data result = self.check_read_data(field_data, 220, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_sea_data) + _shared_utils.assert_masked_array_equal(result, self.decomp_sea_data) def test_sea_masked_data_too_long(self): # Check sea unpacking with field data that is larger than the mask. field_data = np.tile(self.sea_masked_data, 2) result = self.check_read_data(field_data, 220, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_sea_data) + _shared_utils.assert_masked_array_equal(result, self.decomp_sea_data) def test_bad_lbpack(self): # Check basic land unpacking. field_data = self.sea_masked_data - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="Unsupported mask compression."): self.check_read_data(field_data, 320, self.land_mask) def check_read_data(self, field_data, lbpack, mask): # Calls pp._data_bytes_to_shaped_array with the necessary mocked # items, an lbpack instance, the correct data shape and mask instance. - with mock.patch("numpy.frombuffer", return_value=field_data): + with self.mocker.patch("numpy.frombuffer", return_value=field_data): data = pp._data_bytes_to_shaped_array( - mock.Mock(), + self.mocker.Mock(), self.create_lbpack(lbpack), None, mask.shape, @@ -189,7 +188,3 @@ def check_read_data(self, field_data, lbpack, mask): mask=mask, ) return ma.masked_array(data, np.isnan(data), fill_value=-999) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test__field_gen.py b/lib/iris/tests/unit/fileformats/pp/test__field_gen.py index f1018d8df4..6618c79a38 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__field_gen.py +++ b/lib/iris/tests/unit/fileformats/pp/test__field_gen.py @@ -4,75 +4,70 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp._field_gen` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import contextlib import io -from unittest import mock -import warnings import numpy as np +import pytest import iris.fileformats.pp as pp -class Test(tests.IrisTest): - @contextlib.contextmanager - def mock_for_field_gen(self, fields): - side_effect_fields = list(fields)[:] +class Test: + @pytest.fixture + def mock_for_field_gen(self, mocker): + @contextlib.contextmanager + def _mock_for_field_gen(fields): + side_effect_fields = list(fields)[:] - def make_pp_field_override(*args): - # Iterates over the fields passed to this context manager, - # until there are no more, upon which the np.fromfile - # returns an empty list and the while loop in load() is - # broken. - result = side_effect_fields.pop(0) - if not side_effect_fields: - np.fromfile.return_value = [] - return result + def make_pp_field_override(*args): + # Iterates over the fields passed to this context manager, + # until there are no more, upon which the np.fromfile + # returns an empty list and the while loop in load() is + # broken. + result = side_effect_fields.pop(0) + if not side_effect_fields: + np.fromfile.return_value = [] + return result - open_func = "builtins.open" - with ( - mock.patch("numpy.fromfile", return_value=[0]), - mock.patch(open_func), - mock.patch("struct.unpack_from", return_value=[4]), - mock.patch( + open_func = "builtins.open" + mocker.patch("numpy.fromfile", return_value=[0]) + mocker.patch(open_func) + mocker.patch("struct.unpack_from", return_value=[4]) + mocker.patch( "iris.fileformats.pp.make_pp_field", side_effect=make_pp_field_override, - ), - ): + ) yield - def gen_fields(self, fields): - with self.mock_for_field_gen(fields): + return _mock_for_field_gen + + def gen_fields(self, fields, mock_for_field_gen): + with mock_for_field_gen(fields): return list(pp._field_gen("mocked", "mocked")) - def test_lblrec_invalid(self): - pp_field = mock.Mock(lblrec=2, lbext=0) - with warnings.catch_warnings(record=True) as warn: - warnings.simplefilter("always") - self.gen_fields([pp_field]) - self.assertEqual(len(warn), 1) + def test_lblrec_invalid(self, mocker, mock_for_field_gen): + pp_field = mocker.Mock(lblrec=2, lbext=0) wmsg = ( "LBLREC has a different value to the .* the header in the " r"file \(8 and 4\)\. Skipping .*" ) - self.assertRegex(str(warn[0].message), wmsg) + with pytest.warns(UserWarning, match=wmsg) as warn: + self.gen_fields([pp_field], mock_for_field_gen) + assert len(warn) == 1 - def test_read_headers_call(self): + def test_read_headers_call(self, mocker, mock_for_field_gen): # Checks that the two calls to np.fromfile are called in the # expected way. - pp_field = mock.Mock(lblrec=1, lbext=0, lbuser=[0]) - with self.mock_for_field_gen([pp_field]): - open_fh = mock.MagicMock(spec=io.RawIOBase) + pp_field = mocker.Mock(lblrec=1, lbext=0, lbuser=[0]) + with mock_for_field_gen([pp_field]): + open_fh = mocker.MagicMock(spec=io.RawIOBase) open.return_value = open_fh next(pp._field_gen("mocked", read_data_bytes=False)) with open_fh as open_fh_ctx: calls = [ - mock.call(open_fh_ctx, count=45, dtype=">i4"), - mock.call(open_fh_ctx, count=19, dtype=">f4"), + mocker.call(open_fh_ctx, count=45, dtype=">i4"), + mocker.call(open_fh_ctx, count=19, dtype=">f4"), ] np.fromfile.assert_has_calls(calls) with open_fh as open_fh_ctx: @@ -82,33 +77,30 @@ def test_read_headers_call(self): 4, np.dtype(">f4"), ) - self.assertEqual(pp_field.data, expected_deferred_bytes) + assert pp_field.data == expected_deferred_bytes - def test_read_data_call(self): + def test_read_data_call(self, mocker, mock_for_field_gen): # Checks that data is read if read_data is True. - pp_field = mock.Mock(lblrec=1, lbext=0, lbuser=[0]) - with self.mock_for_field_gen([pp_field]): - open_fh = mock.MagicMock(spec=io.RawIOBase) + pp_field = mocker.Mock(lblrec=1, lbext=0, lbuser=[0]) + with mock_for_field_gen([pp_field]): + open_fh = mocker.MagicMock(spec=io.RawIOBase) open.return_value = open_fh next(pp._field_gen("mocked", read_data_bytes=True)) with open_fh as open_fh_ctx: expected_loaded_bytes = pp.LoadedArrayBytes( open_fh_ctx.read(), np.dtype(">f4") ) - self.assertEqual(pp_field.data, expected_loaded_bytes) + assert pp_field.data == expected_loaded_bytes - def test_invalid_header_release(self): + def test_invalid_header_release(self, tmp_path): # Check that an unknown LBREL value just results in a warning # and the end of the file iteration instead of raising an error. - with self.temp_filename() as temp_path: - np.zeros(65, dtype="i4").tofile(temp_path) - generator = pp._field_gen(temp_path, False) - with mock.patch("warnings.warn") as warn: - with self.assertRaises(StopIteration): - next(generator) - self.assertEqual(warn.call_count, 1) - self.assertIn("header release number", warn.call_args[0][0]) - - -if __name__ == "__main__": - tests.main() + temp_path = tmp_path / "temp" + np.zeros(65, dtype="i4").tofile(temp_path) + generator = pp._field_gen(temp_path, False) + with pytest.warns( + pp._WarnComboIgnoringLoad, match="header release number" + ) as warn: + with pytest.raises(StopIteration): + next(generator) + assert len(warn) == 1 diff --git a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py b/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py index 68520300b6..f0042593c3 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py +++ b/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py @@ -4,36 +4,34 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp._interpret_field` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from copy import deepcopy -from unittest import mock import numpy as np +import pytest import iris.fileformats.pp as pp +from iris.warnings import IrisLoadWarning -class Test__interpret_fields__land_packed_fields(tests.IrisTest): - def setUp(self): +class Test__interpret_fields__land_packed_fields: + @pytest.fixture(autouse=True) + def _setup(self, mocker): return_value = ("dummy", 0, 0, np.dtype("f4")) - core_data = mock.MagicMock(return_value=return_value) + core_data = mocker.MagicMock(return_value=return_value) # A field packed using a land/sea mask. - self.pp_field = mock.Mock( + self.pp_field = mocker.Mock( lblrec=1, lbext=0, lbuser=[0] * 7, lbrow=0, lbnpt=0, raw_lbpack=21, - lbpack=mock.Mock(n1=0, n2=2, n3=1), + lbpack=mocker.Mock(n1=0, n2=2, n3=1), core_data=core_data, ) # The field specifying the land/seamask. lbuser = [None, None, None, 30, None, None, 1] # m01s00i030 - self.land_mask_field = mock.Mock( + self.land_mask_field = mocker.Mock( lblrec=1, lbext=0, lbuser=lbuser, @@ -47,33 +45,29 @@ def test_non_deferred_fix_lbrow_lbnpt(self): # Checks the fix_lbrow_lbnpt is applied to fields which are not # deferred. f1, mask = self.pp_field, self.land_mask_field - self.assertEqual(f1.lbrow, 0) - self.assertEqual(f1.lbnpt, 0) + assert f1.lbrow == 0 + assert f1.lbnpt == 0 list(pp._interpret_fields([mask, f1])) - self.assertEqual(f1.lbrow, 3) - self.assertEqual(f1.lbnpt, 4) + assert f1.lbrow == 3 + assert f1.lbnpt == 4 # Check the data's shape has been updated too. - self.assertEqual(f1.data.shape, (3, 4)) + assert f1.data.shape == (3, 4) def test_fix_lbrow_lbnpt_no_mask_available(self): # Check a warning is issued when loading a land masked field # without a land mask. - with mock.patch("warnings.warn") as warn: + with pytest.warns( + IrisLoadWarning, + match="Landmask compressed fields existed without a landmask", + ) as warn: list(pp._interpret_fields([self.pp_field])) - self.assertEqual(warn.call_count, 1) - warn_msg = warn.call_args[0][0] - self.assertTrue( - warn_msg.startswith( - "Landmask compressed fields existed without a landmask" - ), - "Unexpected warning message: {!r}".format(warn_msg), - ) + assert len(warn) == 1 def test_deferred_mask_field(self): # Check that the order of the load is yielded last if the mask # hasn't yet been seen. result = list(pp._interpret_fields([self.pp_field, self.land_mask_field])) - self.assertEqual(result, [self.land_mask_field, self.pp_field]) + assert result == [self.land_mask_field, self.pp_field] def test_not_deferred_mask_field(self): # Check that the order of the load is unchanged if a land mask @@ -81,15 +75,11 @@ def test_not_deferred_mask_field(self): f1, mask = self.pp_field, self.land_mask_field mask2 = deepcopy(mask) result = list(pp._interpret_fields([mask, f1, mask2])) - self.assertEqual(result, [mask, f1, mask2]) + assert result == [mask, f1, mask2] def test_deferred_fix_lbrow_lbnpt(self): # Check the fix is also applied to fields which are deferred. f1, mask = self.pp_field, self.land_mask_field list(pp._interpret_fields([f1, mask])) - self.assertEqual(f1.lbrow, 3) - self.assertEqual(f1.lbnpt, 4) - - -if __name__ == "__main__": - tests.main() + assert f1.lbrow == 3 + assert f1.lbnpt == 4 diff --git a/lib/iris/tests/unit/fileformats/pp/test_as_fields.py b/lib/iris/tests/unit/fileformats/pp/test_as_fields.py index 213eb6c9c4..091ca11b7a 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_as_fields.py +++ b/lib/iris/tests/unit/fileformats/pp/test_as_fields.py @@ -4,30 +4,25 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.as_fields` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest import iris.fileformats.pp as pp import iris.tests.stock as stock -class TestAsFields(tests.IrisTest): - def setUp(self): +class TestAsFields: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.realistic_3d() def test_cube_only(self): fields = pp.as_fields(self.cube) for field in fields: - self.assertEqual(field.lbcode, 101) + assert field.lbcode == 101 def test_field_coords(self): fields = pp.as_fields( self.cube, field_coords=["grid_longitude", "grid_latitude"] ) for field in fields: - self.assertEqual(field.lbcode, 101) - - -if __name__ == "__main__": - tests.main() + assert field.lbcode == 101 diff --git a/lib/iris/tests/unit/fileformats/pp/test_load.py b/lib/iris/tests/unit/fileformats/pp/test_load.py index e802b36c0e..19e21fe077 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_load.py +++ b/lib/iris/tests/unit/fileformats/pp/test_load.py @@ -4,38 +4,27 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.load` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import iris.fileformats.pp as pp -class Test_load(tests.IrisTest): - def test_call_structure(self): +class Test_load: + def test_call_structure(self, mocker): # Check that the load function calls the two necessary utility # functions. - extract_result = mock.Mock() - interpret_patch = mock.patch( + extract_result = mocker.Mock() + interpret_patch = mocker.patch( "iris.fileformats.pp._interpret_fields", autospec=True, return_value=iter([]), ) - field_gen_patch = mock.patch( + field_gen_patch = mocker.patch( "iris.fileformats.pp._field_gen", autospec=True, return_value=extract_result, ) - with interpret_patch as interpret, field_gen_patch as field_gen: - pp.load("mock", read_data=True) + pp.load("mock", read_data=True) - interpret.assert_called_once_with(extract_result) - field_gen.assert_called_once_with( + interpret_patch.assert_called_once_with(extract_result) + field_gen_patch.assert_called_once_with( "mock", read_data_bytes=True, little_ended=False ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py index bd48ade809..32c7b36e40 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save.py @@ -21,7 +21,7 @@ @pytest.mark.parametrize( - "unit,modulus", + ("unit", "modulus"), [ (cf_units.Unit("radians"), 2 * np.pi), (cf_units.Unit("degrees"), 360.0), @@ -75,6 +75,7 @@ def test_bad_stash_string(mocker): def _pp_save_ppfield_values(cube): """Emulate saving a cube as PP, and capture the resulting PP field values.""" # Create a test object to stand in for a real PPField. + # todo: Still uses unittest.mock, it causes a lot of issues making this a fixture pp_field = mock.MagicMock(spec=pp.PPField3) # Add minimal content required by the pp.save operation. pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN @@ -88,7 +89,8 @@ def _pp_save_ppfield_values(cube): class TestVertical: - def setup_method(self): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.lat_lon_cube() def test_pseudo_level(self): @@ -124,7 +126,8 @@ def test_soil_depth(self): class TestLbfcProduction: - def setup_method(self): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.lat_lon_cube() def check_cube_stash_yields_lbfc(self, stash, lbfc_expected): @@ -168,7 +171,8 @@ def test_bad_name_units_to_lbfc_0(self): class TestLbsrceProduction: - def setup_method(self): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.lat_lon_cube() def check_cube_um_source_yields_lbsrce( @@ -212,7 +216,7 @@ class Test_Save__LbprocProduction: @pytest.fixture(autouse=True) def _setup(self, mocker): self.cube = stock.realistic_3d() - self.pp_field = mock.MagicMock(spec=pp.PPField3) + self.pp_field = mocker.MagicMock(spec=pp.PPField3) self.pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN mocker.patch("iris.fileformats.pp.PPField3", return_value=self.pp_field) @@ -427,7 +431,7 @@ def single_mean_time_cube(single_time_cube): return single_time_cube -@pytest.fixture() +@pytest.fixture def global_cube(): x_coord = DimCoord( np.arange(0, 360, 10), standard_name="longitude", units="degrees", circular=True diff --git a/lib/iris/tests/unit/fileformats/pp/test_save_fields.py b/lib/iris/tests/unit/fileformats/pp/test_save_fields.py index 2eaebc0059..0deb8f6f0c 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save_fields.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save_fields.py @@ -4,13 +4,8 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.save_fields` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest import iris.fileformats.pp as pp @@ -19,31 +14,28 @@ def asave(afilehandle): afilehandle.write("saved") -class TestSaveFields(tests.IrisTest): - def setUp(self): +class TestSaveFields: + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create a test object to stand in for a real PPField. - self.pp_field = mock.MagicMock(spec=pp.PPField3) + self.pp_field = mocker.MagicMock(spec=pp.PPField3) # Add minimal content required by the pp.save operation. self.pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN self.pp_field.data = np.zeros((1, 1)) self.pp_field.save = asave - def test_save(self): + def test_save(self, mocker): open_func = "builtins.open" - m = mock.mock_open() - with mock.patch(open_func, m, create=True): - pp.save_fields([self.pp_field], "foo.pp") - self.assertTrue(mock.call("foo.pp", "wb") in m.mock_calls) - self.assertTrue(mock.call().write("saved") in m.mock_calls) + m = mocker.mock_open() + mocker.patch(open_func, m, create=True) + pp.save_fields([self.pp_field], "foo.pp") + assert mocker.call("foo.pp", "wb") in m.mock_calls + assert mocker.call().write("saved") in m.mock_calls - def test_save_append(self): + def test_save_append(self, mocker): open_func = "builtins.open" - m = mock.mock_open() - with mock.patch(open_func, m, create=True): - pp.save_fields([self.pp_field], "foo.pp", append=True) - self.assertTrue(mock.call("foo.pp", "ab") in m.mock_calls) - self.assertTrue(mock.call().write("saved") in m.mock_calls) - - -if __name__ == "__main__": - tests.main() + m = mocker.mock_open() + mocker.patch(open_func, m, create=True) + pp.save_fields([self.pp_field], "foo.pp", append=True) + assert mocker.call("foo.pp", "ab") in m.mock_calls + assert mocker.call().write("saved") in m.mock_calls diff --git a/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py b/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py index 5ab3f7c480..7d697781aa 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py @@ -4,31 +4,30 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.save_pairs_from_cube` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest from iris.fileformats.pp import save_pairs_from_cube import iris.tests.stock as stock -class TestSaveFields(tests.IrisTest): - def setUp(self): +class TestSaveFields: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.realistic_3d() def test_cube_only(self): slices_and_fields = save_pairs_from_cube(self.cube) for aslice, field in slices_and_fields: - self.assertEqual(aslice.shape, (9, 11)) - self.assertEqual(field.lbcode, 101) + assert aslice.shape == (9, 11) + assert field.lbcode == 101 def test_field_coords(self): slices_and_fields = save_pairs_from_cube( self.cube, field_coords=["grid_longitude", "grid_latitude"] ) for aslice, field in slices_and_fields: - self.assertEqual(aslice.shape, (11, 9)) - self.assertEqual(field.lbcode, 101) + assert aslice.shape == (11, 9) + assert field.lbcode == 101 def test_lazy_data(self): cube = self.cube.copy() @@ -37,13 +36,9 @@ def test_lazy_data(self): # Check that lazy data is preserved in save-pairs generation. slices_and_fields = save_pairs_from_cube(cube) for aslice, _ in slices_and_fields: - self.assertTrue(aslice.has_lazy_data()) + assert aslice.has_lazy_data() def test_default_bmdi(self): slices_and_fields = save_pairs_from_cube(self.cube) _, field = next(slices_and_fields) - self.assertEqual(field.bmdi, -1e30) - - -if __name__ == "__main__": - tests.main() + assert field.bmdi == -1e30 From af2f29db3b94267fc59c6eb87f0c2bf6d3073d78 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Mon, 9 Feb 2026 08:55:07 +0000 Subject: [PATCH 14/77] Updated environment lockfiles (#6931) Co-authored-by: Lockfile bot --- requirements/locks/py312-linux-64.lock | 51 ++++++++++++------------- requirements/locks/py313-linux-64.lock | 53 +++++++++++++------------- requirements/locks/py314-linux-64.lock | 45 +++++++++++----------- 3 files changed, 73 insertions(+), 76 deletions(-) diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index 9438e5dee7..c82a93cc44 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -15,12 +15,12 @@ https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.09-ha770c72_0.conda#998 https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_16.conda#26c46f90d0e727e95c6c9498a33a09f3 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_17.conda#51b78c6a757575c0d12f4401ffc67029 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda#7df50d44d4a14d6c31a2c54f2cd92157 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_16.conda#6d0363467e6ed84f11435eb309f2ff06 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_17.conda#3c281169ea25b987311400d7a7e28445 https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.3-hb03c661_0.conda#dcdc58c15961dbf17a0621312b01f5cb https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda#51a19bba1b8ebfb60df25cde030b7ebc @@ -32,8 +32,8 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_16.conda#5a68259fac2da8f2ee6f7bfe49c9eb8b -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_16.conda#39183d4e0c05609fd65f130633194e37 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_17.conda#1478bfa85224a65ab096d69ffd2af1e5 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_17.conda#202fdf8cad9eea704c2b0d823d1732bf https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 @@ -41,7 +41,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d8 https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda#68e52064ed3897463c0e958ab5c8f91b https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda#70e3400cbbfa03e96dcde7fc13e38c7b -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_16.conda#68f68355000ec3f1d6f26ea13e8f525f +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_17.conda#24c2fe35fa45cd71214beba6f337c071 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda#db409b7c1720428638e7c0d509d3e1b5 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda#aea31d2e5b1091feca96fcfe945c3cf9 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 @@ -71,12 +71,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.co https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda#c277e0a4d549b03ac1e9d6cbbe3d017b https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_16.conda#40d9b534410403c821ff64f00d0adc22 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_17.conda#a6c682ac611cb1fa4d73478f9e6efb06 https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h5888daf_1.conda#8422fcc9e5e172c91e99aef703b3ce65 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.54-h421ea60_0.conda#d361fa2a59e53b61c2675bfa073e5b7e https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_16.conda#1b3152694d236cf233b76b8c56bf0eae +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_17.conda#ea12f5a6bf12c88c06750d9803e1a570 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h54a6638_2.conda#b4ecbefe517ed0157c37f8182768271c https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 @@ -95,14 +95,14 @@ https://conda.anaconda.org/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda# https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda#1c74ff8c35dcadf952a16f752ca5aa49 https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-h909a3a2_5.conda#6a0eb48e58684cca4d7acc8b7a0fd3c7 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 -https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.2-hceb46e0_1.conda#40feea2979654ed579f1cda7c63ccb94 +https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.3-hceb46e0_1.conda#2aadb0d17215603a82a2a6b0afd9a4cb https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda#4a13eeac0b5c8e5b8ab496e6c4ddd829 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.2.0-hb03c661_1.conda#af39b9a8711d4a8d437b52c1d78eb6a1 https://conda.anaconda.org/conda-forge/linux-64/gl2ps-1.4.2-hae5d5c5_1.conda#00e642ec191a19bf806a3915800e9524 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45-default_hbd61a6d_105.conda#3ec0aa5037d39b06554109a01e6fb0c6 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-ha09017c_8.conda#6e9bf4ce797d0216bd2a58298b6290b5 @@ -129,7 +129,7 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8c https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda#cae723309a49399d2949362f4ab5c9e4 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.4-h2b0a6b4_0.conda#c379d67c686fb83475c1a6ed41cc41ff +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_0.conda#e3bcef76c3ecb25823c503ce11783d85 https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_0.conda#fd6acbf37b40cbe919450fa58309fbe1 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 @@ -169,8 +169,8 @@ https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda#2cfaaccf085c133a477f0a7a8657afe9 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py312h447239a_0.conda#63e20cf7b7460019b423fc06abb96c60 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.1.0-pyhd8ed1ab_0.conda#1daaf94a304a27ba3446a306235a37ea -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_105.conda#d58cd79121dd51128f2a5dab44edf1ea +https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h19486de_106.conda#c223ee1429ba538f3e48cfb4a0b97357 https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe63df7e10a6ba605dc5f9f961d36 @@ -210,7 +210,7 @@ https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#4 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py312h0d868a3_1.conda#1cfb9b04c827219597def32c22fb9ca2 https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py312h8a5da7c_0.conda#fba10c2007c8b06f77c5a23ce3a635ad +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py312h8a5da7c_1.conda#15878599a87992e44c059731771591cb https://conda.anaconda.org/conda-forge/noarch/scooby-0.11.0-pyhd8ed1ab_0.conda#2d707ed62f63d72f4a0141b818e9c7b6 https://conda.anaconda.org/conda-forge/noarch/setuptools-80.10.2-pyh332efcf_0.conda#7b446fcbb6779ee479debb4fd7453e6c https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 @@ -225,7 +225,7 @@ https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py312h4c3975b_0.co https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.0-py312h4c3975b_1.conda#a0b8efbe73c90f810a171a6c746be087 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.6-hb03c661_0.conda#4d1fc190b99912ed557a8236e958c559 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.7-hb03c661_0.conda#f2ba4192d38b6cef2bb2c25029071d90 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 @@ -236,12 +236,12 @@ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30cd29cb87d819caead4d55184c1d115 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.4.0-pyhd8ed1ab_0.conda#421a865222cd0c9d83ff08bc78bf3a61 -https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda#0a01c169f0ab0f91b26e77a3301fbfe4 +https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_0.conda#ea5be9abc2939c8431893b4e123a2065 https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py312h460c074_1.conda#648ee28dcd4e07a1940a17da62eccd40 https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.2-py312h8a5da7c_0.conda#3935daadad011d007deb379b8188588d +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.3-py312h8a5da7c_0.conda#f3c0bc6e23bd3653d74390aa644b0a95 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py312h4c3975b_1.conda#693cda60b9223f55d0836c885621611b https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee @@ -250,16 +250,16 @@ https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea9 https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_2.conda#3c71daed530c0c26671a1b1b7010e746 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_2.conda#0ad9019bb10eda915fb0ce5f78fef13b +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda#24a2802074d26aecfdbc9b3f1d8168d1 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_3.conda#b4277f5a09d458a0306db3147bd0171c https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_h11f7409_103.conda#3ccff1066c05a1e6c221356eecc40581 https://conda.anaconda.org/conda-forge/linux-64/libpq-18.1-hb80d175_3.conda#c39da2ad0e7dd600d1eb3146783b057d https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f -https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.328.1-h5279c79_0.conda#372a62464d47d9e966b630ffae3abe73 +https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda#31ad065eda3c2d88f8215b1289df9c89 https://conda.anaconda.org/conda-forge/linux-64/mesalib-25.0.5-h57bcd07_2.conda#9b6b685b123906eb4ef270b50cbe826c https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.1-py312h33ff503_0.conda#ba7e6cb06c372eae6f164623e6e06db8 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.2-py312h33ff503_1.conda#3569a8fca2dd3202e4ab08f42499f6d3 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py312h9b6a7d9_2.conda#573b9a879a3a42990f9c51d7376dce6b https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 @@ -277,15 +277,15 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py312h4f23490_1.conda#84bf349fad55056ed326fc550671b65c https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312h0a2e395_4.conda#43c2bc96af3ae5ed9e8a10ded942aa50 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.1-pyhcf101f3_1.conda#91e3b2a0d014ac032c066a2e18051686 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.conda#b20e7ce9afd59036ab194f3d1e27edf5 https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_0.conda#6ce4ad29c3ae0b74df813409433457ff +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_1.conda#7a8b949fb98c73b802b5e66a67dac140 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h4f23490_2.conda#cec5bc5f7d374f8f8095f8e28e31f6cb https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py312hf79963d_1.conda#e597b3e812d9613f659b7d87ad252d18 -https://conda.anaconda.org/conda-forge/noarch/pip-25.3-pyh8b19718_0.conda#c55515ca43c6444d2572e0f0d93cb6b9 +https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh8b19718_0.conda#67bdec43082fd8a9cffb9484420b39a2 https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py312h4f23490_2.conda#1af24b0eb99b1158c0d8c64a4d6c5d79 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py312h4f23490_2.conda#ab856c36638ab1acf90e70349c525cf9 @@ -298,7 +298,7 @@ https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9 https://conda.anaconda.org/conda-forge/linux-64/viskores-1.1.0-hca82ae8_0.conda#5b4d69a15107ebad71ee9aaf76c4b09e https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py312h4f23490_0.conda#6aef45ba3c0123547eb7b0f15852cac9 -https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.1-pyhcf101f3_1.conda#c15e359a982395be86a7576a91f9c5f5 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_0.conda#1eac93a6257796dd348d366a85f7f283 https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a @@ -320,7 +320,7 @@ https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#d https://conda.anaconda.org/conda-forge/noarch/esmpy-8.9.1-pyhdfbf58e_0.conda#0155d4a55415cda7f7f09bc0b7760f51 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 -https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_3.conda#d2bbbd293097e664ffb01fc4cdaf5729 +https://conda.anaconda.org/conda-forge/noarch/pooch-1.9.0-pyhd8ed1ab_0.conda#dd4b6337bf8886855db6905b336db3c8 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-hb82b983_4.conda#9861c7820fdb45bc50a2ea60f4ff7952 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 @@ -341,4 +341,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 - diff --git a/requirements/locks/py313-linux-64.lock b/requirements/locks/py313-linux-64.lock index f8ac96ac02..0b48f09a09 100644 --- a/requirements/locks/py313-linux-64.lock +++ b/requirements/locks/py313-linux-64.lock @@ -15,12 +15,12 @@ https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.09-ha770c72_0.conda#998 https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_16.conda#26c46f90d0e727e95c6c9498a33a09f3 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_17.conda#51b78c6a757575c0d12f4401ffc67029 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda#7df50d44d4a14d6c31a2c54f2cd92157 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_16.conda#6d0363467e6ed84f11435eb309f2ff06 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_17.conda#3c281169ea25b987311400d7a7e28445 https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.3-hb03c661_0.conda#dcdc58c15961dbf17a0621312b01f5cb https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda#51a19bba1b8ebfb60df25cde030b7ebc @@ -32,8 +32,8 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_16.conda#5a68259fac2da8f2ee6f7bfe49c9eb8b -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_16.conda#39183d4e0c05609fd65f130633194e37 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_17.conda#1478bfa85224a65ab096d69ffd2af1e5 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_17.conda#202fdf8cad9eea704c2b0d823d1732bf https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 @@ -42,7 +42,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d8 https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda#68e52064ed3897463c0e958ab5c8f91b https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda#70e3400cbbfa03e96dcde7fc13e38c7b -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_16.conda#68f68355000ec3f1d6f26ea13e8f525f +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_17.conda#24c2fe35fa45cd71214beba6f337c071 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda#db409b7c1720428638e7c0d509d3e1b5 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda#aea31d2e5b1091feca96fcfe945c3cf9 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 @@ -72,12 +72,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.co https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda#c277e0a4d549b03ac1e9d6cbbe3d017b https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_16.conda#40d9b534410403c821ff64f00d0adc22 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_17.conda#a6c682ac611cb1fa4d73478f9e6efb06 https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h5888daf_1.conda#8422fcc9e5e172c91e99aef703b3ce65 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.54-h421ea60_0.conda#d361fa2a59e53b61c2675bfa073e5b7e https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_16.conda#1b3152694d236cf233b76b8c56bf0eae +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_17.conda#ea12f5a6bf12c88c06750d9803e1a570 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h54a6638_2.conda#b4ecbefe517ed0157c37f8182768271c https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 @@ -96,14 +96,14 @@ https://conda.anaconda.org/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda# https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda#1c74ff8c35dcadf952a16f752ca5aa49 https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-h909a3a2_5.conda#6a0eb48e58684cca4d7acc8b7a0fd3c7 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 -https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.2-hceb46e0_1.conda#40feea2979654ed579f1cda7c63ccb94 +https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.3-hceb46e0_1.conda#2aadb0d17215603a82a2a6b0afd9a4cb https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda#4a13eeac0b5c8e5b8ab496e6c4ddd829 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.2.0-hb03c661_1.conda#af39b9a8711d4a8d437b52c1d78eb6a1 https://conda.anaconda.org/conda-forge/linux-64/gl2ps-1.4.2-hae5d5c5_1.conda#00e642ec191a19bf806a3915800e9524 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45-default_hbd61a6d_105.conda#3ec0aa5037d39b06554109a01e6fb0c6 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-ha09017c_8.conda#6e9bf4ce797d0216bd2a58298b6290b5 @@ -130,7 +130,7 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8c https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda#cae723309a49399d2949362f4ab5c9e4 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.4-h2b0a6b4_0.conda#c379d67c686fb83475c1a6ed41cc41ff +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_0.conda#e3bcef76c3ecb25823c503ce11783d85 https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_0.conda#fd6acbf37b40cbe919450fa58309fbe1 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 @@ -142,7 +142,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8 https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd -https://conda.anaconda.org/conda-forge/linux-64/python-3.13.11-hc97d973_101_cp313.conda#aa23b675b860f2566af2dfb3ffdf3b8c +https://conda.anaconda.org/conda-forge/linux-64/python-3.13.12-hc97d973_100_cp313.conda#4c875ed0e78c2d407ec55eadffb8cf3d https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 @@ -170,8 +170,8 @@ https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda#2cfaaccf085c133a477f0a7a8657afe9 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py313h6b9daa2_0.conda#3a0be7abedcbc2aee92ea228efea8eba -https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.1.0-pyhd8ed1ab_0.conda#1daaf94a304a27ba3446a306235a37ea -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_105.conda#d58cd79121dd51128f2a5dab44edf1ea +https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h19486de_106.conda#c223ee1429ba538f3e48cfb4a0b97357 https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe63df7e10a6ba605dc5f9f961d36 @@ -198,7 +198,7 @@ https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#3 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda#2e5bf4f1da39c0b32778561c3c4e5878 https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.0-py313h80991f8_0.conda#183fe6b9e99e5c2b464c1573ec78eac8 -https://conda.anaconda.org/conda-forge/noarch/pip-25.3-pyh145f28c_0.conda#bf47878473e5ab9fdb4115735230e191 +https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh145f28c_0.conda#09a970fbf75e8ed1aa633827ded6aa4f https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.5.1-pyhcf101f3_0.conda#1bd2e65c8c7ef24f4639ae6e850dacc2 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_2.conda#beb1885cfdb793193bba83c9720d53b1 @@ -212,7 +212,7 @@ https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#4 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py313heab5758_1.conda#82df5d372f2796c389fcbe5104664f5a https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py313h3dea7bd_0.conda#4794ea0adaebd9f844414e594b142cb2 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py313h3dea7bd_1.conda#f256753e840c3cd3766488c9437a8f8b https://conda.anaconda.org/conda-forge/noarch/scooby-0.11.0-pyhd8ed1ab_0.conda#2d707ed62f63d72f4a0141b818e9c7b6 https://conda.anaconda.org/conda-forge/noarch/setuptools-80.10.2-pyh332efcf_0.conda#7b446fcbb6779ee479debb4fd7453e6c https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 @@ -226,7 +226,7 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda#c07 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py313h07c4f96_0.conda#82da2dcf1ea3e298f2557b50459809e0 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.6-hb03c661_0.conda#4d1fc190b99912ed557a8236e958c559 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.7-hb03c661_0.conda#f2ba4192d38b6cef2bb2c25029071d90 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 @@ -237,12 +237,12 @@ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30cd29cb87d819caead4d55184c1d115 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.4.0-pyhd8ed1ab_0.conda#421a865222cd0c9d83ff08bc78bf3a61 -https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda#0a01c169f0ab0f91b26e77a3301fbfe4 +https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_0.conda#ea5be9abc2939c8431893b4e123a2065 https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py313hf46b229_1.conda#d0616e7935acab407d1543b28c446f6f https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.2-py313h3dea7bd_0.conda#df05169cc886aaf53dc560db634519f8 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.3-py313h3dea7bd_0.conda#fa90494c873b21b3bdb21c3b588ff043 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py313h07c4f96_1.conda#bcca9afd203fe05d9582249ac12762da https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee @@ -251,16 +251,16 @@ https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea9 https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_2.conda#3c71daed530c0c26671a1b1b7010e746 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_2.conda#0ad9019bb10eda915fb0ce5f78fef13b +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda#24a2802074d26aecfdbc9b3f1d8168d1 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_3.conda#b4277f5a09d458a0306db3147bd0171c https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_h11f7409_103.conda#3ccff1066c05a1e6c221356eecc40581 https://conda.anaconda.org/conda-forge/linux-64/libpq-18.1-hb80d175_3.conda#c39da2ad0e7dd600d1eb3146783b057d https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f -https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.328.1-h5279c79_0.conda#372a62464d47d9e966b630ffae3abe73 +https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda#31ad065eda3c2d88f8215b1289df9c89 https://conda.anaconda.org/conda-forge/linux-64/mesalib-25.0.5-h57bcd07_2.conda#9b6b685b123906eb4ef270b50cbe826c https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.1-py313hf6604e3_0.conda#7d51e3bef1a4b00bde1861d85ba2f874 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.2-py313hf6604e3_1.conda#ca9c6ba4beac38cb3d0a85afde27f94c https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py313h77f6078_2.conda#42d11c7d1ac21ae2085f58353641e71c @@ -278,11 +278,11 @@ https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py313h29aa505_1.conda#c63d5f9d63fe2f48b0ad75005fcae7ba https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py313hc8edb43_4.conda#33639459bc29437315d4bff9ed5bc7a7 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.1-pyhcf101f3_1.conda#91e3b2a0d014ac032c066a2e18051686 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.conda#b20e7ce9afd59036ab194f3d1e27edf5 https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_0.conda#6ce4ad29c3ae0b74df813409433457ff +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_1.conda#7a8b949fb98c73b802b5e66a67dac140 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py313h29aa505_2.conda#ad53894d278895bf15c8fc324727d224 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py313h08cd8bf_2.conda#8a69ea71fdd37bfe42a28f0967dbb75a @@ -299,7 +299,7 @@ https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9 https://conda.anaconda.org/conda-forge/linux-64/viskores-1.1.0-hca82ae8_0.conda#5b4d69a15107ebad71ee9aaf76c4b09e https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py313h29aa505_0.conda#3942b6a86fe92d0888b3373f2c1e1676 -https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.1-pyhcf101f3_1.conda#c15e359a982395be86a7576a91f9c5f5 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_0.conda#1eac93a6257796dd348d366a85f7f283 https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a @@ -320,7 +320,7 @@ https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#d https://conda.anaconda.org/conda-forge/noarch/esmpy-8.9.1-pyhdfbf58e_0.conda#0155d4a55415cda7f7f09bc0b7760f51 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 -https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_3.conda#d2bbbd293097e664ffb01fc4cdaf5729 +https://conda.anaconda.org/conda-forge/noarch/pooch-1.9.0-pyhd8ed1ab_0.conda#dd4b6337bf8886855db6905b336db3c8 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-hb82b983_4.conda#9861c7820fdb45bc50a2ea60f4ff7952 https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9f33c65b20e6a61f21714785e3613ec @@ -340,4 +340,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 - diff --git a/requirements/locks/py314-linux-64.lock b/requirements/locks/py314-linux-64.lock index c219725369..007d466897 100644 --- a/requirements/locks/py314-linux-64.lock +++ b/requirements/locks/py314-linux-64.lock @@ -13,11 +13,11 @@ https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda#ad65 https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_16.conda#26c46f90d0e727e95c6c9498a33a09f3 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_17.conda#51b78c6a757575c0d12f4401ffc67029 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_16.conda#6d0363467e6ed84f11435eb309f2ff06 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_17.conda#3c281169ea25b987311400d7a7e28445 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda#51a19bba1b8ebfb60df25cde030b7ebc https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda#920bb03579f15389b9e512095ad995b7 @@ -28,15 +28,15 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_16.conda#5a68259fac2da8f2ee6f7bfe49c9eb8b -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_16.conda#39183d4e0c05609fd65f130633194e37 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_17.conda#1478bfa85224a65ab096d69ffd2af1e5 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_17.conda#202fdf8cad9eea704c2b0d823d1732bf https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb03c661_1.conda#2c21e66f50753a083cbe6b80f38268fa https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d864d34357c3b65a4b731f78c0801dc4 https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda#70e3400cbbfa03e96dcde7fc13e38c7b -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_16.conda#68f68355000ec3f1d6f26ea13e8f525f +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_17.conda#24c2fe35fa45cd71214beba6f337c071 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda#db409b7c1720428638e7c0d509d3e1b5 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda#aea31d2e5b1091feca96fcfe945c3cf9 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 @@ -61,11 +61,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.co https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda#c277e0a4d549b03ac1e9d6cbbe3d017b https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_16.conda#40d9b534410403c821ff64f00d0adc22 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_17.conda#a6c682ac611cb1fa4d73478f9e6efb06 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.54-h421ea60_0.conda#d361fa2a59e53b61c2675bfa073e5b7e https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_16.conda#1b3152694d236cf233b76b8c56bf0eae +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_17.conda#ea12f5a6bf12c88c06750d9803e1a570 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba @@ -79,13 +79,13 @@ https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.con https://conda.anaconda.org/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda#035da2e4f5770f036ff704fa17aace24 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda#1c74ff8c35dcadf952a16f752ca5aa49 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 -https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.2-hceb46e0_1.conda#40feea2979654ed579f1cda7c63ccb94 +https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.3-hceb46e0_1.conda#2aadb0d17215603a82a2a6b0afd9a4cb https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda#4a13eeac0b5c8e5b8ab496e6c4ddd829 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.2.0-hb03c661_1.conda#af39b9a8711d4a8d437b52c1d78eb6a1 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45-default_hbd61a6d_105.conda#3ec0aa5037d39b06554109a01e6fb0c6 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-ha09017c_8.conda#6e9bf4ce797d0216bd2a58298b6290b5 @@ -106,7 +106,7 @@ https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda# https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8ccf913aaba749a5496c17629d859ed1 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.4-h2b0a6b4_0.conda#c379d67c686fb83475c1a6ed41cc41ff +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_0.conda#e3bcef76c3ecb25823c503ce11783d85 https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_0.conda#fd6acbf37b40cbe919450fa58309fbe1 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 @@ -118,7 +118,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8 https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd -https://conda.anaconda.org/conda-forge/linux-64/python-3.14.2-h32b2ec7_101_cp314.conda#051f60a9d1e3aae7160d173aeb7029f8 +https://conda.anaconda.org/conda-forge/linux-64/python-3.14.3-h32b2ec7_100_cp314.conda#b40594d5da041824087eebe12228af42 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda#34e54f03dfea3e7a2dcf1453a85f1085 @@ -142,8 +142,8 @@ https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda#2cfaaccf085c133a477f0a7a8657afe9 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.1.0-pyhd8ed1ab_0.conda#1daaf94a304a27ba3446a306235a37ea -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_105.conda#d58cd79121dd51128f2a5dab44edf1ea +https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h19486de_106.conda#c223ee1429ba538f3e48cfb4a0b97357 https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe63df7e10a6ba605dc5f9f961d36 @@ -164,7 +164,7 @@ https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py314h9891d https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.0-py314h8ec4b1a_0.conda#f9b6a8fbb8dcb840a0c1c052dc5092e4 -https://conda.anaconda.org/conda-forge/noarch/pip-25.3-pyh145f28c_0.conda#bf47878473e5ab9fdb4115735230e191 +https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh145f28c_0.conda#09a970fbf75e8ed1aa633827ded6aa4f https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.5.1-pyhcf101f3_0.conda#1bd2e65c8c7ef24f4639ae6e850dacc2 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_2.conda#beb1885cfdb793193bba83c9720d53b1 @@ -177,7 +177,7 @@ https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#4 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py314he82b845_1.conda#21dce7c80bbdb9785633011ad348e530 https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 -https://conda.anaconda.org/conda-forge/noarch/pyyaml-6.0.3-pyh7db6752_0.conda#b12f41c0d7fb5ab81709fcc86579688f +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py314h67df5f8_1.conda#2035f68f96be30dc60a5dfd7452c7941 https://conda.anaconda.org/conda-forge/noarch/setuptools-80.10.2-pyh332efcf_0.conda#7b446fcbb6779ee479debb4fd7453e6c https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda#755cf22df8693aa0d1aec1c123fa5863 @@ -190,7 +190,7 @@ https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda#c07 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py314h5bd0f2a_0.conda#e35f08043f54d26a1be93fdbf90d30c3 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.0-py314h5bd0f2a_1.conda#58e2ee530005067c5db23f33c6ab43d2 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.7-hb03c661_0.conda#f2ba4192d38b6cef2bb2c25029071d90 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 @@ -200,11 +200,11 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.7-hb03c661_0 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30cd29cb87d819caead4d55184c1d115 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 -https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda#0a01c169f0ab0f91b26e77a3301fbfe4 +https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_0.conda#ea5be9abc2939c8431893b4e123a2065 https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py314h4a8dc5f_1.conda#cf45f4278afd6f4e6d03eda0f435d527 https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.2-py314h67df5f8_0.conda#ff4ed891a8646b56042ade345ee5c88e +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.3-py314h67df5f8_0.conda#705e973836337db525548d91d44ca3bc https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py314h5bd0f2a_1.conda#51b0391b0ce96be49b1174e9a3e4a279 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee @@ -216,7 +216,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.con https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_h11f7409_103.conda#3ccff1066c05a1e6c221356eecc40581 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.1-py314h2b28147_0.conda#9536e29f857e5d0565e92fd1b54de16a +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.2-py314h2b28147_1.conda#4ea6b620fdf24a1a0bc4f1c7134dfafb https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py314h24aeaa0_2.conda#b46a7e6a2b8c064488576c3e42d85df0 @@ -230,10 +230,10 @@ https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py314hc02f841_1.conda#552b5d9d8a2a4be882e1c638953e7281 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py314h97ea11e_4.conda#95bede9cdb7a30a4b611223d52a01aa4 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.1-pyhcf101f3_1.conda#91e3b2a0d014ac032c066a2e18051686 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.conda#b20e7ce9afd59036ab194f3d1e27edf5 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_0.conda#6ce4ad29c3ae0b74df813409433457ff +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_1.conda#7a8b949fb98c73b802b5e66a67dac140 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py314hc02f841_2.conda#55ac6d85f5dd8ec5e9919e7762fcb31a https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py314ha0b5721_2.conda#fe3a5c8be07a7b82058bdeb39d33d93b @@ -248,7 +248,7 @@ https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py314h9891dd4_0.co https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py314hc02f841_0.conda#de50a60eab348de04809a33e180b4b01 -https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.1-pyhcf101f3_1.conda#c15e359a982395be86a7576a91f9c5f5 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_0.conda#1eac93a6257796dd348d366a85f7f283 https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a @@ -281,4 +281,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 - From ed5fda79b2ee20a242d64ad384be2501fcbc1e2d Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 9 Feb 2026 11:20:34 +0000 Subject: [PATCH 15/77] Amend mo-jheming link. (#6935) --- docs/src/whatsnew/3.4.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/whatsnew/3.4.rst b/docs/src/whatsnew/3.4.rst index e8d4f0fd2b..cbea3eba8c 100644 --- a/docs/src/whatsnew/3.4.rst +++ b/docs/src/whatsnew/3.4.rst @@ -70,7 +70,7 @@ v3.4.1 (21 Feb 2023) non-existing paths, and added expansion functionality to :func:`~iris.io.save`. (:issue:`4772`, :pull:`4913`) -#. `@trexfeathers`_ and `Julian Heming`_ added new mappings between CF +#. `@trexfeathers`_ and `@mo-jheming`_ added new mappings between CF standard names and UK Met Office LBFC codes. (:pull:`4859`) #. `@pp-mo`_ changed the metadata of a face/edge-type @@ -286,7 +286,7 @@ v3.4.1 (21 Feb 2023) core dev names are automatically included by the common_links.inc: .. _@TTV-Intrepid: https://github.com/TTV-Intrepid -.. _Julian Heming: https://www.metoffice.gov.uk/research/people/julian-heming +.. _@mo-jheming: https://github.com/mo-jheming .. _@hsteptoe: https://github.com/hsteptoe From db8208cbb874eee1be8c864ccb18287da6c5ee9f Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Mon, 9 Feb 2026 14:34:59 +0000 Subject: [PATCH 16/77] Converted fileformats/um to pytest (#6930) * converted um_to_pp * converted optimal_array_structure * converted test_BasicFieldCollation * converted test_group_struct * converted test_FieldCollation * converted test__convert_collation --- .../um/fast_load/test_FieldCollation.py | 32 +++--- .../um/fast_load/test__convert_collation.py | 98 +++++++++---------- .../test_BasicFieldCollation.py | 80 ++++++++------- .../test_group_structured_fields.py | 36 +++---- .../test_optimal_array_structure.py | 80 +++++++-------- .../unit/fileformats/um/test_um_to_pp.py | 35 +++---- 6 files changed, 155 insertions(+), 206 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py index 35da9fab47..04efd4accc 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py @@ -11,23 +11,19 @@ """ -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest import iris +from iris.tests import _shared_utils from iris.tests.integration.fast_load.test_fast_load import Mixin_FieldTest -class TestFastCallbackLocationInfo(Mixin_FieldTest, tests.IrisTest): +class TestFastCallbackLocationInfo(Mixin_FieldTest): do_fast_loads = True - def setUp(self): - # Call parent setup. - super().setUp() - + @pytest.fixture(autouse=True) + def _setup(self): # Create a basic load test case. self.callback_collations = [] self.callback_filepaths = [] @@ -41,21 +37,17 @@ def fast_load_callback(cube, collation, filename): iris.load(self.test_filepath, callback=fast_load_callback) def test_callback_collations_filepaths(self): - self.assertEqual(len(self.callback_collations), 2) - self.assertEqual(self.callback_collations[0].data_filepath, self.test_filepath) - self.assertEqual(self.callback_collations[1].data_filepath, self.test_filepath) + assert len(self.callback_collations) == 2 + assert self.callback_collations[0].data_filepath == self.test_filepath + assert self.callback_collations[1].data_filepath == self.test_filepath def test_callback_collations_field_indices(self): - self.assertEqual(self.callback_collations[0].data_field_indices.dtype, np.int64) - self.assertArrayEqual( + assert self.callback_collations[0].data_field_indices.dtype == np.int64 + _shared_utils.assert_array_equal( self.callback_collations[0].data_field_indices, [[1, 3], [5, 7]] ) - self.assertEqual(self.callback_collations[1].data_field_indices.dtype, np.int64) - self.assertArrayEqual( + assert self.callback_collations[1].data_field_indices.dtype == np.int64 + _shared_utils.assert_array_equal( self.callback_collations[1].data_field_indices, [[0, 2], [4, 6]] ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py index 6d2c95eaff..52961badea 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py @@ -4,12 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.um._fast_load._convert_collation`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import cf_units import cftime import numpy as np @@ -34,7 +28,7 @@ ) -class Test(tests.IrisTest): +class Test: def _field(self): # Create PP field for X wind on a regular lat-lon grid. header = [0] * 64 @@ -55,28 +49,28 @@ def _field(self): def _check_phenomenon(self, metadata, factory=None): if factory is None: - self.assertEqual(metadata.factories, []) + assert metadata.factories == [] else: - self.assertEqual(metadata.factories, [factory]) - self.assertEqual(metadata.references, []) - self.assertEqual(metadata.standard_name, "x_wind") - self.assertIsNone(metadata.long_name) - self.assertEqual(metadata.units, cf_units.Unit("m s-1")) - self.assertEqual(metadata.attributes, {"STASH": (1, 0, 2)}) - self.assertEqual(metadata.cell_methods, []) + assert metadata.factories == [factory] + assert metadata.references == [] + assert metadata.standard_name == "x_wind" + assert metadata.long_name is None + assert metadata.units == cf_units.Unit("m s-1") + assert metadata.attributes == {"STASH": (1, 0, 2)} + assert metadata.cell_methods == [] - def test_all_scalar(self): + def test_all_scalar(self, mocker): field = self._field() field.lbtim = 11 field.t1 = cftime.datetime(1970, 1, 1, 18) field.t2 = cftime.datetime(1970, 1, 1, 12) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(), element_arrays_and_dims={} ) metadata = convert_collation(collation) self._check_phenomenon(metadata) coords_and_dims = [(LONGITUDE, 1), (LATITUDE, 0)] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [ ( iris.coords.DimCoord(18, "time", units="hours since epoch"), @@ -90,9 +84,9 @@ def test_all_scalar(self): ), (iris.coords.DimCoord(6, "forecast_period", units="hours"), None), ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vector_t1(self): + def test_vector_t1(self, mocker): field = self._field() field.lbtim = 11 field.t2 = cftime.datetime(1970, 1, 1, 12) @@ -104,7 +98,7 @@ def test_vector_t1(self): ], [0], ) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={"t1": t1}, @@ -119,7 +113,7 @@ def test_vector_t1(self): (0,), ), ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [ ( iris.coords.DimCoord( @@ -132,9 +126,9 @@ def test_vector_t1(self): (0,), ), ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vector_t2(self): + def test_vector_t2(self, mocker): field = self._field() field.lbtim = 11 field.t1 = cftime.datetime(1970, 1, 1, 18) @@ -146,7 +140,7 @@ def test_vector_t2(self): ], [0], ) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={"t2": t2}, @@ -165,7 +159,7 @@ def test_vector_t2(self): (0,), ), ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [ ( iris.coords.DimCoord(18, "time", units="hours since epoch"), @@ -176,15 +170,15 @@ def test_vector_t2(self): (0,), ), ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vector_lbft(self): + def test_vector_lbft(self, mocker): field = self._field() field.lbtim = 21 field.t1 = cftime.datetime(1970, 1, 1, 12) field.t2 = cftime.datetime(1970, 1, 1, 18) lbft = ([18, 15, 12], [0]) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={"lbft": lbft}, @@ -220,9 +214,9 @@ def test_vector_lbft(self): (0,), ), ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vector_t1_and_t2(self): + def test_vector_t1_and_t2(self, mocker): field = self._field() field.lbtim = 11 t1 = ( @@ -237,7 +231,7 @@ def test_vector_t1_and_t2(self): [cftime.datetime(1970, 1, 1, 12), cftime.datetime(1970, 1, 2, 0)], [0], ) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(2, 3), element_arrays_and_dims={"t1": t1, "t2": t2}, @@ -260,7 +254,7 @@ def test_vector_t1_and_t2(self): (0,), ), ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [ ( iris.coords.AuxCoord( @@ -271,14 +265,14 @@ def test_vector_t1_and_t2(self): (0, 1), ) ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vertical_pressure(self): + def test_vertical_pressure(self, mocker): field = self._field() field.lbvc = 8 blev = ([1000, 850, 700], (0,)) lblev = ([1000, 850, 700], (0,)) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={"blev": blev, "lblev": lblev}, @@ -295,11 +289,11 @@ def test_vertical_pressure(self): (0,), ), ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_soil_level(self): + def test_soil_level(self, mocker): field = self._field() field.lbvc = 6 points = [10, 20, 30] @@ -308,7 +302,7 @@ def test_soil_level(self): lblev = (points, (0,)) brsvd1 = (lower, (0,)) brlev = (upper, (0,)) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={ @@ -326,11 +320,11 @@ def test_soil_level(self): units="1", ) coords_and_dims = [(LONGITUDE, 2), (LATITUDE, 1), (level, (0,))] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_soil_depth(self): + def test_soil_depth(self, mocker): field = self._field() field.lbvc = 6 points = [10, 20, 30] @@ -339,7 +333,7 @@ def test_soil_depth(self): blev = (points, (0,)) brsvd1 = (lower, (0,)) brlev = (upper, (0,)) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={ @@ -358,11 +352,11 @@ def test_soil_depth(self): attributes={"positive": "down"}, ) coords_and_dims = [(LONGITUDE, 2), (LATITUDE, 1), (depth, (0,))] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vertical_hybrid_height(self): + def test_vertical_hybrid_height(self, mocker): field = self._field() field.lbvc = 65 blev = ([5, 18, 38], (0,)) @@ -373,7 +367,7 @@ def test_vertical_hybrid_height(self): bhrlev = ([1, 0.9989, 0.9970], (0,)) lblev = ([1, 2, 3], (0,)) bhlev = ([0.9994, 0.9979, 0.9957], (0,)) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={ @@ -410,7 +404,7 @@ def test_vertical_hybrid_height(self): (0,), ), ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [ ( iris.coords.DimCoord( @@ -432,8 +426,4 @@ def test_vertical_hybrid_height(self): (0,), ), ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) - - -if __name__ == "__main__": - tests.main() + assert metadata.aux_coords_and_dims == coords_and_dims diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py index a07672e43a..0b497d39d7 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py @@ -7,29 +7,27 @@ """ -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - from cftime import datetime import numpy as np +import pytest from iris._lazy_data import as_lazy_data import iris.fileformats.pp from iris.fileformats.um._fast_load_structured_fields import BasicFieldCollation +from iris.tests import _shared_utils -class Test___init__(tests.IrisTest): +class Test___init__: def test_no_fields(self): - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): BasicFieldCollation([]) -class Test_fields(tests.IrisTest): +class Test_fields: def test_preserve_members(self): fields = ("foo", "bar", "wibble") collation = BasicFieldCollation(fields) - self.assertEqual(collation.fields, fields) + assert collation.fields == fields def _make_field(lbyr=None, lbyrd=None, lbft=None, blev=None, bhlev=None, data=None): @@ -60,7 +58,7 @@ def _make_data(fill_value): return as_lazy_data(np.ones(shape) * fill_value) -class Test_data(tests.IrisTest): +class Test_data: # Test order of the data attribute when fastest-varying element is changed. def test_t1_varies_faster(self): collation = BasicFieldCollation( @@ -75,7 +73,7 @@ def test_t1_varies_faster(self): ) result = collation.data[:, :, 0, 0] expected = [[0, 1, 2], [3, 4, 5]] - self.assertArrayEqual(result, expected) + _shared_utils.assert_array_equal(result, expected) def test_t2_varies_faster(self): collation = BasicFieldCollation( @@ -90,24 +88,26 @@ def test_t2_varies_faster(self): ) result = collation.data[:, :, 0, 0] expected = [[0, 1, 2], [3, 4, 5]] - self.assertArrayEqual(result, expected) + _shared_utils.assert_array_equal(result, expected) -class Test_element_arrays_and_dims(tests.IrisTest): +class Test_element_arrays_and_dims: def test_single_field(self): field = _make_field(2013) collation = BasicFieldCollation([field]) - self.assertEqual(collation.element_arrays_and_dims, {}) + assert collation.element_arrays_and_dims == {} def test_t1(self): collation = BasicFieldCollation( [_make_field(lbyr=2013), _make_field(lbyr=2014)] ) result = collation.element_arrays_and_dims - self.assertEqual(list(result.keys()), ["t1"]) + assert list(result.keys()) == ["t1"] values, dims = result["t1"] - self.assertArrayEqual(values, [datetime(2013, 1, 1), datetime(2014, 1, 1)]) - self.assertEqual(dims, (0,)) + _shared_utils.assert_array_equal( + values, [datetime(2013, 1, 1), datetime(2014, 1, 1)] + ) + assert dims == (0,) def test_t1_and_t2(self): collation = BasicFieldCollation( @@ -118,19 +118,19 @@ def test_t1_and_t2(self): ] ) result = collation.element_arrays_and_dims - self.assertEqual(set(result.keys()), set(["t1", "t2"])) + assert set(result.keys()) == set(["t1", "t2"]) values, dims = result["t1"] - self.assertArrayEqual( + _shared_utils.assert_array_equal( values, [datetime(2013, 1, 1), datetime(2014, 1, 1), datetime(2015, 1, 1)], ) - self.assertEqual(dims, (0,)) + assert dims == (0,) values, dims = result["t2"] - self.assertArrayEqual( + _shared_utils.assert_array_equal( values, [datetime(2000, 1, 1), datetime(2001, 1, 1), datetime(2002, 1, 1)], ) - self.assertEqual(dims, (0,)) + assert dims == (0,) def test_t1_and_t2_and_lbft(self): collation = BasicFieldCollation( @@ -142,31 +142,33 @@ def test_t1_and_t2_and_lbft(self): ] ) result = collation.element_arrays_and_dims - self.assertEqual(set(result.keys()), set(["t1", "t2", "lbft"])) + assert set(result.keys()) == set(["t1", "t2", "lbft"]) values, dims = result["t1"] - self.assertArrayEqual(values, [datetime(1, 1, 1), datetime(11, 1, 1)]) - self.assertEqual(dims, (0,)) + _shared_utils.assert_array_equal( + values, [datetime(1, 1, 1), datetime(11, 1, 1)] + ) + assert dims == (0,) values, dims = result["t2"] - self.assertArrayEqual( + _shared_utils.assert_array_equal( values, [ [datetime(15, 1, 1), datetime(16, 1, 1)], [datetime(25, 1, 1), datetime(26, 1, 1)], ], ) - self.assertEqual(dims, (0, 1)) + assert dims == (0, 1) values, dims = result["lbft"] - self.assertArrayEqual(values, [6, 9]) - self.assertEqual(dims, (1,)) + _shared_utils.assert_array_equal(values, [6, 9]) + assert dims == (1,) def test_blev(self): collation = BasicFieldCollation([_make_field(blev=1), _make_field(blev=2)]) result = collation.element_arrays_and_dims keys = set(["blev", "brsvd1", "brsvd2", "brlev", "bhrlev", "lblev", "bhlev"]) - self.assertEqual(set(result.keys()), keys) + assert set(result.keys()) == keys values, dims = result["blev"] - self.assertArrayEqual(values, [1, 2]) - self.assertEqual(dims, (0,)) + _shared_utils.assert_array_equal(values, [1, 2]) + assert dims == (0,) def test_bhlev(self): collation = BasicFieldCollation( @@ -174,13 +176,13 @@ def test_bhlev(self): ) result = collation.element_arrays_and_dims keys = set(["blev", "brsvd1", "brsvd2", "brlev", "bhrlev", "lblev", "bhlev"]) - self.assertEqual(set(result.keys()), keys) + assert set(result.keys()) == keys values, dims = result["bhlev"] - self.assertArrayEqual(values, [1, 2]) - self.assertEqual(dims, (0,)) + _shared_utils.assert_array_equal(values, [1, 2]) + assert dims == (0,) -class Test__time_comparable_int(tests.IrisTest): +class Test__time_comparable_int: def test(self): # Define a list of date-time tuples, which should remain both all # distinct and in ascending order when converted... @@ -208,10 +210,6 @@ def test(self): for test_tuple in test_date_tuples ] # Check all values are distinct. - self.assertEqual(len(test_date_ints), len(set(test_date_ints))) + assert len(test_date_ints) == len(set(test_date_ints)) # Check all values are in order. - self.assertEqual(test_date_ints, sorted(test_date_ints)) - - -if __name__ == "__main__": - tests.main() + assert test_date_ints == sorted(test_date_ints) diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py index 7c1a9113b4..ac9d4495aa 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py @@ -7,13 +7,8 @@ """ -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - from iris.fileformats.um._fast_load_structured_fields import group_structured_fields +from iris.tests.unit.fileformats import MockerMixin def _convert_to_vector(value, length, default): @@ -33,7 +28,7 @@ def _convert_to_vector(value, length, default): return value -class Test__grouping(tests.IrisTest): +class Test__grouping(MockerMixin): def _dummy_fields_iter(self, stashes=None, models=None, lbprocs=None): # Make a group of test fields, and return an iterator over it. a_vec = [vec for vec in (stashes, models, lbprocs) if vec is not None] @@ -42,7 +37,7 @@ def _dummy_fields_iter(self, stashes=None, models=None, lbprocs=None): models = _convert_to_vector(models, number, default=71) lbprocs = _convert_to_vector(lbprocs, number, default=91) self.test_fields = [ - mock.MagicMock( + self.mocker.MagicMock( lbuser=[0, 0, 0, x_stash, 0, 0, x_model], lbproc=x_lbproc, i_field=ind + 1001, @@ -69,53 +64,46 @@ def _test_fields(self, item): def test_none(self): null_iter = (x for x in []) result = self._group_result(null_iter) - self.assertEqual(result, []) + assert result == [] def test_one(self): fields_iter = self._dummy_fields_iter(stashes=[1]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001,)])) + assert result == self._test_fields([(1001,)]) def test_allsame(self): fields_iter = self._dummy_fields_iter(stashes=[1, 1, 1]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001, 1002, 1003)])) + assert result == self._test_fields([(1001, 1002, 1003)]) def test_stashes_different(self): fields_iter = self._dummy_fields_iter(stashes=[1, 1, 22, 1, 22, 333]) result = self._group_result(fields_iter) - self.assertEqual( - result, - self._test_fields([(1001, 1002, 1004), (1003, 1005), (1006,)]), - ) + assert result == self._test_fields([(1001, 1002, 1004), (1003, 1005), (1006,)]) def test_models_different(self): fields_iter = self._dummy_fields_iter(models=[10, 21, 10]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001, 1003), (1002,)])) + assert result == self._test_fields([(1001, 1003), (1002,)]) def test_lbprocs_different(self): fields_iter = self._dummy_fields_iter(lbprocs=[991, 995, 991]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001, 1003), (1002,)])) + assert result == self._test_fields([(1001, 1003), (1002,)]) def test_2d_combines(self): fields_iter = self._dummy_fields_iter( stashes=[11, 11, 15, 11], lbprocs=[31, 42, 31, 42] ) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001,), (1002, 1004), (1003,)])) + assert result == self._test_fields([(1001,), (1002, 1004), (1003,)]) def test_sortorder(self): fields_iter = self._dummy_fields_iter(stashes=[11, 7, 12]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1002,), (1001,), (1003,)])) + assert result == self._test_fields([(1002,), (1001,), (1003,)]) def test_sortorder_2d(self): fields_iter = self._dummy_fields_iter(stashes=[11, 11, 12], lbprocs=[31, 9, 1]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1002,), (1001,), (1003,)])) - - -if __name__ == "__main__": - tests.main() + assert result == self._test_fields([(1002,), (1001,), (1003,)]) diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py index e6e9359c26..d4d98c59cd 100644 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py +++ b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py @@ -7,33 +7,29 @@ """ -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest from iris.fileformats.um._optimal_array_structuring import optimal_array_structure +from iris.tests import _shared_utils class Test__optimal_dimensioning_structure: pass -class Test_optimal_array_structure(tests.IrisTest): +class Test_optimal_array_structure: def _check_arrays_and_dims(self, result, spec): - self.assertEqual(set(result.keys()), set(spec.keys())) + assert set(result.keys()) == set(spec.keys()) for keyname in spec.keys(): result_array, result_dims = result[keyname] spec_array, spec_dims = spec[keyname] - self.assertEqual( - result_dims, - spec_dims, + assert result_dims == spec_dims, ( 'element dims differ for "{}": result={!r}, expected {!r}'.format( keyname, result_dims, spec_dims - ), + ) ) - self.assertArrayEqual( + _shared_utils.assert_array_equal( result_array, spec_array, 'element arrays differ for "{}": result={!r}, expected {!r}'.format( @@ -42,22 +38,22 @@ def _check_arrays_and_dims(self, result, spec): ) def test_none(self): - with self.assertRaises(IndexError): + with pytest.raises(IndexError, match="index 0 is out of bounds"): _ = optimal_array_structure([], []) def test_one(self): # A single value does not make a dimension (no length-1 dims). elements = [("a", np.array([1]))] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, ()) - self.assertEqual(primaries, set()) - self.assertEqual(elems_and_dims, {}) + assert shape == () + assert primaries == set() + assert elems_and_dims == {} def test_1d(self): elements = [("a", np.array([1, 2, 4]))] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set("a")) + assert shape == (3,) + assert primaries == set("a") self._check_arrays_and_dims(elems_and_dims, {"a": (np.array([1, 2, 4]), (0,))}) def test_1d_actuals(self): @@ -67,14 +63,14 @@ def test_1d_actuals(self): shape, primaries, elems_and_dims = optimal_array_structure( elements, actual_values ) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set("a")) + assert shape == (3,) + assert primaries == set("a") self._check_arrays_and_dims(elems_and_dims, {"a": (np.array([7, 3, 9]), (0,))}) def test_actuals_mismatch_fail(self): elements = [("a", np.array([1, 2, 4]))] actual_values = [("b", np.array([7, 3, 9]))] - with self.assertRaisesRegex(ValueError, "Names.* do not match.*"): + with pytest.raises(ValueError, match="Names.* do not match.*"): shape, primaries, elems_and_dims = optimal_array_structure( elements, actual_values ) @@ -85,8 +81,8 @@ def test_2d(self): ("b", np.array([7, 8, 9, 7, 8, 9])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (2, 3)) - self.assertEqual(primaries, set(["a", "b"])) + assert shape == (2, 3) + assert primaries == set(["a", "b"]) self._check_arrays_and_dims( elems_and_dims, {"a": (np.array([2, 3]), (0,)), "b": (np.array([7, 8, 9]), (1,))}, @@ -105,8 +101,8 @@ def test_2d_with_element_values(self): shape, primaries, elems_and_dims = optimal_array_structure( elements, elements_values ) - self.assertEqual(shape, (2, 3)) - self.assertEqual(primaries, set(["a", "b"])) + assert shape == (2, 3) + assert primaries == set(["a", "b"]) self._check_arrays_and_dims( elems_and_dims, {"a": (np.array([6, 8]), (0,)), "b": (np.array([3, 4, 5]), (1,))}, @@ -119,8 +115,8 @@ def test_non_2d(self): ("b", np.array([7, 8, 9, 7, 8])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (5,)) - self.assertEqual(primaries, set()) + assert shape == (5,) + assert primaries == set() self._check_arrays_and_dims( elems_and_dims, { @@ -133,16 +129,16 @@ def test_degenerate(self): # A all-same vector does not appear in the output. elements = [("a", np.array([1, 2, 3])), ("b", np.array([4, 4, 4]))] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set(["a"])) + assert shape == (3,) + assert primaries == set(["a"]) self._check_arrays_and_dims(elems_and_dims, {"a": (np.array([1, 2, 3]), (0,))}) def test_1d_duplicates(self): # When two have the same structure, the first is 'the dimension'. elements = [("a", np.array([1, 3, 4])), ("b", np.array([6, 7, 9]))] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set("a")) + assert shape == (3,) + assert primaries == set("a") self._check_arrays_and_dims( elems_and_dims, { @@ -155,9 +151,9 @@ def test_1d_duplicates_order(self): # Same as previous but reverse passed order of elements 'a' and 'b'. elements = [("b", np.array([6, 7, 9])), ("a", np.array([1, 3, 4]))] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) + assert shape == (3,) # The only difference is the one chosen as 'principal' - self.assertEqual(primaries, set("b")) + assert primaries == set("b") self._check_arrays_and_dims( elems_and_dims, { @@ -173,8 +169,8 @@ def test_3_way(self): ("period", np.array([9, 8, 7])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set(["t1"])) + assert shape == (3,) + assert primaries == set(["t1"]) self._check_arrays_and_dims( elems_and_dims, { @@ -191,8 +187,8 @@ def test_mixed_dims(self): ("ft", np.array([15, 16, 15, 16])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (2, 2)) - self.assertEqual(primaries, set(["t1", "ft"])) + assert shape == (2, 2) + assert primaries == set(["t1", "ft"]) self._check_arrays_and_dims( elems_and_dims, { @@ -209,10 +205,10 @@ def test_missing_dim(self): ("t2", np.array([15, 16, 25, 26])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (4,)) + assert shape == (4,) # The potential 2d nature can not be recognised. # 't1' is auxiliary, as it has duplicate values over the dimension. - self.assertEqual(primaries, set(["t2"])) + assert primaries == set(["t2"]) self._check_arrays_and_dims( elems_and_dims, { @@ -232,8 +228,8 @@ def test_optimal_structure_decision(self): ("d", np.array([10, 10, 10, 10, 10, 10])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (2, 3)) - self.assertEqual(primaries, set(["a", "b"])) + assert shape == (2, 3) + assert primaries == set(["a", "b"]) self._check_arrays_and_dims( elems_and_dims, { @@ -242,7 +238,3 @@ def test_optimal_structure_decision(self): "b": (np.array([0, 1, 2]), (1,)), }, ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py index 05c2749f40..0581ac5ed4 100644 --- a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py +++ b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py @@ -7,44 +7,33 @@ """ -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - from iris.fileformats.um import um_to_pp -class Test_call(tests.IrisTest): - def test__call(self): +class Test_call: + def test__call(self, mocker): # Check that the function creates an FF2PP and returns the result # of iterating over it. # Make a real (test) iterator object, as otherwise iter() complains... mock_iterator = (1 for x in ()) # Make a mock for the iter() call of an FF2PP object. - mock_iter_call = mock.MagicMock(return_value=mock_iterator) + mock_iter_call = mocker.MagicMock(return_value=mock_iterator) # Make a mock FF2PP object instance. - mock_ff2pp_instance = mock.MagicMock(__iter__=mock_iter_call) + mock_ff2pp_instance = mocker.MagicMock(__iter__=mock_iter_call) # Make the mock FF2PP class. - mock_ff2pp_class = mock.MagicMock(return_value=mock_ff2pp_instance) + mock_ff2pp_class = mocker.MagicMock(return_value=mock_ff2pp_instance) # Call um_to_pp while patching the um._ff_replacement.FF2PP class. test_path = "/any/old/file.name" - with mock.patch("iris.fileformats.um._ff_replacement.FF2PP", mock_ff2pp_class): - result = um_to_pp(test_path) + _ = mocker.patch("iris.fileformats.um._ff_replacement.FF2PP", mock_ff2pp_class) + result = um_to_pp(test_path) # Check that it called FF2PP in the expected way. - self.assertEqual( - mock_ff2pp_class.call_args_list, - [mock.call("/any/old/file.name", read_data=False)], - ) - self.assertEqual(mock_ff2pp_instance.__iter__.call_args_list, [mock.call()]) + assert mock_ff2pp_class.call_args_list == [ + mocker.call("/any/old/file.name", read_data=False) + ] + assert mock_ff2pp_instance.__iter__.call_args_list == [mocker.call()] # Check that it returned the expected result. - self.assertIs(result, mock_iterator) - - -if __name__ == "__main__": - tests.main() + assert result is mock_iterator From 10b6b06957cc9df4a1f5d0749b60bcd23af6666e Mon Sep 17 00:00:00 2001 From: Henry <84939917+HGWright@users.noreply.github.com> Date: Mon, 9 Feb 2026 15:39:55 +0000 Subject: [PATCH 17/77] Expand the Nimrod loader to include Tables 1, 3 & 4 (#6763) * add table 1 * add review comments * add table reference * remove duplicate entries * fixing tests * add tables 3 and 4 * add specific tests * Uncomment failing tests * improve tuples * regen cml for failing tests & fix name error * add enums * expand use of enum * Improve enum wording Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> --------- Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> --- lib/iris/fileformats/nimrod.py | 181 +++++++++++++----- lib/iris/fileformats/nimrod_load_rules.py | 100 +++++++++- .../results/nimrod/probability_fields.cml | 19 +- .../nimrod/u1096_ng_bmr04_precip_2km.cml | 1 + .../u1096_ng_bsr05_precip_accum60_2km.cml | 1 + .../nimrod/u1096_ng_ek00_radiation_2km.cml | 6 + .../nimrod/u1096_ng_ek00_radiationuv_2km.cml | 3 + lib/iris/tests/test_nimrod.py | 54 ++++++ 8 files changed, 309 insertions(+), 56 deletions(-) diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index d318c94882..d646d71a30 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -4,6 +4,7 @@ # See LICENSE in the root of the repository for full licensing details. """Provides NIMROD file format capabilities.""" +from enum import Enum import glob import os import struct @@ -14,6 +15,7 @@ import iris from iris.exceptions import TranslationError import iris.fileformats.nimrod_load_rules +from iris.fileformats.nimrod_load_rules import Table # general header (int16) elements 1-31 (Fortran bytes 1-62) general_header_int16s = ( @@ -101,59 +103,97 @@ # data specific header (int16) elements 108-159 (Fortran bytes 411-512) -data_header_int16s = ( - "threshold_type", - "probability_method", - "recursive_filter_iterations", - "member_count", - "probability_period_of_event", +table_1_data_header_int16s = ( + ( + "radar_number", + "radar_sites", + "additional_radar_sites", + "clutter_map_number", + "calibration_type", + "bright_band_height", + "bright_band_intensity", + "bright_band_test_param_1", + "bright_band_test_param_2", + "infill_flag", + "stop_elevation", + "copy_vertical_coord", + "copy_reference_vertical_coord", + "copy_y_origin", + "copy_row_step", + "copy_x_origin", + "copy_column_step", + "copy_float32_mdi", + "copy_MKS_data_scaling", + "copy_data_offset", + "copy_x_offset", + "copy_y_offset", + "copy_true_origin_latitude", + "copy_true_origin_longitude", + "copy_tl_y", + "copy_tl_x", + "copy_tr_y", + "copy_tr_x", + "copy_br_y", + "copy_br_x", + "copy_bl_y", + "copy_bl_x", + "sensor_identifier", + "meteosat_identifier", + "availability_of_synop_meteosat", + "software_identifier", + "software_major_version", + "software_minor_version", + "software_micro_version", + ) + + tuple(f"data_header_int16_{i}" for i in range(48, 59)) + + ("period_seconds",) +) + + +# data specific header (int16) elements 108-159 (Fortran bytes 411-512) +table_2_data_header_int16s = ( + ( + "threshold_type", + "probability_method", + "recursive_filter_iterations", + "member_count", + "probability_period_of_event", + ) + + tuple(f"data_header_int16_{i}" for i in range(5, 59)) + + ("period_seconds",) +) + + +# data specific header (int16) elements 108-159 (Fortran bytes 411-512) +table_3_data_header_int16s = ( + "data_header_int16_00", + "data_header_int16_01", + "data_header_int16_02", + "data_header_int16_03", + "data_header_int16_04", "data_header_int16_05", "soil_type", + "data_header_int16_07", + "data_header_int16_08", + "data_header_int16_09", + "data_header_int16_10", +) + tuple(f"data_header_int16_{i}" for i in range(11, 60)) + + +# data specific header (int16) elements 108-159 (Fortran bytes 411-512) +table_4_data_header_int16s = ( + "data_header_int16_00", + "data_header_int16_01", + "data_header_int16_02", + "data_header_int16_03", + "data_header_int16_04", + "data_header_int16_05", + "data_header_int16_06", "radiation_code", "data_header_int16_08", "data_header_int16_09", "data_header_int16_10", - "data_header_int16_11", - "data_header_int16_12", - "data_header_int16_13", - "data_header_int16_14", - "data_header_int16_15", - "data_header_int16_16", - "data_header_int16_17", - "data_header_int16_18", - "data_header_int16_19", - "data_header_int16_20", - "data_header_int16_21", - "data_header_int16_22", - "data_header_int16_23", - "data_header_int16_24", - "data_header_int16_25", - "data_header_int16_26", - "data_header_int16_27", - "data_header_int16_28", - "data_header_int16_29", - "data_header_int16_30", - "data_header_int16_31", - "data_header_int16_32", - "data_header_int16_33", - "data_header_int16_34", - "data_header_int16_35", - "data_header_int16_36", - "data_header_int16_37", - "data_header_int16_38", - "data_header_int16_39", - "data_header_int16_40", - "data_header_int16_41", - "data_header_int16_42", - "data_header_int16_43", - "data_header_int16_44", - "data_header_int16_45", - "data_header_int16_46", - "data_header_int16_47", - "data_header_int16_48", - "data_header_int16_49", - "period_seconds", -) +) + tuple(f"data_header_int16_{i}" for i in range(11, 60)) def _read_chars(infile, num): @@ -226,6 +266,53 @@ def _read_header(self, infile): self.source = _read_chars(infile, 24) self.title = _read_chars(infile, 24) + # determine which of Table 1, 2, 3 or 4 is being used + Table_3_field_codes = [ + 18, + 144, + 190, + 191, + 192, + 193, + 194, + 196, + 197, + 198, + 199, + 201, + 202, + 203, + 204, + 218, + 219, + 301, + 302, + 901, + 8229, + 8230, + ] + Table_4_field_codes = [90, 91, 92, 93, 96, 303] + default_float_threshold = self.float32_mdi + threshold_set = ( + self.threshold_value != default_float_threshold + or self.threshold_value_alt != default_float_threshold + ) + # The `Table` enum is defined in iris.fileformats.nimrod_load_rules + if self.field_code in Table_3_field_codes: + table = Table.table_3 + data_header_int16s = table_3_data_header_int16s + elif self.field_code in Table_4_field_codes: + table = Table.table_4 + data_header_int16s = table_4_data_header_int16s + elif threshold_set: + table = Table.table_2 + data_header_int16s = table_2_data_header_int16s + else: + table = Table.table_1 + data_header_int16s = table_1_data_header_int16s + + self.table = table + # data specific header (int16) elements 108- (bytes 411-512) self._read_header_subset(infile, data_header_int16s, np.int16) # skip unnamed int16s diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index 4b3987003a..07b9f2a27e 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -4,6 +4,7 @@ # See LICENSE in the root of the repository for full licensing details. """Rules for converting NIMROD fields into cubes.""" +from enum import Enum import re import string import warnings @@ -31,6 +32,14 @@ ) +class Table(Enum): + # The NIMROD documentation defines four tables of fields, with different header contents and load rules. + table_1 = "Table_1" + table_2 = "Table_2" + table_3 = "Table_3" + table_4 = "Table_4" + + class TranslationWarning(IrisNimrodTranslationWarning): """Backwards compatible form of :class:`iris.warnings.IrisNimrodTranslationWarning`.""" @@ -636,10 +645,8 @@ def add_attr(item): cube_source = "Nimrod pwind routine" for key in [ "neighbourhood_radius", - "recursive_filter_iterations", "recursive_filter_alpha", "threshold_vicinity_radius", - "probability_period_of_event", ]: add_attr(key) @@ -660,6 +667,60 @@ def add_attr(item): cube.attributes["institution"] = "Met Office" +def table_1_attributes(cube, field): + """Add attributes to the cube.""" + # TODO: This section may need to be changed in the future + # as there may be some of these attributes that can be promoted into coords + # but we in AVD do not have that level of domain knowledge to make those decisions + + def add_attr(item): + """Add an attribute to the cube.""" + if hasattr(field, item): + value = getattr(field, item) + if is_missing(field, value): + return + cube.attributes[item] = value + + for key in [ + "radar_number", + "radar_sites", + "additional_radar_sites", + "clutter_map_number", + "calibration_type", + "bright_band_height", + "bright_band_intensity", + "bright_band_test_param_1", + "bright_band_test_param_2", + "infill_flag", + "stop_elevation", + "sensor_identifier", + "meteosat_identifier", + "software_identifier", + "software_major_version", + "software_minor_version", + "software_micro_version", + ]: + add_attr(key) + + +def table_2_attributes(cube, field): + """Add attributes to the cube.""" + # TODO: This section may need to be changed in the future + # as there may be some of these attributes that can be promoted into coords + # but we in AVD do not have that level of domain knowledge to make those decisions + + def add_attr(item): + """Add an attribute to the cube.""" + if hasattr(field, item): + value = getattr(field, item) + if is_missing(field, value): + return + cube.attributes[item] = value + + for key in ["recursive_filter_iterations", "probability_period_of_event"]: + add_attr(key) + + def known_threshold_coord(field): """Supply known threshold coord meta-data for known use cases. @@ -865,6 +926,26 @@ def soil_type_coord(cube, field): ) +def radiation_type_attr(cube, field): + """Decode the Radiation Types codes - similar to time_averaging.""" + radiation_codes = { + 64: 'instantaneous ("corrected")', + 32: "upward_radiation", + 16: "downward_radiation", + 8: "diffuse_radiation", + 4: "direct_radiation", + 2: "clear_sky_radiation", + } + num = field.radiation_code + radiation_types = [] + for key in sorted(radiation_codes.keys(), reverse=True): + if num >= key: + radiation_types.append(radiation_codes[key]) + num = num - key + if radiation_types: + cube.attributes["radiation_type"] = radiation_types + + def time_averaging(cube, field): """Decode the averagingtype code - similar to the PP LBPROC code.""" time_averaging_codes = { @@ -930,9 +1011,18 @@ def run(field, handle_metadata_errors=True): # vertical vertical_coord(cube, field) - # add other stuff, if present - soil_type_coord(cube, field) - probability_coord(cube, field, handle_metadata_errors) + match field.table: + case Table.table_1: + table_1_attributes(cube, field) + case Table.table_2: + probability_coord(cube, field, handle_metadata_errors) + table_2_attributes(cube, field) + case Table.table_3: + soil_type_coord(cube, field) + case Table.table_4: + radiation_type_attr(cube, field) + + # add other generic stuff, if present ensemble_member(cube, field) time_averaging(cube, field) attributes(cube, field) diff --git a/lib/iris/tests/results/nimrod/probability_fields.cml b/lib/iris/tests/results/nimrod/probability_fields.cml index 29202b52bc..726698e59b 100644 --- a/lib/iris/tests/results/nimrod/probability_fields.cml +++ b/lib/iris/tests/results/nimrod/probability_fields.cml @@ -2,6 +2,7 @@ + @@ -92,6 +93,7 @@ + @@ -464,6 +466,7 @@ + @@ -540,6 +543,7 @@ + @@ -667,6 +671,7 @@ + @@ -745,6 +750,7 @@ + @@ -970,6 +976,7 @@ + @@ -1043,7 +1050,6 @@ - @@ -1062,9 +1068,6 @@ - - - @@ -1131,6 +1134,7 @@ + @@ -1214,6 +1218,7 @@ + @@ -1304,6 +1309,7 @@ + @@ -1347,6 +1353,7 @@ + @@ -1496,6 +1503,7 @@ + @@ -1586,6 +1594,7 @@ + @@ -1735,6 +1744,7 @@ + @@ -1778,6 +1788,7 @@ + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml index 3d04a5dd47..65698423a1 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml @@ -7,6 +7,7 @@ + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml index cd9bf79f6a..45890bbaaa 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml @@ -7,6 +7,7 @@ + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml index 5defc4b03c..af271ce196 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml @@ -6,6 +6,7 @@ + @@ -45,6 +46,7 @@ + @@ -123,6 +125,7 @@ + @@ -162,6 +165,7 @@ + @@ -201,6 +205,7 @@ + @@ -240,6 +245,7 @@ + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml index 0ae03b18b9..72239f5279 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml @@ -6,6 +6,7 @@ + @@ -45,6 +46,7 @@ + @@ -84,6 +86,7 @@ + diff --git a/lib/iris/tests/test_nimrod.py b/lib/iris/tests/test_nimrod.py index 93b028aff2..625404e96c 100644 --- a/lib/iris/tests/test_nimrod.py +++ b/lib/iris/tests/test_nimrod.py @@ -9,6 +9,7 @@ import iris from iris.exceptions import TranslationError import iris.fileformats.nimrod_load_rules as nimrod_load_rules +from iris.fileformats.nimrod_load_rules import radiation_type_attr from iris.tests import _shared_utils @@ -158,3 +159,56 @@ def test_period_of_interest(self, request): nimrod_load_rules.time(cube, field) _shared_utils.assert_CML(request, cube, ("nimrod", "period_of_interest.cml")) + + +class TestNimrodTables: + # Testing that the table-based load rules work as expected + + def test_table_1(self): + field = mock_nimrod_field() + cube = iris.cube.Cube(np.arange(100).reshape(10, 10)) + + field.table = "Table_1" + field.clutter_map_number = 5 + + nimrod_load_rules.table_1_attributes(cube, field) + + assert "clutter_map_number" in cube.attributes + + def test_table_2(self): + field = mock_nimrod_field() + cube = iris.cube.Cube(np.arange(100).reshape(10, 10)) + + field.table = "Table_2" + field.field_code = 45 + field.threshold_type = 2 + field.threshold_value_alt = 2 + field.threshold_fuzziness = 1 + field.probability_method = 1 + field.probability_field_of_event = 3 + + nimrod_load_rules.probability_coord(cube, field, handle_metadata_errors=False) + + assert "Probability methods" in cube.attributes + + def test_table_3(self): + field = mock_nimrod_field() + cube = iris.cube.Cube(np.arange(100).reshape(10, 10)) + + field.table = "Table_3" + field.soil_type = 8 + + nimrod_load_rules.soil_type_coord(cube, field) + + assert cube.coord("soil_type") + + def test_table_4(self): + field = mock_nimrod_field() + cube = iris.cube.Cube(np.arange(100).reshape(10, 10)) + + field.table = "Table_4" + field.radiation_code = 16 + + nimrod_load_rules.radiation_type_attr(cube, field) + + assert "radiation_type" in cube.attributes From 82a10338e068fa65b7f905ecd5152f07d2e28478 Mon Sep 17 00:00:00 2001 From: Henry <84939917+HGWright@users.noreply.github.com> Date: Tue, 10 Feb 2026 10:12:44 +0000 Subject: [PATCH 18/77] added whatsnew for pr 6763 (#6938) * added whatsnew for pr 6763 * Update docs/src/whatsnew/latest.rst Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> --------- Co-authored-by: Elias <110238618+ESadek-MO@users.noreply.github.com> --- docs/src/whatsnew/latest.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 730880b368..5ad52f4787 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -48,6 +48,8 @@ This document explains the changes made to Iris for this release :func:`~iris.cube.Cube.rolling_window` and :func:`~iris.cube.Cube.intersection` to work with dataless cubes. (:pull:`6860`, :pull:`6757`) +#. `@HGWright`_ added to the Nimrod loader to expand the types of Nimrod files it can load. This includes selecting which Nimrod table to use the data entry headers from. (:issue:`4505`, :pull:`6763`) + 🐛 Bugs Fixed ============= From a15e5b7e9f972a87cc930cb17a581cc3280a939f Mon Sep 17 00:00:00 2001 From: Bill Little Date: Tue, 10 Feb 2026 11:21:26 +0000 Subject: [PATCH 19/77] docs: add `:user:` extlinks convenience (#6936) --- docs/src/common_links.inc | 1 + docs/src/conf.py | 1 + .../documenting/whats_new_contributions.rst | 13 +++++++++++++ docs/src/whatsnew/latest.rst | 3 +++ 4 files changed, 18 insertions(+) diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc index a516332aaf..247372cc10 100644 --- a/docs/src/common_links.inc +++ b/docs/src/common_links.inc @@ -47,6 +47,7 @@ .. _iris-esmf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid .. _netCDF4: https://github.com/Unidata/netcdf4-python .. _SciTools Contributor's License Agreement (CLA): https://cla-assistant.io/SciTools/ +.. _extlinks: https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html .. comment diff --git a/docs/src/conf.py b/docs/src/conf.py index 4669b423fd..8f45de51c5 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -288,6 +288,7 @@ def _dotv(version): "https://github.com/SciTools/iris/discussions/%s", "Discussion #%s", ), + "user": ("https://github.com/%s", "@%s"), } # -- Doctest ("make doctest")-------------------------------------------------- diff --git a/docs/src/developers_guide/documenting/whats_new_contributions.rst b/docs/src/developers_guide/documenting/whats_new_contributions.rst index 82569e57a0..a6b7d13148 100644 --- a/docs/src/developers_guide/documenting/whats_new_contributions.rst +++ b/docs/src/developers_guide/documenting/whats_new_contributions.rst @@ -82,6 +82,18 @@ The required content, in order, is as follows: .. _@tkknight: https://github.com/tkknight + .. hint:: + + Alternatively adopt the ``:user:`` `extlinks`_ convenience instead. + + For example to reference the ``github`` user ``tkknight`` simply use + :literal:`:user:\`tkknight\``. + + This will be rendered as :user:`tkknight`. + + In addition, there is now no need to add a full reference to the user within + the documentation. + * A succinct summary of the new/changed behaviour. * Context to the change. Possible examples include: what this fixes, why @@ -143,3 +155,4 @@ users. To achieve this several categories may be used. **💼 Internal** Changes to any internal or development related topics, such as testing, environment dependencies etc. + diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 5ad52f4787..7ee91f046e 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -88,6 +88,9 @@ This document explains the changes made to Iris for this release #. `@tkknight`_ added a gallery carousel to the documentation homepage. (:pull:`6884`) +#. :user:`bjlittle` added the ``:user:`` `extlinks`_ ``github`` user convenience. + (:pull:`6931`) + 💼 Internal =========== From 4d1d419a412d62b17f1b72c42db2198d7623e3fe Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Wed, 11 Feb 2026 13:52:36 +0000 Subject: [PATCH 20/77] final few (#6937) --- .../test_ArrayStructure.py | 2 +- .../test_GroupStructure.py | 10 +++------- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py index bc461f84ee..d28030f619 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py @@ -125,7 +125,7 @@ def test_not_an_array(self): assert ArrayStructure.from_array([1, 2, 3]) == ArrayStructure(1, [1, 2, 3]) def test_multi_dim_array(self): - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="The given array must be 1D."): ArrayStructure.from_array(np.arange(12).reshape(3, 4)) def test_eq_incompatible_shapes(self): diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py index ef2d1d2e75..f4c6fa8a44 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py @@ -7,10 +7,6 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np import pytest @@ -32,7 +28,7 @@ def regular_array_structures(shape, names="abcdefg"): return array_structures -class TestGroupStructure_from_component_arrays(tests.IrisTest): +class TestGroupStructure_from_component_arrays: def test_different_sizes(self): arrays = {"a": np.arange(6), "b": np.arange(5)} msg = "All array elements must have the same size." @@ -51,7 +47,7 @@ def test_structure_creation(self): assert grp._cmpt_structure == expected_structure -class TestGroupStructure_possible_structures(tests.IrisTest): +class TestGroupStructure_possible_structures: def test_simple_3d_structure(self): # Construct a structure representing a (3, 2, 4) group and assert # that the result is of the expected form. @@ -114,7 +110,7 @@ def test_completely_unstructured_element(self): self.assert_potentials(24, array_structures, [["a", "b", "c"]]) -class TestGroupStructure_build_arrays(tests.IrisTest): +class TestGroupStructure_build_arrays: def assert_built_array(self, name, result, expected): ex_arr, ex_dims = expected re_arr, re_dims = result[name] From d0edcb5a2675c5be0f2d0a1eeedd62b250cbab73 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Mon, 16 Feb 2026 15:20:08 +0000 Subject: [PATCH 21/77] Updated environment lockfiles (#6944) Co-authored-by: Lockfile bot --- requirements/locks/py312-linux-64.lock | 41 +++++++++++++++----------- requirements/locks/py313-linux-64.lock | 39 +++++++++++++----------- requirements/locks/py314-linux-64.lock | 39 ++++++++++++------------ 3 files changed, 64 insertions(+), 55 deletions(-) diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index c82a93cc44..dc4fbb95b8 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -2,7 +2,6 @@ # platform: linux-64 # input_hash: f6f5de785dfa266ec64d091b66f6ab12432b446820ea95baba9f63fee66c3ce4 @EXPLICIT -https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb @@ -16,7 +15,7 @@ https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_ https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_17.conda#51b78c6a757575c0d12f4401ffc67029 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda#a9f577daf3de00bca7c3c76c0ecbd1de https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda#7df50d44d4a14d6c31a2c54f2cd92157 @@ -74,7 +73,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172b https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_17.conda#a6c682ac611cb1fa4d73478f9e6efb06 https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h5888daf_1.conda#8422fcc9e5e172c91e99aef703b3ce65 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.54-h421ea60_0.conda#d361fa2a59e53b61c2675bfa073e5b7e +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.55-h421ea60_0.conda#5f13ffc7d30ffec87864e678df9957b4 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_17.conda#ea12f5a6bf12c88c06750d9803e1a570 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -104,8 +103,8 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee -https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-ha09017c_8.conda#6e9bf4ce797d0216bd2a58298b6290b5 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_1.conda#5b5846bc2b23e07a1d61b89dcb67fcf0 +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 @@ -123,14 +122,14 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda#0e0cbe0564d03a99afd5fd7b362feecd https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.3.0-hd9031aa_1.conda#66a1db55ecdb7377d2b91f54cd56eafa -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.12-h4f16b4b_0.conda#db038ce880f100acc74dba10302b5630 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda#861fb6ccbc677bb9a9fb2468430b9c6a https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8ccf913aaba749a5496c17629d859ed1 https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda#cae723309a49399d2949362f4ab5c9e4 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_0.conda#e3bcef76c3ecb25823c503ce11783d85 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_0.conda#fd6acbf37b40cbe919450fa58309fbe1 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_1.conda#5ebd79c20c7ecf979f20e26fedc0a4fd https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 @@ -164,9 +163,10 @@ https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda# https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda#4c2a8fef270f6c69591889b93f9f55c1 https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py312h68e6be4_0.conda#14f638dad5953c83443a2c4f011f1c9e https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 +https://conda.anaconda.org/conda-forge/noarch/docstring_parser-0.17.0-pyhd8ed1ab_0.conda#ce49d3e5a7d20be2ba57a2c670bdd82e https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda#2cfaaccf085c133a477f0a7a8657afe9 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.21.2-pyhd8ed1ab_0.conda#0b3041d2b101e48bee2df46dffd6f047 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py312h447239a_0.conda#63e20cf7b7460019b423fc06abb96c60 https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 @@ -191,15 +191,16 @@ https://conda.anaconda.org/conda-forge/linux-64/libxml2-devel-2.15.1-he237659_1. https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/noarch/loguru-0.7.3-pyh707e725_0.conda#49647ac1de4d1e4b49124aedf3934e02 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.3-py312h8a5da7c_0.conda#f775a43412f7f3d7ed218113ad233869 +https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592132998493b3ff25fd7479396e8351 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py312hd9148b4_1.conda#2e489969e38f0b428c39492619b5e6e5 https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.0-py312h8a5da7c_0.conda#9fe4c848dd01cde9b8d0073744d4eef8 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda#2e5bf4f1da39c0b32778561c3c4e5878 https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.0-py312h50c33e8_0.conda#923b06ad75b7acc888fa20a22dc397cd -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.5.1-pyhcf101f3_0.conda#1bd2e65c8c7ef24f4639ae6e850dacc2 +https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.1-py312h50c33e8_0.conda#c5eff3ada1a829f0bdb780dc4b62bbae +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.7.0-pyhcf101f3_0.conda#2157d0900a4bc2e9a0ba3cccb8497e8c https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e -https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_2.conda#beb1885cfdb793193bba83c9720d53b1 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_3.conda#031e33ae075b336c0ce92b14efa886c5 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.3.1-py312h178313f_0.conda#0cf580c1b73146bb9ff1bbdb4d4c8cf9 https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py312h5253ce2_0.conda#dd94c506b119130aef5a9382aed648e7 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef @@ -212,7 +213,7 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py312h0d868a https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py312h8a5da7c_1.conda#15878599a87992e44c059731771591cb https://conda.anaconda.org/conda-forge/noarch/scooby-0.11.0-pyhd8ed1ab_0.conda#2d707ed62f63d72f4a0141b818e9c7b6 -https://conda.anaconda.org/conda-forge/noarch/setuptools-80.10.2-pyh332efcf_0.conda#7b446fcbb6779ee479debb4fd7453e6c +https://conda.anaconda.org/conda-forge/noarch/setuptools-82.0.0-pyh332efcf_0.conda#1d00d46c634177fc8ede8b99d6089239 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda#755cf22df8693aa0d1aec1c123fa5863 https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda#0401a17ae845fa72c7210e206ec5647d @@ -223,7 +224,7 @@ https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda#72e https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda#c07a6153f8306e45794774cf9b13bd32 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py312h4c3975b_0.conda#e03a4bf52d2170d64c816b2a52972097 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.0-py312h4c3975b_1.conda#a0b8efbe73c90f810a171a6c746be087 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.1-py312h4c3975b_0.conda#0b6c506ec1f272b685240e70a29261b8 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.6-hb03c661_0.conda#4d1fc190b99912ed557a8236e958c559 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.7-hb03c661_0.conda#f2ba4192d38b6cef2bb2c25029071d90 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b @@ -241,7 +242,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py312h460c074_1.conda https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.3-py312h8a5da7c_0.conda#f3c0bc6e23bd3653d74390aa644b0a95 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py312h8a5da7c_0.conda#a8df7f0812ac4fa6bbc7135556d3e2c4 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py312h4c3975b_1.conda#693cda60b9223f55d0836c885621611b https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee @@ -253,10 +254,11 @@ https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04 https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda#24a2802074d26aecfdbc9b3f1d8168d1 https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_3.conda#b4277f5a09d458a0306db3147bd0171c https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_h11f7409_103.conda#3ccff1066c05a1e6c221356eecc40581 -https://conda.anaconda.org/conda-forge/linux-64/libpq-18.1-hb80d175_3.conda#c39da2ad0e7dd600d1eb3146783b057d +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 +https://conda.anaconda.org/conda-forge/linux-64/libpq-18.2-hb80d175_0.conda#fa63c385ddb50957d93bdb394e355be8 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda#31ad065eda3c2d88f8215b1289df9c89 +https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda#5b5203189eb668f042ac2b0826244964 https://conda.anaconda.org/conda-forge/linux-64/mesalib-25.0.5-h57bcd07_2.conda#9b6b685b123906eb4ef270b50cbe826c https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.2-py312h33ff503_1.conda#3569a8fca2dd3202e4ab08f42499f6d3 @@ -281,7 +283,7 @@ https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.co https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_1.conda#7a8b949fb98c73b802b5e66a67dac140 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_0.conda#367b1cd271b8aa39170f68826a94d65b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h4f23490_2.conda#cec5bc5f7d374f8f8095f8e28e31f6cb https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py312hf79963d_1.conda#e597b3e812d9613f659b7d87ad252d18 @@ -289,6 +291,7 @@ https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh8b19718_0.conda#67bd https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py312h4f23490_2.conda#1af24b0eb99b1158c0d8c64a4d6c5d79 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py312h4f23490_2.conda#ab856c36638ab1acf90e70349c525cf9 +https://conda.anaconda.org/conda-forge/noarch/rich-14.3.2-pyhcf101f3_0.conda#33950a076fd589a7655c6888cc3d2b34 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py312h54fa4ab_1.conda#828eb07c4c87c38ed8c6560c25893280 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py312h383787d_2.conda#69e400d3deca12ee7afd4b73a5596905 @@ -314,9 +317,11 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py312h4c3975b_3.conda#b0610b4174af97290f5f466a72583071 https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py312hcedc861_0.conda#f0d110978a87b200a06412b56b26407c https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 +https://conda.anaconda.org/conda-forge/noarch/rich-rst-1.3.2-pyhd8ed1ab_0.conda#cbd84dbdb3f5a7d762b5fb2b0d49e7cd https://conda.anaconda.org/conda-forge/noarch/wslink-2.5.0-pyhd8ed1ab_0.conda#8fa415e696acd9af59ce0a4425fd1b38 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.25.0-py312hf79963d_1.conda#6c913a686cb4060cbd7639a36fa144f0 https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e +https://conda.anaconda.org/conda-forge/noarch/cyclopts-4.5.1-pyhcf101f3_0.conda#657c51cee8d0d5893329ec30a0e1a5a4 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.9.1-pyhdfbf58e_0.conda#0155d4a55415cda7f7f09bc0b7760f51 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 @@ -328,7 +333,7 @@ https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.0-h61e6d4b_0.conda#91e6d4d684e237fba31b9815c4b40edf https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py312h6fba518_7.conda#2edca3790f2a372db44ff1aa159769fc https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e -https://conda.anaconda.org/conda-forge/noarch/pyvista-0.46.5-pyhd8ed1ab_0.conda#4454f5c41511ece8a81a177043bc8c3b +https://conda.anaconda.org/conda-forge/noarch/pyvista-0.47.0-pyhd8ed1ab_0.conda#dd7ebc742e6a766322ed77931123631e https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb diff --git a/requirements/locks/py313-linux-64.lock b/requirements/locks/py313-linux-64.lock index 0b48f09a09..03f559315b 100644 --- a/requirements/locks/py313-linux-64.lock +++ b/requirements/locks/py313-linux-64.lock @@ -2,7 +2,6 @@ # platform: linux-64 # input_hash: 9e22298d3c86ab9a2d785adbe961656f88dda327f4b1b70155fd64231d47d1f3 @EXPLICIT -https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb @@ -16,7 +15,7 @@ https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_ https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_17.conda#51b78c6a757575c0d12f4401ffc67029 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda#a9f577daf3de00bca7c3c76c0ecbd1de https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda#7df50d44d4a14d6c31a2c54f2cd92157 @@ -75,7 +74,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172b https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_17.conda#a6c682ac611cb1fa4d73478f9e6efb06 https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h5888daf_1.conda#8422fcc9e5e172c91e99aef703b3ce65 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.54-h421ea60_0.conda#d361fa2a59e53b61c2675bfa073e5b7e +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.55-h421ea60_0.conda#5f13ffc7d30ffec87864e678df9957b4 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_17.conda#ea12f5a6bf12c88c06750d9803e1a570 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -105,8 +104,8 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee -https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-ha09017c_8.conda#6e9bf4ce797d0216bd2a58298b6290b5 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_1.conda#5b5846bc2b23e07a1d61b89dcb67fcf0 +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 @@ -124,14 +123,14 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda#0e0cbe0564d03a99afd5fd7b362feecd https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.3.0-hd9031aa_1.conda#66a1db55ecdb7377d2b91f54cd56eafa -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.12-h4f16b4b_0.conda#db038ce880f100acc74dba10302b5630 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda#861fb6ccbc677bb9a9fb2468430b9c6a https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8ccf913aaba749a5496c17629d859ed1 https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda#cae723309a49399d2949362f4ab5c9e4 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_0.conda#e3bcef76c3ecb25823c503ce11783d85 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_0.conda#fd6acbf37b40cbe919450fa58309fbe1 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_1.conda#5ebd79c20c7ecf979f20e26fedc0a4fd https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 @@ -165,9 +164,10 @@ https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda# https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda#4c2a8fef270f6c69591889b93f9f55c1 https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py313hc80a56d_0.conda#4a08e7dd57fdc0a13dc699c4c6d76c3a https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 +https://conda.anaconda.org/conda-forge/noarch/docstring_parser-0.17.0-pyhd8ed1ab_0.conda#ce49d3e5a7d20be2ba57a2c670bdd82e https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda#2cfaaccf085c133a477f0a7a8657afe9 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.21.2-pyhd8ed1ab_0.conda#0b3041d2b101e48bee2df46dffd6f047 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py313h6b9daa2_0.conda#3a0be7abedcbc2aee92ea228efea8eba https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 @@ -192,16 +192,17 @@ https://conda.anaconda.org/conda-forge/linux-64/libxml2-devel-2.15.1-he237659_1. https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/noarch/loguru-0.7.3-pyh707e725_0.conda#49647ac1de4d1e4b49124aedf3934e02 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.3-py313h3dea7bd_0.conda#c14389156310b8ed3520d84f854be1ee +https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592132998493b3ff25fd7479396e8351 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py313h7037e92_1.conda#cd1cfde0ea3bca6c805c73ffa988b12a https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.0-py313h3dea7bd_0.conda#d182804a222acc8f2c7e215f344d229f https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda#2e5bf4f1da39c0b32778561c3c4e5878 https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.0-py313h80991f8_0.conda#183fe6b9e99e5c2b464c1573ec78eac8 +https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.1-py313h80991f8_0.conda#2d5ee4938cdde91a8967f3eea686c546 https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh145f28c_0.conda#09a970fbf75e8ed1aa633827ded6aa4f -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.5.1-pyhcf101f3_0.conda#1bd2e65c8c7ef24f4639ae6e850dacc2 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.7.0-pyhcf101f3_0.conda#2157d0900a4bc2e9a0ba3cccb8497e8c https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e -https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_2.conda#beb1885cfdb793193bba83c9720d53b1 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_3.conda#031e33ae075b336c0ce92b14efa886c5 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.3.1-py313h8060acc_0.conda#b62867739241368f43f164889b45701b https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py313h54dd161_0.conda#25fe6e02c2083497b3239e21b49d8093 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef @@ -214,7 +215,7 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py313heab575 https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py313h3dea7bd_1.conda#f256753e840c3cd3766488c9437a8f8b https://conda.anaconda.org/conda-forge/noarch/scooby-0.11.0-pyhd8ed1ab_0.conda#2d707ed62f63d72f4a0141b818e9c7b6 -https://conda.anaconda.org/conda-forge/noarch/setuptools-80.10.2-pyh332efcf_0.conda#7b446fcbb6779ee479debb4fd7453e6c +https://conda.anaconda.org/conda-forge/noarch/setuptools-82.0.0-pyh332efcf_0.conda#1d00d46c634177fc8ede8b99d6089239 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda#755cf22df8693aa0d1aec1c123fa5863 https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda#0401a17ae845fa72c7210e206ec5647d @@ -242,7 +243,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py313hf46b229_1.conda https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.3-py313h3dea7bd_0.conda#fa90494c873b21b3bdb21c3b588ff043 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py313h3dea7bd_0.conda#77e1fc7133e03ccd62070f2405c82ea9 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py313h07c4f96_1.conda#bcca9afd203fe05d9582249ac12762da https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee @@ -254,10 +255,11 @@ https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04 https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda#24a2802074d26aecfdbc9b3f1d8168d1 https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_3.conda#b4277f5a09d458a0306db3147bd0171c https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_h11f7409_103.conda#3ccff1066c05a1e6c221356eecc40581 -https://conda.anaconda.org/conda-forge/linux-64/libpq-18.1-hb80d175_3.conda#c39da2ad0e7dd600d1eb3146783b057d +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 +https://conda.anaconda.org/conda-forge/linux-64/libpq-18.2-hb80d175_0.conda#fa63c385ddb50957d93bdb394e355be8 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda#31ad065eda3c2d88f8215b1289df9c89 +https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda#5b5203189eb668f042ac2b0826244964 https://conda.anaconda.org/conda-forge/linux-64/mesalib-25.0.5-h57bcd07_2.conda#9b6b685b123906eb4ef270b50cbe826c https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.2-py313hf6604e3_1.conda#ca9c6ba4beac38cb3d0a85afde27f94c @@ -282,13 +284,14 @@ https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.co https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_1.conda#7a8b949fb98c73b802b5e66a67dac140 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_0.conda#367b1cd271b8aa39170f68826a94d65b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py313h29aa505_2.conda#ad53894d278895bf15c8fc324727d224 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py313h08cd8bf_2.conda#8a69ea71fdd37bfe42a28f0967dbb75a https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py313h29aa505_2.conda#e3a598d20bf2fa7b03a771e9e4471be9 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py313h29aa505_2.conda#60f5d1c039da10fe89a530cc93ea50ac +https://conda.anaconda.org/conda-forge/noarch/rich-14.3.2-pyhcf101f3_0.conda#33950a076fd589a7655c6888cc3d2b34 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py313h4b8bb8b_1.conda#2b18fe5b4b2d1611ddf8c2f080a46563 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py313had47c43_2.conda#6e550dd748e9ac9b2925411684e076a1 @@ -314,9 +317,11 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py313h07c4f96_3.conda#b7810803a3481e22968022a94107ed93 https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py313h2005660_0.conda#d551bd1d2fcfac36674dbe2be4b0a410 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 +https://conda.anaconda.org/conda-forge/noarch/rich-rst-1.3.2-pyhd8ed1ab_0.conda#cbd84dbdb3f5a7d762b5fb2b0d49e7cd https://conda.anaconda.org/conda-forge/noarch/wslink-2.5.0-pyhd8ed1ab_0.conda#8fa415e696acd9af59ce0a4425fd1b38 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.25.0-py313h08cd8bf_1.conda#a0d8dc5c90850d9f1a79f69c98aef0ff https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e +https://conda.anaconda.org/conda-forge/noarch/cyclopts-4.5.1-pyhcf101f3_0.conda#657c51cee8d0d5893329ec30a0e1a5a4 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.9.1-pyhdfbf58e_0.conda#0155d4a55415cda7f7f09bc0b7760f51 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 @@ -327,7 +332,7 @@ https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.0-h61e6d4b_0.conda#91e6d4d684e237fba31b9815c4b40edf https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py313hbb97348_7.conda#03c6ddd039b6877278b5c4df20b61f29 https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e -https://conda.anaconda.org/conda-forge/noarch/pyvista-0.46.5-pyhd8ed1ab_0.conda#4454f5c41511ece8a81a177043bc8c3b +https://conda.anaconda.org/conda-forge/noarch/pyvista-0.47.0-pyhd8ed1ab_0.conda#dd7ebc742e6a766322ed77931123631e https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb diff --git a/requirements/locks/py314-linux-64.lock b/requirements/locks/py314-linux-64.lock index 007d466897..a0c6d9652c 100644 --- a/requirements/locks/py314-linux-64.lock +++ b/requirements/locks/py314-linux-64.lock @@ -2,7 +2,6 @@ # platform: linux-64 # input_hash: 51877f045987ca3eb18cf2b23a50d599952703dc6a6fe8a5f1fcbcdce93433ab @EXPLICIT -https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb @@ -14,7 +13,7 @@ https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_ https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_17.conda#51b78c6a757575c0d12f4401ffc67029 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda#a9f577daf3de00bca7c3c76c0ecbd1de https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_17.conda#3c281169ea25b987311400d7a7e28445 @@ -63,7 +62,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_17.conda#a6c682ac611cb1fa4d73478f9e6efb06 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.54-h421ea60_0.conda#d361fa2a59e53b61c2675bfa073e5b7e +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.55-h421ea60_0.conda#5f13ffc7d30ffec87864e678df9957b4 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_17.conda#ea12f5a6bf12c88c06750d9803e1a570 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -84,11 +83,11 @@ https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda#4a13 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.2.0-hb03c661_1.conda#af39b9a8711d4a8d437b52c1d78eb6a1 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.22.2-ha1258a1_0.conda#fb53fb07ce46a575c5d004bbc96032c2 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee -https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-ha09017c_8.conda#6e9bf4ce797d0216bd2a58298b6290b5 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_1.conda#5b5846bc2b23e07a1d61b89dcb67fcf0 +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 @@ -101,24 +100,24 @@ https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#35 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda#d71d3a66528853c0a1ac2c02d79a0284 https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.3.0-hd9031aa_1.conda#66a1db55ecdb7377d2b91f54cd56eafa -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.12-h4f16b4b_0.conda#db038ce880f100acc74dba10302b5630 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda#861fb6ccbc677bb9a9fb2468430b9c6a https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8ccf913aaba749a5496c17629d859ed1 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_0.conda#e3bcef76c3ecb25823c503ce11783d85 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_0.conda#fd6acbf37b40cbe919450fa58309fbe1 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_1.conda#5ebd79c20c7ecf979f20e26fedc0a4fd https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-hb8b1518_5.conda#d4a250da4737ee127fb1fa6452a9002e -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-h4e3cde8_0.conda#0a5563efed19ca4461cf927419b6eb73 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h7a8fb5f_6.conda#49c553b47ff679a6a1e9fc80b9c5a2d4 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-hcf29cc6_1.conda#1707cdd636af2ff697b53186572c9f77 https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.14.1-ha770c72_0.conda#f4084e4e6577797150f9b04a4560ceb0 https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd -https://conda.anaconda.org/conda-forge/linux-64/python-3.14.3-h32b2ec7_100_cp314.conda#b40594d5da041824087eebe12228af42 +https://conda.anaconda.org/conda-forge/linux-64/python-3.14.3-h32b2ec7_101_cp314.conda#c014ad06e60441661737121d3eae8a60 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda#34e54f03dfea3e7a2dcf1453a85f1085 @@ -140,7 +139,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py314h1807b08_0.con https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.3-pyhd8ed1ab_0.conda#2cfaaccf085c133a477f0a7a8657afe9 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.21.2-pyhd8ed1ab_0.conda#0b3041d2b101e48bee2df46dffd6f047 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h19486de_106.conda#c223ee1429ba538f3e48cfb4a0b97357 @@ -163,11 +162,11 @@ https://conda.anaconda.org/conda-forge/noarch/markupsafe-3.0.3-pyh7db6752_0.cond https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py314h9891dd4_1.conda#c6752022dcdbf4b9ef94163de1ab7f03 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 -https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.0-py314h8ec4b1a_0.conda#f9b6a8fbb8dcb840a0c1c052dc5092e4 +https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.1-py314h8ec4b1a_0.conda#79678378ae235e24b3aa83cee1b38207 https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh145f28c_0.conda#09a970fbf75e8ed1aa633827ded6aa4f -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.5.1-pyhcf101f3_0.conda#1bd2e65c8c7ef24f4639ae6e850dacc2 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.7.0-pyhcf101f3_0.conda#2157d0900a4bc2e9a0ba3cccb8497e8c https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e -https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_2.conda#beb1885cfdb793193bba83c9720d53b1 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_3.conda#031e33ae075b336c0ce92b14efa886c5 https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py314h0f05182_0.conda#4f225a966cfee267a79c5cb6382bd121 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda#6b6ece66ebcae2d5f326c77ef2c5a066 @@ -178,7 +177,7 @@ https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py314he82b845_1.conda#21dce7c80bbdb9785633011ad348e530 https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py314h67df5f8_1.conda#2035f68f96be30dc60a5dfd7452c7941 -https://conda.anaconda.org/conda-forge/noarch/setuptools-80.10.2-pyh332efcf_0.conda#7b446fcbb6779ee479debb4fd7453e6c +https://conda.anaconda.org/conda-forge/noarch/setuptools-82.0.0-pyh332efcf_0.conda#1d00d46c634177fc8ede8b99d6089239 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda#755cf22df8693aa0d1aec1c123fa5863 https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda#0401a17ae845fa72c7210e206ec5647d @@ -189,7 +188,7 @@ https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda#72e https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda#c07a6153f8306e45794774cf9b13bd32 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py314h5bd0f2a_0.conda#e35f08043f54d26a1be93fdbf90d30c3 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.0-py314h5bd0f2a_1.conda#58e2ee530005067c5db23f33c6ab43d2 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.1-py314h5bd0f2a_0.conda#494fdf358c152f9fdd0673c128c2f3dd https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.7-hb03c661_0.conda#f2ba4192d38b6cef2bb2c25029071d90 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 @@ -204,7 +203,7 @@ https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_0.conda#ea https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py314h4a8dc5f_1.conda#cf45f4278afd6f4e6d03eda0f435d527 https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.3-py314h67df5f8_0.conda#705e973836337db525548d91d44ca3bc +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py314h67df5f8_0.conda#6c7efc167cee337d9c41200506d022b8 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py314h5bd0f2a_1.conda#51b0391b0ce96be49b1174e9a3e4a279 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee @@ -213,7 +212,7 @@ https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc4 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_h11f7409_103.conda#3ccff1066c05a1e6c221356eecc40581 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.2-py314h2b28147_1.conda#4ea6b620fdf24a1a0bc4f1c7134dfafb @@ -233,7 +232,7 @@ https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py314h97ea11e_4. https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.conda#b20e7ce9afd59036ab194f3d1e27edf5 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_1.conda#7a8b949fb98c73b802b5e66a67dac140 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_0.conda#367b1cd271b8aa39170f68826a94d65b https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py314hc02f841_2.conda#55ac6d85f5dd8ec5e9919e7762fcb31a https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py314ha0b5721_2.conda#fe3a5c8be07a7b82058bdeb39d33d93b From 70ad614261e8fddf263f63c20f10c45cbb0a72a0 Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Tue, 17 Feb 2026 11:50:35 +0000 Subject: [PATCH 22/77] Fixed all ruff PT failures (#6939) * ruff auto fixes * PT018 * PT013 * PT006 * PT007 * removed PT exclusion * ignored deprecated files * PT031 and PT011 * final misses * doctest thingy --- .ruff.toml | 2 +- docs/gallery_tests/conftest.py | 2 +- lib/iris/common/resolve.py | 3 +- lib/iris/tests/_shared_utils.py | 5 +- .../derived_bounds/test_bounds_files.py | 6 +- .../integration/netcdf/test_coord_systems.py | 1 - .../netcdf/test_load_managed_attributes.py | 2 +- .../netcdf/test_save_managed_attributes.py | 2 +- lib/iris/tests/test_analysis.py | 15 ++-- lib/iris/tests/test_analysis_calculus.py | 9 +- lib/iris/tests/test_pp_module.py | 3 +- .../_shapefiles/test_create_shape_mask.py | 4 +- .../_shapefiles/test_get_weighted_mask.py | 2 +- .../_shapefiles/test_is_geometry_valid.py | 36 ++++---- .../_shapefiles/test_transform_geometry.py | 6 +- .../test_AtmosphereSigmaFactory.py | 47 +++++++--- .../unit/aux_factory/test_AuxCoordFactory.py | 2 +- .../aux_factory/test_HybridPressureFactory.py | 37 +++++--- .../unit/aux_factory/test_OceanSFactory.py | 63 +++++++++----- .../unit/aux_factory/test_OceanSg1Factory.py | 61 ++++++++----- .../unit/aux_factory/test_OceanSg2Factory.py | 61 ++++++++----- .../aux_factory/test_OceanSigmaFactory.py | 33 ++++--- .../aux_factory/test_OceanSigmaZFactory.py | 86 ++++++++++++------- .../tests/unit/common/lenient/test_Lenient.py | 2 +- .../unit/common/lenient/test__Lenient.py | 2 +- .../metadata/test_microsecond_future.py | 2 +- .../unit/concatenate/test__CubeSignature.py | 2 +- .../unit/concatenate/test_concatenate.py | 10 +-- .../tests/unit/concatenate/test_hashing.py | 4 +- .../coord_systems/test_ObliqueMercator.py | 6 +- lib/iris/tests/unit/cube/test_Cube.py | 33 +++---- .../tests/unit/cube/test_CubeAttrsDict.py | 2 +- lib/iris/tests/unit/cube/test_CubeList.py | 3 +- .../unit/fileformats/cf/test_CFReader.py | 30 ++++--- .../unit/fileformats/dot/test__dot_path.py | 3 +- .../test_ArrayStructure.py | 3 +- .../lazy_data/test_is_lazy_masked_data.py | 2 +- lib/iris/tests/unit/util/test_array_equal.py | 2 +- .../test_demote_dim_coord_to_aux_coord.py | 6 -- .../unit/util/test_file_is_newer_than.py | 9 +- .../tests/unit/util/test_make_gridcube.py | 2 +- .../unit/util/test_mask_cube_from_shape.py | 2 +- .../util/test_mask_cube_from_shapefile.py | 2 +- lib/iris/tests/unit/util/test_new_axis.py | 2 +- .../test_promote_aux_coord_to_dim_coord.py | 27 ++++-- .../tests/unit/util/test_rolling_window.py | 9 +- pyproject.toml | 5 ++ 47 files changed, 402 insertions(+), 256 deletions(-) diff --git a/.ruff.toml b/.ruff.toml index 5d78ecdb57..37f2cb7498 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -96,7 +96,7 @@ lint.ignore = [ # flake8-pytest-style (PT) # https://docs.astral.sh/ruff/rules/#flake8-pytest-style-pt - "PT", + "PT019", # flake8-raise (RSE) # https://docs.astral.sh/ruff/rules/#flake8-raise-rse diff --git a/docs/gallery_tests/conftest.py b/docs/gallery_tests/conftest.py index 564a2892a2..2f35659b2e 100644 --- a/docs/gallery_tests/conftest.py +++ b/docs/gallery_tests/conftest.py @@ -47,7 +47,7 @@ def no_show(): if example_dir.is_dir(): monkeypatch.syspath_prepend(example_dir) - yield + return @pytest.fixture diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 49df0e66e6..70ec61b957 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -747,7 +747,8 @@ def _create_prepared_item( # Build a prepared-item to make a MeshCoord. # This case does *NOT* use points + bounds, so alternatives to the # coord content should not have been specified by the caller. - assert points is None and bounds is None + assert points is None + assert bounds is None mesh = coord.mesh location = coord.location axis = coord.axis diff --git a/lib/iris/tests/_shared_utils.py b/lib/iris/tests/_shared_utils.py index 7f03ae95f9..3e0ed6ccc4 100644 --- a/lib/iris/tests/_shared_utils.py +++ b/lib/iris/tests/_shared_utils.py @@ -537,9 +537,8 @@ def assert_files_equal(test_filename, reference_filename): reference_path = get_result_path(reference_filename) if _check_reference_file(reference_path): fmt = "test file {!r} does not match reference {!r}." - assert filecmp.cmp(test_filename, reference_path) and fmt.format( - test_filename, reference_path - ) + assert filecmp.cmp(test_filename, reference_path) + assert fmt.format(test_filename, reference_path) else: _ensure_folder(reference_path) shutil.copy(test_filename, reference_path) diff --git a/lib/iris/tests/integration/netcdf/derived_bounds/test_bounds_files.py b/lib/iris/tests/integration/netcdf/derived_bounds/test_bounds_files.py index 4985d819a3..4d4b4e5c79 100644 --- a/lib/iris/tests/integration/netcdf/derived_bounds/test_bounds_files.py +++ b/lib/iris/tests/integration/netcdf/derived_bounds/test_bounds_files.py @@ -38,7 +38,7 @@ def derived_bounds(request): yield db -@pytest.fixture() +@pytest.fixture def cf_primary_sample_path(tmp_path_factory): cdl = """ netcdf a_new_file { @@ -190,9 +190,9 @@ def test_load_primary_cf_style(derived_bounds, cf_primary_sample_path): assert main_cube.coord_dims(co_P0) == () -@pytest.fixture() +@pytest.fixture def tmp_ncdir(tmp_path_factory): - yield tmp_path_factory.mktemp("_temp_netcdf_dir") + return tmp_path_factory.mktemp("_temp_netcdf_dir") def test_save_primary_cf_style( diff --git a/lib/iris/tests/integration/netcdf/test_coord_systems.py b/lib/iris/tests/integration/netcdf/test_coord_systems.py index db3e21d22f..6c742ae848 100644 --- a/lib/iris/tests/integration/netcdf/test_coord_systems.py +++ b/lib/iris/tests/integration/netcdf/test_coord_systems.py @@ -8,7 +8,6 @@ import numpy as np import pytest -from pytest import MonkeyPatch import iris from iris.coords import DimCoord diff --git a/lib/iris/tests/integration/netcdf/test_load_managed_attributes.py b/lib/iris/tests/integration/netcdf/test_load_managed_attributes.py index b49c092a38..e840606136 100644 --- a/lib/iris/tests/integration/netcdf/test_load_managed_attributes.py +++ b/lib/iris/tests/integration/netcdf/test_load_managed_attributes.py @@ -32,7 +32,7 @@ def tmp_filepath(self, tmp_path_factory): tmp_dir = tmp_path_factory.mktemp("tmp_nc") # We can reuse the same path all over, as it is recreated for each test. self.tmp_ncpath = tmp_dir / "tmp.nc" - yield + return def _check_load_inner(self, iris_name, nc_name, value): # quickly create a valid netcdf file with a simple cube in it. diff --git a/lib/iris/tests/integration/netcdf/test_save_managed_attributes.py b/lib/iris/tests/integration/netcdf/test_save_managed_attributes.py index 0c6a5b9151..cf3fa2e4a9 100644 --- a/lib/iris/tests/integration/netcdf/test_save_managed_attributes.py +++ b/lib/iris/tests/integration/netcdf/test_save_managed_attributes.py @@ -24,7 +24,7 @@ def tmp_filepath(self, tmp_path_factory): tmp_dir = tmp_path_factory.mktemp("tmp_nc") # We can reuse the same path all over, as it is recreated for each test. self.tmp_ncpath = tmp_dir / "tmp.nc" - yield + return def _check_save_inner(self, iris_name, nc_name, value): cube = Cube([1], var_name="x", attributes={iris_name: value}) diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py index bd1aeee40f..061d94e7d5 100644 --- a/lib/iris/tests/test_analysis.py +++ b/lib/iris/tests/test_analysis.py @@ -1019,7 +1019,8 @@ def test_max_run_2d(self): self.request, bar_result, ("analysis", "max_run_bar_2d.cml"), checksum=False ) - with pytest.raises(ValueError): + msg = "Not possible to calculate runs over more than one dimension" + with pytest.raises(ValueError, match=msg): _ = cube.collapsed( ("foo", "bar"), iris.analysis.MAX_RUN, @@ -1363,12 +1364,14 @@ def test_area_weights_non_contiguous(self): def test_area_weights_no_lon_bounds(self): self.cube.coord("grid_longitude").bounds = None - with pytest.raises(ValueError): + msg = "Coordinates 'grid_latitude' and 'grid_longitude' must have bounds to determine the area weights." + with pytest.raises(ValueError, match=msg): iris.analysis.cartography.area_weights(self.cube) def test_area_weights_no_lat_bounds(self): self.cube.coord("grid_latitude").bounds = None - with pytest.raises(ValueError): + msg = "Coordinates 'grid_latitude' and 'grid_longitude' must have bounds to determine the area weights" + with pytest.raises(ValueError, match=msg): iris.analysis.cartography.area_weights(self.cube) @@ -1510,12 +1513,14 @@ def test_cosine_latitude_weights_2d_latitude_last(self): def test_cosine_latitude_weights_no_latitude(self): # no coordinate identified as latitude self.cube_dim_lat.remove_coord("grid_latitude") - with pytest.raises(ValueError): + msg = "Cannot get latitude coordinate from cube 'precipitation_flux'." + with pytest.raises(ValueError, match=msg): _ = iris.analysis.cartography.cosine_latitude_weights(self.cube_dim_lat) def test_cosine_latitude_weights_multiple_latitude(self): # two coordinates identified as latitude - with pytest.raises(ValueError): + msg = "Multiple latitude coords are currently disallowed." + with pytest.raises(ValueError, match=msg): _ = iris.analysis.cartography.cosine_latitude_weights(self.cube) diff --git a/lib/iris/tests/test_analysis_calculus.py b/lib/iris/tests/test_analysis_calculus.py index 8c33cd8fc2..46ad9779f3 100644 --- a/lib/iris/tests/test_analysis_calculus.py +++ b/lib/iris/tests/test_analysis_calculus.py @@ -24,7 +24,8 @@ def test_invalid(self): _ = iris.analysis.calculus.cube_delta(cube, "surface_altitude") with pytest.raises(iris.exceptions.CoordinateMultiDimError): _ = iris.analysis.calculus.cube_delta(cube, "altitude") - with pytest.raises(ValueError): + msg = "Cannot calculate delta over 'forecast_period' as it has length of 1." + with pytest.raises(ValueError, match=msg): _ = iris.analysis.calculus.cube_delta(cube, "forecast_period") def test_delta_coord_lookup(self): @@ -175,7 +176,8 @@ def test_singular_delta(self): # Test single valued coordinate mid-points when not circular lon.circular = False - with pytest.raises(ValueError): + msg = "Cannot take interval differences of a single valued coordinate." + with pytest.raises(ValueError, match=msg): iris.analysis.calculus._construct_delta_coord(lon) def test_singular_midpoint(self): @@ -196,7 +198,8 @@ def test_singular_midpoint(self): # Test single valued coordinate mid-points when not circular lon.circular = False - with pytest.raises(ValueError): + msg = "Cannot take the midpoints of a single valued coordinate." + with pytest.raises(ValueError, match=msg): iris.analysis.calculus._construct_midpoint_coord(lon) diff --git a/lib/iris/tests/test_pp_module.py b/lib/iris/tests/test_pp_module.py index 9b8babd862..2100e0b132 100644 --- a/lib/iris/tests/test_pp_module.py +++ b/lib/iris/tests/test_pp_module.py @@ -124,7 +124,8 @@ def test_set_stash(self): assert self.pp.lbuser[3] == self.pp.stash.lbuser3() assert self.pp.lbuser[6] == self.pp.stash.lbuser6() - with pytest.raises(ValueError): + msg = r"Cannot set stash to \(4, 15, 5\)" + with pytest.raises(ValueError, match=msg): self.pp.stash = (4, 15, 5) def test_lbproc_bad_access(self): diff --git a/lib/iris/tests/unit/_shapefiles/test_create_shape_mask.py b/lib/iris/tests/unit/_shapefiles/test_create_shape_mask.py index b4d943bbc5..32ca99d48d 100644 --- a/lib/iris/tests/unit/_shapefiles/test_create_shape_mask.py +++ b/lib/iris/tests/unit/_shapefiles/test_create_shape_mask.py @@ -224,7 +224,7 @@ def test_invalid_cube_crs(self, square_polygon, wgs84_crs): ) @pytest.mark.parametrize( - "minimum_weight, error_type", + ("minimum_weight", "error_type"), [(-1, ValueError), (2, ValueError)], ) def test_invalid_minimum_weight( @@ -242,7 +242,7 @@ def test_invalid_minimum_weight( ) @pytest.mark.parametrize( - "minimum_weight, error_type", + ("minimum_weight", "error_type"), [(-1, ValueError), (2, ValueError)], ) def test_invalid_minimum_weight_with_all_touched( diff --git a/lib/iris/tests/unit/_shapefiles/test_get_weighted_mask.py b/lib/iris/tests/unit/_shapefiles/test_get_weighted_mask.py index 406a9dbc59..6863fb1847 100644 --- a/lib/iris/tests/unit/_shapefiles/test_get_weighted_mask.py +++ b/lib/iris/tests/unit/_shapefiles/test_get_weighted_mask.py @@ -43,7 +43,7 @@ def mock_cube(): @pytest.mark.parametrize( - "minimum_weight, expected_mask", + ("minimum_weight", "expected_mask"), [ ( 0.0, diff --git a/lib/iris/tests/unit/_shapefiles/test_is_geometry_valid.py b/lib/iris/tests/unit/_shapefiles/test_is_geometry_valid.py index 46ff058af2..8605d72d8b 100644 --- a/lib/iris/tests/unit/_shapefiles/test_is_geometry_valid.py +++ b/lib/iris/tests/unit/_shapefiles/test_is_geometry_valid.py @@ -22,17 +22,17 @@ # Shareable shape fixtures used in: # - util/test_mask_cube_from_shapefile.py # - _shapefiles/test_is_geometry_valid.py -@pytest.fixture() +@pytest.fixture def wgs84_crs(): return CRS.from_epsg(4326) -@pytest.fixture() +@pytest.fixture def osgb_crs(): return CRS.from_epsg(27700) -@pytest.fixture() +@pytest.fixture def basic_polygon_geometry(): # Define the coordinates of a basic rectangle min_lon = -90 @@ -44,7 +44,7 @@ def basic_polygon_geometry(): return box(min_lon, min_lat, max_lon, max_lat) -@pytest.fixture() +@pytest.fixture def basic_wide_polygon_geometry(): # Define the coordinates of a basic rectangle min_lon = -170 @@ -56,7 +56,7 @@ def basic_wide_polygon_geometry(): return box(min_lon, min_lat, max_lon, max_lat) -@pytest.fixture() +@pytest.fixture def basic_multipolygon_geometry(): # Define the coordinates of a basic rectangle min_lon = 0 @@ -73,25 +73,25 @@ def basic_multipolygon_geometry(): ) -@pytest.fixture() +@pytest.fixture def basic_point_geometry(): # Define the coordinates of a basic point (lon, lat) return Point((-3.476204, 50.727059)) -@pytest.fixture() +@pytest.fixture def basic_line_geometry(): # Define the coordinates of a basic line return LineString([(0, 0), (10, 10)]) -@pytest.fixture() +@pytest.fixture def basic_multiline_geometry(): # Define the coordinates of a basic line return MultiLineString([[(0, 0), (10, 10)], [(20, 20), (30, 30)]]) -@pytest.fixture() +@pytest.fixture def basic_point_collection(): # Define the coordinates of a basic collection of points # as (lon, lat) tuples, assuming a WGS84 projection. @@ -109,37 +109,37 @@ def basic_point_collection(): return points -@pytest.fixture() +@pytest.fixture def canada_geometry(): # Define the coordinates of a rectangle that covers Canada return box(-143.5, 42.6, -37.8, 84.0) -@pytest.fixture() +@pytest.fixture def bering_sea_geometry(): # Define the coordinates of a rectangle that covers the Bering Sea return box(148.42, 49.1, -138.74, 73.12) -@pytest.fixture() +@pytest.fixture def uk_geometry(): # Define the coordinates of a rectangle that covers the UK return box(-10, 49, 2, 61) -@pytest.fixture() +@pytest.fixture def invalid_geometry_poles(): # Define the coordinates of a rectangle that crosses the poles return box(-10, -90, 10, 90) -@pytest.fixture() +@pytest.fixture def invalid_geometry_bounds(): # Define the coordinates of a rectangle that is outside the bounds of the coordinate system return box(-200, -100, 200, 100) -@pytest.fixture() +@pytest.fixture def not_a_valid_geometry(): # Return an invalid geometry type # This is not a valid geometry, e.g., a string @@ -168,7 +168,7 @@ def test_valid_geometry(test_input, request, wgs84_crs): # N.B. error message comparison is done with regex so # any parentheses in the error message must be escaped (\) @pytest.mark.parametrize( - "test_input, errortype, err_message", + ("test_input", "errortype", "err_message"), [ ( "invalid_geometry_poles", @@ -195,10 +195,10 @@ def test_invalid_geometry(test_input, errortype, err_message, request, wgs84_crs @pytest.mark.parametrize( "test_input", - ( + [ "basic_wide_polygon_geometry", "bering_sea_geometry", - ), + ], ) def test_warning_geometry(test_input, request, wgs84_crs): # Assert that all invalid geometries raise the expected error diff --git a/lib/iris/tests/unit/_shapefiles/test_transform_geometry.py b/lib/iris/tests/unit/_shapefiles/test_transform_geometry.py index ca5debb9a9..6aff0931c5 100644 --- a/lib/iris/tests/unit/_shapefiles/test_transform_geometry.py +++ b/lib/iris/tests/unit/_shapefiles/test_transform_geometry.py @@ -23,7 +23,7 @@ def wgs84_crs(): @pytest.mark.parametrize( - "input_geometry, wgs84_crs, input_cube_crs, output_expected_geometry", + ("input_geometry", "wgs84_crs", "input_cube_crs", "output_expected_geometry"), [ ( # Basic geometry in WGS84, no transformation needed shapely.geometry.box(-10, 50, 2, 60), @@ -90,7 +90,7 @@ def test_transform_geometry( # Assert that an invalid inputs raise the expected errors @pytest.mark.parametrize( - "input_geometry, input_geometry_crs, input_cube_crs, expected_error", + ("input_geometry", "input_geometry_crs", "input_cube_crs", "expected_error"), [ ( # Basic geometry in WGS84, no transformation needed "bad_input_geometry", @@ -120,7 +120,7 @@ def test_transform_geometry_invalid_input( @pytest.mark.parametrize( - "input_geometry, wgs84_crs, input_cube_crs", + ("input_geometry", "wgs84_crs", "input_cube_crs"), [ ( # Basic geometry in WGS84, transformed to OSGB shapely.geometry.box(np.inf, np.inf, np.inf, np.inf), diff --git a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py index 7d78217388..12c8e64ef7 100644 --- a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py @@ -7,6 +7,8 @@ """ +import re + from cf_units import Unit import numpy as np import pytest @@ -28,11 +30,13 @@ def _setup(self, mocker): ) def test_insufficient_coordinates_no_args(self): - with pytest.raises(ValueError): + msg = "Unable to construct atmosphere sigma coordinate factory due to insufficient source coordinates" + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory() def test_insufficient_coordinates_no_ptop(self): - with pytest.raises(ValueError): + msg = "Unable to construct atmosphere sigma coordinate factory due to insufficient source coordinates" + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory( pressure_at_top=None, sigma=self.sigma, @@ -40,7 +44,8 @@ def test_insufficient_coordinates_no_ptop(self): ) def test_insufficient_coordinates_no_sigma(self): - with pytest.raises(ValueError): + msg = "Unable to construct atmosphere sigma coordinate factory due to insufficient source coordinates" + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory( pressure_at_top=self.pressure_at_top, sigma=None, @@ -48,7 +53,8 @@ def test_insufficient_coordinates_no_sigma(self): ) def test_insufficient_coordinates_no_ps(self): - with pytest.raises(ValueError): + msg = "Unable to construct atmosphere sigma coordinate factory due to insufficient source coordinates" + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory( pressure_at_top=self.pressure_at_top, sigma=self.sigma, @@ -62,8 +68,11 @@ def test_ptop_shapes(self): def test_ptop_invalid_shapes(self): for shape in [(2,), (1, 1)]: + msg = re.escape( + f"Expected scalar 'pressure_at_top' coordinate, got shape {shape}" + ) self.pressure_at_top.shape = shape - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory(**self.kwargs) def test_sigma_bounds(self): @@ -73,8 +82,9 @@ def test_sigma_bounds(self): def test_sigma_invalid_bounds(self): for n_bounds in [-1, 1, 3]: + msg = f"Invalid 'sigma' coordinate: must have either 0 or 2 bounds, got {n_bounds}" self.sigma.nbounds = n_bounds - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory(**self.kwargs) def test_sigma_units(self): @@ -85,7 +95,8 @@ def test_sigma_units(self): def test_sigma_invalid_units(self): for units in ["Pa", "m"]: self.sigma.units = Unit(units) - with pytest.raises(ValueError): + msg = f"Invalid units: 'sigma' must be dimensionless, got '{units}'" + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory(**self.kwargs) def test_ptop_ps_units(self): @@ -96,9 +107,10 @@ def test_ptop_ps_units(self): def test_ptop_ps_invalid_units(self): for units in [("Pa", "1"), ("1", "Pa"), ("bar", "Pa"), ("Pa", "hPa")]: + msg = f"Incompatible units: 'pressure_at_top' and 'surface_air_pressure' must have the same units, got '{units[0]}' and '{units[1]}'" self.pressure_at_top.units = Unit(units[0]) self.surface_air_pressure.units = Unit(units[1]) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory(**self.kwargs) def test_ptop_units(self): @@ -109,14 +121,15 @@ def test_ptop_units(self): def test_ptop_invalid_units(self): for units in ["1", "m", "kg", None]: + msg = "Invalid units: 'pressure_at_top' and 'surface_air_pressure' must have units of pressure" self.pressure_at_top.units = Unit(units) self.surface_air_pressure.units = Unit(units) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory(**self.kwargs) class Test_dependencies: - @pytest.fixture() + @pytest.fixture def sample_kwargs(self, mocker): pressure_at_top = mocker.Mock(units=Unit("Pa"), nbounds=0, shape=()) sigma = mocker.Mock(units=Unit("1"), nbounds=0) @@ -247,7 +260,10 @@ def test_pressure_at_top(self, mocker): def test_pressure_at_top_wrong_shape(self, mocker): new_pressure_at_top = mocker.Mock(units=Unit("Pa"), nbounds=0, shape=(2,)) - with pytest.raises(ValueError): + msg = re.escape( + "Failed to update dependencies. Expected scalar 'pressure_at_top' coordinate, got shape (2,)" + ) + with pytest.raises(ValueError, match=msg): self.factory.update(self.pressure_at_top, new_pressure_at_top) def test_sigma(self, mocker): @@ -257,12 +273,14 @@ def test_sigma(self, mocker): def test_sigma_too_many_bounds(self, mocker): new_sigma = mocker.Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid 'sigma' coordinate: must have either 0 or 2 bounds, got 4" + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_sigma_incompatible_units(self, mocker): new_sigma = mocker.Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: 'sigma' must be dimensionless, got 'Pa'" + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_surface_air_pressure(self, mocker): @@ -272,5 +290,6 @@ def test_surface_air_pressure(self, mocker): def test_surface_air_pressure_incompatible_units(self, mocker): new_surface_air_pressure = mocker.Mock(units=Unit("mbar"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: 'pressure_at_top' and 'surface_air_pressure' must have the same units, got 'Pa' and 'mbar'" + with pytest.raises(ValueError, match=msg): self.factory.update(self.surface_air_pressure, new_surface_air_pressure) diff --git a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py index a9d1c6548a..faf57d0e65 100644 --- a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py @@ -140,7 +140,7 @@ def test_lazy_complex(self): @skip_data class Test_lazy_aux_coords: - @pytest.fixture() + @pytest.fixture def sample_cube(self, mocker): path = get_data_path(["NetCDF", "testing", "small_theta_colpex.nc"]) # While loading, "turn off" loading small variables as real data. diff --git a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py index e3caf0c114..5eb0f8f0c9 100644 --- a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py @@ -34,13 +34,14 @@ def _setup(self): create_default_sample_parts(self) def test_insufficient_coords(self): - with pytest.raises(ValueError): + msg = "Unable to construct hybrid pressure coordinate factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): HybridPressureFactory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=None, sigma=self.sigma, surface_air_pressure=None ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=None, sigma=None, @@ -49,7 +50,8 @@ def test_insufficient_coords(self): def test_incompatible_delta_units(self): self.delta.units = cf_units.Unit("m") - with pytest.raises(ValueError): + msg = "Incompatible units: delta and surface_air_pressure must have the same units." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -58,7 +60,8 @@ def test_incompatible_delta_units(self): def test_incompatible_sigma_units(self): self.sigma.units = cf_units.Unit("Pa") - with pytest.raises(ValueError): + msg = "Invalid units: sigma must be dimensionless." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -67,7 +70,8 @@ def test_incompatible_sigma_units(self): def test_incompatible_surface_air_pressure_units(self): self.surface_air_pressure.units = cf_units.Unit("unknown") - with pytest.raises(ValueError): + msg = "Incompatible units: delta and surface_air_pressure must have the same units." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -77,7 +81,8 @@ def test_incompatible_surface_air_pressure_units(self): def test_different_pressure_units(self): self.delta.units = cf_units.Unit("hPa") self.surface_air_pressure.units = cf_units.Unit("Pa") - with pytest.raises(ValueError): + msg = "Incompatible units: delta and surface_air_pressure must have the same units." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -86,7 +91,8 @@ def test_different_pressure_units(self): def test_too_many_delta_bounds(self): self.delta.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid delta coordinate: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -95,7 +101,8 @@ def test_too_many_delta_bounds(self): def test_too_many_sigma_bounds(self): self.sigma.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid sigma coordinate: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -257,12 +264,14 @@ def test_good_delta(self): def test_bad_delta(self): new_delta_coord = Mock(units=cf_units.Unit("1"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: delta and surface_air_pressure must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.delta, new_delta_coord) def test_alternative_bad_delta(self): new_delta_coord = Mock(units=cf_units.Unit("Pa"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid delta coordinate: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.delta, new_delta_coord) def test_good_surface_air_pressure(self): @@ -272,7 +281,8 @@ def test_good_surface_air_pressure(self): def test_bad_surface_air_pressure(self): new_surface_p_coord = Mock(units=cf_units.Unit("km"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: delta and surface_air_pressure must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.surface_air_pressure, new_surface_p_coord) def test_non_dependency(self): @@ -292,5 +302,6 @@ def test_none_sigma(self): def test_insufficient_coords(self): self.factory.update(self.delta, None) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Unable to construct hybrid pressure coordinate factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): self.factory.update(self.surface_air_pressure, None) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py index e702b8d2e2..a1e92c03c5 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py @@ -36,9 +36,10 @@ def _setup(self): ) def test_insufficient_coordinates(self): - with pytest.raises(ValueError): + msg = "Unable to construct Ocean s-coordinate factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): OceanSFactory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=None, eta=self.eta, @@ -47,7 +48,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=self.s, eta=None, @@ -56,7 +57,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=self.s, eta=self.eta, @@ -65,7 +66,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=self.s, eta=self.eta, @@ -74,7 +75,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=self.s, eta=self.eta, @@ -83,7 +84,7 @@ def test_insufficient_coordinates(self): b=None, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=self.s, eta=self.eta, @@ -95,42 +96,50 @@ def test_insufficient_coordinates(self): def test_s_too_many_bounds(self): self.s.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid s coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_a_non_scalar(self): self.a.shape = (2,) - with pytest.raises(ValueError): + msg = r"Expected scalar a coordinate .*: got shape \(2,\)\." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_b_non_scalar(self): self.b.shape = (2,) - with pytest.raises(ValueError): + msg = r"Expected scalar b coordinate .*: got shape \(2,\)\." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with pytest.raises(ValueError): + msg = r"Expected scalar depth_c coordinate .*: got shape \(2,\)\." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_s_incompatible_units(self): self.s.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: s coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .* and depth coordinate .* must have the same units" + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: depth_c coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_promote_s_units_unknown_to_dimensionless(self): @@ -254,12 +263,14 @@ def test_s(self): def test_s_too_many_bounds(self): new_s = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid s coordinate .*: must have either 0 or 2 bounds" + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_s_incompatible_units(self): new_s = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: s coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_eta(self): @@ -269,7 +280,8 @@ def test_eta(self): def test_eta_incompatible_units(self): new_eta = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.eta, new_eta) def test_depth(self): @@ -279,7 +291,8 @@ def test_depth(self): def test_depth_incompatible_units(self): new_depth = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth, new_depth) def test_a(self): @@ -289,7 +302,8 @@ def test_a(self): def test_a_non_scalar(self): new_a = Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar a coordinate .*: got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.a, new_a) def test_b(self): @@ -299,7 +313,8 @@ def test_b(self): def test_b_non_scalar(self): new_b = Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar b coordinate .*: got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.b, new_b) def test_depth_c(self): @@ -309,10 +324,12 @@ def test_depth_c(self): def test_depth_c_non_scalar(self): new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar depth_c coordinate .*: got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: depth_c coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py index 82e7cd2a7b..6e45c49ec2 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py @@ -34,9 +34,10 @@ def _setup(self): ) def test_insufficient_coordinates(self): - with pytest.raises(ValueError): + msg = "Unable to construct Ocean s-coordinate, generic form 1 factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): OceanSg1Factory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg1Factory( s=None, c=self.c, @@ -44,7 +45,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg1Factory( s=self.s, c=None, @@ -52,7 +53,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg1Factory( s=self.s, c=self.c, @@ -60,7 +61,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg1Factory( s=self.s, c=self.c, @@ -68,7 +69,7 @@ def test_insufficient_coordinates(self): depth=None, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg1Factory( s=self.s, c=self.c, @@ -79,42 +80,50 @@ def test_insufficient_coordinates(self): def test_s_too_many_bounds(self): self.s.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid s coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_c_too_many_bounds(self): self.c.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid c coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with pytest.raises(ValueError): + msg = "Expected scalar depth coordinate .*: got shape .*." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_s_incompatible_units(self): self.s.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: s coordinate .*must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_c_incompatible_units(self): self.c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: c coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: depth_c coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_promote_c_and_s_units_unknown_to_dimensionless(self): @@ -235,22 +244,26 @@ def test_c(self): def test_s_too_many_bounds(self): new_s = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid s coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_c_too_many_bounds(self): new_c = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid c coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.c, new_c) def test_s_incompatible_units(self): new_s = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: s coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_c_incompatible_units(self): new_c = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: c coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.c, new_c) def test_eta(self): @@ -260,7 +273,8 @@ def test_eta(self): def test_eta_incompatible_units(self): new_eta = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.eta, new_eta) def test_depth(self): @@ -270,7 +284,8 @@ def test_depth(self): def test_depth_incompatible_units(self): new_depth = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth, new_depth) def test_depth_c(self): @@ -280,10 +295,12 @@ def test_depth_c(self): def test_depth_c_non_scalar(self): new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Expected scalar depth coordinate .*: got shape .*" + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: depth_c coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py index ecb8593e99..bd80de9dfe 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py @@ -34,9 +34,10 @@ def _setup(self): ) def test_insufficient_coordinates(self): - with pytest.raises(ValueError): + msg = "Unable to construct Ocean s-coordinate, generic form 2 factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): OceanSg2Factory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg2Factory( s=None, c=self.c, @@ -44,7 +45,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg2Factory( s=self.s, c=None, @@ -52,7 +53,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg2Factory( s=self.s, c=self.c, @@ -60,7 +61,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg2Factory( s=self.s, c=self.c, @@ -68,7 +69,7 @@ def test_insufficient_coordinates(self): depth=None, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg2Factory( s=self.s, c=self.c, @@ -79,42 +80,50 @@ def test_insufficient_coordinates(self): def test_s_too_many_bounds(self): self.s.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid s coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_c_too_many_bounds(self): self.c.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid c coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with pytest.raises(ValueError): + msg = r"Expected scalar depth_c coordinate .*: got shape \(2,\)." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_s_incompatible_units(self): self.s.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: s coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_c_incompatible_units(self): self.c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: c coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: depth_c coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_promote_c_and_s_units_unknown_to_dimensionless(self): @@ -235,22 +244,26 @@ def test_c(self): def test_s_too_many_bounds(self): new_s = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid s coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_c_too_many_bounds(self): new_c = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid c coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.c, new_c) def test_s_incompatible_units(self): new_s = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: s coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_c_incompatible_units(self): new_c = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: c coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.c, new_c) def test_eta(self): @@ -260,7 +273,8 @@ def test_eta(self): def test_eta_incompatible_units(self): new_eta = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.eta, new_eta) def test_depth(self): @@ -270,7 +284,8 @@ def test_depth(self): def test_depth_incompatible_units(self): new_depth = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth, new_depth) def test_depth_c(self): @@ -280,10 +295,12 @@ def test_depth_c(self): def test_depth_c_non_scalar(self): new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar depth_c coordinate .*: got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: depth_c coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py index 910e897590..f203718c8c 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py @@ -26,33 +26,38 @@ def _setup(self): self.kwargs = dict(sigma=self.sigma, eta=self.eta, depth=self.depth) def test_insufficient_coordinates(self): - with pytest.raises(ValueError): + msg = "Unable to construct ocean sigma coordinate factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): OceanSigmaFactory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(sigma=None, eta=self.eta, depth=self.depth) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(sigma=self.sigma, eta=None, depth=self.depth) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(sigma=self.sigma, eta=self.eta, depth=None) def test_sigma_too_many_bounds(self): self.sigma.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid sigma coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(**self.kwargs) def test_sigma_incompatible_units(self): self.sigma.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: sigma coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(**self.kwargs) def test_promote_sigma_units_unknown_to_dimensionless(self): @@ -138,12 +143,14 @@ def test_sigma(self): def test_sigma_too_many_bounds(self): new_sigma = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid sigma coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_sigma_incompatible_units(self): new_sigma = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: sigma coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_eta(self): @@ -153,7 +160,8 @@ def test_eta(self): def test_eta_incompatible_units(self): new_eta = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.eta, new_eta) def test_depth(self): @@ -163,5 +171,6 @@ def test_depth(self): def test_depth_incompatible_units(self): new_depth = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth, new_depth) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py index e6ec074ba4..3b2f7178ef 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py @@ -36,9 +36,10 @@ def _setup(self): ) def test_insufficient_coordinates(self): - with pytest.raises(ValueError): + msg = "Unable to determine units: no zlev coordinate available." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory( sigma=self.sigma, eta=self.eta, @@ -47,7 +48,8 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=None, ) - with pytest.raises(ValueError): + msg = "Unable to construct ocean sigma over z coordinate factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory( sigma=None, eta=None, @@ -56,7 +58,7 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=self.zlev, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory( sigma=self.sigma, eta=None, @@ -65,7 +67,7 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=self.zlev, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory( sigma=self.sigma, eta=None, @@ -74,7 +76,8 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=self.zlev, ) - with pytest.raises(ValueError): + msg = "Missing nsigma coordinate." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory( sigma=self.sigma, eta=self.eta, @@ -86,52 +89,62 @@ def test_insufficient_coordinates(self): def test_sigma_too_many_bounds(self): self.sigma.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid sigma coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_zlev_too_many_bounds(self): self.zlev.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid zlev coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_sigma_zlev_same_boundedness(self): self.zlev.nbounds = 2 - with pytest.raises(ValueError): + msg = "The sigma coordinate .* and zlev coordinate .* must be equally bounded." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with pytest.raises(ValueError): + msg = r"Expected scalar depth_c coordinate .*: got shape \(2,\)." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_nsigma_non_scalar(self): self.nsigma.shape = (4,) - with pytest.raises(ValueError): + msg = r"Expected scalar nsigma coordinate .*: got shape \(4,\)." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_zlev_incompatible_units(self): self.zlev.units = Unit("Pa") - with pytest.raises(ValueError): + msg = "Invalid units: zlev coordinate .* must have units of distance." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_sigma_incompatible_units(self): self.sigma.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: sigma coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: depth_c coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: depth coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_promote_sigma_units_unknown_to_dimensionless(self): @@ -356,17 +369,20 @@ def test_sigma(self): def test_sigma_too_many_bounds(self): new_sigma = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Invalid sigma coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_sigma_zlev_same_boundedness(self): new_sigma = Mock(units=Unit("1"), nbounds=2) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. The sigma coordinate .* and zlev coordinate .* must be equally bounded." + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_sigma_incompatible_units(self): new_sigma = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: sigma coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_eta(self): @@ -376,7 +392,8 @@ def test_eta(self): def test_eta_incompatible_units(self): new_eta = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.eta, new_eta) def test_depth(self): @@ -386,7 +403,8 @@ def test_depth(self): def test_depth_incompatible_units(self): new_depth = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: depth coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth, new_depth) def test_depth_c(self): @@ -396,12 +414,14 @@ def test_depth_c(self): def test_depth_c_non_scalar(self): new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar depth_c coordinate .*: got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: depth_c coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) def test_nsigma(self): @@ -410,12 +430,14 @@ def test_nsigma(self): assert self.factory.nsigma is new_nsigma def test_nsigma_missing(self): - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Missing nsigma coordinate." + with pytest.raises(ValueError, match=msg): self.factory.update(self.nsigma, None) def test_nsigma_non_scalar(self): new_nsigma = Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar nsigma coordinate .* got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.nsigma, new_nsigma) def test_zlev(self): @@ -424,20 +446,24 @@ def test_zlev(self): assert self.factory.zlev is new_zlev def test_zlev_missing(self): - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Unable to determine units: no zlev coordinate available." + with pytest.raises(ValueError, match=msg): self.factory.update(self.zlev, None) def test_zlev_too_many_bounds(self): new_zlev = Mock(units=Unit("m"), nbounds=4) - with pytest.raises(ValueError): + msg = "Invalid zlev coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.zlev, new_zlev) def test_zlev_same_boundedness(self): new_zlev = Mock(units=Unit("m"), nbounds=2) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. The sigma coordinate .*and zlev coordinate .* must be equally bounded." + with pytest.raises(ValueError, match=msg): self.factory.update(self.zlev, new_zlev) def test_zlev_incompatible_units(self): - new_zlev = new_zlev = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + new_zlev = Mock(units=Unit("Pa"), nbounds=0) + msg = "Failed to update dependencies. Invalid units: zlev coordinate .* must have units of distance." + with pytest.raises(ValueError, match=msg): self.factory.update(self.zlev, new_zlev) diff --git a/lib/iris/tests/unit/common/lenient/test_Lenient.py b/lib/iris/tests/unit/common/lenient/test_Lenient.py index cbc1c8fe1f..7f7ecde5fd 100644 --- a/lib/iris/tests/unit/common/lenient/test_Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test_Lenient.py @@ -9,7 +9,7 @@ from iris.common.lenient import _LENIENT, _LENIENT_PROTECTED, Lenient -@pytest.fixture() +@pytest.fixture def lenient(): # setup state = {key: _LENIENT.__dict__[key] for key in _LENIENT_PROTECTED} diff --git a/lib/iris/tests/unit/common/lenient/test__Lenient.py b/lib/iris/tests/unit/common/lenient/test__Lenient.py index bd19c3922e..f932555dce 100644 --- a/lib/iris/tests/unit/common/lenient/test__Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test__Lenient.py @@ -16,7 +16,7 @@ ) -@pytest.fixture() +@pytest.fixture def lenient(): return _Lenient() diff --git a/lib/iris/tests/unit/common/metadata/test_microsecond_future.py b/lib/iris/tests/unit/common/metadata/test_microsecond_future.py index b86ebf06d4..994456aea0 100644 --- a/lib/iris/tests/unit/common/metadata/test_microsecond_future.py +++ b/lib/iris/tests/unit/common/metadata/test_microsecond_future.py @@ -61,7 +61,7 @@ def _op(): @pytest.mark.parametrize( "indexing", - (np.s_[0], np.s_[:], np.s_[:, np.newaxis]), + [np.s_[0], np.s_[:], np.s_[:, np.newaxis]], ids=("single", "array", "array_2d"), ) def test_num2date(time_coord, future_date_microseconds, indexing): diff --git a/lib/iris/tests/unit/concatenate/test__CubeSignature.py b/lib/iris/tests/unit/concatenate/test__CubeSignature.py index c6736f73bd..88c5da6f12 100644 --- a/lib/iris/tests/unit/concatenate/test__CubeSignature.py +++ b/lib/iris/tests/unit/concatenate/test__CubeSignature.py @@ -28,7 +28,7 @@ class SampleData: class Test__coordinate_dim_metadata_equality: - @pytest.fixture() + @pytest.fixture def sample_data(self) -> SampleData: # Return a standard set of test items, wrapped in a data object diff --git a/lib/iris/tests/unit/concatenate/test_concatenate.py b/lib/iris/tests/unit/concatenate/test_concatenate.py index 448ffb5e7b..c08aba238a 100644 --- a/lib/iris/tests/unit/concatenate/test_concatenate.py +++ b/lib/iris/tests/unit/concatenate/test_concatenate.py @@ -19,7 +19,7 @@ class TestEpoch: - @pytest.fixture() + @pytest.fixture def simple_1d_time_cubes(self): reftimes = [ "hours since 1970-01-01 00:00:00", @@ -51,12 +51,12 @@ def test_concat_1d_with_same_time_units(self, simple_1d_time_cubes): class _MessagesMixin: - @pytest.fixture() + @pytest.fixture def placeholder(self): # Shim to allow sample_cubes to have identical signature in both parent and subclasses return [] - @pytest.fixture() + @pytest.fixture def sample_cubes(self, placeholder): # Construct and return a pair of identical cubes data = np.arange(24, dtype=np.float32).reshape(2, 3, 4) @@ -238,7 +238,7 @@ def test_dim_coords_overlap_message(self, sample_cubes): class TestNonMetadataMessages(_MessagesMixin): parent_cubes = _MessagesMixin.sample_cubes - @pytest.fixture() + @pytest.fixture def sample_cubes(self, parent_cubes): coord = parent_cubes[1].coord("time") parent_cubes[1].replace_coord(coord.copy(points=coord.points + 2)) @@ -390,7 +390,7 @@ def test_desc_bounds_all_singleton(self): class TestConcatenate__dask: - @pytest.fixture() + @pytest.fixture def sample_lazy_cubes(self): # Make a pair of concatenatable cubes, with dim points [1, 2] and [3, 4, 5] def build_lazy_cube(points): diff --git a/lib/iris/tests/unit/concatenate/test_hashing.py b/lib/iris/tests/unit/concatenate/test_hashing.py index 88064e4e46..3fafc408e6 100644 --- a/lib/iris/tests/unit/concatenate/test_hashing.py +++ b/lib/iris/tests/unit/concatenate/test_hashing.py @@ -14,7 +14,7 @@ @pytest.mark.parametrize( - "a,b,eq", + ("a", "b", "eq"), [ (np.arange(2), da.arange(2), True), (np.arange(2), np.arange(2).reshape((1, 2)), False), @@ -78,7 +78,7 @@ def test_compute_hashes(a, b, eq): @pytest.mark.parametrize( - "a,b", + ("a", "b"), [ (a, b) for (a, b, withnans, eq) in TEST_CASES diff --git a/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py b/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py index 95069ba378..a7855ce21a 100644 --- a/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py +++ b/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py @@ -125,11 +125,11 @@ def make_variant_inputs(self, request) -> None: def make_instance(self) -> ObliqueMercator: return ObliqueMercator(**self.class_kwargs) - @pytest.fixture() + @pytest.fixture def instance(self): return self.make_instance() - @pytest.fixture() + @pytest.fixture def mock_ccrs(self, mocker): return mocker.patch("cartopy.crs.ObliqueMercator", autospec=True) @@ -144,7 +144,7 @@ def test_cartopy_projection(self, instance, mock_ccrs): instance.as_cartopy_projection() mock_ccrs.assert_called_with(**self.cartopy_kwargs_expected) - @pytest.fixture() + @pytest.fixture def label_class(self, instance): """Make the tested coordinate system available, even for subclasses.""" from iris import coord_systems diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index fbcc8f7f0a..d91c7e81c0 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -1466,17 +1466,20 @@ def test_global_wrapped(self, dataless): class Test_intersection__Invalid: def test_reversed_min_max(self, dataless): cube = create_cube(0, 360, dataless=dataless) - with pytest.raises(ValueError): + msg = "minimum greater than maximum" + with pytest.raises(ValueError, match=msg): cube.intersection(longitude=(30, 10)) def test_dest_too_large(self, dataless): cube = create_cube(0, 360, dataless=dataless) - with pytest.raises(ValueError): + msg = "requested range greater than coordinate's unit's modulus" + with pytest.raises(ValueError, match=msg): cube.intersection(longitude=(30, 500)) def test_src_too_large(self, dataless): cube = create_cube(0, 400, dataless=dataless) - with pytest.raises(ValueError): + msg = "coordinate's range greater than coordinate's unit's modulus" + with pytest.raises(ValueError, match=msg): cube.intersection(longitude=(10, 30)) def test_missing_coord(self, dataless): @@ -2886,7 +2889,7 @@ def test_lazy_data_masked__mask_set(self): class TestSubset: @pytest.mark.parametrize( - ["data", "shape"], [[0, None], [None, ()]], ids=["with_data", "dataless"] + ("data", "shape"), [(0, None), (None, ())], ids=["with_data", "dataless"] ) def test_scalar_coordinate(self, data, shape): cube = Cube(data=data, shape=shape, long_name="apricot", units="1") @@ -2895,8 +2898,8 @@ def test_scalar_coordinate(self, data, shape): assert cube == result @pytest.mark.parametrize( - ["data", "shape"], - [[np.zeros(4), None], [None, (4,)]], + ("data", "shape"), + [(np.zeros(4), None), (None, (4,))], ids=["with_data", "dataless"], ) def test_dimensional_coordinate(self, data, shape): @@ -2909,7 +2912,7 @@ def test_dimensional_coordinate(self, data, shape): assert cube == result @pytest.mark.parametrize( - ["data", "shape"], [[0, None], [None, ()]], ids=["with_data", "dataless"] + ("data", "shape"), [(0, None), (None, ())], ids=["with_data", "dataless"] ) def test_missing_coordinate(self, data, shape): cube = Cube(data=data, shape=shape, long_name="raspberry", units="1") @@ -2918,7 +2921,7 @@ def test_missing_coordinate(self, data, shape): pytest.raises(CoordinateNotFoundError, cube.subset, bad_coord) @pytest.mark.parametrize( - ["data", "shape"], [[0, None], [None, ()]], ids=["with_data", "dataless"] + ("data", "shape"), [(0, None), (None, ())], ids=["with_data", "dataless"] ) def test_different_coordinate(self, data, shape): cube = Cube(data=data, shape=shape, long_name="raspberry", units="1") @@ -2928,7 +2931,7 @@ def test_different_coordinate(self, data, shape): assert result is None @pytest.mark.parametrize( - ["data", "shape"], [[[0, 1], None], [None, (2,)]], ids=["with_data", "dataless"] + ("data", "shape"), [([0, 1], None), (None, (2,))], ids=["with_data", "dataless"] ) def test_different_coordinate_vector(self, data, shape): cube = Cube(data=data, shape=shape, long_name="raspberry", units="1") @@ -2938,7 +2941,7 @@ def test_different_coordinate_vector(self, data, shape): assert result is None @pytest.mark.parametrize( - ["data", "shape"], [[0, None], [None, ()]], ids=["with_data", "dataless"] + ("data", "shape"), [(0, None), (None, ())], ids=["with_data", "dataless"] ) def test_not_coordinate(self, data, shape): cube = Cube(data=data, shape=shape, long_name="peach", units="1") @@ -3712,7 +3715,7 @@ def test_cell_method_correct_order(self): assert cube1 == cube2 -@pytest.fixture() +@pytest.fixture def simplecube(): return stock.simple_2d_w_cell_measure_ancil_var() @@ -3790,14 +3793,14 @@ class TestReprs: """ # Note: logically this could be a staticmethod, but that seems to upset Pytest - @pytest.fixture() + @pytest.fixture def patched_cubeprinter(self, mocker): target = "iris._representation.cube_printout.CubePrinter" instance_mock = mock.MagicMock( to_string=mock.MagicMock(return_value="") # NB this must return a string ) class_mock = mocker.patch(target, return_value=instance_mock) - yield class_mock, instance_mock + return class_mock, instance_mock @staticmethod def _check_expected_effects(simplecube, patched_cubeprinter, oneline, padding): @@ -3847,14 +3850,14 @@ class TestHtmlRepr: """ # Note: logically this could be a staticmethod, but that seems to upset Pytest - @pytest.fixture() + @pytest.fixture def patched_cubehtml(self, mocker): target = "iris.experimental.representation.CubeRepresentation" instance_mock = mock.MagicMock( repr_html=mock.MagicMock(return_value="") # NB this must return a string ) class_mock = mocker.patch(target, return_value=instance_mock) - yield class_mock, instance_mock + return class_mock, instance_mock @staticmethod def test__repr_html__effects(simplecube, patched_cubehtml): diff --git a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py index 943a3268fa..6864780dee 100644 --- a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py +++ b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py @@ -14,7 +14,7 @@ from iris.fileformats.netcdf.saver import _CF_GLOBAL_ATTRS -@pytest.fixture() +@pytest.fixture def sample_attrs() -> CubeAttrsDict: return CubeAttrsDict(locals={"a": 1, "z": "this"}, globals={"b": 2, "z": "that"}) diff --git a/lib/iris/tests/unit/cube/test_CubeList.py b/lib/iris/tests/unit/cube/test_CubeList.py index 440e0950b8..26497048b0 100644 --- a/lib/iris/tests/unit/cube/test_CubeList.py +++ b/lib/iris/tests/unit/cube/test_CubeList.py @@ -256,7 +256,8 @@ def test_fail(self): CubeList([self.cube1, cube2]).merge_cube() def test_empty(self): - with pytest.raises(ValueError): + msg = "can't merge an empty CubeList" + with pytest.raises(ValueError, match=msg): CubeList([]).merge_cube() def test_single_cube(self): diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index 7f37eb9f24..d0dd0175a2 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -301,32 +301,34 @@ def test_formula_terms_ignore(self): self.orography.dimensions = ["lat", "wibble"] with pytest.warns(match="Ignoring formula terms variable"): cf_group = CFReader("dummy").cf_group - group = cf_group.promoted - assert list(group.keys()) == ["orography"] - assert group["orography"].cf_data == self.orography + group = cf_group.promoted + assert list(group.keys()) == ["orography"] + assert group["orography"].cf_data == self.orography def test_auxiliary_ignore(self): self.x.dimensions = ["lat", "wibble"] with pytest.warns(match=r"Ignoring variable x"): cf_group = CFReader("dummy").cf_group - promoted = ["x", "orography"] - group = cf_group.promoted - assert set(group.keys()) == set(promoted) - for name in promoted: - assert group[name].cf_data == getattr(self, name) + promoted = ["x", "orography"] + group = cf_group.promoted + assert set(group.keys()) == set(promoted) + for name in promoted: + assert group[name].cf_data == getattr(self, name) def test_promoted_auxiliary_ignore(self): self.wibble = netcdf_variable("wibble", "lat wibble", np.float64) self.variables["wibble"] = self.wibble self.orography.coordinates = "wibble" + with pytest.warns(match="Ignoring variable wibble") as warns: cf_group = CFReader("dummy").cf_group.promoted - promoted = ["wibble", "orography"] - assert set(cf_group.keys()) == set(promoted) - for name in promoted: - assert cf_group[name].cf_data == getattr(self, name) - # we should have got 2 warnings - assert len(warns.list) == 2 + + promoted = ["wibble", "orography"] + assert set(cf_group.keys()) == set(promoted) + for name in promoted: + assert cf_group[name].cf_data == getattr(self, name) + # we should have got 2 warnings + assert len(warns.list) == 2 class Test_build_cf_groups__ugrid: diff --git a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py index 679f74c51d..c01e1516b0 100644 --- a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py +++ b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py @@ -27,7 +27,8 @@ def _setup(self, mocker): def test_valid_absolute_path(self, mocker): # Override the configuration value for System.dot_path real_path = os.path.abspath(__file__) - assert os.path.exists(real_path) and os.path.isabs(real_path) + assert os.path.exists(real_path) + assert os.path.isabs(real_path) mocker.patch("iris.config.get_option", return_value=real_path) result = _dot_path() assert result == real_path diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py index d28030f619..dcc5d59a00 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py @@ -125,7 +125,8 @@ def test_not_an_array(self): assert ArrayStructure.from_array([1, 2, 3]) == ArrayStructure(1, [1, 2, 3]) def test_multi_dim_array(self): - with pytest.raises(ValueError, match="The given array must be 1D."): + msg = "The given array must be 1D." + with pytest.raises(ValueError, match=msg): ArrayStructure.from_array(np.arange(12).reshape(3, 4)) def test_eq_incompatible_shapes(self): diff --git a/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py index 6466ab0ea2..3106d11182 100644 --- a/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py +++ b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py @@ -19,7 +19,7 @@ @pytest.mark.parametrize( - "arr, expected", zip(real_arrays + lazy_arrays, [False] * 4 + [True] * 2) + ("arr", "expected"), zip(real_arrays + lazy_arrays, [False] * 4 + [True] * 2) ) def test_is_lazy_masked_data(arr, expected): result = is_lazy_masked_data(arr) diff --git a/lib/iris/tests/unit/util/test_array_equal.py b/lib/iris/tests/unit/util/test_array_equal.py index eafe123aed..82f1473f03 100644 --- a/lib/iris/tests/unit/util/test_array_equal.py +++ b/lib/iris/tests/unit/util/test_array_equal.py @@ -180,7 +180,7 @@ @pytest.mark.parametrize("lazy", [False, True]) -@pytest.mark.parametrize("array_a,array_b,withnans,eq", TEST_CASES) +@pytest.mark.parametrize(("array_a", "array_b", "withnans", "eq"), TEST_CASES) def test_array_equal(array_a, array_b, withnans, eq, lazy): if lazy: identical = array_a is array_b diff --git a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py b/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py index 2e06a75fc7..91305dda87 100644 --- a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py +++ b/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.demote_dim_coord_to_aux_coord`.""" -import unittest - import pytest import iris @@ -65,7 +63,3 @@ def test_trying_to_demote_a_scalar_coord(self): cube_b = cube_a.copy() demote_dim_coord_to_aux_coord(cube_b, "an_other") assert cube_a == cube_b - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/iris/tests/unit/util/test_file_is_newer_than.py b/lib/iris/tests/unit/util/test_file_is_newer_than.py index bba3f1fe37..fa63c2aaaa 100644 --- a/lib/iris/tests/unit/util/test_file_is_newer_than.py +++ b/lib/iris/tests/unit/util/test_file_is_newer_than.py @@ -89,21 +89,24 @@ def test_wild_fail(self): self._test(False, "example_result", ["older_sour*", "newer_sour*"]) def test_error_missing_result(self): - with pytest.raises(OSError) as error_trap: + msg = r"\[Errno 2\] No such file or directory:.*" + with pytest.raises(OSError, match=msg) as error_trap: self._test(False, "non_exist", ["older_sour*"]) error = error_trap.value assert error.strerror == "No such file or directory" assert error.filename == self._name2path("non_exist") def test_error_missing_source(self): - with pytest.raises(IOError) as error_trap: + msg = "One or more of the files specified did not exist:.*" + with pytest.raises(IOError, match=msg) as error_trap: self._test(False, "example_result", ["older_sour*", "non_exist"]) assert ( "One or more of the files specified did not exist" in error_trap.exconly() ) def test_error_missing_wild(self): - with pytest.raises(IOError) as error_trap: + msg = "One or more of the files specified did not exist:.*" + with pytest.raises(IOError, match=msg) as error_trap: self._test(False, "example_result", ["older_sour*", "unknown_*"]) assert ( "One or more of the files specified did not exist" in error_trap.exconly() diff --git a/lib/iris/tests/unit/util/test_make_gridcube.py b/lib/iris/tests/unit/util/test_make_gridcube.py index 24bd1ad72a..8a876211ec 100644 --- a/lib/iris/tests/unit/util/test_make_gridcube.py +++ b/lib/iris/tests/unit/util/test_make_gridcube.py @@ -137,7 +137,7 @@ def test_regular_badlims__fail(self, lims): @pytest.fixture(params=["int", "float", "i2", "i4", "i8", "f2", "f4", "f8"]) def arg_dtype(self, request): """Check all valid numeric argument types.""" - yield request.param + return request.param @staticmethod def f4_promoted_dtype(typename): diff --git a/lib/iris/tests/unit/util/test_mask_cube_from_shape.py b/lib/iris/tests/unit/util/test_mask_cube_from_shape.py index 93e7921509..ef7b658589 100644 --- a/lib/iris/tests/unit/util/test_mask_cube_from_shape.py +++ b/lib/iris/tests/unit/util/test_mask_cube_from_shape.py @@ -68,7 +68,7 @@ def test_mask_cube_from_shape_not_inplace(mock_cube, square_polygon): @pytest.mark.parametrize( - "minimum_weight, expected_output", + ("minimum_weight", "expected_output"), [ ( 0.0, diff --git a/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py b/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py index a12e2b146a..845867ebae 100644 --- a/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py +++ b/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py @@ -57,7 +57,7 @@ def test_mask_cube_from_shapefile_not_inplace(mock_cube): @pytest.mark.parametrize( - "minimum_weight, expected_output", + ("minimum_weight", "expected_output"), [ ( 0.0, diff --git a/lib/iris/tests/unit/util/test_new_axis.py b/lib/iris/tests/unit/util/test_new_axis.py index 4ade2eb61c..578d1e0b8d 100644 --- a/lib/iris/tests/unit/util/test_new_axis.py +++ b/lib/iris/tests/unit/util/test_new_axis.py @@ -18,7 +18,7 @@ class Test: - @pytest.fixture() + @pytest.fixture def stock_cube(self): cube = stock.simple_2d_w_cell_measure_ancil_var() time = iris.coords.DimCoord([1], standard_name="time") diff --git a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py index bceffe700d..492309b60c 100644 --- a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py +++ b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py @@ -4,7 +4,7 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.promote_aux_coord_to_dim_coord`.""" -import unittest +import re import pytest @@ -67,7 +67,10 @@ def test_coord_does_not_exist(self): cube_a = stock.simple_2d_w_multidim_and_scalars() coord = cube_a.coord("dim1").copy() coord.rename("new") - with pytest.raises(ValueError): + msg = re.escape( + "Attempting to promote an AuxCoord (new) which does not exist in the cube." + ) + with pytest.raises(ValueError, match=msg): promote_aux_coord_to_dim_coord(cube_a, coord) def test_argument_is_wrong_type(self): @@ -77,19 +80,25 @@ def test_argument_is_wrong_type(self): def test_trying_to_promote_a_multidim_coord(self): cube_a = stock.simple_2d_w_multidim_coords() - with pytest.raises(ValueError): + msg = re.escape( + "Attempting to promote an AuxCoord (bar) which is associated with 2 dimensions." + ) + with pytest.raises(ValueError, match=msg): promote_aux_coord_to_dim_coord(cube_a, "bar") def test_trying_to_promote_a_scalar_coord(self): cube_a = stock.simple_2d_w_multidim_and_scalars() - with pytest.raises(ValueError): + msg = re.escape( + "Attempting to promote an AuxCoord (an_other) which is associated with 0 dimensions." + ) + with pytest.raises(ValueError, match=msg): promote_aux_coord_to_dim_coord(cube_a, "an_other") def test_trying_to_promote_a_nonmonotonic_coord(self): cube_a = stock.hybrid_height() - with pytest.raises(ValueError): + msg = re.escape( + "Attempt to promote an AuxCoord (surface_altitude) fails when attempting to create a DimCoord " + "from the AuxCoord because: The 'surface_altitude' DimCoord points array must be strictly monotonic." + ) + with pytest.raises(ValueError, match=msg): promote_aux_coord_to_dim_coord(cube_a, "surface_altitude") - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/iris/tests/unit/util/test_rolling_window.py b/lib/iris/tests/unit/util/test_rolling_window.py index c2e5bdbb6c..da7a9b6ae0 100644 --- a/lib/iris/tests/unit/util/test_rolling_window.py +++ b/lib/iris/tests/unit/util/test_rolling_window.py @@ -100,18 +100,21 @@ def test_step(self): def test_window_too_short(self): # raise an error if the window length is less than 1 a = np.empty([5]) - with pytest.raises(ValueError): + msg = "`window` must be at least 1." + with pytest.raises(ValueError, match=msg): rolling_window(a, window=0) def test_window_too_long(self): # raise an error if the window length is longer than the # corresponding array dimension a = np.empty([7, 5]) - with pytest.raises(ValueError): + msg = "`window` is too long." + with pytest.raises(ValueError, match=msg): rolling_window(a, window=6, axis=1) def test_invalid_step(self): # raise an error if the step between windows is less than 1 a = np.empty([5]) - with pytest.raises(ValueError): + msg = "`step` must be at least 1." + with pytest.raises(ValueError, match=msg): rolling_window(a, step=0) diff --git a/pyproject.toml b/pyproject.toml index fb9cdb3983..643fc661d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -112,6 +112,11 @@ known-first-party = ["iris"] "D401", # 1 First line of docstring should be in imperative mood ] +# Deprecated unittest tests + +"lib/iris/tests/__init__.py" = ["PT"] +"lib/iris/tests/unit/tests/test_IrisTest.py" = ["PT"] + [tool.ruff.lint.pydocstyle] convention = "numpy" From 5d9144656b10ae0461ea54419af0ac7bb610216e Mon Sep 17 00:00:00 2001 From: Elias <110238618+ESadek-MO@users.noreply.github.com> Date: Wed, 18 Feb 2026 15:19:28 +0000 Subject: [PATCH 23/77] Deprecate the IrisTest class and WhatsNew (#6950) * init commit * A bunch more deprecation warnings and a whatsnew * A bunch more deprecation warnings and a whatsnew * Missed whatsnews from previous pytest work * corrected whatsnew formatting * Missed a backtick Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --------- Co-authored-by: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> --- docs/src/whatsnew/latest.rst | 9 +++- lib/iris/tests/__init__.py | 74 ++++++++++++++++++++++++++++- lib/iris/tests/graphics/__init__.py | 11 ++++- 3 files changed, 90 insertions(+), 4 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 7ee91f046e..ae7b5d3047 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -71,7 +71,9 @@ This document explains the changes made to Iris for this release 🔥 Deprecations =============== -#. N/A +#. `@ESadek-MO`_ has deprecated the :class:`~iris.tests.IrisTest` class, and other unittest-based + testing conveniences in favour of the conveniences found in :mod:`iris/tests/_shared_utils.py`. + (:pull:`6950`) 🔗 Dependencies @@ -112,6 +114,11 @@ This document explains the changes made to Iris for this release `:module:~iris.experimental.geovista` is currently only available for Python \<3.14. (:pull:`6816`, :issue:`6775`) +#. `@ESadek-MO`_, `@trexfeathers`_, `@bjlittle`_, `@HGWright`_, `@pp-mo`_, + `@stephenworsley`_ and `@ukmo-ccbunney`_ converted the entirity of the tests + from unittest to pytest. Iris is now also ruff-PT compliant, save for PT019. + (:issue:`6212`, :pull:`6939`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 9ac5dc5322..77b78701eb 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -168,7 +168,14 @@ def assert_masked_array_equal(a, b, strict=False): If False (default), the data array equality considers only unmasked elements. + .. deprecated:: v3.15.0 in favour of `_shared_utils.assert_masked_array_equal()` + """ + iris._deprecation.warn_deprecated( + "assert_masked_array_equal()` is now deprecated as part of the efforts " + "to convert from unittest to pytest." + "Please use `_shared_utils.assert_masked_array_equal()` instead." + ) _assert_masked_array(np.testing.assert_array_equal, a, b, strict) @@ -190,7 +197,14 @@ def assert_masked_array_almost_equal(a, b, decimal=6, strict=False): :meth:`numpy.testing.assert_array_almost_equal`, with the meaning 'abs(desired-actual) < 0.5 * 10**(-decimal)' + .. deprecated:: v3.15.0 in favour of `_shared_utils.assert_masked_array_almost_equal()` + """ + iris._deprecation.warn_deprecated( + "assert_masked_array_almost_equal()` is now deprecated as part of the efforts " + "to convert from unittest to pytest." + "Please use `_shared_utils.assert_masked_array_almost_equal()` instead." + ) _assert_masked_array( np.testing.assert_array_almost_equal, a, b, strict, decimal=decimal ) @@ -216,14 +230,30 @@ def assert_cml(cubes, reference_filename=None, checksum=True): When True, causes the CML to include a checksum for each Cube's data. Defaults to True. + .. deprecated:: v3.15.0 in favour of `_shared_utils.assert_CML()` + """ + iris._deprecation.warn_deprecated( + "`assert_cml` is now deprecated as part of the efforts " + "to convert from unittest to pytest. Please use `_shared_utils.assert_CML()` instead." + ) test = IrisTest() test.assertCML(cubes, reference_filename, checksum) class IrisTest(unittest.TestCase): - """A subclass of unittest.TestCase which provides Iris specific testing functionality.""" + """A subclass of unittest.TestCase which provides Iris specific testing functionality. + + .. deprecated:: v3.15.0 in favour of the private module `_shared_utils`, which contains + the majority of these methods converted to pytest-compliant functions. + """ + + iris._deprecation.warn_deprecated( + "IrisTest class is now deprecated as part of the efforts to migrate from unittest to pytest. " + "The majority of these methods can be found as functions (converted " + "to snake_case) in `_shared_utils`." + ) _assertion_counts: collections.defaultdict[str, int] = collections.defaultdict(int) def _assert_str_same( @@ -906,11 +936,25 @@ def assertEqualAndKind(self, value, expected): class GraphicsTest(graphics.GraphicsTestMixin, IrisTest): + """.. deprecated:: v3.15.0 in favour of `_shared_utils.GraphicsTest`.""" + + iris._deprecation.warn_deprecated( + "`GraphicsTest` has been moved to `_shared_utils` as part of the efforts to convert " + "from unittest to pytest." + ) pass class PPTest: - """A mixin class to provide PP-specific utilities to subclasses of tests.IrisTest.""" + """A mixin class to provide PP-specific utilities to subclasses of tests.IrisTest. + + .. deprecated:: v3.15.0 in favour of `_shared_utils.pp_cube_save_test() + """ + + iris._deprecation.warn_deprecated( + "PPTest class is now deprecated as part of the efforts to migrate from unittest to pytest. " + "`cube_save_test()` has been moved to `_shared_utils` as `pp_cube_save_test()`" + ) @contextlib.contextmanager def cube_save_test( @@ -1005,7 +1049,14 @@ def skip_data(fn): class MyDataTests(tests.IrisTest): ... + .. deprecated:: v3.15.0 in favour of `_shared_utils.skip_data` + """ + iris._deprecation.warn_deprecated( + "`skip_data` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) + no_data = ( not iris.config.TEST_DATA_DIR or not os.path.isdir(iris.config.TEST_DATA_DIR) @@ -1026,7 +1077,13 @@ def skip_gdal(fn): class MyGeoTiffTests(test.IrisTest): ... + .. deprecated:: v3.15.0 in favour of `_shared_utils.skip_gdal` + """ + iris._deprecation.warn_deprecated( + "`skip_gdal` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) skip = unittest.skipIf(condition=not GDAL_AVAILABLE, reason="Test requires 'gdal'.") return skip(fn) @@ -1062,7 +1119,13 @@ def no_warnings(func): """Provides a decorator to ensure that there are no warnings raised within the test, otherwise the test will fail. + .. deprecated:: v3.15.0 in favour of `_shared_utils.no_warnings` + """ + iris._deprecation.warn_deprecated( + "`no_warnings` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) @functools.wraps(func) def wrapped(self, *args, **kwargs): @@ -1096,7 +1159,14 @@ def env_bin_path(exe_name: str | None = None) -> Path | None: For use in tests which spawn commands which should call executables within the Python environment, since many IDEs (Eclipse, PyCharm) don't automatically include this location in $PATH (as opposed to $PYTHONPATH). + + .. deprecated:: v3.15.0 in favour of `_shared_utils.env_bin_path` + """ + iris._deprecation.warn_deprecated( + "`env_bin_path` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) exe_path = Path(os.__file__) exe_path = (exe_path / "../../../bin").resolve() if exe_name is not None: diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index 7fb2074ca0..2c9fc0b345 100644 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -23,6 +23,8 @@ import filelock import pytest +from iris._deprecation import warn_deprecated + # Test for availability of matplotlib. # (And remove matplotlib as an iris.tests dependency.) try: @@ -241,7 +243,14 @@ def _create_missing(phash: str) -> None: class GraphicsTestMixin: - # TODO: deprecate this in favour of check_graphic_caller. + """.. deprecated:: v3.15.0 in favour of `_check_graphic_caller()`.""" + + warn_deprecated( + "GraphicsTestMixin class is now deprecated as part of the efforts " + "to convert from unittest to pytest." + "Please use `_check_graphic_caller()` instead." + ) + def setUp(self) -> None: # Acquire threading non re-entrant blocking lock to ensure # thread-safe plotting. From da4e26db10b5017779734577d5a44643d1859233 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Mon, 23 Feb 2026 11:23:48 +0000 Subject: [PATCH 24/77] Updated environment lockfiles (#6954) Co-authored-by: Lockfile bot --- requirements/locks/py312-linux-64.lock | 58 +++++++++++++------------- requirements/locks/py313-linux-64.lock | 58 +++++++++++++------------- requirements/locks/py314-linux-64.lock | 50 +++++++++++----------- 3 files changed, 83 insertions(+), 83 deletions(-) diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index dc4fbb95b8..56a43b85f7 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -6,7 +6,7 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 -https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_2.tar.bz2#bbf6f174dcd3254e19a2f5d2295ce808 +https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_3.conda#129e404c5b001f3ef5581316971e3ea0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.12.0-h54a6638_1.conda#16c2a0e9c4a166e53632cfca4f68d020 https://conda.anaconda.org/conda-forge/noarch/python_abi-3.12-8_cp312.conda#c3efd25ac4d74b1584d2f7a57195ddf1 https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda#ad659d0a2b3e47e38d829aa8cad2d610 @@ -14,25 +14,25 @@ https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.09-ha770c72_0.conda#998 https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_17.conda#51b78c6a757575c0d12f4401ffc67029 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda#239c5e9546c38a1e884d69effcf4c882 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda#a9f577daf3de00bca7c3c76c0ecbd1de https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda#7df50d44d4a14d6c31a2c54f2cd92157 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_17.conda#3c281169ea25b987311400d7a7e28445 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda#0aa00f03f9e39fb9876085dee11a85d4 https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.3-hb03c661_0.conda#dcdc58c15961dbf17a0621312b01f5cb https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda#51a19bba1b8ebfb60df25cde030b7ebc +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda#d2ffd7602c02f2b316fd921d39876885 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda#920bb03579f15389b9e512095ad995b7 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.16-hb03c661_0.conda#f9f81ea472684d75b9dd8d0b328cf655 https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda#38f5dbc9ac808e31c00650f7be1db93f https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda#b38117a3c920364aff79f870c984b4a3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1.conda#72c8fd1af66bd67bf580645b426513ed https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 -https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda#e7f7ce06ec24cfcfb9e36d28cf82ba57 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_17.conda#1478bfa85224a65ab096d69ffd2af1e5 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_17.conda#202fdf8cad9eea704c2b0d823d1732bf +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda#d5e96b1ed75ca01906b3d2469b4ce493 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_18.conda#646855f357199a12f02a87382d429b75 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 @@ -40,7 +40,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d8 https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda#68e52064ed3897463c0e958ab5c8f91b https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda#70e3400cbbfa03e96dcde7fc13e38c7b -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_17.conda#24c2fe35fa45cd71214beba6f337c071 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda#1b08cd684f34175e4514474793d44bcb https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda#db409b7c1720428638e7c0d509d3e1b5 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda#aea31d2e5b1091feca96fcfe945c3cf9 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 @@ -70,12 +70,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.co https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda#c277e0a4d549b03ac1e9d6cbbe3d017b https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_17.conda#a6c682ac611cb1fa4d73478f9e6efb06 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_18.conda#9063115da5bc35fdc3e1002e69b9ef6e https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h5888daf_1.conda#8422fcc9e5e172c91e99aef703b3ce65 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.55-h421ea60_0.conda#5f13ffc7d30ffec87864e678df9957b4 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_17.conda#ea12f5a6bf12c88c06750d9803e1a570 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_18.conda#6235adb93d064ecdf3d44faee6f468de https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h54a6638_2.conda#b4ecbefe517ed0157c37f8182768271c https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 @@ -103,7 +103,7 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_1.conda#5b5846bc2b23e07a1d61b89dcb67fcf0 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_0.conda#b7113551db5a3e2403cdd052c66e9999 https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 @@ -128,8 +128,8 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8c https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda#cae723309a49399d2949362f4ab5c9e4 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_0.conda#e3bcef76c3ecb25823c503ce11783d85 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_1.conda#5ebd79c20c7ecf979f20e26fedc0a4fd +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_1.conda#7eb4977dd6f60b3aaab0715a0ea76f11 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_0.conda#70a09b6817c7ad694ef4543204c59c25 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 @@ -166,7 +166,8 @@ https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#0 https://conda.anaconda.org/conda-forge/noarch/docstring_parser-0.17.0-pyhd8ed1ab_0.conda#ce49d3e5a7d20be2ba57a2c670bdd82e https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.21.2-pyhd8ed1ab_0.conda#0b3041d2b101e48bee2df46dffd6f047 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.24.3-pyhd8ed1ab_0.conda#9dbb20eec24beb026291c20a35ce1ff9 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.17.1-h27c8c51_0.conda#867127763fbe935bab59815b6e0b7b5c https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py312h447239a_0.conda#63e20cf7b7460019b423fc06abb96c60 https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 @@ -193,12 +194,12 @@ https://conda.anaconda.org/conda-forge/noarch/loguru-0.7.3-pyh707e725_0.conda#49 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.3-py312h8a5da7c_0.conda#f775a43412f7f3d7ed218113ad233869 https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592132998493b3ff25fd7479396e8351 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py312hd9148b4_1.conda#2e489969e38f0b428c39492619b5e6e5 -https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.0-py312h8a5da7c_0.conda#9fe4c848dd01cde9b8d0073744d4eef8 +https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.1-py312h8a5da7c_0.conda#17c77acc59407701b54404cfd3639cac https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda#2e5bf4f1da39c0b32778561c3c4e5878 https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.1-py312h50c33e8_0.conda#c5eff3ada1a829f0bdb780dc4b62bbae -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.7.0-pyhcf101f3_0.conda#2157d0900a4bc2e9a0ba3cccb8497e8c +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.9.2-pyhcf101f3_0.conda#4fefefb892ce9cc1539405bec2f1a6cd https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_3.conda#031e33ae075b336c0ce92b14efa886c5 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.3.1-py312h178313f_0.conda#0cf580c1b73146bb9ff1bbdb4d4c8cf9 @@ -238,6 +239,7 @@ https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30c https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.4.0-pyhd8ed1ab_0.conda#421a865222cd0c9d83ff08bc78bf3a61 https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_0.conda#ea5be9abc2939c8431893b4e123a2065 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py312h460c074_1.conda#648ee28dcd4e07a1940a17da62eccd40 https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 @@ -245,7 +247,6 @@ https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py312h8a5da7c_0.conda#a8df7f0812ac4fa6bbc7135556d3e2c4 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py312h4c3975b_1.conda#693cda60b9223f55d0836c885621611b https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.61.1-py312h8a5da7c_0.conda#3bf8fb959dc598c67dac0430b4aff57a https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea943ca4f0d01d6eec6a60d24415dc5 https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 @@ -253,6 +254,7 @@ https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda#24a2802074d26aecfdbc9b3f1d8168d1 https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_3.conda#b4277f5a09d458a0306db3147bd0171c +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 https://conda.anaconda.org/conda-forge/linux-64/libpq-18.2-hb80d175_0.conda#fa63c385ddb50957d93bdb394e355be8 @@ -267,7 +269,6 @@ https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py312h9b6a7d9_2.con https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.3.0-hb700be7_2.conda#8f7278ca5f7456a974992a8b34284737 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.36.1-pyhd8ed1ab_0.conda#6b0259cea8ffa6b66b35bae0ca01c447 https://conda.anaconda.org/conda-forge/noarch/wheel-0.46.3-pyhd8ed1ab_0.conda#bdbd7385b4a67025ac2dba4ef8cb6a8f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.22.0-py312h8a5da7c_0.conda#6a3fd177315aaafd4366930d440e4430 @@ -275,15 +276,14 @@ https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.13.3-py312h5d8c7f2_0.c https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0.conda#5267bef8efea4127aacd1f4e1f149b6e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py312h4f23490_1.conda#84bf349fad55056ed326fc550671b65c https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312h0a2e395_4.conda#43c2bc96af3ae5ed9e8a10ded942aa50 https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.conda#b20e7ce9afd59036ab194f3d1e27edf5 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_0.conda#367b1cd271b8aa39170f68826a94d65b +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_1.conda#c8ca404aeab8e32c9d6d82a82aeb6511 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h4f23490_2.conda#cec5bc5f7d374f8f8095f8e28e31f6cb https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py312hf79963d_1.conda#e597b3e812d9613f659b7d87ad252d18 @@ -291,46 +291,46 @@ https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh8b19718_0.conda#67bd https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py312h4f23490_2.conda#1af24b0eb99b1158c0d8c64a4d6c5d79 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py312h4f23490_2.conda#ab856c36638ab1acf90e70349c525cf9 -https://conda.anaconda.org/conda-forge/noarch/rich-14.3.2-pyhcf101f3_0.conda#33950a076fd589a7655c6888cc3d2b34 +https://conda.anaconda.org/conda-forge/noarch/rich-14.3.3-pyhcf101f3_0.conda#7a6289c50631d620652f5045a63eb573 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py312h54fa4ab_1.conda#828eb07c4c87c38ed8c6560c25893280 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py312h383787d_2.conda#69e400d3deca12ee7afd4b73a5596905 https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py312hd9148b4_0.conda#55fd03988b1b1bc6faabbfb5b481ecd7 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.38.0-pyhcf101f3_0.conda#eaf8f08a04ab2d8612e45aa4f4c33357 https://conda.anaconda.org/conda-forge/linux-64/viskores-1.1.0-hca82ae8_0.conda#5b4d69a15107ebad71ee9aaf76c4b09e https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py312h4f23490_0.conda#6aef45ba3c0123547eb7b0f15852cac9 -https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_0.conda#1eac93a6257796dd348d366a85f7f283 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_1.conda#3c155e2914169b807ebb4027a8c0999c https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py312he3d6523_0.conda#b8dc157bbbb69c1407478feede8b7b42 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py312h25f8dc5_102.conda#99217b58c029977345b72bb36a1f6596 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py312h4c3975b_3.conda#b0610b4174af97290f5f466a72583071 +https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-hb82b983_4.conda#9861c7820fdb45bc50a2ea60f4ff7952 https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py312hcedc861_0.conda#f0d110978a87b200a06412b56b26407c https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 https://conda.anaconda.org/conda-forge/noarch/rich-rst-1.3.2-pyhd8ed1ab_0.conda#cbd84dbdb3f5a7d762b5fb2b0d49e7cd https://conda.anaconda.org/conda-forge/noarch/wslink-2.5.0-pyhd8ed1ab_0.conda#8fa415e696acd9af59ce0a4425fd1b38 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.25.0-py312hf79963d_1.conda#6c913a686cb4060cbd7639a36fa144f0 https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e -https://conda.anaconda.org/conda-forge/noarch/cyclopts-4.5.1-pyhcf101f3_0.conda#657c51cee8d0d5893329ec30a0e1a5a4 +https://conda.anaconda.org/conda-forge/noarch/cyclopts-4.5.4-pyhcf101f3_0.conda#11e433386dd008eca6e25204894e8f94 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.9.1-pyhdfbf58e_0.conda#0155d4a55415cda7f7f09bc0b7760f51 +https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-ha5ea40c_7.conda#f605332e1e4d9ff5c599933ae81db57d +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.2-h61e6d4b_0.conda#d62da3d560992bfa2feb611d7be813b8 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pooch-1.9.0-pyhd8ed1ab_0.conda#dd4b6337bf8886855db6905b336db3c8 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 -https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-hb82b983_4.conda#9861c7820fdb45bc50a2ea60f4ff7952 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9f33c65b20e6a61f21714785e3613ec -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.0-h61e6d4b_0.conda#91e6d4d684e237fba31b9815c4b40edf https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py312h6fba518_7.conda#2edca3790f2a372db44ff1aa159769fc https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e https://conda.anaconda.org/conda-forge/noarch/pyvista-0.47.0-pyhd8ed1ab_0.conda#dd7ebc742e6a766322ed77931123631e diff --git a/requirements/locks/py313-linux-64.lock b/requirements/locks/py313-linux-64.lock index 03f559315b..d8ad15deb8 100644 --- a/requirements/locks/py313-linux-64.lock +++ b/requirements/locks/py313-linux-64.lock @@ -6,7 +6,7 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 -https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_2.tar.bz2#bbf6f174dcd3254e19a2f5d2295ce808 +https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_3.conda#129e404c5b001f3ef5581316971e3ea0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.12.0-h54a6638_1.conda#16c2a0e9c4a166e53632cfca4f68d020 https://conda.anaconda.org/conda-forge/noarch/python_abi-3.13-8_cp313.conda#94305520c52a4aa3f6c2b1ff6008d9f8 https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda#ad659d0a2b3e47e38d829aa8cad2d610 @@ -14,25 +14,25 @@ https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.09-ha770c72_0.conda#998 https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_17.conda#51b78c6a757575c0d12f4401ffc67029 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda#239c5e9546c38a1e884d69effcf4c882 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda#a9f577daf3de00bca7c3c76c0ecbd1de https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda#7df50d44d4a14d6c31a2c54f2cd92157 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_17.conda#3c281169ea25b987311400d7a7e28445 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda#0aa00f03f9e39fb9876085dee11a85d4 https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.3-hb03c661_0.conda#dcdc58c15961dbf17a0621312b01f5cb https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda#51a19bba1b8ebfb60df25cde030b7ebc +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda#d2ffd7602c02f2b316fd921d39876885 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda#920bb03579f15389b9e512095ad995b7 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.16-hb03c661_0.conda#f9f81ea472684d75b9dd8d0b328cf655 https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda#38f5dbc9ac808e31c00650f7be1db93f https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda#b38117a3c920364aff79f870c984b4a3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1.conda#72c8fd1af66bd67bf580645b426513ed https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 -https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda#e7f7ce06ec24cfcfb9e36d28cf82ba57 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_17.conda#1478bfa85224a65ab096d69ffd2af1e5 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_17.conda#202fdf8cad9eea704c2b0d823d1732bf +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda#d5e96b1ed75ca01906b3d2469b4ce493 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_18.conda#646855f357199a12f02a87382d429b75 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 @@ -41,7 +41,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d8 https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda#68e52064ed3897463c0e958ab5c8f91b https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda#70e3400cbbfa03e96dcde7fc13e38c7b -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_17.conda#24c2fe35fa45cd71214beba6f337c071 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda#1b08cd684f34175e4514474793d44bcb https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda#db409b7c1720428638e7c0d509d3e1b5 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda#aea31d2e5b1091feca96fcfe945c3cf9 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 @@ -71,12 +71,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.co https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda#c277e0a4d549b03ac1e9d6cbbe3d017b https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_17.conda#a6c682ac611cb1fa4d73478f9e6efb06 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_18.conda#9063115da5bc35fdc3e1002e69b9ef6e https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h5888daf_1.conda#8422fcc9e5e172c91e99aef703b3ce65 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.55-h421ea60_0.conda#5f13ffc7d30ffec87864e678df9957b4 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_17.conda#ea12f5a6bf12c88c06750d9803e1a570 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_18.conda#6235adb93d064ecdf3d44faee6f468de https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h54a6638_2.conda#b4ecbefe517ed0157c37f8182768271c https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 @@ -104,7 +104,7 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_1.conda#5b5846bc2b23e07a1d61b89dcb67fcf0 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_0.conda#b7113551db5a3e2403cdd052c66e9999 https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 @@ -129,8 +129,8 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8c https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda#cae723309a49399d2949362f4ab5c9e4 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_0.conda#e3bcef76c3ecb25823c503ce11783d85 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_1.conda#5ebd79c20c7ecf979f20e26fedc0a4fd +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_1.conda#7eb4977dd6f60b3aaab0715a0ea76f11 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_0.conda#70a09b6817c7ad694ef4543204c59c25 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 @@ -167,7 +167,8 @@ https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#0 https://conda.anaconda.org/conda-forge/noarch/docstring_parser-0.17.0-pyhd8ed1ab_0.conda#ce49d3e5a7d20be2ba57a2c670bdd82e https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.21.2-pyhd8ed1ab_0.conda#0b3041d2b101e48bee2df46dffd6f047 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.24.3-pyhd8ed1ab_0.conda#9dbb20eec24beb026291c20a35ce1ff9 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.17.1-h27c8c51_0.conda#867127763fbe935bab59815b6e0b7b5c https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py313h6b9daa2_0.conda#3a0be7abedcbc2aee92ea228efea8eba https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 @@ -194,13 +195,13 @@ https://conda.anaconda.org/conda-forge/noarch/loguru-0.7.3-pyh707e725_0.conda#49 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.3-py313h3dea7bd_0.conda#c14389156310b8ed3520d84f854be1ee https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592132998493b3ff25fd7479396e8351 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py313h7037e92_1.conda#cd1cfde0ea3bca6c805c73ffa988b12a -https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.0-py313h3dea7bd_0.conda#d182804a222acc8f2c7e215f344d229f +https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.1-py313h3dea7bd_0.conda#4f3e7bf5a9fc60a7d39047ba9e84c84c https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda#2e5bf4f1da39c0b32778561c3c4e5878 https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.1-py313h80991f8_0.conda#2d5ee4938cdde91a8967f3eea686c546 https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh145f28c_0.conda#09a970fbf75e8ed1aa633827ded6aa4f -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.7.0-pyhcf101f3_0.conda#2157d0900a4bc2e9a0ba3cccb8497e8c +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.9.2-pyhcf101f3_0.conda#4fefefb892ce9cc1539405bec2f1a6cd https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_3.conda#031e33ae075b336c0ce92b14efa886c5 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.3.1-py313h8060acc_0.conda#b62867739241368f43f164889b45701b @@ -239,6 +240,7 @@ https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30c https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.4.0-pyhd8ed1ab_0.conda#421a865222cd0c9d83ff08bc78bf3a61 https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_0.conda#ea5be9abc2939c8431893b4e123a2065 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py313hf46b229_1.conda#d0616e7935acab407d1543b28c446f6f https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 @@ -246,7 +248,6 @@ https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py313h3dea7bd_0.conda#77e1fc7133e03ccd62070f2405c82ea9 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py313h07c4f96_1.conda#bcca9afd203fe05d9582249ac12762da https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.61.1-py313h3dea7bd_0.conda#c0f36dfbb130da4f6ce2df31f6b25ea8 https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea943ca4f0d01d6eec6a60d24415dc5 https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 @@ -254,6 +255,7 @@ https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda#24a2802074d26aecfdbc9b3f1d8168d1 https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_3.conda#b4277f5a09d458a0306db3147bd0171c +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 https://conda.anaconda.org/conda-forge/linux-64/libpq-18.2-hb80d175_0.conda#fa63c385ddb50957d93bdb394e355be8 @@ -269,29 +271,27 @@ https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py313h77f6078_2.con https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.3.0-hb700be7_2.conda#8f7278ca5f7456a974992a8b34284737 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.36.1-pyhd8ed1ab_0.conda#6b0259cea8ffa6b66b35bae0ca01c447 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.22.0-py313h3dea7bd_0.conda#e9415b0f7b43d2e32a3f24fd889c9e70 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.13.3-py313hd6074c6_0.conda#684fb9c78db5024b939a1ed0a107f464 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0.conda#5267bef8efea4127aacd1f4e1f149b6e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py313h29aa505_1.conda#c63d5f9d63fe2f48b0ad75005fcae7ba https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py313hc8edb43_4.conda#33639459bc29437315d4bff9ed5bc7a7 https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.conda#b20e7ce9afd59036ab194f3d1e27edf5 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_0.conda#367b1cd271b8aa39170f68826a94d65b +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_1.conda#c8ca404aeab8e32c9d6d82a82aeb6511 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py313h29aa505_2.conda#ad53894d278895bf15c8fc324727d224 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py313h08cd8bf_2.conda#8a69ea71fdd37bfe42a28f0967dbb75a https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py313h29aa505_2.conda#e3a598d20bf2fa7b03a771e9e4471be9 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py313h29aa505_2.conda#60f5d1c039da10fe89a530cc93ea50ac -https://conda.anaconda.org/conda-forge/noarch/rich-14.3.2-pyhcf101f3_0.conda#33950a076fd589a7655c6888cc3d2b34 +https://conda.anaconda.org/conda-forge/noarch/rich-14.3.3-pyhcf101f3_0.conda#7a6289c50631d620652f5045a63eb573 https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py313h4b8bb8b_1.conda#2b18fe5b4b2d1611ddf8c2f080a46563 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py313had47c43_2.conda#6e550dd748e9ac9b2925411684e076a1 @@ -299,37 +299,37 @@ https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py313h7037e92_0.conda#cb423e0853b3dde2b3738db4dedf5ba2 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.38.0-pyhcf101f3_0.conda#eaf8f08a04ab2d8612e45aa4f4c33357 https://conda.anaconda.org/conda-forge/linux-64/viskores-1.1.0-hca82ae8_0.conda#5b4d69a15107ebad71ee9aaf76c4b09e https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py313h29aa505_0.conda#3942b6a86fe92d0888b3373f2c1e1676 -https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_0.conda#1eac93a6257796dd348d366a85f7f283 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_1.conda#3c155e2914169b807ebb4027a8c0999c https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py313h683a580_0.conda#ffe67570e1a9192d2f4c189b27f75f89 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py313h16051e2_102.conda#20ae46c5e9c7106bdb2cac6b44b7d845 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py313h07c4f96_3.conda#b7810803a3481e22968022a94107ed93 +https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-hb82b983_4.conda#9861c7820fdb45bc50a2ea60f4ff7952 https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py313h2005660_0.conda#d551bd1d2fcfac36674dbe2be4b0a410 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 https://conda.anaconda.org/conda-forge/noarch/rich-rst-1.3.2-pyhd8ed1ab_0.conda#cbd84dbdb3f5a7d762b5fb2b0d49e7cd https://conda.anaconda.org/conda-forge/noarch/wslink-2.5.0-pyhd8ed1ab_0.conda#8fa415e696acd9af59ce0a4425fd1b38 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.25.0-py313h08cd8bf_1.conda#a0d8dc5c90850d9f1a79f69c98aef0ff https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e -https://conda.anaconda.org/conda-forge/noarch/cyclopts-4.5.1-pyhcf101f3_0.conda#657c51cee8d0d5893329ec30a0e1a5a4 +https://conda.anaconda.org/conda-forge/noarch/cyclopts-4.5.4-pyhcf101f3_0.conda#11e433386dd008eca6e25204894e8f94 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.9.1-pyhdfbf58e_0.conda#0155d4a55415cda7f7f09bc0b7760f51 +https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-ha5ea40c_7.conda#f605332e1e4d9ff5c599933ae81db57d +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.2-h61e6d4b_0.conda#d62da3d560992bfa2feb611d7be813b8 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pooch-1.9.0-pyhd8ed1ab_0.conda#dd4b6337bf8886855db6905b336db3c8 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 -https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-hb82b983_4.conda#9861c7820fdb45bc50a2ea60f4ff7952 -https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9f33c65b20e6a61f21714785e3613ec -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.0-h61e6d4b_0.conda#91e6d4d684e237fba31b9815c4b40edf https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py313hbb97348_7.conda#03c6ddd039b6877278b5c4df20b61f29 https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e https://conda.anaconda.org/conda-forge/noarch/pyvista-0.47.0-pyhd8ed1ab_0.conda#dd7ebc742e6a766322ed77931123631e diff --git a/requirements/locks/py314-linux-64.lock b/requirements/locks/py314-linux-64.lock index a0c6d9652c..94f3433de3 100644 --- a/requirements/locks/py314-linux-64.lock +++ b/requirements/locks/py314-linux-64.lock @@ -6,36 +6,36 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 -https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_2.tar.bz2#bbf6f174dcd3254e19a2f5d2295ce808 +https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_3.conda#129e404c5b001f3ef5581316971e3ea0 https://conda.anaconda.org/conda-forge/noarch/python_abi-3.14-8_cp314.conda#0539938c55b6b1a59b560e843ad864a4 https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda#ad659d0a2b3e47e38d829aa8cad2d610 https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_17.conda#51b78c6a757575c0d12f4401ffc67029 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda#239c5e9546c38a1e884d69effcf4c882 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda#a9f577daf3de00bca7c3c76c0ecbd1de https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_17.conda#3c281169ea25b987311400d7a7e28445 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda#0aa00f03f9e39fb9876085dee11a85d4 https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda#51a19bba1b8ebfb60df25cde030b7ebc +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda#d2ffd7602c02f2b316fd921d39876885 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda#920bb03579f15389b9e512095ad995b7 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.16-hb03c661_0.conda#f9f81ea472684d75b9dd8d0b328cf655 https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda#38f5dbc9ac808e31c00650f7be1db93f https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda#b38117a3c920364aff79f870c984b4a3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1.conda#72c8fd1af66bd67bf580645b426513ed https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 -https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda#e7f7ce06ec24cfcfb9e36d28cf82ba57 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_17.conda#1478bfa85224a65ab096d69ffd2af1e5 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_17.conda#202fdf8cad9eea704c2b0d823d1732bf +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda#d5e96b1ed75ca01906b3d2469b4ce493 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_18.conda#646855f357199a12f02a87382d429b75 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb03c661_1.conda#2c21e66f50753a083cbe6b80f38268fa https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d864d34357c3b65a4b731f78c0801dc4 https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda#70e3400cbbfa03e96dcde7fc13e38c7b -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_17.conda#24c2fe35fa45cd71214beba6f337c071 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda#1b08cd684f34175e4514474793d44bcb https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda#db409b7c1720428638e7c0d509d3e1b5 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda#aea31d2e5b1091feca96fcfe945c3cf9 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 @@ -60,11 +60,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.co https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda#c277e0a4d549b03ac1e9d6cbbe3d017b https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_17.conda#a6c682ac611cb1fa4d73478f9e6efb06 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_18.conda#9063115da5bc35fdc3e1002e69b9ef6e https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.55-h421ea60_0.conda#5f13ffc7d30ffec87864e678df9957b4 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_17.conda#ea12f5a6bf12c88c06750d9803e1a570 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_18.conda#6235adb93d064ecdf3d44faee6f468de https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba @@ -86,7 +86,7 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.22.2-ha1258a1_0.conda#fb53fb07ce46a575c5d004bbc96032c2 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_1.conda#5b5846bc2b23e07a1d61b89dcb67fcf0 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_0.conda#b7113551db5a3e2403cdd052c66e9999 https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 @@ -105,8 +105,8 @@ https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda# https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8ccf913aaba749a5496c17629d859ed1 https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_0.conda#e3bcef76c3ecb25823c503ce11783d85 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_1.conda#5ebd79c20c7ecf979f20e26fedc0a4fd +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_1.conda#7eb4977dd6f60b3aaab0715a0ea76f11 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_0.conda#70a09b6817c7ad694ef4543204c59c25 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 @@ -139,7 +139,8 @@ https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py314h1807b08_0.con https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.21.2-pyhd8ed1ab_0.conda#0b3041d2b101e48bee2df46dffd6f047 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.24.3-pyhd8ed1ab_0.conda#9dbb20eec24beb026291c20a35ce1ff9 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.17.1-h27c8c51_0.conda#867127763fbe935bab59815b6e0b7b5c https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h19486de_106.conda#c223ee1429ba538f3e48cfb4a0b97357 @@ -164,7 +165,7 @@ https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#3 https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.1-py314h8ec4b1a_0.conda#79678378ae235e24b3aa83cee1b38207 https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh145f28c_0.conda#09a970fbf75e8ed1aa633827ded6aa4f -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.7.0-pyhcf101f3_0.conda#2157d0900a4bc2e9a0ba3cccb8497e8c +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.9.2-pyhcf101f3_0.conda#4fefefb892ce9cc1539405bec2f1a6cd https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_3.conda#031e33ae075b336c0ce92b14efa886c5 https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py314h0f05182_0.conda#4f225a966cfee267a79c5cb6382bd121 @@ -200,17 +201,18 @@ https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30cd29cb87d819caead4d55184c1d115 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_0.conda#ea5be9abc2939c8431893b4e123a2065 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py314h4a8dc5f_1.conda#cf45f4278afd6f4e6d03eda0f435d527 https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py314h67df5f8_0.conda#6c7efc167cee337d9c41200506d022b8 https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py314h5bd0f2a_1.conda#51b0391b0ce96be49b1174e9a3e4a279 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/noarch/fonttools-4.61.1-pyh7db6752_0.conda#d5da976e963e70364b9e3ff270842b9f https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f @@ -221,18 +223,16 @@ https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py314h24aeaa0_2.conda#b46a7e6a2b8c064488576c3e42d85df0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.36.1-pyhd8ed1ab_0.conda#6b0259cea8ffa6b66b35bae0ca01c447 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0.conda#5267bef8efea4127aacd1f4e1f149b6e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py314hc02f841_1.conda#552b5d9d8a2a4be882e1c638953e7281 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py314h97ea11e_4.conda#95bede9cdb7a30a4b611223d52a01aa4 https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.conda#b20e7ce9afd59036ab194f3d1e27edf5 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_0.conda#367b1cd271b8aa39170f68826a94d65b +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_1.conda#c8ca404aeab8e32c9d6d82a82aeb6511 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py314hc02f841_2.conda#55ac6d85f5dd8ec5e9919e7762fcb31a https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py314ha0b5721_2.conda#fe3a5c8be07a7b82058bdeb39d33d93b @@ -245,16 +245,17 @@ https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py314h9891dd4_0.conda#5d3c008e54c7f49592fca9c32896a76f https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.38.0-pyhcf101f3_0.conda#eaf8f08a04ab2d8612e45aa4f4c33357 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py314hc02f841_0.conda#de50a60eab348de04809a33e180b4b01 -https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_0.conda#1eac93a6257796dd348d366a85f7f283 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_1.conda#3c155e2914169b807ebb4027a8c0999c https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py314h1194b4b_0.conda#b8683e6068099b69c10dbfcf7204203f https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py314h4ae7121_102.conda#cf495d9fc5e01a2ee10e0867ce957a44 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 @@ -263,11 +264,10 @@ https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py314ha1f92a4_0.c https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.25.0-py314ha0b5721_1.conda#fe89c5fa422f215b0d75046ecd4667de https://conda.anaconda.org/conda-forge/noarch/esmpy-8.9.1-pyhdfbf58e_0.conda#0155d4a55415cda7f7f09bc0b7760f51 +https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-ha5ea40c_7.conda#f605332e1e4d9ff5c599933ae81db57d +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.2-h61e6d4b_0.conda#d62da3d560992bfa2feb611d7be813b8 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 -https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9f33c65b20e6a61f21714785e3613ec -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.0-h61e6d4b_0.conda#91e6d4d684e237fba31b9815c4b40edf https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb From d79be4e68e8f7ea36d37251d0940b09d09ee2527 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 24 Feb 2026 19:52:08 +0000 Subject: [PATCH 25/77] Add docs page on how to use S3 data. (#6951) * Update docs/src/further_topics/s3_io.rst Co-authored-by: Chris Bunney <48915820+ukmo-ccbunney@users.noreply.github.com> --- docs/src/further_topics/index.rst | 1 + docs/src/further_topics/s3_io.rst | 260 ++++++++++++++++++++++++++++++ docs/src/whatsnew/latest.rst | 3 + 3 files changed, 264 insertions(+) create mode 100644 docs/src/further_topics/s3_io.rst diff --git a/docs/src/further_topics/index.rst b/docs/src/further_topics/index.rst index 122f1746a9..25c472c045 100644 --- a/docs/src/further_topics/index.rst +++ b/docs/src/further_topics/index.rst @@ -17,6 +17,7 @@ Extra information on specific technical issues. missing_data_handling dataless_cubes netcdf_io + s3_io dask_best_practices/index ugrid/index which_regridder_to_use diff --git a/docs/src/further_topics/s3_io.rst b/docs/src/further_topics/s3_io.rst new file mode 100644 index 0000000000..26774113aa --- /dev/null +++ b/docs/src/further_topics/s3_io.rst @@ -0,0 +1,260 @@ +.. _s3_io: + +Loading From and Saving To S3 Buckets +===================================== + +For cloud computing, it is natural to want to access data storage based on URIs. +At the present time, by far the most widely used platform for this is +`Amazon S3 "buckets" `_. + +It is common to treat an S3 bucket like a "disk", storing files as individual S3 +objects. S3 access URLs can also contain a nested +`'prefix string' `_ +structure, which naturally mirrors sub-directories in a file-system. + +While it would be possible for Iris to support S3 access directly, as it does the +"OpenDAP" protocol for netCDF data, this approach has some serious limitations : most +notably, each supported file format would have to be separately extended to support S3 +URLs in the place of file paths for loading and saving. + +Instead, we have found that it is most practical to perform this access using a virtual +file system approach. However, one drawback is that this is best controlled *outside* +the Python code -- see details below. + + +TL;DR +----- +Install s3-fuse and use its ``s3fs`` command, to create a file-system mount which maps +to an S3 bucket. S3 objects can then be accessed as a regular files (read and write). + + +Fsspec, S3-fs, fuse and s3-fuse +-------------------------------- +This approach depends on a set of related code solutions, as follows: + +`fsspec `_ +is a general framework for implementing Python-file-like access to alternative storage +resources. + +`s3fs `_ +is a package based on fsspec, which enables Python to "open" S3 data objects as Python +file-like objects for reading and writing. + +`fuse `_ +is an interface library that enables a data resource to be "mounted" as a Linux +filesystem, with user (not root) privilege. + +`s3-fuse `_ +is a utility based on s3fs and fuse, which provides a POSIX-compatible "mount" so that +an S3 bucket can be accessed as a regular Unix file system. + + +Practical usage +--------------- +Of the above, the only thing you actually need to know about is **s3-fuse**. + +There is an initial one-time setup, and also actions to take in advance of launching +Python, and after exit, each time you want to access S3 from Python. + +Prior requirements +^^^^^^^^^^^^^^^^^^ + +Install "s3-fuse" +~~~~~~~~~~~~~~~~~ +The most reliable method is to install into your Linux O.S. See +`installation instructions `_ . +This presumes that you perform a system installation with ``apt``, ``yum`` or similar. + +If you do not have necessary 'sudo' or root access permissions, we have found that it +is sufficient to install only **into your Python environment**, using conda. +Though not suggested, this appears to work on Unix systems where we have tried it. + +For this, you can use conda -- e.g. + +.. code-block:: bash + + $ conda install s3-fuse + +( Or better, put it into a reusable 'spec file', with all other requirements, and then +use ``$ conda create --file ...`` +). + +.. note:: + + It is **not** possible to install s3fs-fuse into a Python environment with ``pip``, + as it is not a Python package. + + +Create an empty mount directory +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +You need an empty directory in your existing filesystem tree, that you will map your +S3 bucket **onto** -- e.g. + +.. code-block:: bash + + $ mkdir /home/self.me/s3_root/testbucket_mountpoint + + +Setup AWS credentials +~~~~~~~~~~~~~~~~~~~~~ +Provide S3 access credentials in an AWS credentials file, as described +`here in the the s3-fuse documentation `_. + +There is a general introduction to AWS credentials +`here in the AWS documentation `_ +which should explain what you need here. + + +Before use (before each Python invocation) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Activate your Python environment, which then gives access to the **s3-fuse** Linux +command "s3fs". + +Map your S3 bucket "into" the chosen empty directory -- e.g. + +.. code-block:: bash + + $ s3fs my-test-bucket /home/self.me/s3_root/testbucket_mountpoint + +.. note:: + + You can now freely list/access contents of your bucket at this path + -- including updating or writing files. + +.. note:: + + This performs a Unix file-system "mount" operation, which temporarily + modifies your system. This change is not part of the current environment, and is not + limited to the scope of the current process. + + If you reboot, the mount will disappear. If you logout and login again, there can + be problems : ideally you should avoid this by always "unmounting" (see below). + +.. note:: + + The command for mounting an s3-fuse filesystem is ``s3fs`` - this should not be + confused with the similarly named s3fs python package. + + +Within Python code +^^^^^^^^^^^^^^^^^^ +You can now access objects at the remote S3 URL via the mount point on your local file +system you just created with `s3fs`, e.g. + +.. code-block:: python + + >>> path = "/home/self.me/s3_root/testbucket_mountpoint/sub_dir/a_file.nc" + >>> cubes = iris.load(path) + + +After use (after Python exit) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +When you have finished accessing the S3 objects in the mounted virtual filesystem, it +is a good idea to **unmount** it. Before doing this, make sure that all file handles to +the objects have been closed and there are no terminals open in that directory. + +.. code-block:: bash + + $ umount /home/self.me/s3_root/testbucket_mountpoint + +.. note:: + + The ``umount`` command is a standard Unix command. It may not always succeed, in which case + some kind of retry may be needed -- see detail notes below. + + The mount created will not survive a system reboot, nor does it function correctly + if the user logs out + logs in again. + + Presumably, problems could occur if repeated operation were to create a very large + number of mounts, so unmounting after use does seem advisable. + + +Some Pros and Cons of this approach +----------------------------------- + +PROs +^^^^ + +* **s3fs** supports random access to "parts" of a file, allowing efficient handling of + datasets larger than memory without requiring the data to be explicitly sharded + in storage. + +* **s3-fuse** is transparent to file access within Python, including Iris load+save or + other files accessed via a Python 'open' : the S3 data appears to be files in a + regular file-system. + +* the file-system virtualisation approach works for all file formats, since the + mapping occurs in the O.S. rather than in Iris, or Python. + +* "mounting" avoids the need for the Python code to dynamically connect to / + disconnect from an S3 bucket. + +* the "unmount problem" (see below) is managed at the level of the operating system, + where it occurs, instead of trying to allow for it in Python code. This means it + could be managed differently in different operating systems, if needed. + +* it does also work with many other cloud object-storage platforms, though with extra + required dependencies in some cases. + See the s3fs-fuse `Non-Amazon S3`_ docs page for details. + +CONs +^^^^ + +* only works on Unix-like O.S. + +* requires the "fuse" kernel module to be supported in your O.S. + This is usually installed by default, but may not always be. + See `'fuse' kernel module `_ + for more detail. + +* the file-system virtualisation may not be perfect : some file-system operations + might not behave as expected, e.g. with regard to file permissions or system + information. + +* it requires user actions *outside* the Python code. + +* the user must manage the mount/umount context. + +* some similar cloud object-storage platforms are *not* supported. + See the s3fs-fuse `Non-Amazon S3`_ docs page for details of those which are. + + +Background Notes and Details +---------------------------- + +* The file-like objects provided by **fsspec** replicate nearly *all* the behaviours + of a regular Python file. + + However, this is still hard to integrate with regular file access, since you + cannot create one from a regular Python "open" call -- still less + when opening a file with an underlying file-format such as netCDF4 or HDF5 + (since these are usually implemented in other languages such as C). + Nor can you interrogate file paths or system metadata, e.g. permissions. + + So, the key benefit offered by **s3-fuse** is that all functions are mapped + onto regular O.S. file-system calls -- so the file-format never needs to + know that the data is not a "real" file. + +* It would be possible, instead, to copy data into an *actual* file on disk, but the + s3-fuse approach avoids the need for copying, and thus in a cloud environment also + the cost and maintenance of a "local disk". + + s3fs also allows the software to access only *required* parts of a file, without + copying the whole content. This is obviously essential for efficient use of large + datasets, e.g. when larger than available memory. + +* It is also possible to use **s3-fuse** to establish the mounts *from within Python*. + However, we have considered integrating this into Iris and rejected it because of + unavoidable problems : namely, the "umount problem" (see below). + For details, see : https://github.com/SciTools/iris/pull/6731 + +* "Unmounting" must be done via a shell ``umount`` command, and there is no easy way to + guarantee that this succeeds, since it can often get a "target is busy" error. + + This "umount problem" is a known problem in Unix generally : see + `here `_ . + + It can only be resolved by a delay + retry. + + +.. _Non-Amazon S3: https://github.com/s3fs-fuse/s3fs-fuse/wiki/Non-Amazon-S3 diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index ae7b5d3047..bf0de99703 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -93,6 +93,9 @@ This document explains the changes made to Iris for this release #. :user:`bjlittle` added the ``:user:`` `extlinks`_ ``github`` user convenience. (:pull:`6931`) +#. `@pp-mo`_ added a page on how to access datafiles in S3 buckets. + (:issue:`6374`, :pull:`6951`) + 💼 Internal =========== From 207d28d6dc0a7523fd86d8caea3d708aed2a1aba Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Feb 2026 10:59:49 +0000 Subject: [PATCH 26/77] Bump lycheeverse/lychee-action in the gha group across 1 directory (#6958) Bumps the gha group with 1 update in the / directory: [lycheeverse/lychee-action](https://github.com/lycheeverse/lychee-action). Updates `lycheeverse/lychee-action` from 2.7.0 to 2.8.0 - [Release notes](https://github.com/lycheeverse/lychee-action/releases) - [Commits](https://github.com/lycheeverse/lychee-action/compare/a8c4c7cb88f0c7386610c35eb25108e448569cb0...8646ba30535128ac92d33dfc9133794bfdd9b411) --- updated-dependencies: - dependency-name: lycheeverse/lychee-action dependency-version: 2.8.0 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: gha ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-linkchecks.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-linkchecks.yml b/.github/workflows/ci-linkchecks.yml index 83ef091a19..6ffdd3df93 100644 --- a/.github/workflows/ci-linkchecks.yml +++ b/.github/workflows/ci-linkchecks.yml @@ -23,7 +23,7 @@ jobs: - name: Link Checker id: lychee - uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 + uses: lycheeverse/lychee-action@8646ba30535128ac92d33dfc9133794bfdd9b411 with: token: ${{secrets.GITHUB_TOKEN}} fail: false From 7da9f80f4bf7d6a8184813108ef26797beebf7f1 Mon Sep 17 00:00:00 2001 From: Hamish Steptoe Date: Thu, 26 Feb 2026 16:13:22 +0000 Subject: [PATCH 27/77] Fixes for pandas v3 compatibility (#6948) * Update env reqs to pandas 3 * Update env reqs to pandas 3 * Add copy kwarg checking * Add pylance ignores for decorator * Improve check copy decorator * Updates and fixes to test_pandas.py * Type checking fixes * Ruff fixes * Type checking fixes * Remove extraneous lock file * Further mypy clarification fixes * Yet more mypy-pytest intereaction fixing * Rebuild lock files * Test fixes * Whatsnew update * Whatsnew update 2 * MORE type fixing * Revert get_dimensional_metadata check * Code review responses * Update Whats New to reflect code reviewer * Further clarification of typing comment --- docs/src/whatsnew/latest.rst | 15 +- lib/iris/_deprecation.py | 74 +++++++++ lib/iris/pandas.py | 189 +++++++++++++++------- lib/iris/tests/unit/pandas/test_pandas.py | 180 +++++++-------------- requirements/locks/py313-linux-64.lock | 6 +- requirements/locks/py314-linux-64.lock | 6 +- requirements/py312.yml | 2 +- requirements/py313.yml | 2 +- requirements/py314.yml | 2 +- 9 files changed, 285 insertions(+), 191 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index bf0de99703..a655b3f5e8 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -75,6 +75,9 @@ This document explains the changes made to Iris for this release testing conveniences in favour of the conveniences found in :mod:`iris/tests/_shared_utils.py`. (:pull:`6950`) +#. `@hsteptoe`_ has deprecated the use of the `copy` kwarg across :mod:`iris.pandas` to reflect changes + to the default behaviour of pandas v3 `New pandas v3 copy behaviour`_. (:pull:`6948`) + 🔗 Dependencies =============== @@ -122,13 +125,21 @@ This document explains the changes made to Iris for this release from unittest to pytest. Iris is now also ruff-PT compliant, save for PT019. (:issue:`6212`, :pull:`6939`) +#. `@hsteptoe`_ and `@ESadek-MO`_ (reviewer) updated chained assignment useage within the tests + associated with :mod:`iris.pandas` to reflect changes in pandas v3 `New pandas v3 copy behaviour`_. + (:pull:`6948`, :issue:`6761`) + +#. `@hsteptoe`_ and `@ESadek-MO`_ (reviewer) added static type hinting to :mod:`iris.pandas`. (:pull:`6948`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: .. _@hdyson: https://github.com/hdyson - -.. _SPEC0 Minimum Supported Dependencies: https://scientific-python.org/specs/spec-0000/ +.. _@hsteptoe: https://github.com/hsteptoe .. comment Whatsnew resources in alphabetical order: + +.. _New pandas v3 copy behaviour: https://pandas.pydata.org/docs/whatsnew/v3.0.0.html#consistent-copy-view-behaviour-with-copy-on-write +.. _SPEC0 Minimum Supported Dependencies: https://scientific-python.org/specs/spec-0000/ diff --git a/lib/iris/_deprecation.py b/lib/iris/_deprecation.py index b771883a71..27127ecf5b 100644 --- a/lib/iris/_deprecation.py +++ b/lib/iris/_deprecation.py @@ -4,8 +4,82 @@ # See LICENSE in the root of the repository for full licensing details. """Utilities for producing runtime deprecation messages.""" +from functools import wraps +import inspect import warnings +from iris.warnings import IrisUserWarning + + +def explicit_copy_checker(f): + """Check for explicitly set parameters in a function. + + This is intended to be used as a decorator for functions that take a + variable number of parameters, to allow the function to determine which + parameters were explicitly set by the caller. + + This can be helpful when wanting raise DeprecationWarning of function + parameters, but only when they are explicitly set by the caller, and not + when they are left at their default value. + + Parameters + ---------- + f : function + The function to be decorated. The function must have a signature that + allows for variable parameters (e.g. ``*args`` and/or ``**kwargs``), and + the parameters to be checked must be explicitly listed in the function + signature (i.e. not just passed via ``**kwargs``). + + Returns + ------- + function + The decorated function, which will have an additional keyword argument + ``explicit_params`` added to its signature. This argument will be a set + of the names of the parameters that were explicitly set by the caller when + calling the function. + + Examples + -------- + The following example shows how to use the ``explicit_copy_checker`` decorator to + check for explicitly set parameters in a function, and raise a DeprecationWarning + if a deprecated parameter is explicitly set by the caller. + + >>> from iris._deprecation import explicit_copy_checker, IrisDeprecation + >>> @explicit_copy_checker + ... def my_function(a, b=1): + ... print(f"a={a}, b={b}") + ... if "b" in kwargs["explicit_params"]: + ... warnings.warn("Parameter 'b' is deprecated.", IrisDeprecation) + >>> my_function(1) # No warning, 'b' is not explicitly set + >>> my_function(1, b=3) # Warning, 'b' is explicitly set + + """ + varnames = inspect.getfullargspec(f)[0] + + @wraps(f) + def wrapper(*a, **kw): + explicit_params = set(list(varnames[: len(a)]) + list(kw.keys())) + if "copy" in explicit_params: + if kw["copy"] is False: + msg = ( + "Pandas v3 behaviour defaults to copy=True. The `copy`" + f" parameter in `{f.__name__}` is deprecated and" + "will be removed in a future release." + ) + warnings.warn(msg, category=IrisUserWarning) + else: + msg = ( + f"The `copy` parameter in `{f.__name__}` is deprecated and" + " will be removed in a future release. The function will" + " always make a copy of the data array, to ensure that the" + " returned Cubes are independent of the input pandas data." + ) + warn_deprecated(msg) + else: + return f(*a, **kw) + + return wrapper + class IrisDeprecation(UserWarning): """An Iris deprecation warning. diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index b23b31dff6..6d95143a01 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -10,6 +10,7 @@ import datetime from itertools import chain, combinations +from typing import TYPE_CHECKING, Optional import warnings import cf_units @@ -18,21 +19,27 @@ import numpy as np import numpy.ma as ma import pandas as pd - -try: - from pandas.core.indexes.datetimes import DatetimeIndex # pandas >=0.20 -except ImportError: - from pandas.tseries.index import DatetimeIndex # pandas <0.20 +from pandas import Index as pd_index import iris -from iris._deprecation import warn_deprecated +from iris._deprecation import explicit_copy_checker, warn_deprecated from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord from iris.cube import Cube, CubeList -from iris.util import new_axis +from iris.util import monotonic, new_axis from iris.warnings import IrisIgnoringWarning +try: + from pandas.core.indexes.datetimes import DatetimeIndex # pandas >=0.20 +except ImportError: + from pandas.tseries.index import DatetimeIndex # pandas <0.20 + -def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): +def _get_dimensional_metadata( + name: str, + values: np.ndarray | DatetimeIndex | pd_index, + calendar: Optional[str] = None, + dm_class: Optional[AuxCoord | DimCoord] = None, +) -> AuxCoord | DimCoord: """Create a Coord or other dimensional metadata from a Pandas index or columns array. If no calendar is specified for a time series, Standard is assumed. @@ -45,7 +52,9 @@ def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): # Getting everything into a single datetime format is hard! # Convert out of NumPy's own datetime format. - if np.issubdtype(values.dtype, np.datetime64): + if isinstance(values.dtype, np.dtype) and np.issubdtype( + values.dtype, np.datetime64 + ): values = pd.to_datetime(values) # Convert pandas datetime objects to python datetime objects. @@ -61,15 +70,14 @@ def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): values = np.array(values) - if dm_class is None: - if np.issubdtype(values.dtype, np.number) and iris.util.monotonic( - values, strict=True - ): - dm_class = DimCoord + if dm_class is not None: + instance = dm_class(values, units=units) # type: ignore[operator] + else: + if np.issubdtype(values.dtype, np.number) and monotonic(values, strict=True): + instance = DimCoord(values, units=units) else: - dm_class = AuxCoord + instance = AuxCoord(values, units=units) - instance = dm_class(values, units=units) if name is not None: # Use rename() to attempt standard_name but fall back on long_name. instance.rename(str(name)) @@ -77,19 +85,25 @@ def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): return instance -def _add_iris_coord(cube, name, points, dim, calendar=None): +def _add_iris_coord( + cube: Cube, + name: str, + points: np.ndarray | DatetimeIndex | pd_index, + dim: int, + calendar: Optional[str] = None, +) -> None: """Add a Coord or other dimensional metadata to a Cube from a Pandas index or columns array.""" # Most functionality has been abstracted to _get_dimensional_metadata, # allowing reuse in as_cube() and as_cubes(). coord = _get_dimensional_metadata(name, points, calendar) - if coord.__class__ == DimCoord: + if isinstance(coord, DimCoord): cube.add_dim_coord(coord, dim) else: cube.add_aux_coord(coord, dim) -def _series_index_unique(pandas_series: pd.Series) -> tuple[int, ...] | None: +def _series_index_unique(pandas_series: pd.Series) -> tuple[int, ...]: """Find an index grouping of a :class:`pandas.Series` that has just one Series value per group. Iterates through grouping single index levels, then combinations of 2 @@ -97,7 +111,10 @@ def _series_index_unique(pandas_series: pd.Series) -> tuple[int, ...] | None: group are found. Returns a ``tuple`` of the index levels that group to produce single values, as soon as one is found. - Returns ``None`` if no index level combination produces single values. + Raises + ------ + ValueError + If no unique grouping can be found. This would cause problems defining iris coordinates later on. """ unique_number = pandas_series.nunique() @@ -116,14 +133,24 @@ def _series_index_unique(pandas_series: pd.Series) -> tuple[int, ...] | None: result = lc # Escape as early as possible - heavy operation. break + # Protect against the possibility of no unique grouping being found, which would cause problems + # defining iris coordinates later on. + if result is None: + message = ( + "No unique index grouping could be found for this Series. " + "Consider resetting the index or adding a unique index level." + ) + raise ValueError(message) + return result +@explicit_copy_checker def as_cube( - pandas_array, - copy=True, - calendars=None, -): + pandas_array: pd.Series | pd.DataFrame, + copy: bool = True, + calendars: Optional[dict] = None, +) -> Cube: """Convert a Pandas Series/DataFrame into a 1D/2D Iris Cube. Parameters @@ -133,6 +160,14 @@ def as_cube( copy : bool, default=True Whether to copy `pandas_array`, or to create array views where possible. Provided in case of memory limit concerns. + + .. deprecated:: 3.15.0 + The 'copy' parameter is deprecated and will be removed in a + future release. This function will always make a copy of the + data array, to ensure that the returned Cube is independent + of the input pandas data and to be consistent with pandas v3 + behaviour. + calendars : dict, optional A dict mapping a dimension to a calendar. Required to convert datetime indices/columns. @@ -188,14 +223,15 @@ def as_cube( return cube +@explicit_copy_checker def as_cubes( - pandas_structure, - copy=True, - calendars=None, - aux_coord_cols=None, - cell_measure_cols=None, - ancillary_variable_cols=None, -): + pandas_structure: pd.DataFrame | pd.Series, + copy: bool = True, + calendars: Optional[dict] = None, + aux_coord_cols: Optional[list[str]] = None, + cell_measure_cols: Optional[list[str]] = None, + ancillary_variable_cols: Optional[list[str]] = None, +) -> CubeList: r"""Convert a Pandas Series/DataFrame into n-dimensional Iris Cubes, including dimensional metadata. The index of `pandas_structure` will be used for generating the @@ -212,6 +248,14 @@ def as_cubes( `pandas_structure` column, or a view of the same array. Arrays other than the data (coords etc.) are always copies. This option is provided to help with memory size concerns. + + .. deprecated:: 3.15.0 + The 'copy' parameter is deprecated and will be removed in a + future release. This function will always make a copy of the + data array, to ensure that the returned Cube is independent + of the input pandas data and to be consistent with pandas v3 + behaviour. + calendars : dict, optional Calendar conversions for individual date-time coordinate columns/index-levels e.g. ``{"my_column": cf_units.CALENDAR_360_DAY}``. @@ -396,7 +440,7 @@ def as_cubes( ) raise ValueError(message) - cube_kwargs = {} + cube_kwargs: dict = {} def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): # Common convenience to get the right DM in the right format for @@ -408,7 +452,7 @@ def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): # DimCoords. dim_coord_kwarg = [] for ix, dim_name in enumerate(pandas_index.names): - if hasattr(pandas_index, "levels"): + if isinstance(pandas_index, pd.MultiIndex): coord_points = pandas_index.levels[ix] else: coord_points = pandas_index @@ -453,7 +497,9 @@ def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): # for this object. _series_index_unique should have ensured # that we are indeed removing the duplicates. shaped = content.reshape(cube_shape) - indices = [0] * len(cube_shape) + # Static typing `indices` needed to avoid mypy call-overload error + # from assuming int instead of list for later slicing + indices: list = [0] * len(cube_shape) for dim in dimensions: indices[dim] = slice(None) collapsed = shaped[tuple(indices)] @@ -486,8 +532,8 @@ def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): return cubes -def _as_pandas_coord(coord): - """Convert an Iris Coord into a Pandas index or columns array.""" +def _as_pandas_coord(coord: DimCoord | AuxCoord) -> np.ndarray: + """Convert an Iris Coord into a numpy array.""" index = coord.points if coord.units.is_time_reference(): index = coord.units.num2date(index) @@ -511,25 +557,25 @@ def _get_base(array): raise AssertionError(msg) -def _make_dim_coord_list(cube): +def _make_dim_coord_list(cube: Cube) -> list: """Get Dimension coordinates.""" outlist = [] for dimn in range(cube.ndim): dimn_coord = cube.coords(dimensions=dimn, dim_coords=True) if dimn_coord: - outlist += [[dimn_coord[0].name(), _as_pandas_coord(dimn_coord[0])]] + outlist += [[dimn_coord[0].name(), _as_pandas_coord(dimn_coord[0])]] # type: ignore[AttributeAccessIssue] else: outlist += [[f"dim{dimn}", range(cube.shape[dimn])]] return list(zip(*outlist)) -def _make_aux_coord_list(cube): +def _make_aux_coord_list(cube: Cube) -> list: """Get Auxiliary coordinates.""" outlist = [] for aux_coord in cube.coords(dim_coords=False): outlist += [ [ - aux_coord.name(), + aux_coord.name(), # type: ignore[AttributeAccessIssue] cube.coord_dims(aux_coord), _as_pandas_coord(aux_coord), ] @@ -537,13 +583,13 @@ def _make_aux_coord_list(cube): return list(chain.from_iterable([outlist])) -def _make_ancillary_variables_list(cube): +def _make_ancillary_variables_list(cube: Cube) -> list: """Get Ancillary variables.""" outlist = [] for ancil_var in cube.ancillary_variables(): outlist += [ [ - ancil_var.name(), + ancil_var.name(), # type: ignore[AttributeAccessIssue] cube.ancillary_variable_dims(ancil_var), ancil_var.data, ] @@ -551,13 +597,13 @@ def _make_ancillary_variables_list(cube): return list(chain.from_iterable([outlist])) -def _make_cell_measures_list(cube): +def _make_cell_measures_list(cube: Cube) -> list: """Get cell measures.""" outlist = [] for cell_measure in cube.cell_measures(): outlist += [ [ - cell_measure.name(), + cell_measure.name(), # type: ignore[AttributeAccessIssue] cube.cell_measure_dims(cell_measure), cell_measure.data, ] @@ -565,7 +611,8 @@ def _make_cell_measures_list(cube): return list(chain.from_iterable([outlist])) -def as_series(cube, copy=True): +@explicit_copy_checker +def as_series(cube: Cube, copy: bool = True) -> pd.Series: """Convert a 1D cube to a Pandas Series. Parameters @@ -576,6 +623,13 @@ def as_series(cube, copy=True): Whether to make a copy of the data. Defaults to True. Must be True for masked data. + .. deprecated:: 3.15.0 + The 'copy' parameter is deprecated and will be removed in a + future release. This function will always make a copy of the + data array, to ensure that the returned Cube is independent + of the input pandas data and to be consistent with pandas v3 + behaviour. + Notes ----- This function will copy your data by default. @@ -601,7 +655,7 @@ def as_series(cube, copy=True): if ma.isMaskedArray(data): if not copy: raise ValueError("Masked arrays must always be copied.") - data = data.astype("f").filled(np.nan) + data = data.astype("f").filled(np.nan) # type: ignore[AttributeAccessIssue] elif copy: data = data.copy() index = None @@ -613,13 +667,14 @@ def as_series(cube, copy=True): return series +@explicit_copy_checker def as_data_frame( - cube, - copy=True, - add_aux_coords=False, - add_cell_measures=False, - add_ancillary_variables=False, -): + cube: Cube, + copy: bool = True, + add_aux_coords: bool = False, + add_cell_measures: bool = False, + add_ancillary_variables: bool = False, +) -> pd.DataFrame: r"""Convert a :class:`~iris.cube.Cube` to a :class:`pandas.DataFrame`. :attr:`~iris.cube.Cube.dim_coords` and :attr:`~iris.cube.Cube.data` are @@ -635,6 +690,14 @@ def as_data_frame( Whether the :class:`pandas.DataFrame` is a copy of the the Cube :attr:`~iris.cube.Cube.data`. This option is provided to help with memory size concerns. + + .. deprecated:: 3.15.0 + The 'copy' parameter is deprecated and will be removed in a + future release. This function will always make a copy of the + data array, to ensure that the returned Cube is independent + of the input pandas data and to be consistent with pandas v3 + behaviour. + add_aux_coords : bool, default=False If True, add all :attr:`~iris.cube.Cube.aux_coords` (including scalar coordinates) to the returned :class:`pandas.DataFrame`. @@ -672,6 +735,12 @@ def as_data_frame( #. Where the :class:`~iris.cube.Cube` contains masked values, these become :data:`numpy.nan` in the returned :class:`~pandas.DataFrame`. + #. If `copy` parameter is explicitly set to True or False, a DeprecationWarning + is raised, as this parameter will be removed in a future release. + This function will always make a copy of the data array, to ensure that the + returned Cube is independent of the input pandas data and to be consistent + with pandas v3 behaviour. + Notes ----- :class:`dask.dataframe.DataFrame` are not supported. @@ -800,8 +869,11 @@ def as_data_frame( Name: surface_temperature, Length: 419904, dtype: float32 """ + data_frame: pd.DataFrame - def merge_metadata(meta_var_list): + def merge_metadata( + meta_var_list: list[tuple[str, list[int], np.ndarray]], + ) -> pd.DataFrame: """Add auxiliary cube metadata to the DataFrame.""" nonlocal data_frame for meta_var_name, meta_var_index, meta_var in meta_var_list: @@ -828,13 +900,14 @@ def merge_metadata(meta_var_list): ) return data_frame - if getattr(cube, "ndim", None) is not None and (is_scalar := cube.ndim == 0): + is_scalar = (getattr(cube, "ndim", None) is not None) and (cube.ndim == 0) + if is_scalar: # promote the scalar cube to a 1D cube, and convert in the same way as a 1D cube cube = new_axis(cube) if iris.FUTURE.pandas_ndim: # Checks - if not isinstance(cube, iris.cube.Cube): + if not isinstance(cube, Cube): raise TypeError( f"Expected input to be iris.cube.Cube instance, got: {type(cube)}" ) @@ -845,13 +918,13 @@ def merge_metadata(meta_var_list): if ma.isMaskedArray(data): if not copy: raise ValueError("Masked arrays must always be copied.") - data = data.astype("f").filled(np.nan) + data = data.astype("f").filled(np.nan) # type: ignore[AttributeAccessIssue] # Extract dim coord information: separate lists for dim names and dim values coord_names, coords = _make_dim_coord_list(cube) # Make base DataFrame index = pd.MultiIndex.from_product(coords, names=coord_names) - data_frame = pd.DataFrame(data.ravel(), columns=[cube.name()], index=index) + data_frame = pd.DataFrame(data.ravel(), columns=[cube.name()], index=index) # type: ignore[AttributeAccessIssue] if add_aux_coords: data_frame = merge_metadata(_make_aux_coord_list(cube)) @@ -882,7 +955,7 @@ def merge_metadata(meta_var_list): if ma.isMaskedArray(data): if not copy: raise ValueError("Masked arrays must always be copied.") - data = data.astype("f").filled(np.nan) + data = data.astype("f").filled(np.nan) # type: ignore[AttributeAccessIssue] elif copy: data = data.copy() diff --git a/lib/iris/tests/unit/pandas/test_pandas.py b/lib/iris/tests/unit/pandas/test_pandas.py index f83004a31b..5fed3ee956 100644 --- a/lib/iris/tests/unit/pandas/test_pandas.py +++ b/lib/iris/tests/unit/pandas/test_pandas.py @@ -18,6 +18,7 @@ import iris from iris._deprecation import IrisDeprecation from iris.tests import _shared_utils +from iris.warnings import IrisUserWarning # Importing pandas has the side-effect of messing with the formatters # used by matplotlib for handling dates. @@ -122,24 +123,6 @@ def test_copy_true(self): series[0] = 99 assert cube.data[0] == 0 - def test_copy_int32_false(self): - cube = Cube(np.array([0, 1, 2, 3, 4], dtype=np.int32), long_name="foo") - series = iris.pandas.as_series(cube, copy=False) - series[0] = 99 - assert cube.data[0] == 99 - - def test_copy_int64_false(self): - cube = Cube(np.array([0, 1, 2, 3, 4], dtype=np.int64), long_name="foo") - series = iris.pandas.as_series(cube, copy=False) - series[0] = 99 - assert cube.data[0] == 99 - - def test_copy_float_false(self): - cube = Cube(np.array([0, 1, 2, 3.3, 4]), long_name="foo") - series = iris.pandas.as_series(cube, copy=False) - series[0] = 99 - assert cube.data[0] == 99 - def test_copy_masked_true(self): data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0]) cube = Cube(data, long_name="foo") @@ -147,12 +130,6 @@ def test_copy_masked_true(self): series[0] = 99 assert cube.data[0] == 0 - def test_copy_masked_false(self): - data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0]) - cube = Cube(data, long_name="foo") - with pytest.raises(ValueError, match="Masked arrays must always be copied"): - _ = iris.pandas.as_series(cube, copy=False) - @skip_pandas @pytest.mark.filterwarnings( @@ -270,33 +247,9 @@ def test_time_360(self): def test_copy_true(self): cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo") data_frame = iris.pandas.as_data_frame(cube) - data_frame[0][0] = 99 + data_frame.iloc[0, 0] = 99 assert cube.data[0, 0] == 0 - def test_copy_int32_false(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], dtype=np.int32), - long_name="foo", - ) - data_frame = iris.pandas.as_data_frame(cube, copy=False) - data_frame[0][0] = 99 - assert cube.data[0, 0] == 99 - - def test_copy_int64_false(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], dtype=np.int64), - long_name="foo", - ) - data_frame = iris.pandas.as_data_frame(cube, copy=False) - data_frame[0][0] = 99 - assert cube.data[0, 0] == 99 - - def test_copy_float_false(self): - cube = Cube(np.array([[0, 1, 2, 3, 4.4], [5, 6, 7, 8, 9]]), long_name="foo") - data_frame = iris.pandas.as_data_frame(cube, copy=False) - data_frame[0][0] = 99 - assert cube.data[0, 0] == 99 - def test_copy_masked_true(self): data = np.ma.MaskedArray( [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], @@ -304,25 +257,9 @@ def test_copy_masked_true(self): ) cube = Cube(data, long_name="foo") data_frame = iris.pandas.as_data_frame(cube) - data_frame[0][0] = 99 + data_frame.iloc[0, 0] = 99 assert cube.data[0, 0] == 0 - def test_copy_masked_false(self): - data = np.ma.MaskedArray( - [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], - mask=[[0, 1, 0, 1, 0], [1, 0, 1, 0, 1]], - ) - cube = Cube(data, long_name="foo") - with pytest.raises(ValueError, match="Masked arrays must always be copied"): - _ = iris.pandas.as_data_frame(cube, copy=False) - - def test_copy_false_with_cube_view(self): - data = np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) - cube = Cube(data[:], long_name="foo") - data_frame = iris.pandas.as_data_frame(cube, copy=False) - data_frame[0][0] = 99 - assert cube.data[0, 0] == 99 - @skip_pandas class TestAsDataFrameNDim: @@ -450,17 +387,11 @@ def test_simple3_d(self): data_frame.index.get_level_values("kid"), expected_kid ) - def test_copy_false(self): - cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") - data_frame = iris.pandas.as_data_frame(cube, copy=False) - cube.data[2] = 99 - assert cube.data[2] == data_frame.foo[2] - - def test_copy_true(self): + def test_implicit_copy_true(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") - data_frame = iris.pandas.as_data_frame(cube, copy=True) + data_frame = iris.pandas.as_data_frame(cube) cube.data[2] = 99 - assert cube.data[2] != data_frame.foo[2] + assert cube.data[2] != data_frame.loc[2, "foo"].values def test_time_standard(self): cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="ts") @@ -709,18 +640,12 @@ def test_series_cftime_360(self, request): ), ) - def test_copy_true(self): + def test_implicit_copy_true(self): series = pd.Series([0, 1, 2, 3, 4], index=[5, 6, 7, 8, 9]) cube = iris.pandas.as_cube(series) cube.data[0] = 99 assert series[5] == 0 - def test_copy_false(self): - series = pd.Series([0, 1, 2, 3, 4], index=[5, 6, 7, 8, 9]) - cube = iris.pandas.as_cube(series, copy=False) - cube.data[0] = 99 - assert series[5] == 99 - @skip_pandas @pytest.mark.filterwarnings( @@ -818,17 +743,11 @@ def test_data_frame_datetime_standard(self, request): ), ) - def test_copy_true(self): + def test_implicit_copy_true(self): data_frame = pd.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) cube = iris.pandas.as_cube(data_frame) cube.data[0, 0] = 99 - assert data_frame[0][0] == 0 - - def test_copy_false(self): - data_frame = pd.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) - cube = iris.pandas.as_cube(data_frame, copy=False) - cube.data[0, 0] = 99 - assert data_frame[0][0] == 99 + assert data_frame.iloc[0, 0] == 0 @skip_pandas @@ -856,6 +775,49 @@ def test_as_dataframe_no_future_warning(self, activate_pandas_ndim): warnings.simplefilter("error", FutureWarning) _ = iris.pandas.as_data_frame(cube) + @pytest.mark.parametrize( + ("test_function", "test_input"), + [ + (iris.pandas.as_cube, pd.DataFrame()), + (iris.pandas.as_cubes, pd.DataFrame()), + ( + iris.pandas.as_series, + Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"), + ), + ( + iris.pandas.as_data_frame, + Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"), + ), + ], + ) + def test_explicit_copy_true_error(self, test_function, test_input): + with pytest.warns( + IrisDeprecation, + match=f"The `copy` parameter in `{test_function.__name__}` is deprecated", + ): + _ = test_function(test_input, copy=True) + + @pytest.mark.parametrize( + ("test_function", "test_input"), + [ + (iris.pandas.as_cube, pd.DataFrame()), + (iris.pandas.as_cubes, pd.DataFrame()), + ( + iris.pandas.as_series, + Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"), + ), + ( + iris.pandas.as_data_frame, + Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"), + ), + ], + ) + def test_explicit_copy_false_error(self, test_function, test_input): + with pytest.warns( + IrisUserWarning, match="Pandas v3 behaviour defaults to copy=True." + ): + _ = test_function(test_input, copy=False) + @skip_pandas class TestPandasAsCubes: @@ -948,26 +910,24 @@ def test_3d_series(self): def test_non_unique_index(self): df = self._create_pandas(index_levels=1) - new_index = df.index.values + new_index = df.index.values.copy() new_index[1] = new_index[0] - df.set_index(new_index) + df.set_index(new_index, inplace=True) with pytest.raises(ValueError, match="not unique per row"): _ = iris.pandas.as_cubes(df) def test_non_monotonic_index(self): df = self._create_pandas(index_levels=1) - new_index = df.index.values + new_index = df.index.values.copy() new_index[:2] = new_index[1::-1] - df.set_index(new_index) - + df.set_index(new_index, inplace=True) with pytest.raises(ValueError, match="not monotonic"): _ = iris.pandas.as_cubes(df) def test_missing_rows(self): df = self._create_pandas(index_levels=2) df = df[:-1] - with pytest.raises( ValueError, match="Not all index values have a corresponding row" ): @@ -1186,39 +1146,15 @@ def test_series_with_col_args(self): with pytest.warns(Warning, match="is a Series; ignoring"): _ = iris.pandas.as_cubes(series, aux_coord_cols=["some_column"]) - def test_phenom_view(self): - df = self._create_pandas() - result = iris.pandas.as_cubes(df, copy=False) - - # Modify AFTER creating the Cube(s). - df[0][0] += 1 - - (result_cube,) = result - assert result_cube.data[0] == df[0][0] - def test_phenom_copy(self): df = self._create_pandas() result = iris.pandas.as_cubes(df) # Modify AFTER creating the Cube(s). - df[0][0] += 1 - - (result_cube,) = result - assert result_cube.data[0] != df[0][0] - - def test_coord_never_view(self): - # Using AuxCoord - DimCoords and Pandas indices are immutable. - df = self._create_pandas() - coord_name = "foo" - df[coord_name] = df.index.values - result = iris.pandas.as_cubes(df, copy=False, aux_coord_cols=[coord_name]) - - # Modify AFTER creating the Cube(s). - df[coord_name][0] += 1 + df.iloc[0, 0] += 1 (result_cube,) = result - result_coord = result_cube.coord(coord_name) - assert result_coord.points[0] != df[coord_name][0] + assert result_cube.data[0] != df.iloc[0, 0] def _test_dates_common(self, mode=None, alt_calendar=False): df = self._create_pandas() diff --git a/requirements/locks/py313-linux-64.lock b/requirements/locks/py313-linux-64.lock index d8ad15deb8..182c932917 100644 --- a/requirements/locks/py313-linux-64.lock +++ b/requirements/locks/py313-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 9e22298d3c86ab9a2d785adbe961656f88dda327f4b1b70155fd64231d47d1f3 +# input_hash: b214c8fa287dfc2dc1dce58414771bc08704b4ae09ebb02359517434384613bb @EXPLICIT https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 @@ -211,7 +211,6 @@ https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda#3687cc0b82a8b4c17e1f0eb7e47163d5 https://conda.anaconda.org/conda-forge/noarch/pyshp-3.0.3-pyhd8ed1ab_0.conda#c138c7aaa6a10b5762dcd92247864aff https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py313heab5758_1.conda#82df5d372f2796c389fcbe5104664f5a https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py313h3dea7bd_1.conda#f256753e840c3cd3766488c9437a8f8b @@ -287,7 +286,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.co https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_1.conda#c8ca404aeab8e32c9d6d82a82aeb6511 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py313h29aa505_2.conda#ad53894d278895bf15c8fc324727d224 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py313h08cd8bf_2.conda#8a69ea71fdd37bfe42a28f0967dbb75a +https://conda.anaconda.org/conda-forge/linux-64/pandas-3.0.1-py313hbfd7664_0.conda#1c8807728f0333228766dee685394e16 https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py313h29aa505_2.conda#e3a598d20bf2fa7b03a771e9e4471be9 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py313h29aa505_2.conda#60f5d1c039da10fe89a530cc93ea50ac @@ -345,3 +344,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 + diff --git a/requirements/locks/py314-linux-64.lock b/requirements/locks/py314-linux-64.lock index 94f3433de3..420e625a9c 100644 --- a/requirements/locks/py314-linux-64.lock +++ b/requirements/locks/py314-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 51877f045987ca3eb18cf2b23a50d599952703dc6a6fe8a5f1fcbcdce93433ab +# input_hash: 5140d73d0e22ca24039e1c88bb70517169c79003d4d42a5bb2c726d5e0867d54 @EXPLICIT https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 @@ -174,7 +174,6 @@ https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda#3687cc0b82a8b4c17e1f0eb7e47163d5 https://conda.anaconda.org/conda-forge/noarch/pyshp-3.0.3-pyhd8ed1ab_0.conda#c138c7aaa6a10b5762dcd92247864aff https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py314he82b845_1.conda#21dce7c80bbdb9785633011ad348e530 https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py314h67df5f8_1.conda#2035f68f96be30dc60a5dfd7452c7941 @@ -235,7 +234,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.co https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_1.conda#c8ca404aeab8e32c9d6d82a82aeb6511 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py314hc02f841_2.conda#55ac6d85f5dd8ec5e9919e7762fcb31a https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py314ha0b5721_2.conda#fe3a5c8be07a7b82058bdeb39d33d93b +https://conda.anaconda.org/conda-forge/linux-64/pandas-3.0.1-py314hb4ffadd_0.conda#23fc526360815090f6bfcd7c6c8e4954 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py314hc02f841_2.conda#5be92985870940eac3f3b8cda57002cc https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py314hf07bd8e_1.conda#c7df812186fb1290bc00d9b7b5a50b18 @@ -280,3 +279,4 @@ https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 + diff --git a/requirements/py312.yml b/requirements/py312.yml index cd3cfb7033..454bfd7e79 100644 --- a/requirements/py312.yml +++ b/requirements/py312.yml @@ -31,7 +31,7 @@ dependencies: - iris-sample-data >=2.4.0 - mo_pack - nc-time-axis >=1.4 - - pandas <3 + - pandas - pip - python-stratify - rasterio diff --git a/requirements/py313.yml b/requirements/py313.yml index eaf556b564..32b606c02a 100644 --- a/requirements/py313.yml +++ b/requirements/py313.yml @@ -31,7 +31,7 @@ dependencies: - iris-sample-data >=2.4.0 - mo_pack - nc-time-axis >=1.4 - - pandas <3 + - pandas - pip - python-stratify - rasterio diff --git a/requirements/py314.yml b/requirements/py314.yml index f1612564ce..706f697e20 100644 --- a/requirements/py314.yml +++ b/requirements/py314.yml @@ -31,7 +31,7 @@ dependencies: - iris-sample-data >=2.4.0 - mo_pack - nc-time-axis >=1.4 - - pandas <3 + - pandas - pip - python-stratify - rasterio From 995932da8a8d726ca3f3f1aa6a11f54e93e7ea38 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Fri, 27 Feb 2026 09:40:45 +0000 Subject: [PATCH 28/77] Adopt Diataxis (#6868) * Introduce user_manual. * Reader-level restructure. * Add sphinx-needs to dependencies. * Add licence header to user_manual_directives.py. * Address Sphinx warnings. * Populate the explanation and how-to directories. * Populate the reference and tutorial directories. * Fix some references I missed before. * Refactor of get_started. * Remove defunct IEP directory. * Itemise all of Iris public API. * Rendering improvements. * Itemise all of the Iris docs pages. * Itemise all of the Gallery pages. * Topic descriptions. * user_manual_directives.py code quality. * Needs item validation routine. * Remove column titles. * Fix doctests. * Implement redirects. * Update lock files. * Better use of inbuilt indenting. * Remove Get Started and Iris API from top level toctree. * Clearer wording about the purpose of the User Manual and User Guide. * Page summary improvements. * Topic tag improvements. * Fix admonition. * Update lock files. * More accurate caption for plot_atlantic_profiles. * Diataxis metadata for s3_io.rst. * Review actions. * Rename topic_statistics. * Less aggressive phrasing about how to navigate. * Update lock files. * Adapt to sphinx-needs v7. * What's New entry. --- .lycheeignore | 11 +- .../general/plot_SOI_filtering.py | 5 + .../general/plot_anomaly_log_colouring.py | 5 + docs/gallery_code/general/plot_coriolis.py | 5 + .../general/plot_cross_section.py | 5 + .../general/plot_custom_aggregation.py | 5 + .../general/plot_custom_file_loading.py | 5 + docs/gallery_code/general/plot_global_map.py | 5 + docs/gallery_code/general/plot_inset.py | 5 + .../general/plot_lineplot_with_legend.py | 5 + .../gallery_code/general/plot_polar_stereo.py | 5 + .../general/plot_polynomial_fit.py | 5 + .../plot_projections_and_annotations.py | 5 + .../general/plot_rotated_pole_mapping.py | 5 + docs/gallery_code/general/plot_zonal_means.py | 5 + docs/gallery_code/meteorology/plot_COP_1d.py | 5 + .../gallery_code/meteorology/plot_COP_maps.py | 5 + docs/gallery_code/meteorology/plot_TEC.py | 5 + .../meteorology/plot_deriving_phenomena.py | 5 + .../meteorology/plot_hovmoller.py | 5 + .../meteorology/plot_lagged_ensemble.py | 5 + .../meteorology/plot_wind_barbs.py | 5 + .../meteorology/plot_wind_speed.py | 5 + .../oceanography/plot_atlantic_profiles.py | 5 + .../oceanography/plot_load_nemo.py | 5 + .../oceanography/plot_orca_projection.py | 5 + docs/src/IEP/IEP001.adoc | 193 -------------- docs/src/_templates/tags_links.need | 11 + docs/src/common_links.inc | 1 + docs/src/conf.py | 140 ++++++++++ docs/src/further_topics/index.rst | 25 -- docs/src/getting_started.rst | 15 -- docs/src/index.rst | 35 +-- docs/src/sphinxext/user_manual_directives.py | 251 ++++++++++++++++++ .../explanation}/dataless_cubes.rst | 5 + .../images/data_structured_grid.svg | 0 .../explanation}/images/data_ugrid_mesh.svg | 0 .../explanation}/images/geovistalogo.svg | 0 .../explanation}/images/iris-esmf-regrid.svg | 0 .../explanation/images}/multi_array.svg | 0 .../images}/multi_array_to_cube.svg | 0 .../explanation}/images/ugrid_edge_data.svg | 0 .../images/ugrid_element_centres.svg | 0 .../images/ugrid_node_independence.svg | 0 .../images/ugrid_variable_faces.svg | 0 .../explanation}/iris_cubes.rst | 9 +- .../explanation}/iris_philosophy.rst | 7 +- .../explanation}/iris_xarray.rst | 9 +- .../explanation}/lenient_maths.rst | 5 + .../explanation}/lenient_metadata.rst | 5 + .../explanation/mesh_data_model.rst} | 7 +- .../explanation/mesh_partners.rst} | 7 +- .../explanation}/metadata.rst | 6 +- .../explanation}/missing_data_handling.rst | 5 + .../explanation}/netcdf_io.rst | 5 + .../explanation}/real_and_lazy_data.rst | 6 +- .../explanation}/um_files_loading.rst | 5 + .../explanation}/ux_guide.rst | 7 +- .../explanation}/which_regridder_to_use.rst | 9 +- .../explanation}/why_iris.rst | 5 + .../how_to}/filtering_warnings.rst | 21 +- .../how_to}/images/fesom_mesh.png | Bin .../how_to}/images/orca_grid.png | Bin .../how_to}/images/plotting.png | Bin .../how_to}/images/smc_mesh.png | Bin .../{ => user_manual/how_to}/installing.rst | 5 + .../how_to/mesh_conversions.rst} | 7 +- .../how_to/mesh_operations.rst} | 15 +- .../how_to}/navigating_a_cube.rst | 9 +- .../how_to}/plugins.rst | 5 + docs/src/user_manual/index.rst | 199 ++++++++++++++ .../reference}/citation.rst | 5 + .../reference}/glossary.rst | 31 ++- .../reference}/phrasebook.rst | 7 +- .../section_indexes/community.rst} | 14 +- .../section_indexes/dask_best_practices.rst} | 16 +- .../user_manual/section_indexes/general.rst | 28 ++ .../section_indexes/get_started.rst | 29 ++ .../section_indexes/mesh_support.rst} | 16 +- .../section_indexes/metadata_arithmetic.rst | 12 + .../user_manual/section_indexes/userguide.rst | 46 ++++ .../tutorial}/controlling_merge.rst | 5 + .../tutorial}/cube_maths.rst | 7 +- .../tutorial}/cube_statistics.rst | 5 + .../tutorial}/dask_bags_and_greed.rst | 5 + .../tutorial}/dask_parallel_loop.rst | 5 + .../tutorial}/dask_pp_to_netcdf.rst | 6 + .../tutorial/images}/concat.svg | 0 .../tutorial}/images/grib-bottleneck.png | Bin .../loop_third_party_kapture_results.png | Bin .../tutorial/images}/merge.svg | 0 .../tutorial/images}/merge_and_concat.svg | 0 .../interpolation_and_regridding.rst | 19 +- .../tutorial}/loading_iris_cubes.rst | 10 +- .../tutorial}/merge_and_concat.rst | 11 +- .../tutorial}/plotting_a_cube.rst | 21 +- .../plotting_examples/1d_quickplot_simple.py | 0 .../tutorial}/plotting_examples/1d_simple.py | 0 .../plotting_examples/1d_with_legend.py | 0 .../tutorial}/plotting_examples/brewer.py | 0 .../plotting_examples/cube_blockplot.py | 0 .../cube_brewer_cite_contourf.py | 0 .../plotting_examples/cube_brewer_contourf.py | 0 .../plotting_examples/cube_contour.py | 0 .../plotting_examples/cube_contourf.py | 0 .../plotting_examples/masking_brazil_plot.py | 0 .../masking_stereographic_plot.py | 0 .../regridding_plots/interpolate_column.py | 0 .../regridding_plots/regridded_to_global.py | 0 .../regridded_to_global_area_weighted.py | 0 .../regridding_plots/regridded_to_rotated.py | 0 .../regridding_plots/regridding_plot.py | 0 .../tutorial}/s3_io.rst | 5 + .../tutorial}/saving_iris_cubes.rst | 5 + .../tutorial}/subsetting_a_cube.rst | 11 +- .../src/userguide/change_management_goals.txt | 9 - docs/src/userguide/index.rst | 45 ---- docs/src/voted_issues.rst | 2 + docs/src/whatsnew/1.7.rst | 4 +- docs/src/whatsnew/1.8.rst | 10 +- docs/src/whatsnew/1.9.rst | 6 +- docs/src/whatsnew/2.0.rst | 6 +- docs/src/whatsnew/3.0.rst | 2 +- docs/src/whatsnew/3.5.rst | 4 +- docs/src/whatsnew/3.7.rst | 2 +- docs/src/whatsnew/3.8.rst | 4 +- docs/src/whatsnew/latest.rst | 11 +- lib/iris/__init__.py | 7 +- lib/iris/analysis/__init__.py | 9 +- lib/iris/analysis/_grid_angles.py | 2 +- lib/iris/analysis/calculus.py | 13 +- lib/iris/analysis/cartography.py | 20 +- lib/iris/analysis/geometry.py | 7 +- lib/iris/analysis/maths.py | 32 ++- lib/iris/analysis/stats.py | 8 +- lib/iris/analysis/trajectory.py | 10 +- lib/iris/aux_factory.py | 8 +- lib/iris/common/__init__.py | 8 +- lib/iris/common/lenient.py | 8 +- lib/iris/common/metadata.py | 8 +- lib/iris/common/mixin.py | 8 +- lib/iris/common/resolve.py | 5 + lib/iris/config.py | 5 + lib/iris/coord_categorisation.py | 5 + lib/iris/coord_systems.py | 8 +- lib/iris/coords.py | 8 +- lib/iris/cube.py | 10 +- lib/iris/exceptions.py | 8 +- lib/iris/experimental/__init__.py | 5 + lib/iris/experimental/animate.py | 5 + lib/iris/experimental/geovista.py | 8 +- lib/iris/experimental/raster.py | 5 + lib/iris/experimental/regrid.py | 5 + lib/iris/experimental/regrid_conservative.py | 5 + lib/iris/experimental/representation.py | 8 +- lib/iris/experimental/stratify.py | 8 +- lib/iris/experimental/ugrid.py | 5 + lib/iris/fileformats/__init__.py | 8 +- lib/iris/fileformats/abf.py | 5 + lib/iris/fileformats/cf.py | 5 + lib/iris/fileformats/dot.py | 8 +- lib/iris/fileformats/name.py | 8 +- lib/iris/fileformats/name_loaders.py | 8 +- lib/iris/fileformats/netcdf/__init__.py | 5 + lib/iris/fileformats/netcdf/loader.py | 5 + lib/iris/fileformats/netcdf/saver.py | 5 + lib/iris/fileformats/netcdf/ugrid_load.py | 5 + lib/iris/fileformats/nimrod.py | 8 +- lib/iris/fileformats/nimrod_load_rules.py | 8 +- lib/iris/fileformats/pp.py | 8 +- lib/iris/fileformats/pp_load_rules.py | 8 +- lib/iris/fileformats/pp_save_rules.py | 8 +- lib/iris/fileformats/rules.py | 8 +- lib/iris/fileformats/um/__init__.py | 5 + lib/iris/fileformats/um_cf_map.py | 4 + lib/iris/io/__init__.py | 12 +- lib/iris/io/format_picker.py | 5 + lib/iris/iterate.py | 10 +- lib/iris/loading.py | 9 +- lib/iris/mesh/__init__.py | 5 + lib/iris/mesh/components.py | 5 + lib/iris/mesh/utils.py | 8 +- lib/iris/palette.py | 7 +- lib/iris/pandas.py | 5 + lib/iris/plot.py | 41 +-- lib/iris/quickplot.py | 25 +- lib/iris/symbols.py | 8 +- lib/iris/tests/test_coding_standards.py | 4 +- lib/iris/time.py | 8 +- lib/iris/util.py | 60 +++-- lib/iris/warnings.py | 5 + pyproject.toml | 2 +- requirements/locks/py312-linux-64.lock | 35 ++- requirements/locks/py313-linux-64.lock | 34 ++- requirements/locks/py314-linux-64.lock | 29 +- requirements/py312.yml | 2 + requirements/py313.yml | 2 + requirements/py314.yml | 2 + tools/generate_std_names.py | 5 + 199 files changed, 1707 insertions(+), 585 deletions(-) delete mode 100644 docs/src/IEP/IEP001.adoc create mode 100644 docs/src/_templates/tags_links.need delete mode 100644 docs/src/further_topics/index.rst delete mode 100644 docs/src/getting_started.rst create mode 100644 docs/src/sphinxext/user_manual_directives.py rename docs/src/{further_topics => user_manual/explanation}/dataless_cubes.rst (96%) rename docs/src/{further_topics/ugrid => user_manual/explanation}/images/data_structured_grid.svg (100%) rename docs/src/{further_topics/ugrid => user_manual/explanation}/images/data_ugrid_mesh.svg (100%) rename docs/src/{further_topics/ugrid => user_manual/explanation}/images/geovistalogo.svg (100%) rename docs/src/{further_topics/ugrid => user_manual/explanation}/images/iris-esmf-regrid.svg (100%) rename docs/src/{userguide => user_manual/explanation/images}/multi_array.svg (100%) rename docs/src/{userguide => user_manual/explanation/images}/multi_array_to_cube.svg (100%) rename docs/src/{further_topics/ugrid => user_manual/explanation}/images/ugrid_edge_data.svg (100%) rename docs/src/{further_topics/ugrid => user_manual/explanation}/images/ugrid_element_centres.svg (100%) rename docs/src/{further_topics/ugrid => user_manual/explanation}/images/ugrid_node_independence.svg (100%) rename docs/src/{further_topics/ugrid => user_manual/explanation}/images/ugrid_variable_faces.svg (100%) rename docs/src/{userguide => user_manual/explanation}/iris_cubes.rst (98%) rename docs/src/{userguide => user_manual/explanation}/iris_philosophy.rst (98%) rename docs/src/{community => user_manual/explanation}/iris_xarray.rst (97%) rename docs/src/{further_topics => user_manual/explanation}/lenient_maths.rst (98%) rename docs/src/{further_topics => user_manual/explanation}/lenient_metadata.rst (99%) rename docs/src/{further_topics/ugrid/data_model.rst => user_manual/explanation/mesh_data_model.rst} (99%) rename docs/src/{further_topics/ugrid/partner_packages.rst => user_manual/explanation/mesh_partners.rst} (94%) rename docs/src/{further_topics => user_manual/explanation}/metadata.rst (99%) rename docs/src/{further_topics => user_manual/explanation}/missing_data_handling.rst (94%) rename docs/src/{further_topics => user_manual/explanation}/netcdf_io.rst (99%) rename docs/src/{userguide => user_manual/explanation}/real_and_lazy_data.rst (98%) rename docs/src/{further_topics => user_manual/explanation}/um_files_loading.rst (99%) rename docs/src/{further_topics => user_manual/explanation}/ux_guide.rst (86%) rename docs/src/{further_topics => user_manual/explanation}/which_regridder_to_use.rst (99%) rename docs/src/{ => user_manual/explanation}/why_iris.rst (94%) rename docs/src/{further_topics => user_manual/how_to}/filtering_warnings.rst (93%) rename docs/src/{further_topics/ugrid => user_manual/how_to}/images/fesom_mesh.png (100%) rename docs/src/{further_topics/ugrid => user_manual/how_to}/images/orca_grid.png (100%) rename docs/src/{further_topics/ugrid => user_manual/how_to}/images/plotting.png (100%) rename docs/src/{further_topics/ugrid => user_manual/how_to}/images/smc_mesh.png (100%) rename docs/src/{ => user_manual/how_to}/installing.rst (97%) rename docs/src/{further_topics/ugrid/other_meshes.rst => user_manual/how_to/mesh_conversions.rst} (98%) rename docs/src/{further_topics/ugrid/operations.rst => user_manual/how_to/mesh_operations.rst} (98%) rename docs/src/{userguide => user_manual/how_to}/navigating_a_cube.rst (98%) rename docs/src/{community => user_manual/how_to}/plugins.rst (93%) create mode 100644 docs/src/user_manual/index.rst rename docs/src/{userguide => user_manual/reference}/citation.rst (88%) rename docs/src/{userguide => user_manual/reference}/glossary.rst (86%) rename docs/src/{community => user_manual/reference}/phrasebook.rst (92%) rename docs/src/{community/index.rst => user_manual/section_indexes/community.rst} (83%) rename docs/src/{further_topics/dask_best_practices/index.rst => user_manual/section_indexes/dask_best_practices.rst} (95%) create mode 100644 docs/src/user_manual/section_indexes/general.rst create mode 100644 docs/src/user_manual/section_indexes/get_started.rst rename docs/src/{further_topics/ugrid/index.rst => user_manual/section_indexes/mesh_support.rst} (75%) create mode 100644 docs/src/user_manual/section_indexes/metadata_arithmetic.rst create mode 100644 docs/src/user_manual/section_indexes/userguide.rst rename docs/src/{further_topics => user_manual/tutorial}/controlling_merge.rst (98%) rename docs/src/{userguide => user_manual/tutorial}/cube_maths.rst (98%) rename docs/src/{userguide => user_manual/tutorial}/cube_statistics.rst (99%) rename docs/src/{further_topics/dask_best_practices => user_manual/tutorial}/dask_bags_and_greed.rst (98%) rename docs/src/{further_topics/dask_best_practices => user_manual/tutorial}/dask_parallel_loop.rst (97%) rename docs/src/{further_topics/dask_best_practices => user_manual/tutorial}/dask_pp_to_netcdf.rst (94%) rename docs/src/{userguide => user_manual/tutorial/images}/concat.svg (100%) rename docs/src/{further_topics/dask_best_practices => user_manual/tutorial}/images/grib-bottleneck.png (100%) rename docs/src/{further_topics/dask_best_practices => user_manual/tutorial}/images/loop_third_party_kapture_results.png (100%) rename docs/src/{userguide => user_manual/tutorial/images}/merge.svg (100%) rename docs/src/{userguide => user_manual/tutorial/images}/merge_and_concat.svg (100%) rename docs/src/{userguide => user_manual/tutorial}/interpolation_and_regridding.rst (96%) rename docs/src/{userguide => user_manual/tutorial}/loading_iris_cubes.rst (98%) rename docs/src/{userguide => user_manual/tutorial}/merge_and_concat.rst (99%) rename docs/src/{userguide => user_manual/tutorial}/plotting_a_cube.rst (94%) rename docs/src/{userguide => user_manual/tutorial}/plotting_examples/1d_quickplot_simple.py (100%) rename docs/src/{userguide => user_manual/tutorial}/plotting_examples/1d_simple.py (100%) rename docs/src/{userguide => user_manual/tutorial}/plotting_examples/1d_with_legend.py (100%) rename docs/src/{userguide => user_manual/tutorial}/plotting_examples/brewer.py (100%) rename docs/src/{userguide => user_manual/tutorial}/plotting_examples/cube_blockplot.py (100%) rename docs/src/{userguide => user_manual/tutorial}/plotting_examples/cube_brewer_cite_contourf.py (100%) rename docs/src/{userguide => user_manual/tutorial}/plotting_examples/cube_brewer_contourf.py (100%) rename docs/src/{userguide => user_manual/tutorial}/plotting_examples/cube_contour.py (100%) rename docs/src/{userguide => user_manual/tutorial}/plotting_examples/cube_contourf.py (100%) rename docs/src/{userguide => user_manual/tutorial}/plotting_examples/masking_brazil_plot.py (100%) rename docs/src/{userguide => user_manual/tutorial}/plotting_examples/masking_stereographic_plot.py (100%) rename docs/src/{userguide => user_manual/tutorial}/regridding_plots/interpolate_column.py (100%) rename docs/src/{userguide => user_manual/tutorial}/regridding_plots/regridded_to_global.py (100%) rename docs/src/{userguide => user_manual/tutorial}/regridding_plots/regridded_to_global_area_weighted.py (100%) rename docs/src/{userguide => user_manual/tutorial}/regridding_plots/regridded_to_rotated.py (100%) rename docs/src/{userguide => user_manual/tutorial}/regridding_plots/regridding_plot.py (100%) rename docs/src/{further_topics => user_manual/tutorial}/s3_io.rst (98%) rename docs/src/{userguide => user_manual/tutorial}/saving_iris_cubes.rst (97%) rename docs/src/{userguide => user_manual/tutorial}/subsetting_a_cube.rst (98%) delete mode 100644 docs/src/userguide/change_management_goals.txt delete mode 100644 docs/src/userguide/index.rst diff --git a/.lycheeignore b/.lycheeignore index 1cdb78f91f..6b3103f7c8 100644 --- a/.lycheeignore +++ b/.lycheeignore @@ -4,16 +4,17 @@ file:/// # DEAD : legacy in various old whatsnews https://biggus.readthedocs.io -# unkown problem, works in browser : used in further_topics/ugrid/data_model +# unknown problem, works in browser : used in +# docs/src/user_manual/explanation/mesh_data_model.rst https://doi.org/10.3390/jmse2010194 -# DEAD, todo:remove, used in docs/src/userguide/plotting_a_cube.rst +# DEAD, todo:remove, used in docs/src/user_manual/tutorial/plotting_a_cube.rst https://effbot.org # nonfunctional, found in some code examples https://foo/ -# DEAD, todo:remove, used in docs/src/further_topics/ugrid/data_model.rst +# DEAD, todo:remove, used in docs/src/user_manual/explanation/mesh_data_model.rst https://ibm-design-language.eu-de.mybluemix.net/design/language/resources/color-library # DEAD, legacy in whatsnew/1.4.rst @@ -45,14 +46,14 @@ https://stickler-ci.com # DEAD, todo:remove, used in lib/iris/symbols.py https://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf -# DEAD, todo:remove, used in docs/src/userguide/plotting_a_cube.rst +# DEAD, todo:remove, used in docs/src/user_manual/tutorial/plotting_a_cube.rst # unkown problem, works in browser : used in docs/src/index.rst https://www.flaticon.com # nonfunctional example, used in lib/iris/io/__init__.py https://www.thing.com -# DEAD, todo:remove, used in docs/src/userguide/plotting_a_cube.rst +# DEAD, todo:remove, used in docs/src/user_manual/tutorial/plotting_a_cube.rst https://www.personal.psu.edu/cab38/ColorBrewer/ColorBrewer_updates.html # nonfunctional, found in some code examples diff --git a/docs/gallery_code/general/plot_SOI_filtering.py b/docs/gallery_code/general/plot_SOI_filtering.py index 4b256c894c..68b2812ccd 100644 --- a/docs/gallery_code/general/plot_SOI_filtering.py +++ b/docs/gallery_code/general/plot_SOI_filtering.py @@ -2,6 +2,11 @@ Applying a Filter to a Time-Series ================================== +.. how-to:: Applying a Filter to a Time-Series + :tags: topic_plotting;topic_maths_stats + + How to apply a low pass filter to an Iris Cube via rolling_window(). + This example demonstrates low pass filtering a time-series by applying a weighted running mean over the time dimension. diff --git a/docs/gallery_code/general/plot_anomaly_log_colouring.py b/docs/gallery_code/general/plot_anomaly_log_colouring.py index cd11161041..8ac8c76e0d 100644 --- a/docs/gallery_code/general/plot_anomaly_log_colouring.py +++ b/docs/gallery_code/general/plot_anomaly_log_colouring.py @@ -2,6 +2,11 @@ Colouring Anomaly Data With Logarithmic Scaling =============================================== +.. how-to:: Colouring Anomaly Data With Logarithmic Scaling + :tags: topic_plotting;topic_maths_stats + + How to visualise values using a logarithmic scale. + In this example, we need to plot anomaly data where the values have a "logarithmic" significance -- i.e. we want to give approximately equal ranges of colour between data values of, say, 1 and 10 as between 10 and 100. diff --git a/docs/gallery_code/general/plot_coriolis.py b/docs/gallery_code/general/plot_coriolis.py index 905108abfd..d435023574 100644 --- a/docs/gallery_code/general/plot_coriolis.py +++ b/docs/gallery_code/general/plot_coriolis.py @@ -2,6 +2,11 @@ Deriving the Coriolis Frequency Over the Globe ============================================== +.. how-to:: Deriving the Coriolis Frequency Over the Globe + :tags: topic_plotting;topic_data_model + + How to create your own Cube from computed data and visualise it. + This code computes the Coriolis frequency and stores it in a cube with associated metadata. It then plots the Coriolis frequency on an orthographic projection. diff --git a/docs/gallery_code/general/plot_cross_section.py b/docs/gallery_code/general/plot_cross_section.py index 8e5bee85ed..8309d6d5cf 100644 --- a/docs/gallery_code/general/plot_cross_section.py +++ b/docs/gallery_code/general/plot_cross_section.py @@ -2,6 +2,11 @@ Cross Section Plots =================== +.. how-to:: Cross Section Plots + :tags: topic_plotting;topic_slice_combine + + How to visualise cross-sections of multi-dimensional Cubes. + This example demonstrates contour plots of a cross-sectioned multi-dimensional cube which features a hybrid height vertical coordinate system. diff --git a/docs/gallery_code/general/plot_custom_aggregation.py b/docs/gallery_code/general/plot_custom_aggregation.py index 65fadfb473..afeb359409 100644 --- a/docs/gallery_code/general/plot_custom_aggregation.py +++ b/docs/gallery_code/general/plot_custom_aggregation.py @@ -2,6 +2,11 @@ Calculating a Custom Statistic ============================== +.. how-to:: Calculating a Custom Statistic + :tags: topic_plotting;topic_maths_stats + + How to define and use a custom aggregation operation, including visualisation. + This example shows how to define and use a custom :class:`iris.analysis.Aggregator`, that provides a new statistical operator for use with cube aggregation functions such as :meth:`~iris.cube.Cube.collapsed`, diff --git a/docs/gallery_code/general/plot_custom_file_loading.py b/docs/gallery_code/general/plot_custom_file_loading.py index 06de887614..670575d124 100644 --- a/docs/gallery_code/general/plot_custom_file_loading.py +++ b/docs/gallery_code/general/plot_custom_file_loading.py @@ -2,6 +2,11 @@ Loading a Cube From a Custom File Format ======================================== +.. how-to:: Loading a Cube From a Custom File Format + :tags: topic_plotting;topic_load_save + + How to visualise data from a file Iris does not natively support. + This example shows how a custom text file can be loaded using the standard Iris load mechanism. diff --git a/docs/gallery_code/general/plot_global_map.py b/docs/gallery_code/general/plot_global_map.py index 60ac200a43..7e61565f44 100644 --- a/docs/gallery_code/general/plot_global_map.py +++ b/docs/gallery_code/general/plot_global_map.py @@ -2,6 +2,11 @@ Quickplot of a 2D Cube on a Map =============================== +.. how-to:: Quickplot of a 2D Cube on a Map + :tags: topic_plotting + + A demonstration of basic iris.quickplot use. + This example demonstrates a contour plot of global air temperature. The plot title and the labels for the axes are automatically derived from the metadata. diff --git a/docs/gallery_code/general/plot_inset.py b/docs/gallery_code/general/plot_inset.py index 5edd375743..6ea04ffebb 100644 --- a/docs/gallery_code/general/plot_inset.py +++ b/docs/gallery_code/general/plot_inset.py @@ -2,6 +2,11 @@ Test Data Showing Inset Plots ============================= +.. how-to:: Test Data Showing Inset Plots + :tags: topic_plotting;topic_maths_stats + + How to create inset plots within a main plot. + This example demonstrates the use of a single 3D data cube with time, latitude and longitude dimensions to plot a temperature series for a single latitude coordinate, with an inset plot of the data region. diff --git a/docs/gallery_code/general/plot_lineplot_with_legend.py b/docs/gallery_code/general/plot_lineplot_with_legend.py index d1b3acd912..bfba085ef2 100644 --- a/docs/gallery_code/general/plot_lineplot_with_legend.py +++ b/docs/gallery_code/general/plot_lineplot_with_legend.py @@ -2,6 +2,11 @@ Multi-Line Temperature Profile Plot =================================== +.. how-to:: Multi-Line Temperature Profile Plot + :tags: topic_plotting + + How to plot multiple lines on a single plot with a legend. + """ # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_polar_stereo.py b/docs/gallery_code/general/plot_polar_stereo.py index 99abbd0ae0..07f0a4c01b 100644 --- a/docs/gallery_code/general/plot_polar_stereo.py +++ b/docs/gallery_code/general/plot_polar_stereo.py @@ -2,6 +2,11 @@ Example of a Polar Stereographic Plot ===================================== +.. how-to:: Example of a Polar Stereographic Plot + :tags: topic_plotting + + A demonstration of plotting data defined on an alternative map projection. + Demonstrates plotting data that are defined on a polar stereographic projection. diff --git a/docs/gallery_code/general/plot_polynomial_fit.py b/docs/gallery_code/general/plot_polynomial_fit.py index 37cc4e283b..adaaf4b94f 100644 --- a/docs/gallery_code/general/plot_polynomial_fit.py +++ b/docs/gallery_code/general/plot_polynomial_fit.py @@ -2,6 +2,11 @@ Fitting a Polynomial ==================== +.. how-to:: Fitting a Polynomial + :tags: topic_plotting;topic_maths_stats;topic_data_model + + How to compute and plot a polynomial fit to 1D data in an Iris cube. + This example demonstrates computing a polynomial fit to 1D data from an Iris cube, adding the fit to the cube's metadata, and plotting both the 1D data and the fit. diff --git a/docs/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py index 6e8ba5a5af..441dee245e 100644 --- a/docs/gallery_code/general/plot_projections_and_annotations.py +++ b/docs/gallery_code/general/plot_projections_and_annotations.py @@ -2,6 +2,11 @@ Plotting in Different Projections ================================= +.. how-to:: Plotting in Different Projections + :tags: topic_plotting;topic_interoperability + + How to overlay data from two different map projections and add graphics. + This example shows how to overlay data and graphics in different projections, demonstrating various features of Iris, Cartopy and matplotlib. diff --git a/docs/gallery_code/general/plot_rotated_pole_mapping.py b/docs/gallery_code/general/plot_rotated_pole_mapping.py index e9e3656184..a43a06e7a3 100644 --- a/docs/gallery_code/general/plot_rotated_pole_mapping.py +++ b/docs/gallery_code/general/plot_rotated_pole_mapping.py @@ -2,6 +2,11 @@ Rotated Pole Mapping ==================== +.. how-to:: Rotated Pole Mapping + :tags: topic_plotting + + How to visualise data via different methods and coordinate systems. + This example uses several visualisation methods to achieve an array of differing images, including: diff --git a/docs/gallery_code/general/plot_zonal_means.py b/docs/gallery_code/general/plot_zonal_means.py index d4ec1eb1fc..af3e83ce41 100644 --- a/docs/gallery_code/general/plot_zonal_means.py +++ b/docs/gallery_code/general/plot_zonal_means.py @@ -2,6 +2,11 @@ Zonal Mean Diagram of Air Temperature ===================================== +.. how-to:: Zonal Mean Diagram of Air Temperature + :tags: topic_plotting;topic_maths_stats + + How to use aligned plots to visualise collapsed dimensional statistics. + This example demonstrates aligning a linear plot and a cartographic plot using Matplotlib. diff --git a/docs/gallery_code/meteorology/plot_COP_1d.py b/docs/gallery_code/meteorology/plot_COP_1d.py index 84addd140a..099f3f80e9 100644 --- a/docs/gallery_code/meteorology/plot_COP_1d.py +++ b/docs/gallery_code/meteorology/plot_COP_1d.py @@ -2,6 +2,11 @@ Global Average Annual Temperature Plot ====================================== +.. how-to:: Global Average Annual Temperature Plot + :tags: topic_plotting;topic_slice_combine;topic_maths_stats + + How to spatially constrain data, compute statistics and visualise a comparison. + Produces a time-series plot of North American temperature forecasts for 2 different emission scenarios. Constraining data to a limited spatial area also features in this example. diff --git a/docs/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py index 1c5e865a8f..fca40dc373 100644 --- a/docs/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -2,6 +2,11 @@ Global Average Annual Temperature Maps ====================================== +.. how-to:: Global Average Annual Temperature Maps + :tags: topic_plotting + + How to produce comparative maps of two files with a shared colour bar. + Produces maps of global temperature forecasts from the A1B and E1 scenarios. The data used comes from the HadGEM2-AO model simulations for the A1B and E1 diff --git a/docs/gallery_code/meteorology/plot_TEC.py b/docs/gallery_code/meteorology/plot_TEC.py index e6269eaf9b..67a2b2722e 100644 --- a/docs/gallery_code/meteorology/plot_TEC.py +++ b/docs/gallery_code/meteorology/plot_TEC.py @@ -2,6 +2,11 @@ Ionosphere Space Weather ======================== +.. how-to:: Ionosphere Space Weather + :tags: topic_plotting + + How to mask out values below a threshold in a plot. + This space weather example plots a filled contour of rotated pole point data with a shaded relief image underlay. The plot shows aggregated vertical electron content in the ionosphere. diff --git a/docs/gallery_code/meteorology/plot_deriving_phenomena.py b/docs/gallery_code/meteorology/plot_deriving_phenomena.py index 81a05be9b9..cc9e1c93ca 100644 --- a/docs/gallery_code/meteorology/plot_deriving_phenomena.py +++ b/docs/gallery_code/meteorology/plot_deriving_phenomena.py @@ -2,6 +2,11 @@ Deriving Exner Pressure and Air Temperature =========================================== +.. how-to:: Deriving Exner Pressure and Air Temperature + :tags: topic_plotting;topic_maths_stats + + How to use Iris arithmetic to derive phenomena from existing cubes and plot them. + This example shows some processing of cubes in order to derive further related cubes; in this case the derived cubes are Exner pressure and air temperature which are calculated by combining air pressure, air potential temperature and diff --git a/docs/gallery_code/meteorology/plot_hovmoller.py b/docs/gallery_code/meteorology/plot_hovmoller.py index 829b370d78..ad8297ba79 100644 --- a/docs/gallery_code/meteorology/plot_hovmoller.py +++ b/docs/gallery_code/meteorology/plot_hovmoller.py @@ -2,6 +2,11 @@ Hovmoller Diagram of Monthly Surface Temperature ================================================ +.. how-to:: Hovmoller Diagram of Monthly Surface Temperature + :tags: topic_plotting;topic_maths_stats + + How to collapse and plot Cubes to create a Hovmoller diagram. + This example demonstrates the creation of a Hovmoller diagram with fine control over plot ticks and labels. The data comes from the Met Office OSTIA project and has been pre-processed to calculate the monthly mean sea surface diff --git a/docs/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/gallery_code/meteorology/plot_lagged_ensemble.py index 7c34572136..4cb7e9e6ad 100644 --- a/docs/gallery_code/meteorology/plot_lagged_ensemble.py +++ b/docs/gallery_code/meteorology/plot_lagged_ensemble.py @@ -2,6 +2,11 @@ Seasonal Ensemble Model Plots ============================= +.. how-to:: Seasonal Ensemble Model Plots + :tags: topic_plotting;topic_data_model;topic_maths_stats;topic_slice_combine + + How to use Iris in a complex real-world analysis scenario. + This example demonstrates the loading of a lagged ensemble dataset from the GloSea4 model, which is then used to produce two types of plot: diff --git a/docs/gallery_code/meteorology/plot_wind_barbs.py b/docs/gallery_code/meteorology/plot_wind_barbs.py index f11c9a7b50..9dce0caad2 100644 --- a/docs/gallery_code/meteorology/plot_wind_barbs.py +++ b/docs/gallery_code/meteorology/plot_wind_barbs.py @@ -2,6 +2,11 @@ Plotting Wind Direction Using Barbs =================================== +.. how-to:: Plotting Wind Direction Using Barbs + :tags: topic_plotting;topic_maths_stats + + How to use Iris to derive and plot wind barbs. + This example demonstrates using barbs to plot wind speed contours and wind direction barbs from wind vector component input data. The vector components are co-located in space in this case. diff --git a/docs/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py index 5310ad937d..beccb217c2 100644 --- a/docs/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -2,6 +2,11 @@ Plotting Wind Direction Using Quiver ==================================== +.. how-to:: Plotting Wind Direction Using Quiver + :tags: topic_plotting + + How to use Iris to plot wind quivers. + This example demonstrates using quiver to plot wind speed contours and wind direction arrows from wind vector component input data. The vector components are co-located in space in this case. diff --git a/docs/gallery_code/oceanography/plot_atlantic_profiles.py b/docs/gallery_code/oceanography/plot_atlantic_profiles.py index a43fb7f8cb..bd57e1dfe8 100644 --- a/docs/gallery_code/oceanography/plot_atlantic_profiles.py +++ b/docs/gallery_code/oceanography/plot_atlantic_profiles.py @@ -2,6 +2,11 @@ Oceanographic Profiles and T-S Diagrams ======================================= +.. how-to:: Oceanographic Profiles and T-S Diagrams + :tags: topic_plotting;topic_slice_combine + + How to use Iris' for visualising oceanographic profile data, including scatter plotting. + This example demonstrates how to plot vertical profiles of different variables in the same axes, and how to make a scatter plot of two variables. There is an oceanographic theme but the same techniques are diff --git a/docs/gallery_code/oceanography/plot_load_nemo.py b/docs/gallery_code/oceanography/plot_load_nemo.py index aac89fec0e..a26b97da31 100644 --- a/docs/gallery_code/oceanography/plot_load_nemo.py +++ b/docs/gallery_code/oceanography/plot_load_nemo.py @@ -2,6 +2,11 @@ Load a Time Series of Data From the NEMO Model ============================================== +.. how-to:: Load a Time Series of Data From the NEMO Model + :tags: topic_plotting;topic_load_save;topic_data_model;topic_slice_combine + + How to concatenate data from multiple NEMO files. + This example demonstrates how to load multiple files containing data output by the NEMO model and combine them into a time series in a single cube. The different time dimensions in these files can prevent Iris from concatenating diff --git a/docs/gallery_code/oceanography/plot_orca_projection.py b/docs/gallery_code/oceanography/plot_orca_projection.py index bb68056cb3..065078e5df 100644 --- a/docs/gallery_code/oceanography/plot_orca_projection.py +++ b/docs/gallery_code/oceanography/plot_orca_projection.py @@ -2,6 +2,11 @@ Tri-Polar Grid Projected Plotting ================================= +.. how-to:: Tri-Polar Grid Projected Plotting + :tags: topic_plotting + + How to visualise data defined on a tri-polar grid using different map projections. + This example demonstrates cell plots of data on the semi-structured ORCA2 model grid. diff --git a/docs/src/IEP/IEP001.adoc b/docs/src/IEP/IEP001.adoc deleted file mode 100644 index 2daef2363a..0000000000 --- a/docs/src/IEP/IEP001.adoc +++ /dev/null @@ -1,193 +0,0 @@ -# IEP 1 - Enhanced indexing - -## Background - -Currently, to select a subset of a Cube based on coordinate values we use something like: -[source,python] ----- -cube.extract(iris.Constraint(realization=3, - model_level_number=[1, 5], - latitude=lambda cell: 40 <= cell <= 60)) ----- -On the plus side, this works irrespective of the dimension order of the data, but the drawbacks with this form of indexing include: - -* It uses a completely different syntax to position-based indexing, e.g. `cube[4, 0:6]`. -* It uses a completely different syntax to pandas and xarray value-based indexing, e.g. `df.loc[4, 0:6]`. -* It is long-winded and requires the use of an additional class. -* It requires the use of lambda functions even when just selecting a range. - -Arguably, the situation when subsetting using positional indices but where the dimension order is unknown is even worse - it has no standard syntax _at all_! Instead it requires code akin to: -[source,python] ----- -key = [slice(None)] * cube.ndim -key[cube.coord_dims('model_level_number')[0]] = slice(3, 9, 2) -cube[tuple(key)] ----- - -The only form of indexing that is well supported is indexing by position where the dimension order is known: -[source,python] ----- -cube[4, 0:6, 30:] ----- - -## Proposal - -Provide indexing helpers on the Cube to extend explicit support to all permutations of: - -* implicit dimension vs. named coordinate, -* and positional vs. coordinate-value based selection. - -### Helper syntax options - -Commonly, the names of coordinates are also valid Python identifiers. -For names where this is not true, the names can expressed through either the `helper[...]` or `helper(...)` syntax by constructing an explicit dict. -For example: `cube.loc[{'12': 0}]` or `cube.loc(**{'12': 0})`. - -#### Extended pandas style - -Use a single helper for index by position, and a single helper for index by value. Helper names taken from pandas, but their behaviour is extended by making them callable to support named coordinates. - -|=== -.2+| 2+h|Index by -h|Position h|Value - -h|Implicit dimension - -a|[source,python] ----- -cube[:, 2] # No change -cube.iloc[:, 2] ----- - -a|[source,python] ----- -cube.loc[:, 1.5] ----- - -h|Coordinate name - -a|[source,python] ----- -cube[dict(height=2)] -cube.iloc[dict(height=2)] -cube.iloc(height=2) ----- - -a|[source,python] ----- -cube.loc[dict(height=1.5)] -cube.loc(height=1.5) ----- -|=== - -#### xarray style - -xarray introduces a second set of helpers for accessing named dimensions that provide the callable syntax `(foo=...)`. - -|=== -.2+| 2+h|Index by -h|Position h|Value - -h|Implicit dimension - -a|[source,python] ----- -cube[:, 2] # No change ----- - -a|[source,python] ----- -cube.loc[:, 1.5] ----- - -h|Coordinate name - -a|[source,python] ----- - cube[dict(height=2)] - cube.isel(height=2) ----- - -a|[source,python] ----- -cube.loc[dict(height=1.5)] -cube.sel(height=1.5) ----- -|=== - -### Slices - -The semantics of position-based slices will continue to match that of normal Python slices. The start position is included, the end position is excluded. - -Value-based slices will be strictly inclusive, with both the start and end values included. This behaviour differs from normal Python slices but is in common with pandas. - -Just as for normal Python slices, we do not need to provide the ability to control the include/exclude behaviour for slicing. - -### Value-based indexing - -#### Equality - -Should the behaviour of value-based equality depend on the data type of the coordinate? - -* integer: exact match -* float: tolerance match, tolerance determined by bit-width -* string: exact match - -#### Scalar/category - -If/how to deal with category selection `cube.loc(season='JJA')`? Defer to `groupby()`? - -`cube.loc[12]` - must always match a single value or raise KeyError, corresponding dimension will be removed -`cube.loc[[12]]` - may match any number of values? (incl. zero?), dimension will be retained - -### Out of scope - -* Deliberately enhancing the performance. -This is a very valuable topic and should be addressed by subsequent efforts. - -* Time/date values as strings. -Providing pandas-style string representations for convenient representation of partial date/times should be addressed in a subsequent effort - perhaps in conjunction with an explicit performance test suite. -There is a risk that this topic could bog down when dealing with non-standard calendars and climatological date ranges. - -## Work required - -* Implementations for each of the new helper objects. -* An update to the documentation to demonstrate best practice. Known impacted areas include: -** The "Subsetting a Cube" chapter of the user guide. - -### TODO -* Multi-dimensional coordinates -* Non-orthogonal coordinates -* Bounds -* Boolean array indexing -* Lambdas? -* What to do about constrained loading? -* Relationship to https://scitools.org.uk/iris/docs/v1.9.2/iris/iris/cube.html#iris.cube.Cube.intersection[iris.cube.Cube.intersection]? -* Relationship to interpolation (especially nearest-neighbour)? -** e.g. What to do about values that don't exist? -*** pandas throws a KeyError -*** xarray supports (several) nearest-neighbour schemes via https://xarray.pydata.org/en/stable/indexing.html#nearest-neighbor-lookups[`data.sel()`] -*** Apparently https://holoviews.org/[holoviews] does nearest-neighbour interpolation. -* multi-dimensional coordinate => unroll? -* var_name only selection? `cube.vloc(t0=12)` -* Orthogonal only? Or also independent? `cube.loc_points(lon=[1, 1, 5], lat=[31, 33, 32])` - ** This seems quite closely linked to interpolation. Is the interpolation scheme orthogonal to cross-product vs. independent? -+ -[source,python] ----- -cube.interpolate( - scheme='nearest', - mesh=dict(lon=[5, 10, 15], lat=[40, 50])) -cube.interpolate( - scheme=Nearest(mode='spherical'), - locations=Ortho(lon=[5, 10, 15], lat=[40, 50])) ----- - -## References -. Iris - * https://scitools.org.uk/iris/docs/v1.9.2/iris/iris.html#iris.Constraint[iris.Constraint] - * https://scitools.org.uk/iris/docs/v1.9.2/userguide/subsetting_a_cube.html[Subsetting a cube] -. https://pandas.pydata.org/pandas-docs/stable/indexing.html[pandas indexing] -. https://xarray.pydata.org/en/stable/indexing.html[xarray indexing] -. https://legacy.python.org/dev/peps/pep-0472/[PEP 472 - Support for indexing with keyword arguments] -. https://nbviewer.jupyter.org/gist/rsignell-usgs/13d7ce9d95fddb4983d4cbf98be6c71d[Time slicing NetCDF or OPeNDAP datasets] - Rich Signell's xarray/iris comparison focussing on time handling and performance diff --git a/docs/src/_templates/tags_links.need b/docs/src/_templates/tags_links.need new file mode 100644 index 0000000000..43c3ca39ce --- /dev/null +++ b/docs/src/_templates/tags_links.need @@ -0,0 +1,11 @@ +{# Render plain clickable text links for each tag #} +{% if tags %} +:strong:`Tags:` {{ " " }} {%- for t in tags -%} + {%- if t and t.startswith('topic_') -%} + :ref:`{{ t }} <{{ t }}>` + {%- else -%} + {{ t }} + {%- endif -%} + {%- if not loop.last %} | {% endif -%} +{%- endfor %} +{% endif %} diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc index 247372cc10..0f31261131 100644 --- a/docs/src/common_links.inc +++ b/docs/src/common_links.inc @@ -48,6 +48,7 @@ .. _netCDF4: https://github.com/Unidata/netcdf4-python .. _SciTools Contributor's License Agreement (CLA): https://cla-assistant.io/SciTools/ .. _extlinks: https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html +.. _Diataxis: https://diataxis.fr/ .. comment diff --git a/docs/src/conf.py b/docs/src/conf.py index 8f45de51c5..ad80752223 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -167,6 +167,9 @@ def _dotv(version): "sphinx_gallery.gen_gallery", "matplotlib.sphinxext.mathmpl", "matplotlib.sphinxext.plot_directive", + "sphinx_needs", + "user_manual_directives", + "sphinx_reredirects", ] if skip_api == "1": @@ -584,8 +587,145 @@ def gallery_carousel( # ============================================================================ +# -- sphinx-reredirects config ------------------------------------------------ + +redirects = { + # explanation + "further_topics/dataless_cubes": "/user_manual/explanation/dataless_cubes.html", + "userguide/iris_cubes": "/user_manual/explanation/iris_cubes.html", + "userguide/iris_philosophy": "/user_manual/explanation/iris_philosophy.html", + "community/iris_xarray": "/user_manual/explanation/iris_xarray.html", + "further_topics/lenient_maths": "/user_manual/explanation/lenient_maths.html", + "further_topics/lenient_metadata": "/user_manual/explanation/lenient_metadata.html", + "further_topics/ugrid/data_model": "/user_manual/explanation/mesh_data_model.html", + "further_topics/ugrid/partner_packages": "/user_manual/explanation/mesh_partners.html", + "further_topics/metadata": "/user_manual/explanation/metadata.html", + "further_topics/missing_data_handling": "/user_manual/explanation/missing_data_handling.html", + "further_topics/netcdf_io": "/user_manual/explanation/netcdf_io.html", + "userguide/real_and_lazy_data": "/user_manual/explanation/real_and_lazy_data.html", + "further_topics/um_files_loading": "/user_manual/explanation/um_files_loading.html", + "further_topics/ux_guide": "/user_manual/explanation/ux_guide.html", + "further_topics/which_regridder_to_use": "/user_manual/explanation/which_regridder_to_use.html", + "why_iris": "/user_manual/explanation/why_iris.html", + # how_to + "further_topics/filtering_warnings": "/user_manual/how_to/filtering_warnings.html", + "installing": "/user_manual/how_to/installing.html", + "further_topics/ugrid/other_meshes": "/user_manual/how_to/mesh_conversions.html", + "further_topics/ugrid/operations": "/user_manual/how_to/mesh_operations.html", + "userguide/navigating_a_cube": "/user_manual/how_to/navigating_a_cube.html", + "community/plugins": "/user_manual/how_to/plugins.html", + # reference + "userguide/citation": "/user_manual/reference/citation.html", + "userguide/glossary": "/user_manual/reference/glossary.html", + "community/phrasebook": "/user_manual/reference/phrasebook.html", + # section indexes + "community/index": "/user_manual/section_indexes/community.html", + "further_topics/dask_best_practices/index": "/user_manual/section_indexes/dask_best_practices.html", + "further_topics/ugrid/index": "/user_manual/section_indexes/mesh_support.html", + "userguide/index": "/user_manual/section_indexes/userguide.html", + # tutorial + "further_topics/controlling_merge": "/user_manual/tutorial/controlling_merge.html", + "userguide/cube_maths": "/user_manual/tutorial/cube_maths.html", + "userguide/cube_statistics": "/user_manual/tutorial/cube_statistics.html", + "further_topics/dask_best_practices/dask_bags_and_greed": "/user_manual/tutorial/dask_bags_and_greed.html", + "further_topics/dask_best_practices/dask_parallel_loop": "/user_manual/tutorial/dask_parallel_loop.html", + "further_topics/dask_best_practices/dask_pp_to_netcdf": "/user_manual/tutorial/dask_pp_to_netcdf.html", + "userguide/interpolation_and_regridding": "/user_manual/tutorial/interpolation_and_regridding.html", + "userguide/loading_iris_cubes": "/user_manual/tutorial/loading_iris_cubes.html", + "userguide/merge_and_concat": "/user_manual/tutorial/merge_and_concat.html", + "userguide/plotting_a_cube": "/user_manual/tutorial/plotting_a_cube.html", + "userguide/saving_iris_cubes": "/user_manual/tutorial/saving_iris_cubes.html", + "userguide/subsetting_a_cube": "/user_manual/tutorial/subsetting_a_cube.html", +} + +# -- sphinx-needs config ------------------------------------------------------ +# See https://sphinx-needs.readthedocs.io/en/latest/configuration.html + +# TODO: namespace these types as Diataxis for max clarity? +needs_types = [ + { + "directive": "tutorial", + "title": "Tutorial", + "prefix": "", + "color": "", + "style": "node", + }, + { + "directive": "how-to", + "title": "How To", + "prefix": "", + "color": "", + "style": "node", + }, + { + "directive": "explanation", + "title": "Explanation", + "prefix": "", + "color": "", + "style": "node", + }, + { + # z_ prefix to force to the end of sorted lists. + "directive": "z_reference", + "title": "Reference", + "prefix": "", + "color": "", + "style": "node", + }, +] +# The layout whenever a 'need item' directive is used. I.e. at the top of each +# user manual page. +needs_default_layout = "focus" +# The `tags_links` jinja template displays a list of tags where every topic_* +# tag is a link to the relevant section in user_manual/index.rst. +needs_template_folder = "_templates" +needs_fields = { + "post_template": {"default": "tags_links"}, +} + +from sphinx_needs.data import NeedsCoreFields + +# Known bug in sphinx-needs pre v6.0. +# https://github.com/useblocks/sphinx-needs/issues/1420 +if "allow_default" not in NeedsCoreFields["post_template"]: + NeedsCoreFields["post_template"]["allow_default"] = "str" + + +# ------------------------------------------------------------------------------ + + def setup(app: Sphinx) -> None: """Configure sphinx application.""" + # Monkeypatch for https://github.com/useblocks/sphinx-needs/issues/723 + import sphinx_needs.directives.needtable as nt + + orig_row_col_maker = nt.row_col_maker + + def row_col_maker_link_title( + app, + fromdocname, + all_needs, + need_info, + need_key, + make_ref=False, + ref_lookup=False, + prefix="", + ): + if need_key == "title": + make_ref = True + return orig_row_col_maker( + app, + fromdocname, + all_needs, + need_info, + need_key, + make_ref, + ref_lookup, + prefix, + ) + + nt.row_col_maker = row_col_maker_link_title + # we require the output of this extension app.setup_extension("sphinx_gallery.gen_gallery") diff --git a/docs/src/further_topics/index.rst b/docs/src/further_topics/index.rst deleted file mode 100644 index 25c472c045..0000000000 --- a/docs/src/further_topics/index.rst +++ /dev/null @@ -1,25 +0,0 @@ -.. _further_topics_index: - - -Further Topics -=============== - -Extra information on specific technical issues. - -.. toctree:: - :maxdepth: 1 - - filtering_warnings - metadata - lenient_metadata - lenient_maths - um_files_loading - missing_data_handling - dataless_cubes - netcdf_io - s3_io - dask_best_practices/index - ugrid/index - which_regridder_to_use - controlling_merge - ux_guide \ No newline at end of file diff --git a/docs/src/getting_started.rst b/docs/src/getting_started.rst deleted file mode 100644 index 24299a4060..0000000000 --- a/docs/src/getting_started.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. _getting_started_index: - -Getting Started -=============== - -To get started with Iris we recommend reading :ref:`why_iris` was created and to -explore the examples in the :ref:`gallery_index` after :ref:`installing_iris` -Iris. - -.. toctree:: - :maxdepth: 1 - - why_iris - installing - generated/gallery/index \ No newline at end of file diff --git a/docs/src/index.rst b/docs/src/index.rst index 2854e2e214..5059bcd062 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -30,10 +30,10 @@ For more information see :ref:`why_iris`. Installing and gallery examples. - .. grid-item-card:: User Guide + .. grid-item-card:: User Manual :class-title: custom-title :class-body: custom-body - :link: getting_started_index + :link: user_manual_index :link-type: ref :img-top: _static/icon_instructions.svg :class-img-top: dark-light @@ -114,20 +114,12 @@ The legacy support resources: .. toctree:: - :caption: Getting Started - :maxdepth: 1 - :hidden: - - getting_started - - -.. toctree:: - :caption: User Guide + :caption: User Manual :maxdepth: 1 - :name: userguide_index + :name: user_manual :hidden: - userguide/index + user_manual/index .. toctree:: @@ -139,15 +131,6 @@ The legacy support resources: developers_guide/contributing_getting_involved -.. toctree:: - :caption: Community - :maxdepth: 1 - :name: community_index - :hidden: - - Community - - .. toctree:: :caption: What's New in Iris :maxdepth: 1 @@ -157,12 +140,4 @@ The legacy support resources: whatsnew/index -.. toctree:: - :caption: Iris API - :maxdepth: 1 - :hidden: - - Iris API - - .. todolist:: diff --git a/docs/src/sphinxext/user_manual_directives.py b/docs/src/sphinxext/user_manual_directives.py new file mode 100644 index 0000000000..fa90a8bccf --- /dev/null +++ b/docs/src/sphinxext/user_manual_directives.py @@ -0,0 +1,251 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Sphinx customisations for a Diataxis User Manual (see diataxis.fr).""" + +import enum +from pathlib import Path +import re +from textwrap import indent +import typing + +from docutils import nodes # type: ignore[import-untyped] +from docutils.parsers.rst import Directive # type: ignore[import-untyped] +from docutils.statemachine import StringList # type: ignore[import-untyped] +from sphinx.application import Sphinx +from sphinx.builders import Builder +from sphinx.util import logging as sphinx_logging +from sphinx_needs.api import get_needs_view + +if typing.TYPE_CHECKING: + from sphinx_needs.api.need import NeedsInfoType + +logger = sphinx_logging.getLogger(__name__) + + +class Diataxis(enum.StrEnum): + """The Diataxis-inspired sphinx-needs directives configured in conf.py.""" + + ALL = "all" + TUTORIAL = "tutorial" + EXPLANATION = "explanation" + HOW_TO = "how-to" + # z_ prefix to force to the end of sorted lists. + REFERENCE = "z_reference" + + +DIATAXIS_CAPTIONS = { + Diataxis.TUTORIAL: "Guided lessons for understanding a topic.\n\n(Supports **study**, via **action**)", + Diataxis.EXPLANATION: "In-depth discussion for understanding concepts.\n\n(Supports **study**, via **theory**)", + Diataxis.HOW_TO: "Step by step instructions for achieving a specific goal.\n\n(Supports **work**, via **action**)", + Diataxis.REFERENCE: "Concise information to look up when needed.\n\n(Supports **work**, via **theory**)", +} +"""Text to be displayed at the top of each Diataxis tab.""" + + +class DiataxisDirective(Directive): + """A topic-filtered tab-set block with Diataxis tab-items and topic navigation badges.""" + + has_content = True + """Content = the topic tag to filter by, e.g. `topic_about`.""" + + @staticmethod + def _indent(text: str) -> str: + return indent(text, " ") + + def _needtable(self, types: Diataxis, tags: str) -> str: + """Construct a single sphinx-needs needtable directive string.""" + options = [ + ':columns: title as " ";content as " "', + ":colwidths: 30;60", + ":style: table", + ":sort: type", + ":filter_warning: No pages for this filter.", + ] + if types is not Diataxis.ALL: + options.append(f":types: {types}") + if tags != "topic_all": + options.append(f":tags: {tags}") + options_str = "\n".join(options) + needtable = "\n".join( + [ + ".. needtable::", + self._indent(options_str), + ] + ) + return needtable + + def _tab_item(self, diataxis: Diataxis, tags: str) -> str: + """Construct a single tab-item string for the given Diataxis type.""" + needtable = self._needtable(types=diataxis, tags=tags) + + # Convert the Diataxis directive name to a pretty title. + tab_item_title = str(diataxis) + tab_item_title = tab_item_title.removeprefix("z_") + tab_item_title = tab_item_title.capitalize() + + caption = DIATAXIS_CAPTIONS.get(diataxis, "") + content = [ + # sync means all tab-sets on this page switch tabs together. + f":sync: {diataxis}", + "", + caption, + "", + needtable, + ] + content_str = "\n".join(content) + tab_item = "\n".join( + [ + f".. tab-item:: {tab_item_title}", + self._indent(content_str), + ] + ) + return tab_item + + def run(self): + """Construct the navigation badges followed by the Diataxis tab-set.""" + # Enforce the only valid location for this directive. + rst_path = Path(self.state.document["source"]) + if not (rst_path.parent.name == "user_manual" and rst_path.name == "index.rst"): + message = "Expected directive to only be used in user_manual/index.rst" + error = self.state_machine.reporter.error(message, line=self.lineno) + return [error] + + # Find all the topic labels in this file and construct navigation badges + # for them. + label_pattern = re.compile(r"^\.\. _(topic_.+):$", re.MULTILINE) + topic_labels = label_pattern.findall(rst_path.read_text()) + # The 'current' topic is highlighted differently. + badges = { + label: "bdg-ref-primary" + if label == self.content[0] + else "bdg-ref-primary-line" + for label in topic_labels + } + # Parse the badges as RST. + node = nodes.Element() + self.state.nested_parse( + StringList([f":{badge}:`{label}`" for label, badge in badges.items()]), + self.content_offset, + node, + ) + + # Construct the Diataxis tab-set. + tab_items = [ + self._tab_item(diataxis=diataxis, tags=self.content[0]) + for diataxis in Diataxis + ] + tab_items_str = "\n\n".join(tab_items) + tab_set = "\n".join( + [ + ".. tab-set::", + "", + self._indent(tab_items_str), + ] + ) + # Parse the tab set as RST. + self.state.nested_parse( + StringList(tab_set.splitlines()), self.content_offset, node + ) + + return node.children + + +def validate_items(app: Sphinx, builder: Builder) -> None: + """Validate that each user manual page has a single correctly configured item.""" + env = app.env + found_docs: typing.Iterable[str] = env.found_docs + + # Read-only iterable of all sphinx-needs items; only valid in the write phase. + needs_view = get_needs_view(app) + # Group needs by docname + by_doc: dict[Path, list[NeedsInfoType]] = {} + for need_id in needs_view: + need = needs_view[need_id] + doc_name = need.get("docname") + if not doc_name: + # External/imported needs may have no docname; skip page accounting + continue + by_doc.setdefault(Path(doc_name), []).append(need) + + def _get_expected_type(doc_path: Path) -> typing.Optional[Diataxis]: + """Get the expected Diataxis type for the given document path.""" + parents_and_diataxis = [ + (Path("generated/api"), Diataxis.REFERENCE), + (Path("generated/gallery"), Diataxis.HOW_TO), + (Path("user_manual/tutorial"), Diataxis.TUTORIAL), + (Path("user_manual/explanation"), Diataxis.EXPLANATION), + (Path("user_manual/how_to"), Diataxis.HOW_TO), + (Path("user_manual/reference"), Diataxis.REFERENCE), + ] + expected = None + for parent, diataxis in parents_and_diataxis: + if parent in doc_path.parents: + expected = diataxis + break + if Path("generated/gallery") in doc_path.parents and doc_path.name == "index": + expected = None + if doc_path.name == "sg_execution_times": + expected = None + return expected + + for doc_name in found_docs: + doc_path = Path(doc_name) + expected_type = _get_expected_type(doc_path) + if expected_type is not None: + problem_prefix = "Page expected to have exactly 1 sphinx-needs item;" + try: + (page_need,) = by_doc[doc_path] + except KeyError: + problem = f"{problem_prefix} found 0." + logger.error(problem, location=doc_name) + continue + except ValueError: + count = len(by_doc[doc_path]) + problem = f"{problem_prefix} found {count}." + logger.error(problem, location=doc_name) + continue + + if (page_type := page_need["type"]) != expected_type: + problem = ( + "sphinx-needs item expected to have type " + f"'{expected_type}'; found type '{page_type}'." + ) + logger.error(problem, location=doc_name) + + if (line_no := page_need.get("lineno")) > 25: + # Ensures that links to the needs directive take reader to the + # start of the page. + problem = ( + "sphinx-needs item expected to be defined within " + f"first 25 lines; found at line {line_no}." + ) + logger.error(problem, location=doc_name) + + # Title is not validated as it is always populated. + + if page_need["content"] == "": + problem = "sphinx-needs item must have non-empty content section." + logger.error(problem, location=doc_name) + + tags = page_need.get("tags", []) + if [tag for tag in tags if tag.startswith("topic_")] == []: + problem = ( + "sphinx-needs item must have at least one 'topic_xxx' tag " + "in its 'tags' field." + ) + logger.error(problem, location=doc_name) + + +def setup(app: Sphinx): + """Set up the Sphinx extension. + + This function is expected by Sphinx to register the extension. + """ + # Connect at write-started so needs are fully collected & resolved. + app.connect("write-started", validate_items) + + app.add_directive("diataxis-page-list", DiataxisDirective) + + return {"version": "0.1"} diff --git a/docs/src/further_topics/dataless_cubes.rst b/docs/src/user_manual/explanation/dataless_cubes.rst similarity index 96% rename from docs/src/further_topics/dataless_cubes.rst rename to docs/src/user_manual/explanation/dataless_cubes.rst index d0b592dfb6..1c4ec64a2c 100644 --- a/docs/src/further_topics/dataless_cubes.rst +++ b/docs/src/user_manual/explanation/dataless_cubes.rst @@ -1,3 +1,8 @@ +.. explanation:: Dataless Cubes + :tags: topic_data_model + + Read about Iris' support for Cubes with no data payload. + .. _dataless-cubes: ============== diff --git a/docs/src/further_topics/ugrid/images/data_structured_grid.svg b/docs/src/user_manual/explanation/images/data_structured_grid.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/data_structured_grid.svg rename to docs/src/user_manual/explanation/images/data_structured_grid.svg diff --git a/docs/src/further_topics/ugrid/images/data_ugrid_mesh.svg b/docs/src/user_manual/explanation/images/data_ugrid_mesh.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/data_ugrid_mesh.svg rename to docs/src/user_manual/explanation/images/data_ugrid_mesh.svg diff --git a/docs/src/further_topics/ugrid/images/geovistalogo.svg b/docs/src/user_manual/explanation/images/geovistalogo.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/geovistalogo.svg rename to docs/src/user_manual/explanation/images/geovistalogo.svg diff --git a/docs/src/further_topics/ugrid/images/iris-esmf-regrid.svg b/docs/src/user_manual/explanation/images/iris-esmf-regrid.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/iris-esmf-regrid.svg rename to docs/src/user_manual/explanation/images/iris-esmf-regrid.svg diff --git a/docs/src/userguide/multi_array.svg b/docs/src/user_manual/explanation/images/multi_array.svg similarity index 100% rename from docs/src/userguide/multi_array.svg rename to docs/src/user_manual/explanation/images/multi_array.svg diff --git a/docs/src/userguide/multi_array_to_cube.svg b/docs/src/user_manual/explanation/images/multi_array_to_cube.svg similarity index 100% rename from docs/src/userguide/multi_array_to_cube.svg rename to docs/src/user_manual/explanation/images/multi_array_to_cube.svg diff --git a/docs/src/further_topics/ugrid/images/ugrid_edge_data.svg b/docs/src/user_manual/explanation/images/ugrid_edge_data.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/ugrid_edge_data.svg rename to docs/src/user_manual/explanation/images/ugrid_edge_data.svg diff --git a/docs/src/further_topics/ugrid/images/ugrid_element_centres.svg b/docs/src/user_manual/explanation/images/ugrid_element_centres.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/ugrid_element_centres.svg rename to docs/src/user_manual/explanation/images/ugrid_element_centres.svg diff --git a/docs/src/further_topics/ugrid/images/ugrid_node_independence.svg b/docs/src/user_manual/explanation/images/ugrid_node_independence.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/ugrid_node_independence.svg rename to docs/src/user_manual/explanation/images/ugrid_node_independence.svg diff --git a/docs/src/further_topics/ugrid/images/ugrid_variable_faces.svg b/docs/src/user_manual/explanation/images/ugrid_variable_faces.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/ugrid_variable_faces.svg rename to docs/src/user_manual/explanation/images/ugrid_variable_faces.svg diff --git a/docs/src/userguide/iris_cubes.rst b/docs/src/user_manual/explanation/iris_cubes.rst similarity index 98% rename from docs/src/userguide/iris_cubes.rst rename to docs/src/user_manual/explanation/iris_cubes.rst index 03b5093efc..4b615ba21e 100644 --- a/docs/src/userguide/iris_cubes.rst +++ b/docs/src/user_manual/explanation/iris_cubes.rst @@ -1,3 +1,8 @@ +.. explanation:: Iris Data Structures + :tags: topic_data_model + + Read about the core Iris data model. + .. _iris_data_structures: ==================== @@ -105,7 +110,7 @@ Suppose we have some gridded data which has 24 air temperature readings (in Kelvin) which is located at 4 different longitudes, 2 different latitudes and 3 different heights. Our data array can be represented pictorially: -.. image:: multi_array.svg +.. image:: images/multi_array.svg Where dimensions 0, 1, and 2 have lengths 3, 2 and 4 respectively. @@ -135,7 +140,7 @@ The Iris cube to represent this data would consist of: Pictorially the cube has taken on more information than a simple array: -.. image:: multi_array_to_cube.svg +.. image:: images/multi_array_to_cube.svg Additionally further information may be optionally attached to the cube. diff --git a/docs/src/userguide/iris_philosophy.rst b/docs/src/user_manual/explanation/iris_philosophy.rst similarity index 98% rename from docs/src/userguide/iris_philosophy.rst rename to docs/src/user_manual/explanation/iris_philosophy.rst index 4005d915f0..e3f7618f88 100644 --- a/docs/src/userguide/iris_philosophy.rst +++ b/docs/src/user_manual/explanation/iris_philosophy.rst @@ -1,11 +1,14 @@ +.. explanation:: Iris' Philosophy + :tags: topic_about + + Read about how and why Iris is made the way it is. + .. _iris-philosophy: **************** Iris' Philosophy **************** -.. todo:: https://github.com/SciTools/iris/issues/6511; this page belongs in 'Explanation' - .. _code-maintenance: Code Maintenance diff --git a/docs/src/community/iris_xarray.rst b/docs/src/user_manual/explanation/iris_xarray.rst similarity index 97% rename from docs/src/community/iris_xarray.rst rename to docs/src/user_manual/explanation/iris_xarray.rst index f64f64bb32..12d13a08a5 100644 --- a/docs/src/community/iris_xarray.rst +++ b/docs/src/user_manual/explanation/iris_xarray.rst @@ -1,4 +1,9 @@ -.. include:: ../common_links.inc +.. explanation:: Iris ❤️ Xarray + :tags: topic_interoperability + + Read about the similarities and differences between Iris and Xarray. + +.. include:: ../../common_links.inc ====================== Iris ❤️ :term:`Xarray` @@ -7,7 +12,7 @@ Iris ❤️ :term:`Xarray` There is a lot of overlap between Iris and :term:`Xarray`, but some important differences too. Below is a summary of the most important differences, so that you can be prepared, and to help you choose the best package for your use case. -See :doc:`phrasebook` for a broad comparison of terminology. +See :doc:`../reference/phrasebook` for a broad comparison of terminology. Overall Experience ------------------ diff --git a/docs/src/further_topics/lenient_maths.rst b/docs/src/user_manual/explanation/lenient_maths.rst similarity index 98% rename from docs/src/further_topics/lenient_maths.rst rename to docs/src/user_manual/explanation/lenient_maths.rst index 51f77fb956..bf297e7e58 100644 --- a/docs/src/further_topics/lenient_maths.rst +++ b/docs/src/user_manual/explanation/lenient_maths.rst @@ -1,3 +1,8 @@ +.. explanation:: Lenient Cube Maths + :tags: topic_data_model;topic_maths_stats + + Read about the options for handling metadata differences during Cube maths. + .. _lenient maths: Lenient Cube Maths diff --git a/docs/src/further_topics/lenient_metadata.rst b/docs/src/user_manual/explanation/lenient_metadata.rst similarity index 99% rename from docs/src/further_topics/lenient_metadata.rst rename to docs/src/user_manual/explanation/lenient_metadata.rst index 5de9ad70c4..7e1b6b26e9 100644 --- a/docs/src/further_topics/lenient_metadata.rst +++ b/docs/src/user_manual/explanation/lenient_metadata.rst @@ -1,3 +1,8 @@ +.. explanation:: Lenient Metadata + :tags: topic_data_model + + Read about the options for handling metadata differences between Cubes. + .. _lenient metadata: Lenient Metadata diff --git a/docs/src/further_topics/ugrid/data_model.rst b/docs/src/user_manual/explanation/mesh_data_model.rst similarity index 99% rename from docs/src/further_topics/ugrid/data_model.rst rename to docs/src/user_manual/explanation/mesh_data_model.rst index 1660f6d08c..bbcfd05f64 100644 --- a/docs/src/further_topics/ugrid/data_model.rst +++ b/docs/src/user_manual/explanation/mesh_data_model.rst @@ -1,3 +1,8 @@ +.. explanation:: The Mesh Data Model + :tags: topic_mesh;topic_data_model + + Read about how Iris represents unstructured mesh data. + .. include:: ../../common_links.inc .. _ugrid model: @@ -269,7 +274,7 @@ using packages such as Dask. Spatial operations on mesh data are more complex ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Detail: :doc:`operations` +Detail: :doc:`../how_to/mesh_operations` Indexing a mesh data array cannot be used for: diff --git a/docs/src/further_topics/ugrid/partner_packages.rst b/docs/src/user_manual/explanation/mesh_partners.rst similarity index 94% rename from docs/src/further_topics/ugrid/partner_packages.rst rename to docs/src/user_manual/explanation/mesh_partners.rst index f69546446c..0e1f3e341d 100644 --- a/docs/src/further_topics/ugrid/partner_packages.rst +++ b/docs/src/user_manual/explanation/mesh_partners.rst @@ -1,3 +1,8 @@ +.. explanation:: Mesh Partner Packages + :tags: topic_mesh;topic_interoperability + + Read about Python packages you can use alongside Iris to work with mesh data. + .. include:: ../../common_links.inc .. _ugrid partners: @@ -34,7 +39,7 @@ reasons: Below you can learn more about the partner packages and how they are useful. Specifics of what operations would require their installation can be found in: -:doc:`operations`. +:doc:`../how_to/mesh_operations`. .. important:: **Experimental** diff --git a/docs/src/further_topics/metadata.rst b/docs/src/user_manual/explanation/metadata.rst similarity index 99% rename from docs/src/further_topics/metadata.rst rename to docs/src/user_manual/explanation/metadata.rst index f66f253a90..589df672b4 100644 --- a/docs/src/further_topics/metadata.rst +++ b/docs/src/user_manual/explanation/metadata.rst @@ -1,4 +1,8 @@ -.. _further topics: +.. explanation:: Metadata + :tags: topic_data_model + + Read about metadata in the Iris data model e.g. names, units. + .. _metadata: Metadata diff --git a/docs/src/further_topics/missing_data_handling.rst b/docs/src/user_manual/explanation/missing_data_handling.rst similarity index 94% rename from docs/src/further_topics/missing_data_handling.rst rename to docs/src/user_manual/explanation/missing_data_handling.rst index a461a44456..fe23787bc1 100644 --- a/docs/src/further_topics/missing_data_handling.rst +++ b/docs/src/user_manual/explanation/missing_data_handling.rst @@ -1,3 +1,8 @@ +.. explanation:: Missing Data Handling in Iris + :tags: topic_data_model;topic_maths_stats;topic_load_save + + Read about how Iris handles missing/masked data during Cube load/save/modification. + ============================= Missing Data Handling in Iris ============================= diff --git a/docs/src/further_topics/netcdf_io.rst b/docs/src/user_manual/explanation/netcdf_io.rst similarity index 99% rename from docs/src/further_topics/netcdf_io.rst rename to docs/src/user_manual/explanation/netcdf_io.rst index 1e94123fdf..d3fbf00b10 100644 --- a/docs/src/further_topics/netcdf_io.rst +++ b/docs/src/user_manual/explanation/netcdf_io.rst @@ -1,3 +1,8 @@ +.. explanation:: NetCDF I/O Handling in Iris + :tags: topic_load_save + + Read about how Iris loads and saves NetCDF files. + .. testsetup:: chunk_control import iris diff --git a/docs/src/userguide/real_and_lazy_data.rst b/docs/src/user_manual/explanation/real_and_lazy_data.rst similarity index 98% rename from docs/src/userguide/real_and_lazy_data.rst rename to docs/src/user_manual/explanation/real_and_lazy_data.rst index 2b3ecf9e64..275c870252 100644 --- a/docs/src/userguide/real_and_lazy_data.rst +++ b/docs/src/user_manual/explanation/real_and_lazy_data.rst @@ -1,3 +1,7 @@ +.. explanation:: Real and Lazy Data + :tags: data-model;topic_lazy_data + + Read about how Iris defers data loading from disk. .. _real_and_lazy_data: @@ -14,7 +18,7 @@ Real and Lazy Data ================== -We have seen in the :doc:`iris_cubes` section of the user guide that +We have seen in the :doc:`../explanation/iris_cubes` section of the user guide that Iris cubes contain data and metadata about a phenomenon. The data element of a cube is always an array, but the array may be either "real" or "lazy". diff --git a/docs/src/further_topics/um_files_loading.rst b/docs/src/user_manual/explanation/um_files_loading.rst similarity index 99% rename from docs/src/further_topics/um_files_loading.rst rename to docs/src/user_manual/explanation/um_files_loading.rst index 2d2eb973e4..8c6718805a 100644 --- a/docs/src/further_topics/um_files_loading.rst +++ b/docs/src/user_manual/explanation/um_files_loading.rst @@ -1,3 +1,8 @@ +.. explanation:: Iris Handling of PP and Fieldsfiles + :tags: topic_load_save + + Read about how Iris represents Met Office UM PP and Fieldsfiles data. + .. testsetup:: import numpy as np diff --git a/docs/src/further_topics/ux_guide.rst b/docs/src/user_manual/explanation/ux_guide.rst similarity index 86% rename from docs/src/further_topics/ux_guide.rst rename to docs/src/user_manual/explanation/ux_guide.rst index 6b0599c757..3b62e036f4 100644 --- a/docs/src/further_topics/ux_guide.rst +++ b/docs/src/user_manual/explanation/ux_guide.rst @@ -1,9 +1,14 @@ +.. explanation:: Reviewing the Iris User Experience + :tags: topic_about + + Read about how we plan to review and improve the user experience of Iris. + .. _ux_guide: Reviewing the Iris User Experience ********************************** -.. todo:: https://github.com/SciTools/iris/issues/6511; this page belongs in 'Explanation' +.. todo:: https://github.com/SciTools/iris/issues/6867; this page belongs in 'Get Involved' Often, improving and updating the existing user experience can fall behind fixing create new features, or quashing pesky bugs. To combat this, we plan to have regular development discussions to ensure diff --git a/docs/src/further_topics/which_regridder_to_use.rst b/docs/src/user_manual/explanation/which_regridder_to_use.rst similarity index 99% rename from docs/src/further_topics/which_regridder_to_use.rst rename to docs/src/user_manual/explanation/which_regridder_to_use.rst index dae273252d..cd1e227872 100644 --- a/docs/src/further_topics/which_regridder_to_use.rst +++ b/docs/src/user_manual/explanation/which_regridder_to_use.rst @@ -1,4 +1,9 @@ -.. include:: ../common_links.inc +.. explanation:: Which Regridder to Use + :tags: topic_regrid + + Read about the different regridders available in Iris. + +.. include:: ../../common_links.inc .. _which_regridder_to_use: @@ -38,7 +43,7 @@ the following form: | System** | ``src``/``tgt`` cube coordinates. | +-----------------+-----------------------------------------------------------+ | **Lazy | If the result is calculated lazily. See | -| Regridding** | :doc:`real and lazy data `.| +| Regridding** | :doc:`real and lazy data `. | +-----------------+-----------------------------------------------------------+ | **Weights | See `regridder performance`_. | | Caching** | | diff --git a/docs/src/why_iris.rst b/docs/src/user_manual/explanation/why_iris.rst similarity index 94% rename from docs/src/why_iris.rst rename to docs/src/user_manual/explanation/why_iris.rst index a5f137b9b8..d7df72d8ad 100644 --- a/docs/src/why_iris.rst +++ b/docs/src/user_manual/explanation/why_iris.rst @@ -1,3 +1,8 @@ +.. explanation:: Why Iris + :tags: topic_about + + Read about the Iris Python package and why you might want to use it. + .. _why_iris: Why Iris diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/user_manual/how_to/filtering_warnings.rst similarity index 93% rename from docs/src/further_topics/filtering_warnings.rst rename to docs/src/user_manual/how_to/filtering_warnings.rst index 204049942b..d2217f326e 100644 --- a/docs/src/further_topics/filtering_warnings.rst +++ b/docs/src/user_manual/how_to/filtering_warnings.rst @@ -1,3 +1,8 @@ +.. how-to:: Filtering Warnings + :tags: topic_troubleshooting + + How to customise Iris' warnings to only see those you need. + .. _filtering-warnings: ================== @@ -49,9 +54,9 @@ Warnings: >>> my_operation() ... - iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:451: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) - iris/coord_systems.py:771: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:777: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( Warnings can be suppressed using the Python warnings filter with the ``ignore`` @@ -112,7 +117,7 @@ You can target specific Warning messages, e.g. ... warnings.filterwarnings("ignore", message="Discarding false_easting") ... my_operation() ... - iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:451: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) :: @@ -127,16 +132,16 @@ Or you can target Warnings raised by specific lines of specific modules, e.g. .. doctest:: filtering_warnings >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=445) + ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=451) ... my_operation() ... - iris/coord_systems.py:771: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:777: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( :: - python -W ignore:::iris.coord_systems:445 - export PYTHONWARNINGS=ignore:::iris.coord_systems:445 + python -W ignore:::iris.coord_systems:451 + export PYTHONWARNINGS=ignore:::iris.coord_systems:451 Warnings from a Common Source ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -190,7 +195,7 @@ module during execution: ... ) ... my_operation() ... - iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:451: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) ---- diff --git a/docs/src/further_topics/ugrid/images/fesom_mesh.png b/docs/src/user_manual/how_to/images/fesom_mesh.png similarity index 100% rename from docs/src/further_topics/ugrid/images/fesom_mesh.png rename to docs/src/user_manual/how_to/images/fesom_mesh.png diff --git a/docs/src/further_topics/ugrid/images/orca_grid.png b/docs/src/user_manual/how_to/images/orca_grid.png similarity index 100% rename from docs/src/further_topics/ugrid/images/orca_grid.png rename to docs/src/user_manual/how_to/images/orca_grid.png diff --git a/docs/src/further_topics/ugrid/images/plotting.png b/docs/src/user_manual/how_to/images/plotting.png similarity index 100% rename from docs/src/further_topics/ugrid/images/plotting.png rename to docs/src/user_manual/how_to/images/plotting.png diff --git a/docs/src/further_topics/ugrid/images/smc_mesh.png b/docs/src/user_manual/how_to/images/smc_mesh.png similarity index 100% rename from docs/src/further_topics/ugrid/images/smc_mesh.png rename to docs/src/user_manual/how_to/images/smc_mesh.png diff --git a/docs/src/installing.rst b/docs/src/user_manual/how_to/installing.rst similarity index 97% rename from docs/src/installing.rst rename to docs/src/user_manual/how_to/installing.rst index a0a3fd2c62..d7832733d4 100644 --- a/docs/src/installing.rst +++ b/docs/src/user_manual/how_to/installing.rst @@ -1,3 +1,8 @@ +.. how-to:: Installing + :tags: topic_about + + How to install the Iris Python package. + .. _installing_iris: Installing diff --git a/docs/src/further_topics/ugrid/other_meshes.rst b/docs/src/user_manual/how_to/mesh_conversions.rst similarity index 98% rename from docs/src/further_topics/ugrid/other_meshes.rst rename to docs/src/user_manual/how_to/mesh_conversions.rst index 19f220be82..c465f9e9da 100644 --- a/docs/src/further_topics/ugrid/other_meshes.rst +++ b/docs/src/user_manual/how_to/mesh_conversions.rst @@ -1,10 +1,15 @@ +.. how-to:: Converting Other Mesh Formats + :tags: topic_mesh;topic_interoperability + + How to convert other mesh formats into Iris' Mesh Data Model. + .. _other_meshes: Converting Other Mesh Formats ***************************** Iris' Mesh Data Model is based primarily on the CF-UGRID conventions (see -:doc:`data_model`), but other mesh formats can be converted to fit into this +:doc:`../../user_manual/explanation/mesh_data_model`), but other mesh formats can be converted to fit into this model, **enabling use of Iris' specialised mesh support**. Below are some examples demonstrating how this works for various mesh formats. diff --git a/docs/src/further_topics/ugrid/operations.rst b/docs/src/user_manual/how_to/mesh_operations.rst similarity index 98% rename from docs/src/further_topics/ugrid/operations.rst rename to docs/src/user_manual/how_to/mesh_operations.rst index 59b178559e..34cd650b91 100644 --- a/docs/src/further_topics/ugrid/operations.rst +++ b/docs/src/user_manual/how_to/mesh_operations.rst @@ -1,10 +1,15 @@ +.. how-to:: Working with Mesh Data + :tags: topic_mesh;topic_load_save;topic_plotting;topic_regrid;topic_maths_stats + + How to perform common Iris operations on unstructured mesh data. + .. _ugrid operations: Working with Mesh Data ********************** .. note:: Several of the operations below rely on the optional dependencies - mentioned in :doc:`partner_packages`. + mentioned in :doc:`../../user_manual/explanation/mesh_partners`. Operations Summary ------------------ @@ -529,7 +534,7 @@ Region Extraction .. rubric:: |tagline: region extraction| -As described in :doc:`data_model`, indexing for a range along a +As described in :doc:`../../user_manual/explanation/mesh_data_model`, indexing for a range along a :class:`~iris.cube.Cube`\'s :meth:`~iris.cube.Cube.mesh_dim` will not provide a contiguous region, since **position on the unstructured dimension is unrelated to spatial position**. This means that subsetted @@ -819,7 +824,7 @@ user. Keep an eye on memory demand when comparing large :class:`~iris.mesh.MeshXY`\es, but note that :class:`~iris.mesh.MeshXY`\ equality is enabled for lazy - processing (:doc:`/userguide/real_and_lazy_data`), so if the + processing (:doc:`/user_manual/explanation/real_and_lazy_data`), so if the :class:`~iris.mesh.MeshXY`\es being compared are lazy the process will use less memory than their total size. @@ -830,7 +835,7 @@ Combining Cubes .. rubric:: |tagline: combining cubes| Merging or concatenating :class:`~iris.cube.Cube`\s (described in -:doc:`/userguide/merge_and_concat`) with two different +:doc:`../tutorial/merge_and_concat`) with two different :class:`~iris.mesh.MeshXY`\es is not possible - a :class:`~iris.cube.Cube` must be associated with just a single :class:`~iris.mesh.MeshXY`, and merge/concatenate are not yet @@ -856,7 +861,7 @@ Arithmetic .. rubric:: |tagline: arithmetic| -Cube Arithmetic (described in :doc:`/userguide/cube_maths`) +Cube Arithmetic (described in :doc:`../tutorial/cube_maths`) has been extended to handle :class:`~iris.cube.Cube`\s that include :class:`~iris.mesh.MeshCoord`\s, and hence have a ``cube.mesh``. diff --git a/docs/src/userguide/navigating_a_cube.rst b/docs/src/user_manual/how_to/navigating_a_cube.rst similarity index 98% rename from docs/src/userguide/navigating_a_cube.rst rename to docs/src/user_manual/how_to/navigating_a_cube.rst index ec3cd8e0dc..2e4f3c0ca9 100644 --- a/docs/src/userguide/navigating_a_cube.rst +++ b/docs/src/user_manual/how_to/navigating_a_cube.rst @@ -1,3 +1,8 @@ +.. how-to:: Navigating a Cube + :tags: topic_data_model + + How to access the properties of a Cube. + ================= Navigating a Cube ================= @@ -66,7 +71,7 @@ and :attr:`Cube.units ` respectively:: Interrogating these with the standard :func:`type` function will tell you that ``standard_name`` and ``long_name`` are either a string or ``None``, and ``units`` is an instance of :class:`iris.unit.Unit`. A more in depth discussion on -the cube units and their functional effects can be found at the end of :doc:`cube_maths`. +the cube units and their functional effects can be found at the end of :doc:`../tutorial/cube_maths`. You can access a string representing the "name" of a cube with the :meth:`Cube.name() ` method:: @@ -94,7 +99,7 @@ Each cube also has a :mod:`numpy` array which represents the phenomenon of the c print(cube.ndim) For more on the benefits, handling and uses of lazy data, see - :doc:`Real and Lazy Data ` + :doc:`Real and Lazy Data ` You can change the units of a cube using the :meth:`~iris.cube.Cube.convert_units` method. For example:: diff --git a/docs/src/community/plugins.rst b/docs/src/user_manual/how_to/plugins.rst similarity index 93% rename from docs/src/community/plugins.rst rename to docs/src/user_manual/how_to/plugins.rst index 0d79d64623..ba993eceeb 100644 --- a/docs/src/community/plugins.rst +++ b/docs/src/user_manual/how_to/plugins.rst @@ -1,3 +1,8 @@ +.. how-to:: Plugins + :tags: topic_interoperability;topic_about + + How to create and use plugins to extend Iris' functionality. + .. _namespace package: https://packaging.python.org/en/latest/guides/packaging-namespace-packages/ .. _community_plugins: diff --git a/docs/src/user_manual/index.rst b/docs/src/user_manual/index.rst new file mode 100644 index 0000000000..0d57721609 --- /dev/null +++ b/docs/src/user_manual/index.rst @@ -0,0 +1,199 @@ +.. include:: /common_links.inc + +.. comment: + now that User Manual is the official top-level, and the User Guide is a + sub-section, the original labels have been relocated here. + +.. _user_guide_index: +.. _user_guide_introduction: +.. _user_manual_index: + +User Manual +=========== + +.. hint:: + + If you are new to Iris: check out :ref:`getting_started_index` first. + +Welcome to the Iris User Manual! + +This is designed as a searchable index of **all** our user documentation. Try +the Topic and `Diataxis`_ filters below to find the information you need today. +Alternatively, you can use the sidebar to navigate by section. + +.. tip:: + + - :doc:`/user_manual/index`: a searchable index of **all** user + documentation. + - :doc:`User Guide `: a linear + narrative introduction to Iris' data model and functionality. + +.. comment: + The tree structure for user_manual is specified here. As mentioned in the + text, we prefer readers to use the tabbed sections below, so the toctree is + hidden - not rendered in the text, only in the sidebar. This toctree is + expected to be section_indexes/* pages; with each of those pages + providing the remaining sub-structure. + + +.. toctree:: + :maxdepth: 1 + :hidden: + + section_indexes/get_started + section_indexes/userguide + /generated/gallery/index + Iris API + section_indexes/dask_best_practices + section_indexes/mesh_support + section_indexes/metadata_arithmetic + section_indexes/community + section_indexes/general + +.. _topic_all: + +All +--- + +.. diataxis-page-list:: topic_all + +By Topic +-------- + +.. _topic_data_model: + +topic: ``data_model`` +^^^^^^^^^^^^^^^^^^^^^ + +Pages about the :class:`~iris.cube.Cube` class and its associated components +such as :class:`~iris.coords.Coord` and :class:`~iris.mesh.MeshXY`. + +.. diataxis-page-list:: topic_data_model + + +.. _topic_slice_combine: + +topic: ``slice_combine`` +^^^^^^^^^^^^^^^^^^^^^^^^ + +Pages about subsetting and combining :class:`~iris.cube.Cube` and +:class:`~iris.cube.CubeList` data. Examples include slicing, indexing, merging, +concatenating. + +.. diataxis-page-list:: topic_slice_combine + + +.. _topic_load_save: + +topic: ``load_save`` +^^^^^^^^^^^^^^^^^^^^ + +Pages about reading from files into the data model, and writing from the data +model to files. + +.. diataxis-page-list:: topic_load_save + + +.. _topic_lazy_data: + +topic: ``lazy_data`` +^^^^^^^^^^^^^^^^^^^^ + +Pages about Iris' implementation of parallel and out-of-core data handling, via +Dask. See :term:`Lazy Data`. + +.. diataxis-page-list:: topic_lazy_data + + +.. _topic_plotting: + +topic: ``plotting`` +^^^^^^^^^^^^^^^^^^^ + +Pages about Iris' use of :term:`Cartopy` or :ref:`ugrid geovista` to plot +:class:`~iris.cube.Cube` data. + +.. diataxis-page-list:: topic_plotting + + +.. _topic_maths_stats: + +topic: ``maths_stats`` +^^^^^^^^^^^^^^^^^^^^^^ + +Pages about statistical and mathematical operations on :class:`~iris.cube.Cube` +data, e.g. computing means, differences, etc. + +.. diataxis-page-list:: topic_maths_stats + + +.. _topic_regrid: + +topic: ``regrid`` +^^^^^^^^^^^^^^^^^ + +Pages about regridding (2D to 2D) and interpolation (ND to 1D) of data from one +set of coordinates to another. Commonly used to move between different XY grids. + +.. diataxis-page-list:: topic_regrid + + +.. _topic_customisation: + +topic: ``customisation`` +^^^^^^^^^^^^^^^^^^^^^^^^ + +Pages about configurable Iris behaviour. + +.. diataxis-page-list:: topic_customisation + + +.. _topic_troubleshooting: + +topic: ``troubleshooting`` +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Pages about problems/exceptions you may encounter when using Iris, and how to +best handle them. + +.. diataxis-page-list:: topic_troubleshooting + + +.. _topic_experimental: + +topic: ``experimental`` +^^^^^^^^^^^^^^^^^^^^^^^ + +Pages about API that is still subject to change. + +.. diataxis-page-list:: topic_experimental + + +.. _topic_interoperability: + +topic: ``interoperability`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Pages about using Iris alongside other libraries and tools. + +.. diataxis-page-list:: topic_interoperability + + +.. _topic_mesh: + +topic: ``mesh`` +^^^^^^^^^^^^^^^ + +Pages about Iris' support for unstructured mesh data. + +.. diataxis-page-list:: topic_mesh + + +.. _topic_about: + +topic: ``about`` +^^^^^^^^^^^^^^^^ + +Pages about the non-code aspects of Iris: philosophy, installation, etc. + +.. diataxis-page-list:: topic_about diff --git a/docs/src/userguide/citation.rst b/docs/src/user_manual/reference/citation.rst similarity index 88% rename from docs/src/userguide/citation.rst rename to docs/src/user_manual/reference/citation.rst index d0496f4876..00991e1a70 100644 --- a/docs/src/userguide/citation.rst +++ b/docs/src/user_manual/reference/citation.rst @@ -1,3 +1,8 @@ +.. z_reference:: Citing Iris + :tags: topic_about + + Information on the correct way to cite the Iris Python package. + .. _Citing_Iris: =========== diff --git a/docs/src/userguide/glossary.rst b/docs/src/user_manual/reference/glossary.rst similarity index 86% rename from docs/src/userguide/glossary.rst rename to docs/src/user_manual/reference/glossary.rst index 7de88462e2..3c04b1756b 100644 --- a/docs/src/userguide/glossary.rst +++ b/docs/src/user_manual/reference/glossary.rst @@ -1,4 +1,9 @@ -.. include:: ../common_links.inc +.. z_reference:: Glossary + :tags: topic_data_model;topic_about;topic_load_save;topic_lazy_data;topic_plotting;topic_maths_stats;topic_regrid;topic_customisation;topic_troubleshooting;topic_slice_combine + + Information on common terms used within Iris documentation. + +.. include:: ../../common_links.inc .. _glossary: @@ -40,7 +45,7 @@ Glossary representing the cube as a whole. | **Related:** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Cube @@ -55,7 +60,7 @@ Glossary - :term:`Coordinate Factories ` | **Related:** :term:`NumPy` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Cell Method @@ -64,7 +69,7 @@ Glossary MEAN or SUM operation. | **Related:** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Coordinate Factory @@ -75,7 +80,7 @@ Glossary "height above ground level" coordinate. | **Related:** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | @@ -85,7 +90,7 @@ Glossary so that not all data is in RAM at once. | **Related:** :term:`Lazy Data` **|** :term:`NumPy` - | **More information:** :doc:`real_and_lazy_data` + | **More information:** :doc:`../explanation/real_and_lazy_data` | Fields File (FF) Format @@ -111,7 +116,7 @@ Glossary thanks to parallel processing. | **Related:** :term:`Dask` **|** :term:`Real Data` - | **More information:** :doc:`real_and_lazy_data` + | **More information:** :doc:`../explanation/real_and_lazy_data` | Long Name @@ -119,7 +124,7 @@ Glossary the same restraints as :term:`standard name`. | **Related:** :term:`Standard Name` **|** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Matplotlib @@ -137,7 +142,7 @@ Glossary e.g. :term:`units ` or :term:`Cell Methods ` | **Related:** :term:`Phenomenon` **|** :term:`Cube` - | **More information:** :doc:`../further_topics/metadata` + | **More information:** :doc:`../explanation/metadata` | NetCDF Format @@ -167,7 +172,7 @@ Glossary | **Related:** :term:`Metadata` **|** :term:`Standard Name` **|** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Post Processing (PP) Format @@ -183,7 +188,7 @@ Glossary on the hard drive. | **Related:** :term:`Lazy Data` **|** :term:`NumPy` - | **More information:** :doc:`real_and_lazy_data` + | **More information:** :doc:`../explanation/real_and_lazy_data` | Standard Name @@ -191,14 +196,14 @@ Glossary defined at `CF Standard Names `_. | **Related:** :term:`Long Name` **|** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Unit The unit with which the :term:`phenomenon` is measured e.g. m / sec. | **Related:** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Xarray diff --git a/docs/src/community/phrasebook.rst b/docs/src/user_manual/reference/phrasebook.rst similarity index 92% rename from docs/src/community/phrasebook.rst rename to docs/src/user_manual/reference/phrasebook.rst index bcd91cca83..c952988c82 100644 --- a/docs/src/community/phrasebook.rst +++ b/docs/src/user_manual/reference/phrasebook.rst @@ -1,4 +1,9 @@ -.. include:: ../common_links.inc +.. z_reference:: Phrasebook + :tags: topic_interoperability + + Information on terminology differences between Iris and similar packages. + +.. include:: ../../common_links.inc .. _phrasebook: diff --git a/docs/src/community/index.rst b/docs/src/user_manual/section_indexes/community.rst similarity index 83% rename from docs/src/community/index.rst rename to docs/src/user_manual/section_indexes/community.rst index 1462f881a8..f2bc926b1e 100644 --- a/docs/src/community/index.rst +++ b/docs/src/user_manual/section_indexes/community.rst @@ -1,4 +1,4 @@ -.. include:: ../common_links.inc +.. include:: ../../common_links.inc .. todo: consider scientific-python.org @@ -7,6 +7,8 @@ Iris in the Community ===================== +.. todo:: https://github.com/SciTools/iris/issues/6867 - this section belongs in 'Get Involved' + Iris aims to be a valuable member of the open source scientific Python community. @@ -39,15 +41,15 @@ smoother interoperability: .. not using toctree due to combination of child pages and cross-references. * The :mod:`iris.pandas` module -* :doc:`iris_xarray` -* :doc:`phrasebook` +* :doc:`../explanation/iris_xarray` +* :doc:`../reference/phrasebook` .. toctree:: :maxdepth: 1 :hidden: - iris_xarray - phrasebook + ../explanation/iris_xarray + ../reference/phrasebook Plugins ------- @@ -57,4 +59,4 @@ Iris can be extended with **plugins**! See below for further information: .. toctree:: :maxdepth: 2 - plugins + ../how_to/plugins diff --git a/docs/src/further_topics/dask_best_practices/index.rst b/docs/src/user_manual/section_indexes/dask_best_practices.rst similarity index 95% rename from docs/src/further_topics/dask_best_practices/index.rst rename to docs/src/user_manual/section_indexes/dask_best_practices.rst index 79de2692bd..109d328b82 100644 --- a/docs/src/further_topics/dask_best_practices/index.rst +++ b/docs/src/user_manual/section_indexes/dask_best_practices.rst @@ -102,7 +102,7 @@ this with:: dask.config.set(num_workers=N) -For an example, see :doc:`dask_bags_and_greed`. +For an example, see :doc:`../tutorial/dask_bags_and_greed`. Alternatively, when there is only one CPU allocated, it may actually be more efficient to use a "synchronous" scheduler instead, with:: @@ -194,7 +194,7 @@ If the file(s) being loaded contain multiple fields, this can lead to an excessive amount of chunks which will result in poor performance. When the default chunking is not appropriate, it is possible to rechunk. -:doc:`dask_pp_to_netcdf` provides a detailed demonstration of how Dask can optimise +:doc:`../tutorial/dask_pp_to_netcdf` provides a detailed demonstration of how Dask can optimise that process. @@ -208,14 +208,14 @@ If you feel you have an example of a Dask best practice that you think may be he please share them with us by raising a new `discussion on the Iris repository `_. -* :doc:`dask_pp_to_netcdf` -* :doc:`dask_parallel_loop` -* :doc:`dask_bags_and_greed` +* :doc:`../tutorial/dask_pp_to_netcdf` +* :doc:`../tutorial/dask_parallel_loop` +* :doc:`../tutorial/dask_bags_and_greed` .. toctree:: :hidden: :maxdepth: 1 - dask_pp_to_netcdf - dask_parallel_loop - dask_bags_and_greed + ../tutorial/dask_pp_to_netcdf + ../tutorial/dask_parallel_loop + ../tutorial/dask_bags_and_greed diff --git a/docs/src/user_manual/section_indexes/general.rst b/docs/src/user_manual/section_indexes/general.rst new file mode 100644 index 0000000000..f3d28824a1 --- /dev/null +++ b/docs/src/user_manual/section_indexes/general.rst @@ -0,0 +1,28 @@ +.. include:: /common_links.inc + +General +======= + +Below are any pages not belonging to any other User Manual section. + +.. tip:: + + To maximise discoverability of **all** pages, the primary design of the + User Manual is based on navigating with the Topic and `Diataxis`_ + filters in the top page: :doc:`../index`. + +.. toctree:: + :maxdepth: 1 + + ../reference/citation + ../tutorial/controlling_merge + ../explanation/dataless_cubes + ../how_to/filtering_warnings + ../reference/glossary + ../explanation/iris_philosophy + ../explanation/missing_data_handling + ../explanation/netcdf_io + ../tutorial/s3_io + ../explanation/um_files_loading + ../explanation/ux_guide + ../explanation/which_regridder_to_use diff --git a/docs/src/user_manual/section_indexes/get_started.rst b/docs/src/user_manual/section_indexes/get_started.rst new file mode 100644 index 0000000000..f95bbf9635 --- /dev/null +++ b/docs/src/user_manual/section_indexes/get_started.rst @@ -0,0 +1,29 @@ +.. _getting_started_index: + +Get Started +=========== + +Quick Start +----------- + +This will get you up and running with just 5 minutes of reading: + +- :ref:`installing_iris` +- :ref:`iris_data_structures` +- The first part of: :ref:`loading_iris_cubes` + +Base Understanding +------------------ + +If you prefer to begin with a richer understanding, these are the pages to read: + +- :ref:`why_iris` +- Browse for an idea of Iris' capabilities: :doc:`/generated/gallery/index` +- :doc:`userguide` + +.. toctree:: + :maxdepth: 1 + :hidden: + + ../how_to/installing + ../explanation/why_iris diff --git a/docs/src/further_topics/ugrid/index.rst b/docs/src/user_manual/section_indexes/mesh_support.rst similarity index 75% rename from docs/src/further_topics/ugrid/index.rst rename to docs/src/user_manual/section_indexes/mesh_support.rst index c247a9dc6d..eb046e6c8a 100644 --- a/docs/src/further_topics/ugrid/index.rst +++ b/docs/src/user_manual/section_indexes/mesh_support.rst @@ -35,10 +35,10 @@ Iris' mesh support is experimental Read on to find out more... -* :doc:`data_model` - learn why the mesh experience is so different. -* :doc:`partner_packages` - meet some optional dependencies that provide powerful mesh operations. -* :doc:`operations` - experience how your workflows will look when written for mesh data. -* :doc:`other_meshes` - check out some examples of converting various mesh formats into Iris' mesh format. +* :doc:`../explanation/mesh_data_model` - learn why the mesh experience is so different. +* :doc:`../explanation/mesh_partners` - meet some optional dependencies that provide powerful mesh operations. +* :doc:`../how_to/mesh_operations` - experience how your workflows will look when written for mesh data. +* :doc:`../how_to/mesh_conversions` - check out some examples of converting various mesh formats into Iris' mesh format. .. Need an actual TOC to get Sphinx working properly, but have hidden it in @@ -48,9 +48,9 @@ Read on to find out more... :hidden: :maxdepth: 1 - data_model - partner_packages - operations - other_meshes + ../explanation/mesh_data_model + ../explanation/mesh_partners + ../how_to/mesh_operations + ../how_to/mesh_conversions __ CF-UGRID_ diff --git a/docs/src/user_manual/section_indexes/metadata_arithmetic.rst b/docs/src/user_manual/section_indexes/metadata_arithmetic.rst new file mode 100644 index 0000000000..1f5e2c044e --- /dev/null +++ b/docs/src/user_manual/section_indexes/metadata_arithmetic.rst @@ -0,0 +1,12 @@ +Metadata and Arithmetic +======================= + +A small series of detailed pages on how Iris handles metadata, especially when +combining :class:`~iris.cube.Cube` instances via arithmetic operations. + +.. toctree:: + :maxdepth: 1 + + ../explanation/metadata + ../explanation/lenient_metadata + ../explanation/lenient_maths diff --git a/docs/src/user_manual/section_indexes/userguide.rst b/docs/src/user_manual/section_indexes/userguide.rst new file mode 100644 index 0000000000..799e751e59 --- /dev/null +++ b/docs/src/user_manual/section_indexes/userguide.rst @@ -0,0 +1,46 @@ +User Guide: Intro to Iris +========================= + +The User Guide is designed to give an introduction to and a comprehensive +grounding in Iris' data model and functionality. It is presented in a linear +narrative style, with early sections providing a foundation for later sections. + +.. tip:: + + - :doc:`User Guide `: a linear + narrative introduction to Iris' data model and functionality. + - :doc:`/user_manual/index`: a searchable index of **all** user + documentation. + +Much of the content has supplementary links to the reference documentation; +you will not need to follow these links in order to understand the guide but +they may serve as a useful reference for future exploration. + +.. only:: html + + Since later pages depend on earlier ones, try reading this user guide + sequentially using the ``next`` and ``previous`` links at the bottom + of each page. + +.. note:: + + There is also useful learning material held in the + https://github.com/scitools-classroom repo, including tutorials, courses + and presentations. + + +.. toctree:: + :maxdepth: 2 + + ../explanation/iris_cubes + ../tutorial/loading_iris_cubes + ../tutorial/saving_iris_cubes + ../how_to/navigating_a_cube + ../tutorial/subsetting_a_cube + ../explanation/real_and_lazy_data + ../tutorial/plotting_a_cube + ../tutorial/interpolation_and_regridding + ../tutorial/merge_and_concat + ../tutorial/cube_statistics + ../tutorial/cube_maths + diff --git a/docs/src/further_topics/controlling_merge.rst b/docs/src/user_manual/tutorial/controlling_merge.rst similarity index 98% rename from docs/src/further_topics/controlling_merge.rst rename to docs/src/user_manual/tutorial/controlling_merge.rst index 8868306d10..de0fe9bfdc 100644 --- a/docs/src/further_topics/controlling_merge.rst +++ b/docs/src/user_manual/tutorial/controlling_merge.rst @@ -1,3 +1,8 @@ +.. tutorial:: Controlling Merge and Concatenate + :tags: topic_slice_combine;topic_customisation;topic_load_save + + A lesson in fine-grain control of combining Iris Cubes. + .. _controlling_merge: ================================= diff --git a/docs/src/userguide/cube_maths.rst b/docs/src/user_manual/tutorial/cube_maths.rst similarity index 98% rename from docs/src/userguide/cube_maths.rst rename to docs/src/user_manual/tutorial/cube_maths.rst index 79c91ca61b..817b496686 100644 --- a/docs/src/userguide/cube_maths.rst +++ b/docs/src/user_manual/tutorial/cube_maths.rst @@ -1,3 +1,8 @@ +.. tutorial:: Cube Maths + :tags: topic_maths_stats + + A lesson in mathematical operations on Iris Cubes. + .. _cube maths: ========== @@ -5,7 +10,7 @@ Cube Maths ========== -The section :doc:`navigating_a_cube` highlighted that +The section :doc:`../how_to/navigating_a_cube` highlighted that every cube has a data attribute; this attribute can then be manipulated directly:: diff --git a/docs/src/userguide/cube_statistics.rst b/docs/src/user_manual/tutorial/cube_statistics.rst similarity index 99% rename from docs/src/userguide/cube_statistics.rst rename to docs/src/user_manual/tutorial/cube_statistics.rst index efc031aa43..e980fea407 100644 --- a/docs/src/userguide/cube_statistics.rst +++ b/docs/src/user_manual/tutorial/cube_statistics.rst @@ -1,3 +1,8 @@ +.. tutorial:: Cube Statistics + :tags: topic_maths_stats + + A lesson in statistical operations on Iris Cubes. + .. _cube-statistics: =============== diff --git a/docs/src/further_topics/dask_best_practices/dask_bags_and_greed.rst b/docs/src/user_manual/tutorial/dask_bags_and_greed.rst similarity index 98% rename from docs/src/further_topics/dask_best_practices/dask_bags_and_greed.rst rename to docs/src/user_manual/tutorial/dask_bags_and_greed.rst index 272ea6fc08..8d6f073e84 100644 --- a/docs/src/further_topics/dask_best_practices/dask_bags_and_greed.rst +++ b/docs/src/user_manual/tutorial/dask_bags_and_greed.rst @@ -1,3 +1,8 @@ +.. tutorial:: Dask Bags and Greedy Parallelism + :tags: topic_lazy_data + + A real-world demonstration of using Dask Bags with Iris, including hazards to watch out for. + .. _examples_bags_greed: 3. Dask Bags and Greedy Parallelism diff --git a/docs/src/further_topics/dask_best_practices/dask_parallel_loop.rst b/docs/src/user_manual/tutorial/dask_parallel_loop.rst similarity index 97% rename from docs/src/further_topics/dask_best_practices/dask_parallel_loop.rst rename to docs/src/user_manual/tutorial/dask_parallel_loop.rst index 2c19196318..977238b52b 100644 --- a/docs/src/further_topics/dask_best_practices/dask_parallel_loop.rst +++ b/docs/src/user_manual/tutorial/dask_parallel_loop.rst @@ -1,3 +1,8 @@ +.. tutorial:: Parallelising a Loop of Multiple Calls to a Third Party Library + :tags: topic_lazy_data + + A real-world demonstration of parallelising function calls with Dask. + .. _examples_parallel_loop: 2. Parallelising a Loop of Multiple Calls to a Third Party Library diff --git a/docs/src/further_topics/dask_best_practices/dask_pp_to_netcdf.rst b/docs/src/user_manual/tutorial/dask_pp_to_netcdf.rst similarity index 94% rename from docs/src/further_topics/dask_best_practices/dask_pp_to_netcdf.rst rename to docs/src/user_manual/tutorial/dask_pp_to_netcdf.rst index 28784154b4..0b8f306026 100644 --- a/docs/src/further_topics/dask_best_practices/dask_pp_to_netcdf.rst +++ b/docs/src/user_manual/tutorial/dask_pp_to_netcdf.rst @@ -1,3 +1,9 @@ +.. tutorial:: Speeding up Converting PP Files to NetCDF + :tags: topic_lazy_data;topic_load_save + + A real-world demonstration of tuning Dask and Iris for better performance + when saving data. + .. _examples_pp_to_ff: 1. Speed up Converting PP Files to NetCDF diff --git a/docs/src/userguide/concat.svg b/docs/src/user_manual/tutorial/images/concat.svg similarity index 100% rename from docs/src/userguide/concat.svg rename to docs/src/user_manual/tutorial/images/concat.svg diff --git a/docs/src/further_topics/dask_best_practices/images/grib-bottleneck.png b/docs/src/user_manual/tutorial/images/grib-bottleneck.png similarity index 100% rename from docs/src/further_topics/dask_best_practices/images/grib-bottleneck.png rename to docs/src/user_manual/tutorial/images/grib-bottleneck.png diff --git a/docs/src/further_topics/dask_best_practices/images/loop_third_party_kapture_results.png b/docs/src/user_manual/tutorial/images/loop_third_party_kapture_results.png similarity index 100% rename from docs/src/further_topics/dask_best_practices/images/loop_third_party_kapture_results.png rename to docs/src/user_manual/tutorial/images/loop_third_party_kapture_results.png diff --git a/docs/src/userguide/merge.svg b/docs/src/user_manual/tutorial/images/merge.svg similarity index 100% rename from docs/src/userguide/merge.svg rename to docs/src/user_manual/tutorial/images/merge.svg diff --git a/docs/src/userguide/merge_and_concat.svg b/docs/src/user_manual/tutorial/images/merge_and_concat.svg similarity index 100% rename from docs/src/userguide/merge_and_concat.svg rename to docs/src/user_manual/tutorial/images/merge_and_concat.svg diff --git a/docs/src/userguide/interpolation_and_regridding.rst b/docs/src/user_manual/tutorial/interpolation_and_regridding.rst similarity index 96% rename from docs/src/userguide/interpolation_and_regridding.rst rename to docs/src/user_manual/tutorial/interpolation_and_regridding.rst index 4a95276ab2..6a888d7549 100644 --- a/docs/src/userguide/interpolation_and_regridding.rst +++ b/docs/src/user_manual/tutorial/interpolation_and_regridding.rst @@ -1,3 +1,8 @@ +.. tutorial:: Cube Interpolation and Regridding + :tags: topic_regrid + + A lesson in Iris' interpolation and regridding functionality. + .. _interpolation_and_regridding: .. testsetup:: * @@ -32,8 +37,8 @@ The following are the regridding schemes that are currently available in Iris: The linear and nearest-neighbour interpolation schemes, and the linear, nearest-neighbour, and area-weighted regridding schemes support lazy regridding, i.e. if the source cube has lazy data, the resulting cube will also have lazy data. -See :doc:`real_and_lazy_data` for an introduction to lazy data. -See :doc:`../further_topics/which_regridder_to_use` for a more in depth overview of the different regridders. +See :doc:`../explanation/real_and_lazy_data` for an introduction to lazy data. +See :doc:`../explanation/which_regridder_to_use` for a more in depth overview of the different regridders. .. _interpolation: @@ -161,7 +166,7 @@ these sample points: Let's look at the original data, the interpolation line and the new data in a plot. This will help us to see what is going on: -.. plot:: userguide/regridding_plots/interpolate_column.py +.. plot:: user_manual/tutorial/regridding_plots/interpolate_column.py The red diamonds on the extremes of the altitude values show that we have extrapolated data beyond the range of the original data. In some cases this is @@ -222,7 +227,7 @@ Let's load two cubes that have different grids and coordinate systems: We can visually confirm that they are on different grids by plotting the two cubes: -.. plot:: userguide/regridding_plots/regridding_plot.py +.. plot:: user_manual/tutorial/regridding_plots/regridding_plot.py Let's regrid the ``global_air_temp`` cube onto a rotated pole grid using a linear regridding scheme. To achieve this we pass the ``rotated_psl`` @@ -231,7 +236,7 @@ cube onto: >>> rotated_air_temp = global_air_temp.regrid(rotated_psl, iris.analysis.Linear()) -.. plot:: userguide/regridding_plots/regridded_to_rotated.py +.. plot:: user_manual/tutorial/regridding_plots/regridded_to_rotated.py We could regrid the pressure values onto the global grid, but this will involve some form of extrapolation. As with interpolation, we can control the extrapolation @@ -260,7 +265,7 @@ pole grid: >>> scheme = iris.analysis.Linear(extrapolation_mode='mask') >>> global_psl = rotated_psl.regrid(global_air_temp, scheme) -.. plot:: userguide/regridding_plots/regridded_to_global.py +.. plot:: user_manual/tutorial/regridding_plots/regridded_to_global.py Notice that although we can still see the approximate shape of the rotated pole grid, the cells have now become rectangular in a plate carrée (equirectangular) projection. @@ -341,7 +346,7 @@ some data will be disregarded if it lies close to masked data. To visualise the above regrid, let's plot the original data, along with 3 distinct ``mdtol`` values to compare the result: -.. plot:: userguide/regridding_plots/regridded_to_global_area_weighted.py +.. plot:: user_manual/tutorial/regridding_plots/regridded_to_global_area_weighted.py .. _caching_a_regridder: diff --git a/docs/src/userguide/loading_iris_cubes.rst b/docs/src/user_manual/tutorial/loading_iris_cubes.rst similarity index 98% rename from docs/src/userguide/loading_iris_cubes.rst rename to docs/src/user_manual/tutorial/loading_iris_cubes.rst index ac6b370466..e54dbc9ebd 100644 --- a/docs/src/userguide/loading_iris_cubes.rst +++ b/docs/src/user_manual/tutorial/loading_iris_cubes.rst @@ -1,3 +1,8 @@ +.. tutorial:: Loading Iris Cubes + :tags: topic_load_save + + A lesson in how Iris loading works. + .. _loading_iris_cubes: =================== @@ -114,7 +119,7 @@ Notice that the result of printing a **cube** is a little more verbose than it was when printing a **list of cubes**. In addition to the very short summary which is provided when printing a list of cubes, information is provided on the coordinates which constitute the cube in question. -This was the output discussed at the end of the :doc:`iris_cubes` section. +This was the output discussed at the end of the :doc:`../explanation/iris_cubes` section. .. note:: @@ -156,7 +161,7 @@ essential descriptive information or metadata : the bulk of the actual data content will only be loaded later, as it is needed. This is referred to as 'lazy' data. It allows loading to be much quicker, and to occupy less memory. -For more on the benefits, handling and uses of lazy data, see :doc:`Real and Lazy Data `. +For more on the benefits, handling and uses of lazy data, see :doc:`Real and Lazy Data `. .. _constrained-loading: @@ -408,3 +413,4 @@ API documentation for:** :class:`iris.loading.LoadProblems`. warnings.filterwarnings("ignore") helpers.get_names = get_names_original std_names.STD_NAMES["air_temperature"] = air_temperature + iris.FUTURE.date_microseconds = False diff --git a/docs/src/userguide/merge_and_concat.rst b/docs/src/user_manual/tutorial/merge_and_concat.rst similarity index 99% rename from docs/src/userguide/merge_and_concat.rst rename to docs/src/user_manual/tutorial/merge_and_concat.rst index d754e08cc1..3f717f064e 100644 --- a/docs/src/userguide/merge_and_concat.rst +++ b/docs/src/user_manual/tutorial/merge_and_concat.rst @@ -1,3 +1,8 @@ +.. tutorial:: Merge and Concatenate + :tags: topic_slice_combine + + A lesson in the various ways to combine Cubes along different dimensional axes. + .. _merge_and_concat: ===================== @@ -16,7 +21,7 @@ issues from occurring. Both ``merge`` and ``concatenate`` take multiple cubes as input and result in fewer cubes as output. The following diagram illustrates the two processes: -.. image:: merge_and_concat.svg +.. image:: images/merge_and_concat.svg :alt: Pictographic of merge and concatenation. :align: center @@ -128,7 +133,7 @@ make a new ``z`` dimension coordinate: The following diagram illustrates what has taken place in this example: -.. image:: merge.svg +.. image:: images/merge.svg :alt: Pictographic of merge. :align: center @@ -294,7 +299,7 @@ cubes to form a new cube with an extended ``t`` coordinate: The following diagram illustrates what has taken place in this example: -.. image:: concat.svg +.. image:: images/concat.svg :alt: Pictographic of concatenate. :align: center diff --git a/docs/src/userguide/plotting_a_cube.rst b/docs/src/user_manual/tutorial/plotting_a_cube.rst similarity index 94% rename from docs/src/userguide/plotting_a_cube.rst rename to docs/src/user_manual/tutorial/plotting_a_cube.rst index f152690835..a32464443f 100644 --- a/docs/src/userguide/plotting_a_cube.rst +++ b/docs/src/user_manual/tutorial/plotting_a_cube.rst @@ -1,3 +1,8 @@ +.. tutorial:: Plotting a Cube + :tags: topic_plotting + + A lesson on visualising Iris Cubes using Cartopy and Matplotlib. + .. _plotting_a_cube: =============== @@ -160,7 +165,7 @@ The syntax is very similar to that which you would provide to Matplotlib's equivalent :py:func:`matplotlib.pyplot.plot` and indeed all of the keyword arguments are equivalent: -.. plot:: userguide/plotting_examples/1d_simple.py +.. plot:: user_manual/tutorial/plotting_examples/1d_simple.py :include-source: For more information on how this example reduced the 2D cube to 1 dimension see @@ -179,7 +184,7 @@ to a plot. For example, the previous plot can be improved quickly by replacing **iris.plot** with **iris.quickplot**: -.. plot:: userguide/plotting_examples/1d_quickplot_simple.py +.. plot:: user_manual/tutorial/plotting_examples/1d_quickplot_simple.py :include-source: @@ -238,7 +243,7 @@ Cube Contour A simple contour plot of a cube can be created with either the :func:`iris.plot.contour` or :func:`iris.quickplot.contour` functions: -.. plot:: userguide/plotting_examples/cube_contour.py +.. plot:: user_manual/tutorial/plotting_examples/cube_contour.py :include-source: @@ -247,7 +252,7 @@ Cube Filled Contour Similarly a filled contour plot of a cube can be created with the :func:`iris.plot.contourf` or :func:`iris.quickplot.contourf` functions: -.. plot:: userguide/plotting_examples/cube_contourf.py +.. plot:: user_manual/tutorial/plotting_examples/cube_contourf.py :include-source: @@ -265,7 +270,7 @@ or :func:`iris.quickplot.pcolormesh`. and :func:`iris.quickplot.pcolormesh` will attempt to guess suitable values based on their points (see also :func:`iris.coords.Coord.guess_bounds()`). -.. plot:: userguide/plotting_examples/cube_blockplot.py +.. plot:: user_manual/tutorial/plotting_examples/cube_blockplot.py :include-source: .. _brewer-info: @@ -303,7 +308,7 @@ Available Brewer Schemes The following subset of Brewer palettes found at `colorbrewer2.org `_ are available within Iris. -.. plot:: userguide/plotting_examples/brewer.py +.. plot:: user_manual/tutorial/plotting_examples/brewer.py Plotting With Brewer @@ -313,7 +318,7 @@ To plot a cube using a Brewer colour palette, simply select one of the Iris registered Brewer colour palettes and plot the cube as normal. The Brewer palettes become available once :mod:`iris.plot` or :mod:`iris.quickplot` are imported. -.. plot:: userguide/plotting_examples/cube_brewer_contourf.py +.. plot:: user_manual/tutorial/plotting_examples/cube_brewer_contourf.py :include-source: @@ -327,5 +332,5 @@ Citations can be easily added to a plot using the The recommended text for the Cynthia Brewer citation is provided by :data:`iris.plot.BREWER_CITE`. -.. plot:: userguide/plotting_examples/cube_brewer_cite_contourf.py +.. plot:: user_manual/tutorial/plotting_examples/cube_brewer_cite_contourf.py :include-source: diff --git a/docs/src/userguide/plotting_examples/1d_quickplot_simple.py b/docs/src/user_manual/tutorial/plotting_examples/1d_quickplot_simple.py similarity index 100% rename from docs/src/userguide/plotting_examples/1d_quickplot_simple.py rename to docs/src/user_manual/tutorial/plotting_examples/1d_quickplot_simple.py diff --git a/docs/src/userguide/plotting_examples/1d_simple.py b/docs/src/user_manual/tutorial/plotting_examples/1d_simple.py similarity index 100% rename from docs/src/userguide/plotting_examples/1d_simple.py rename to docs/src/user_manual/tutorial/plotting_examples/1d_simple.py diff --git a/docs/src/userguide/plotting_examples/1d_with_legend.py b/docs/src/user_manual/tutorial/plotting_examples/1d_with_legend.py similarity index 100% rename from docs/src/userguide/plotting_examples/1d_with_legend.py rename to docs/src/user_manual/tutorial/plotting_examples/1d_with_legend.py diff --git a/docs/src/userguide/plotting_examples/brewer.py b/docs/src/user_manual/tutorial/plotting_examples/brewer.py similarity index 100% rename from docs/src/userguide/plotting_examples/brewer.py rename to docs/src/user_manual/tutorial/plotting_examples/brewer.py diff --git a/docs/src/userguide/plotting_examples/cube_blockplot.py b/docs/src/user_manual/tutorial/plotting_examples/cube_blockplot.py similarity index 100% rename from docs/src/userguide/plotting_examples/cube_blockplot.py rename to docs/src/user_manual/tutorial/plotting_examples/cube_blockplot.py diff --git a/docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py b/docs/src/user_manual/tutorial/plotting_examples/cube_brewer_cite_contourf.py similarity index 100% rename from docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py rename to docs/src/user_manual/tutorial/plotting_examples/cube_brewer_cite_contourf.py diff --git a/docs/src/userguide/plotting_examples/cube_brewer_contourf.py b/docs/src/user_manual/tutorial/plotting_examples/cube_brewer_contourf.py similarity index 100% rename from docs/src/userguide/plotting_examples/cube_brewer_contourf.py rename to docs/src/user_manual/tutorial/plotting_examples/cube_brewer_contourf.py diff --git a/docs/src/userguide/plotting_examples/cube_contour.py b/docs/src/user_manual/tutorial/plotting_examples/cube_contour.py similarity index 100% rename from docs/src/userguide/plotting_examples/cube_contour.py rename to docs/src/user_manual/tutorial/plotting_examples/cube_contour.py diff --git a/docs/src/userguide/plotting_examples/cube_contourf.py b/docs/src/user_manual/tutorial/plotting_examples/cube_contourf.py similarity index 100% rename from docs/src/userguide/plotting_examples/cube_contourf.py rename to docs/src/user_manual/tutorial/plotting_examples/cube_contourf.py diff --git a/docs/src/userguide/plotting_examples/masking_brazil_plot.py b/docs/src/user_manual/tutorial/plotting_examples/masking_brazil_plot.py similarity index 100% rename from docs/src/userguide/plotting_examples/masking_brazil_plot.py rename to docs/src/user_manual/tutorial/plotting_examples/masking_brazil_plot.py diff --git a/docs/src/userguide/plotting_examples/masking_stereographic_plot.py b/docs/src/user_manual/tutorial/plotting_examples/masking_stereographic_plot.py similarity index 100% rename from docs/src/userguide/plotting_examples/masking_stereographic_plot.py rename to docs/src/user_manual/tutorial/plotting_examples/masking_stereographic_plot.py diff --git a/docs/src/userguide/regridding_plots/interpolate_column.py b/docs/src/user_manual/tutorial/regridding_plots/interpolate_column.py similarity index 100% rename from docs/src/userguide/regridding_plots/interpolate_column.py rename to docs/src/user_manual/tutorial/regridding_plots/interpolate_column.py diff --git a/docs/src/userguide/regridding_plots/regridded_to_global.py b/docs/src/user_manual/tutorial/regridding_plots/regridded_to_global.py similarity index 100% rename from docs/src/userguide/regridding_plots/regridded_to_global.py rename to docs/src/user_manual/tutorial/regridding_plots/regridded_to_global.py diff --git a/docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py b/docs/src/user_manual/tutorial/regridding_plots/regridded_to_global_area_weighted.py similarity index 100% rename from docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py rename to docs/src/user_manual/tutorial/regridding_plots/regridded_to_global_area_weighted.py diff --git a/docs/src/userguide/regridding_plots/regridded_to_rotated.py b/docs/src/user_manual/tutorial/regridding_plots/regridded_to_rotated.py similarity index 100% rename from docs/src/userguide/regridding_plots/regridded_to_rotated.py rename to docs/src/user_manual/tutorial/regridding_plots/regridded_to_rotated.py diff --git a/docs/src/userguide/regridding_plots/regridding_plot.py b/docs/src/user_manual/tutorial/regridding_plots/regridding_plot.py similarity index 100% rename from docs/src/userguide/regridding_plots/regridding_plot.py rename to docs/src/user_manual/tutorial/regridding_plots/regridding_plot.py diff --git a/docs/src/further_topics/s3_io.rst b/docs/src/user_manual/tutorial/s3_io.rst similarity index 98% rename from docs/src/further_topics/s3_io.rst rename to docs/src/user_manual/tutorial/s3_io.rst index 26774113aa..5c0cd89a2a 100644 --- a/docs/src/further_topics/s3_io.rst +++ b/docs/src/user_manual/tutorial/s3_io.rst @@ -1,3 +1,8 @@ +.. tutorial:: Loading From and Saving To S3 Buckets + :tags: topic_load_save + + A lesson on using s3-fuse with Iris to load/save data from/to S3 buckets. + .. _s3_io: Loading From and Saving To S3 Buckets diff --git a/docs/src/userguide/saving_iris_cubes.rst b/docs/src/user_manual/tutorial/saving_iris_cubes.rst similarity index 97% rename from docs/src/userguide/saving_iris_cubes.rst rename to docs/src/user_manual/tutorial/saving_iris_cubes.rst index 2ffc8c47d3..50466f8261 100644 --- a/docs/src/userguide/saving_iris_cubes.rst +++ b/docs/src/user_manual/tutorial/saving_iris_cubes.rst @@ -1,3 +1,8 @@ +.. tutorial:: Saving Iris Cubes + :tags: topic_load_save + + A lesson on writing Iris Cubes to file in various formats. + .. _saving_iris_cubes: ================== diff --git a/docs/src/userguide/subsetting_a_cube.rst b/docs/src/user_manual/tutorial/subsetting_a_cube.rst similarity index 98% rename from docs/src/userguide/subsetting_a_cube.rst rename to docs/src/user_manual/tutorial/subsetting_a_cube.rst index cbf3cb4c2e..53fe027243 100644 --- a/docs/src/userguide/subsetting_a_cube.rst +++ b/docs/src/user_manual/tutorial/subsetting_a_cube.rst @@ -1,4 +1,9 @@ -.. include:: ../common_links.inc +.. tutorial:: Subsetting a Cube + :tags: topic_slice_combine + + A lesson on subsetting Iris Cubes by extraction, masking, and iteration. + +.. include:: ../../common_links.inc .. _subsetting_a_cube: @@ -437,7 +442,7 @@ function and this returns a copy of the cube with a :py:class:`numpy.masked_arra as the data payload, where the data outside the shape is hidden by the masked array. -.. plot:: userguide/plotting_examples/masking_brazil_plot.py +.. plot:: user_manual/tutorial/plotting_examples/masking_brazil_plot.py :include-source: We can see that the dimensions of the cube haven't changed - the plot still has @@ -460,7 +465,7 @@ data in a stereographic projection (with projected coordinates with units of metres), and mask it to only show data over the United Kingdom, based on a shapefile of the UK boundary defined in WGS84 lat-lon coordinates. -.. plot:: userguide/plotting_examples/masking_stereographic_plot.py +.. plot:: user_manual/tutorial/plotting_examples/masking_stereographic_plot.py :include-source: diff --git a/docs/src/userguide/change_management_goals.txt b/docs/src/userguide/change_management_goals.txt deleted file mode 100644 index afed1ebb98..0000000000 --- a/docs/src/userguide/change_management_goals.txt +++ /dev/null @@ -1,9 +0,0 @@ -To reduce code maintenance problems to an absolute minimum, Iris applies -defined change management procedures to ensure that : - - * you can be confident that your code will still work with a future release - - * you will be aware of future incompatibility problems in advance - - * you can defer making code compatibility changes for some time, until it suits you - diff --git a/docs/src/userguide/index.rst b/docs/src/userguide/index.rst deleted file mode 100644 index 2b77129a4e..0000000000 --- a/docs/src/userguide/index.rst +++ /dev/null @@ -1,45 +0,0 @@ -.. _user_guide_index: -.. _user_guide_introduction: - -User Guide -========== - -If you are reading this user guide for the first time it is strongly -recommended that you read the user guide fully before experimenting with your -own data files. - -Much of the content has supplementary links to the reference documentation; -you will not need to follow these links in order to understand the guide but -they may serve as a useful reference for future exploration. - -.. only:: html - - Since later pages depend on earlier ones, try reading this user guide - sequentially using the ``next`` and ``previous`` links at the bottom - of each page. - -.. note:: - - There is also useful learning material held in the - https://github.com/scitools-classroom repo, including tutorials, courses - and presentations. - - -.. toctree:: - :maxdepth: 2 - - iris_cubes - loading_iris_cubes - saving_iris_cubes - navigating_a_cube - subsetting_a_cube - real_and_lazy_data - plotting_a_cube - interpolation_and_regridding - merge_and_concat - cube_statistics - cube_maths - citation - iris_philosophy - glossary - ../further_topics/index diff --git a/docs/src/voted_issues.rst b/docs/src/voted_issues.rst index a550a997ba..26eaa78b07 100644 --- a/docs/src/voted_issues.rst +++ b/docs/src/voted_issues.rst @@ -5,6 +5,8 @@ Voted Issues ============ +.. todo:: https://github.com/SciTools/iris/issues/6867 - this page belongs in 'Get Involved'. + You can help us to prioritise development of new features by leaving a 👍 reaction on the header (not subsequent comments) of any issue. diff --git a/docs/src/whatsnew/1.7.rst b/docs/src/whatsnew/1.7.rst index 4c3f3197dc..ed135caf3c 100644 --- a/docs/src/whatsnew/1.7.rst +++ b/docs/src/whatsnew/1.7.rst @@ -312,8 +312,8 @@ Documentation ============= * New sections on :ref:`cube broadcasting ` and - :doc:`regridding and interpolation ` - have been added to the :doc:`user guide `. + :doc:`regridding and interpolation ` + have been added to the :doc:`User Guide `. * An example demonstrating custom log-scale colouring has been added. See :ref:`sphx_glr_generated_gallery_general_plot_anomaly_log_colouring.py`. diff --git a/docs/src/whatsnew/1.8.rst b/docs/src/whatsnew/1.8.rst index dd2ca5e155..51ab612d8c 100644 --- a/docs/src/whatsnew/1.8.rst +++ b/docs/src/whatsnew/1.8.rst @@ -220,12 +220,12 @@ Deprecations Documentation ============= -* A chapter on :doc:`merge and concatenate ` has - been added to the :doc:`user guide `. +* A chapter on :doc:`merge and concatenate ` has + been added to the :doc:`User Guide `. * A section on installing Iris using `conda `_ has - been added to the :doc:`install guide `. + been added to the :doc:`install guide `. * Updates to the chapter on - :doc:`regridding and interpolation ` - have been added to the :doc:`user guide `. + :doc:`regridding and interpolation ` + have been added to the :doc:`User Guide `. diff --git a/docs/src/whatsnew/1.9.rst b/docs/src/whatsnew/1.9.rst index 9829d8ff3b..7f16011ba0 100644 --- a/docs/src/whatsnew/1.9.rst +++ b/docs/src/whatsnew/1.9.rst @@ -64,7 +64,7 @@ Features a fixed surface type of 107. * Added several new helper functions for file-save customisation, - (see also : :doc:`Saving Iris Cubes `): + (see also : :doc:`Saving Iris Cubes `): * :meth:`iris.fileformats.grib.as_pairs` * :meth:`iris.fileformats.grib.as_messages` @@ -193,8 +193,8 @@ Deprecations Documentation ============= -* A chapter on :doc:`saving iris cubes ` has been - added to the :doc:`user guide `. +* A chapter on :doc:`saving iris cubes ` has been + added to the :doc:`User Guide `. * Added script and documentation for building a what's new page from developer-submitted contributions. See diff --git a/docs/src/whatsnew/2.0.rst b/docs/src/whatsnew/2.0.rst index 1ee159c662..a2d8bde77b 100644 --- a/docs/src/whatsnew/2.0.rst +++ b/docs/src/whatsnew/2.0.rst @@ -26,7 +26,7 @@ Features In particular, Dask's *threaded*, *multiprocessing* or *distributed* `schedulers`_ can be used in order to best utilise available compute and memory resource. For further details, see :doc:`Real and Lazy Data - `. + `. * Changes to the :class:`iris.cube.Cube`: @@ -290,8 +290,8 @@ Documentation ============= * A new UserGuide chapter on :doc:`Real and Lazy Data - ` has been added, and referenced from key - points in the :doc:`User Guide ` . + ` has been added, and referenced from key + points in the :doc:`User Guide ` . .. _Biggus: https://biggus.readthedocs.io/en/latest/ diff --git a/docs/src/whatsnew/3.0.rst b/docs/src/whatsnew/3.0.rst index 13629cd9de..fc81f67ba4 100644 --- a/docs/src/whatsnew/3.0.rst +++ b/docs/src/whatsnew/3.0.rst @@ -479,7 +479,7 @@ v3.0.4 (22 July 2021) links are more visible to users. This uses the sphinx-panels_ extension. (:pull:`3884`) -#. `@bjlittle`_ created the :ref:`Further topics ` section and +#. `@bjlittle`_ created the 'Further Topics' section and included documentation for :ref:`metadata`, :ref:`lenient metadata`, and :ref:`lenient maths`. (:pull:`3890`) diff --git a/docs/src/whatsnew/3.5.rst b/docs/src/whatsnew/3.5.rst index c6699ee842..686cfdb2ae 100644 --- a/docs/src/whatsnew/3.5.rst +++ b/docs/src/whatsnew/3.5.rst @@ -140,12 +140,12 @@ This document explains the changes made to Iris for this release and removed an ECMWF link in the ``v1.0`` What's New that was failing the linkcheck CI. (:pull:`5109`) -#. `@trexfeathers`_ added a new top-level :doc:`/community/index` section, +#. `@trexfeathers`_ added a new top-level :doc:`/user_manual/section_indexes/community` section, as a one-stop place to find out about getting involved, and how we relate to other projects. (:pull:`5025`) #. The **Iris community**, with help from the **Xarray community**, produced - the :doc:`/community/iris_xarray` page, highlighting the similarities and + the :doc:`../user_manual/explanation/iris_xarray` page, highlighting the similarities and differences between the two packages. (:pull:`5025`) #. `@bjlittle`_ added a new section to the `README.md`_ to show our support diff --git a/docs/src/whatsnew/3.7.rst b/docs/src/whatsnew/3.7.rst index fdadb20412..41ca3c4206 100644 --- a/docs/src/whatsnew/3.7.rst +++ b/docs/src/whatsnew/3.7.rst @@ -130,7 +130,7 @@ v3.7.1 (04 Mar 2024) .. _dask_guide: -#. `@HGWright`_ added a :doc:`/further_topics/dask_best_practices/index` +#. `@HGWright`_ added a :doc:`/user_manual/section_indexes/dask_best_practices` section into the user guide, containing advice and use cases to help users get the best out of Dask with Iris. (:pull:`5190`) diff --git a/docs/src/whatsnew/3.8.rst b/docs/src/whatsnew/3.8.rst index 9fa87a9337..9ec2be4722 100644 --- a/docs/src/whatsnew/3.8.rst +++ b/docs/src/whatsnew/3.8.rst @@ -215,9 +215,9 @@ v3.8.1 (04 Mar 2024) #. `@bouweandela`_ updated all hyperlinks to https. (:pull:`5621`) -#. `@ESadek-MO`_ created an index page for :ref:`further_topics_index`, and +#. `@ESadek-MO`_ created an index page for 'Further Topics', and relocated all 'Technical Papers' into - :ref:`further_topics_index`. (:pull:`5602`) + 'Further Topics'. (:pull:`5602`) #. `@trexfeathers`_ made drop-down icons visible to show which pages link to 'sub-pages'. (:pull:`5684`) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index a655b3f5e8..f131f619d6 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -24,7 +24,10 @@ This document explains the changes made to Iris for this release 📢 Announcements ================ -#. N/A +#. We've had a makeover! ✨ All user documentation pages have been reorganised + into a new structure: :doc:`/user_manual/index`. This restructure is to + maximise discoverability of the available pages, as well as embracing the + `Diataxis`_ framework for better engagement with user needs going forward. ✨ Features @@ -99,6 +102,12 @@ This document explains the changes made to Iris for this release #. `@pp-mo`_ added a page on how to access datafiles in S3 buckets. (:issue:`6374`, :pull:`6951`) +#. `@trexfeathers`_, `@stephenworsley`_ and `@tkknight`_ reorganised **all** + user documentation pages into a new structure: :doc:`/user_manual/index`. + This restructure is to maximise discoverability of the available pages, as + well as embracing the `Diataxis`_ framework for better engagement with user + needs going forward. (:issue:`6511`, :pull:`6868`) + 💼 Internal =========== diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index fc83615a40..90f8c1f51a 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -4,10 +4,15 @@ # See LICENSE in the root of the repository for full licensing details. """A package for handling multi-dimensional data and associated metadata. +.. z_reference:: iris + :tags: topic_load_save;topic_data_model;topic_customisation;topic_slice_combine + + API reference + .. note :: The Iris documentation has further usage information, including - a :ref:`user guide ` which should be the first port of + a :ref:`user manual ` which should be the first port of call for new users. The functions in this module provide the main way to load and/or save diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index ca530cf931..2520889c88 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """A package providing :class:`iris.cube.Cube` analysis support. +.. z_reference:: iris.analysis + :tags: topic_maths_stats;topic_regrid + + API reference + This module defines a suite of :class:`~iris.analysis.Aggregator` instances, which are used to specify the statistical measure to calculate over a :class:`~iris.cube.Cube`, using methods such as @@ -2281,7 +2286,7 @@ def interp_order(length): Notes ------ This function does not maintain laziness when called; it realises data. -See more at :doc:`/userguide/real_and_lazy_data`. +See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ @@ -2599,7 +2604,7 @@ def clear_phenomenon_identity(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ cube.rename(None) diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 3ba406e02a..e54365e248 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -430,7 +430,7 @@ def rotate_grid_vectors(u_cube, v_cube, grid_angles_cube=None, grid_angles_kwarg Vector magnitudes will always be the same as the inputs. This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ u_out, v_out = (cube.copy() for cube in (u_cube, v_cube)) diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index 4f485c0680..fededb029b 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Calculus operations on :class:`iris.cube.Cube` instances. +.. z_reference:: iris.analysis.calculus + :tags: topic_maths_stats + + API reference + See also: :mod:`NumPy `. """ @@ -150,7 +155,7 @@ def cube_delta(cube, coord): .. note:: This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # handle the case where a user passes a coordinate name @@ -261,7 +266,7 @@ def differentiate(cube, coord_to_differentiate): .. note:: This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # Get the delta cube in the required differential direction. @@ -549,7 +554,7 @@ def curl(i_cube, j_cube, k_cube=None): .. note:: This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # Get the vector quantity names. @@ -773,7 +778,7 @@ def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index d055266d98..e88711d51f 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Various utilities and numeric transformations relevant to cartography.""" +"""Various utilities and numeric transformations relevant to cartography. + +.. z_reference:: iris.analysis.cartography + :tags: topic_maths_stats + + API reference +""" from collections import namedtuple import copy @@ -75,7 +81,7 @@ def wrap_lons(lons, base, period): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # It is important to use 64bit floating precision when changing a floats # numbers range. @@ -279,7 +285,7 @@ def get_xy_grids(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ x_coord, y_coord = cube.coord(axis="X"), cube.coord(axis="Y") @@ -317,7 +323,7 @@ def get_xy_contiguous_bounded_grids(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ x_coord, y_coord = cube.coord(axis="X"), cube.coord(axis="Y") @@ -552,7 +558,7 @@ def cosine_latitude_weights(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # Find all latitude coordinates, we want one and only one. lat_coords = [coord for coord in cube.coords() if "latitude" in coord.name()] @@ -656,7 +662,7 @@ def project(cube, target_proj, nx=None, ny=None): .. note:: This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. .. warning:: @@ -1149,7 +1155,7 @@ def rotate_winds(u_cube, v_cube, target_cs): .. note:: This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. .. warning:: diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py index 120b6dfaa6..dc828851c5 100644 --- a/lib/iris/analysis/geometry.py +++ b/lib/iris/analysis/geometry.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Various utilities related to geometric operations. +.. z_reference:: iris.analysis.geometry + :tags: topic_maths_stats + + API reference + .. note:: This module requires :mod:`shapely`. @@ -162,7 +167,7 @@ def geometry_area_weights(cube, geometry, normalize=False): .. note:: This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. Parameters ---------- diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 24d95153b5..7441cdccfe 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Basic mathematical and statistical operations.""" +"""Basic mathematical and statistical operations. + +.. z_reference:: iris.analysis.maths + :tags: topic_maths_stats + + API reference +""" from functools import lru_cache import inspect @@ -107,7 +113,7 @@ def abs(cube, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -156,7 +162,7 @@ def intersection_of_cubes(cube, other_cube): cube1, cube2 = (intersections[0], intersections[1]) This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ wmsg = ( @@ -237,7 +243,7 @@ def add(cube, other, dim=None, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -289,7 +295,7 @@ def subtract(cube, other, dim=None, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -396,7 +402,7 @@ def multiply(cube, other, dim=None, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -480,7 +486,7 @@ def divide(cube, other, dim=None, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -545,7 +551,7 @@ def exponentiate(cube, exponent, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -593,7 +599,7 @@ def exp(cube, in_place=False): Taking an exponential will return a cube with dimensionless units. This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -621,7 +627,7 @@ def log(cube, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -653,7 +659,7 @@ def log2(cube, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -681,7 +687,7 @@ def log10(cube, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -736,7 +742,7 @@ def apply_ufunc(ufunc, cube, other=None, new_unit=None, new_name=None, in_place= This function maintains laziness when called; it does not realise data. This is dependent on `ufunc` argument being a numpy operation that is compatible with lazy operation. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if not isinstance(ufunc, np.ufunc): diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 8df93571f1..478b87ad21 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Statistical operations between cubes.""" +"""Statistical operations between cubes. + +.. z_reference:: iris.analysis.stats + :tags: topic_maths_stats + + API reference +""" import dask.array as da import numpy as np diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index 100c6a5de1..43927f9257 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Defines a Trajectory class, and a routine to extract a sub-cube along a trajectory.""" +"""Defines a Trajectory class, and a routine to extract a sub-cube along a trajectory. + +.. z_reference:: iris.analysis.trajectory + :tags: topic_maths_stats;topic_regrid + + API reference +""" import math @@ -207,7 +213,7 @@ def interpolate(cube, sample_points, method=None): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.analysis import Linear diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index e322dfeb84..3952c8c924 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Definitions of derived coordinates.""" +"""Definitions of derived coordinates. + +.. z_reference:: iris.aux_factory + :tags: topic_data_model + + API reference +""" from abc import ABCMeta, abstractmethod import warnings diff --git a/lib/iris/common/__init__.py b/lib/iris/common/__init__.py index f9ad2bf207..03fc934a42 100644 --- a/lib/iris/common/__init__.py +++ b/lib/iris/common/__init__.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""A package for provisioning common Iris infrastructure.""" +"""A package for provisioning common Iris infrastructure. + +.. z_reference:: iris.common + :tags: topic_data_model;topic_maths_stats + + API reference +""" from .lenient import * from .metadata import * diff --git a/lib/iris/common/lenient.py b/lib/iris/common/lenient.py index d6fca461d1..22b9dd3d8b 100644 --- a/lib/iris/common/lenient.py +++ b/lib/iris/common/lenient.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides the infrastructure to support lenient client/service behaviour.""" +"""Provides the infrastructure to support lenient client/service behaviour. + +.. z_reference:: iris.common.lenient + :tags: topic_data_model;topic_maths_stats + + API reference +""" from collections.abc import Iterable from contextlib import contextmanager diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 55ad4e1319..93898a34de 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides the infrastructure to support the common metadata API.""" +"""Provides the infrastructure to support the common metadata API. + +.. z_reference:: iris.common.metadata + :tags: topic_data_model + + API reference +""" from __future__ import annotations diff --git a/lib/iris/common/mixin.py b/lib/iris/common/mixin.py index 4cb176dc2f..aab98eb1f7 100644 --- a/lib/iris/common/mixin.py +++ b/lib/iris/common/mixin.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides common metadata mixin behaviour.""" +"""Provides common metadata mixin behaviour. + +.. z_reference:: iris.common.mixin + :tags: topic_data_model + + API reference +""" from __future__ import annotations diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 70ec61b957..66d91416ca 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Resolve metadata common between two cubes. +.. z_reference:: iris.common.resolve + :tags: topic_data_model;topic_maths_stats + + API reference + Provides the infrastructure to support the analysis, identification and combination of metadata common between two :class:`~iris.cube.Cube` operands into a single resultant :class:`~iris.cube.Cube`, which will be diff --git a/lib/iris/config.py b/lib/iris/config.py index 9cec602a95..e99beb351d 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Provides access to Iris-specific configuration values. +.. z_reference:: iris.config + :tags: topic_customisation + + API reference + The default configuration values can be overridden by creating the file ``iris/etc/site.cfg``. If it exists, this file must conform to the format defined by :mod:`configparser`. diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index 460a131a18..a497b1cfd8 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Cube functions for coordinate categorisation. +.. z_reference:: iris.coord_categorisation + :tags: topic_data_model + + API reference + All the functions provided here add a new coordinate to a cube. * The function :func:`add_categorised_coord` performs a generic diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index a8f78b0ebf..3b500f43bb 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Definitions of coordinate systems.""" +"""Definitions of coordinate systems. + +.. z_reference:: iris.coord_systems + :tags: topic_data_model + + API reference +""" from abc import ABCMeta, abstractmethod from functools import cached_property diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 1013353759..c1cc35dede 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Definitions of coordinates and other dimensional metadata.""" +"""Definitions of coordinates and other dimensional metadata. + +.. z_reference:: iris.coords + :tags: topic_data_model + + API reference +""" from abc import ABCMeta, abstractmethod from collections import namedtuple diff --git a/lib/iris/cube.py b/lib/iris/cube.py index a68e9d7599..5be2f9fe1e 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Classes for representing multi-dimensional data with metadata.""" +"""Classes for representing multi-dimensional data with metadata. + +.. z_reference:: iris.cube + :tags: topic_data_model;topic_lazy_data;topic_maths_stats;topic_regrid;topic_slice_combine + + API reference +""" from __future__ import annotations @@ -1126,7 +1132,7 @@ class Cube(CFVariableMixin): source 'Data from Met Office Unified Model' - See the :doc:`user guide` for more information. + See the :doc:`user manual` for more information. """ diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index 56e220faf9..5589e03337 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Exceptions specific to the Iris package.""" +"""Exceptions specific to the Iris package. + +.. z_reference:: iris.exceptions + :tags: topic_troubleshooting + + API reference +""" class IrisError(Exception): diff --git a/lib/iris/experimental/__init__.py b/lib/iris/experimental/__init__.py index eea4259355..aa2ae2aec1 100644 --- a/lib/iris/experimental/__init__.py +++ b/lib/iris/experimental/__init__.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Experimental code can be introduced to Iris through this package. +.. z_reference:: iris.experimental + :tags: topic_experimental + + API reference + Changes to experimental code may be more extensive than in the rest of the codebase. The code is expected to graduate, eventually, to "full status". diff --git a/lib/iris/experimental/animate.py b/lib/iris/experimental/animate.py index 13c1613802..74300dbbdc 100644 --- a/lib/iris/experimental/animate.py +++ b/lib/iris/experimental/animate.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Wrapper for animating iris cubes using iris or matplotlib plotting functions. +.. z_reference:: iris.experimental.animate + :tags: topic_experimental;topic_plotting + + API reference + Notes ----- .. deprecated:: 3.4.0 diff --git a/lib/iris/experimental/geovista.py b/lib/iris/experimental/geovista.py index a8e2c25c5d..a120877dad 100644 --- a/lib/iris/experimental/geovista.py +++ b/lib/iris/experimental/geovista.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Experimental module for using some GeoVista operations with Iris cubes.""" +"""Experimental module for using some GeoVista operations with Iris cubes. + +.. z_reference:: iris.experimental.geovista + :tags: topic_experimental;topic_interoperability;topic_plotting + + API reference +""" from geovista import Transform from geovista.common import VTK_CELL_IDS, VTK_POINT_IDS diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index 7fc9c5153c..0b5057136c 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Experimental module for importing/exporting raster data from Iris cubes using the GDAL library. +.. z_reference:: iris.experimental.raster + :tags: topic_experimental;topic_load_save + + API reference + See also: `GDAL - Geospatial Data Abstraction Library `_. TODO: If this module graduates from experimental the (optional) GDAL diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index 9ad93f83b6..253fa5957e 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Regridding functions. +.. z_reference:: iris.experimental.regrid + :tags: topic_experimental;topic_regrid + + API reference + Notes ----- .. deprecated:: 3.2.0 diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index 886ba8c97f..ca36612330 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -11,6 +11,11 @@ `iris-esmf-regrid `_ instead. +.. z_reference:: iris.experimental.regrid_conservative + :tags: topic_experimental;topic_regrid + + API reference + """ import functools diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index 4beac376ee..12ace6a33e 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Definitions of how Iris objects should be represented.""" +"""Definitions of how Iris objects should be represented. + +.. z_reference:: iris.experimental.representation + :tags: topic_experimental;topic_data_model + + API reference +""" from html import escape diff --git a/lib/iris/experimental/stratify.py b/lib/iris/experimental/stratify.py index 50f8c21dcf..3a5f823fe6 100644 --- a/lib/iris/experimental/stratify.py +++ b/lib/iris/experimental/stratify.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Routines for putting data on new strata (aka. isosurfaces), often in the Z direction.""" +"""Routines for putting data on new strata (aka. isosurfaces), often in the Z direction. + +.. z_reference:: iris.experimental.stratify + :tags: topic_experimental;topic_interoperability + + API reference +""" from functools import partial diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index 7db26ca26b..96f08e76b3 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -5,6 +5,11 @@ """Legacy import location for mesh support. +.. z_reference:: iris.experimental.ugrid + :tags: topic_experimental;topic_mesh;topic_data_model + + API reference + See :mod:`iris.mesh` for the new, correct import location. Notes diff --git a/lib/iris/fileformats/__init__.py b/lib/iris/fileformats/__init__.py index 6729141bf6..0af0888b3f 100644 --- a/lib/iris/fileformats/__init__.py +++ b/lib/iris/fileformats/__init__.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""A package for converting cubes to and from specific file formats.""" +"""A package for converting cubes to and from specific file formats. + +.. z_reference:: iris.fileformats + :tags: topic_load_save + + API reference +""" from iris.io.format_picker import ( DataSourceObjectProtocol, diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 1ac95a42eb..c27da55a0f 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Provides ABF (and ABL) file format capabilities. +.. z_reference:: iris.fileformats.abf + :tags: topic_load_save + + API reference + ABF and ABL files are satellite file formats defined by Boston University. Including this module adds ABF and ABL loading to the session's capabilities. diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 2b6568c315..308ce381ee 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Provide capability to load netCDF files and interpret them. +.. z_reference:: iris.fileformats.cf + :tags: topic_load_save + + API reference + Provides the capability to load netCDF files and interpret them according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index 3c37395f6c..b1047bcffe 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides Creation and saving of DOT graphs for a :class:`iris.cube.Cube`.""" +"""Provides Creation and saving of DOT graphs for a :class:`iris.cube.Cube`. + +.. z_reference:: iris.fileformats.dot + :tags: topic_load_save + + API reference +""" import os import subprocess diff --git a/lib/iris/fileformats/name.py b/lib/iris/fileformats/name.py index bc1bb690c2..7b41d909f7 100644 --- a/lib/iris/fileformats/name.py +++ b/lib/iris/fileformats/name.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides NAME file format loading capabilities.""" +"""Provides NAME file format loading capabilities. + +.. z_reference:: iris.fileformats.name + :tags: topic_load_save + + API reference +""" def _get_NAME_loader(filename): diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index fe53308cb0..b13b33ccd3 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""NAME file format loading functions.""" +"""NAME file format loading functions. + +.. z_reference:: iris.fileformats.name_loaders + :tags: topic_load_save + + API reference +""" import collections import datetime diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py index 992392b9a1..f1e37f2545 100644 --- a/lib/iris/fileformats/netcdf/__init__.py +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Support loading and saving NetCDF files using CF conventions for metadata interpretation. +.. z_reference:: iris.fileformats.netcdf + :tags: topic_load_save + + API reference + See : `NetCDF User's Guide `_ and `netCDF4 python module `_. diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 219f681e67..6557f4aebc 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Support loading Iris cubes from NetCDF files using the CF conventions for metadata interpretation. +.. z_reference:: iris.fileformats.netcdf.loader + :tags: topic_load_save + + API reference + See : `NetCDF User's Guide `_ and `netCDF4 python module `_. diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 5177749c07..31a685f8ee 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Module to support the saving of Iris cubes to a NetCDF file. +.. z_reference:: iris.fileformats.netcdf.saver + :tags: topic_load_save + + API reference + Module to support the saving of Iris cubes to a NetCDF file, also using the CF conventions for metadata interpretation. diff --git a/lib/iris/fileformats/netcdf/ugrid_load.py b/lib/iris/fileformats/netcdf/ugrid_load.py index 0d4766057c..50147f17ef 100644 --- a/lib/iris/fileformats/netcdf/ugrid_load.py +++ b/lib/iris/fileformats/netcdf/ugrid_load.py @@ -5,6 +5,11 @@ r"""Allow the construction of :class:`~iris.mesh.MeshXY`. +.. z_reference:: iris.fileformats.netcdf.ugrid_load + :tags: topic_load_save;topic_mesh + + API reference + Extension functions for Iris NetCDF loading, to construct :class:`~iris.mesh.MeshXY` from UGRID data in files. diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index d646d71a30..03a44f1ec2 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides NIMROD file format capabilities.""" +"""Provides NIMROD file format capabilities. + +.. z_reference:: iris.fileformats.nimrod + :tags: topic_load_save + + API reference +""" from enum import Enum import glob diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index 07b9f2a27e..1bb58d8b59 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Rules for converting NIMROD fields into cubes.""" +"""Rules for converting NIMROD fields into cubes. + +.. z_reference:: iris.fileformats.nimrod_load_rules + :tags: topic_load_save + + API reference +""" from enum import Enum import re diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 211ba1621c..971c0ff7ef 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides UK Met Office Post Process (PP) format specific capabilities.""" +"""Provides UK Met Office Post Process (PP) format specific capabilities. + +.. z_reference:: iris.fileformats.pp + :tags: topic_load_save + + API reference +""" from abc import ABCMeta, abstractmethod import collections diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index 59e0f31d17..f28e65a878 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -6,7 +6,13 @@ # Historically this was auto-generated from # SciTools/iris-code-generators:tools/gen_rules.py -"""PP Load Rules.""" +"""PP Load Rules. + +.. z_reference:: iris.fileformats.pp_load_rules + :tags: topic_load_save + + API reference +""" import calendar from functools import wraps diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index a6a72393a7..d2932015c3 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""PP Save Rules.""" +"""PP Save Rules. + +.. z_reference:: iris.fileformats.pp_save_rules + :tags: topic_load_save + + API reference +""" import warnings diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index d61389c663..dce36b2554 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Generalised mechanisms for metadata translation and cube construction.""" +"""Generalised mechanisms for metadata translation and cube construction. + +.. z_reference:: iris.fileformats.rules + :tags: topic_load_save + + API reference +""" import collections import threading diff --git a/lib/iris/fileformats/um/__init__.py b/lib/iris/fileformats/um/__init__.py index 3a4bd6c516..863ef6febf 100644 --- a/lib/iris/fileformats/um/__init__.py +++ b/lib/iris/fileformats/um/__init__.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Provides iris loading support for UM Fieldsfile-like file types, and PP. +.. z_reference:: iris.fileformats.um + :tags: topic_load_save + + API reference + At present, the only UM file types supported are true FieldsFiles and LBCs. Other types of UM file may fail to load correctly (or at all). diff --git a/lib/iris/fileformats/um_cf_map.py b/lib/iris/fileformats/um_cf_map.py index d2e51a3257..b230e543c8 100644 --- a/lib/iris/fileformats/um_cf_map.py +++ b/lib/iris/fileformats/um_cf_map.py @@ -5,6 +5,10 @@ """ Provides UM/CF phenomenon translations. +.. z_reference:: iris.fileformats.um_cf_map + :tags: topic_load_save + + API reference """ from collections import namedtuple diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 4e32ebf20a..0a7cdd9abb 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides an interface to manage URI scheme support in iris.""" +"""Provides an interface to manage URI scheme support in iris. + +.. z_reference:: iris.io + :tags: topic_load_save + + API reference +""" import collections from collections import OrderedDict @@ -54,7 +60,7 @@ def run_callback(callback, cube, field, filename): the caller of this function should handle this case. This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.cube import Cube @@ -452,7 +458,7 @@ def save(source, target, saver=None, **kwargs): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.cube import Cube, CubeList diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index c885a55074..3f93b5cfd6 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Provide convenient file format identification. +.. z_reference:: iris.io.format_picker + :tags: topic_load_save + + API reference + A module to provide convenient file format identification through a combination of filename extension and file based *magic* numbers. diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index fd2d2ed139..c2d478806c 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Cube functions for iteration in step.""" +"""Cube functions for iteration in step. + +.. z_reference:: iris.iterate + :tags: topic_slice_combine + + API reference +""" from collections.abc import Iterator import itertools @@ -56,7 +62,7 @@ def izip(*cubes, **kwargs): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if not cubes: diff --git a/lib/iris/loading.py b/lib/iris/loading.py index b188d5ae9d..68042847c1 100644 --- a/lib/iris/loading.py +++ b/lib/iris/loading.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Iris general file loading mechanism.""" +"""Iris general file loading mechanism. + +.. z_reference:: iris.loading + :tags: topic_load_save + + API reference +""" from contextlib import contextmanager from dataclasses import dataclass @@ -537,6 +543,7 @@ class LoadProblems(threading.local): >>> warnings.filterwarnings("ignore") >>> helpers.get_names = get_names_original >>> std_names.STD_NAMES["air_temperature"] = air_temperature + >>> iris.FUTURE.date_microseconds = False """ diff --git a/lib/iris/mesh/__init__.py b/lib/iris/mesh/__init__.py index ff530a4abd..e872f6f864 100644 --- a/lib/iris/mesh/__init__.py +++ b/lib/iris/mesh/__init__.py @@ -5,6 +5,11 @@ """Infra-structure for unstructured mesh support. +.. z_reference:: iris.mesh + :tags: topic_data_model;topic_mesh + + API reference + Based on CF UGRID Conventions (v1.0), https://ugrid-conventions.github.io/ugrid-conventions/. """ diff --git a/lib/iris/mesh/components.py b/lib/iris/mesh/components.py index 2cc10c18c1..3e61d7c0d4 100644 --- a/lib/iris/mesh/components.py +++ b/lib/iris/mesh/components.py @@ -5,6 +5,11 @@ """Iris data model representation of CF UGrid's Mesh and its constituent parts. +.. z_reference:: iris.mesh.components + :tags: topic_data_model;topic_mesh + + API reference + Eventual destination: dedicated module in :mod:`iris` root. """ diff --git a/lib/iris/mesh/utils.py b/lib/iris/mesh/utils.py index 3930fa3f1b..eebfcb9ae1 100644 --- a/lib/iris/mesh/utils.py +++ b/lib/iris/mesh/utils.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Utility operations specific to unstructured data.""" +"""Utility operations specific to unstructured data. + +.. z_reference:: iris.mesh.utils + :tags: topic_data_model;topic_mesh;topic_slice_combine + + API reference +""" from collections.abc import Sequence from typing import Union diff --git a/lib/iris/palette.py b/lib/iris/palette.py index 500c203a43..7f8046fbf9 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Color map pallettes management. +.. z_reference:: iris.palette + :tags: topic_plotting + + API reference + Load, configure and register color map palettes and initialise color map meta-data mappings. """ @@ -128,7 +133,7 @@ def cmap_norm(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ args, kwargs = _default_cmap_norm((cube,), {}) diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 6d95143a01..dcdcbd3bb9 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Provide conversion to and from Pandas data structures. +.. z_reference:: iris.pandas + :tags: topic_interoperability + + API reference + See also: https://pandas.pydata.org/ """ diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 789d7fbf86..82e45aba3a 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Iris-specific extensions to matplotlib, mimicking the :mod:`matplotlib.pyplot` interface. +.. z_reference:: iris.plot + :tags: topic_plotting + + API reference + See also: :ref:`matplotlib `. """ @@ -1092,7 +1097,7 @@ def contour(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ result = _draw_2d_from_points("contour", None, cube, *args, **kwargs) @@ -1119,7 +1124,7 @@ def contourf(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") @@ -1193,7 +1198,7 @@ def default_projection(cube): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # XXX logic seems flawed, but it is what map_setup did... @@ -1216,7 +1221,7 @@ def default_projection_extent(cube, mode=iris.coords.POINT_MODE): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ extents = cartography._xy_range(cube, mode) @@ -1258,7 +1263,7 @@ def orography_at_bounds(cube, facecolor="#888888", coords=None, axes=None): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # XXX Needs contiguous orography corners to work. raise NotImplementedError( @@ -1296,7 +1301,7 @@ def orography_at_points(cube, facecolor="#888888", coords=None, axes=None): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ style_args = {"facecolor": facecolor} @@ -1341,7 +1346,7 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ result = _draw_2d_from_bounds( @@ -1385,7 +1390,7 @@ def pcolor(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ kwargs.setdefault("antialiased", True) @@ -1421,7 +1426,7 @@ def pcolormesh(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ result = _draw_2d_from_bounds("pcolormesh", cube, *args, **kwargs) @@ -1449,7 +1454,7 @@ def points(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ @@ -1539,7 +1544,7 @@ def barbs(u_cube, v_cube, *args, **kwargs): # numpydoc ignore=PR08 :class:`cartopy.crs.CRS`. This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # @@ -1589,7 +1594,7 @@ def quiver(u_cube, v_cube, *args, **kwargs): # numpydoc ignore=PR08 :class:`cartopy.crs.CRS`. This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # @@ -1616,7 +1621,7 @@ def plot(*args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. Examples -------- @@ -1673,7 +1678,7 @@ def scatter(x, y, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # here we are more specific about argument types than generic 1d plotting @@ -1707,7 +1712,7 @@ def fill_between(x, y1, y2, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # here we are more specific about argument types than generic 1d plotting @@ -1741,7 +1746,7 @@ def hist(x, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if isinstance(x, iris.cube.Cube): @@ -1783,7 +1788,7 @@ def symbols(x, y, symbols, size, axes=None, units="inches"): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if axes is None: @@ -1902,7 +1907,7 @@ def animate(cube_iterator, plot_func, fig=None, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ kwargs.setdefault("interval", 100) diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 107945677f..240ae56f02 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """High-level plotting extensions to :mod:`iris.plot`. +.. z_reference:: iris.quickplot + :tags: topic_plotting + + API reference + These routines work much like their :mod:`iris.plot` counterparts, but they automatically add a plot title, axis titles, and a colour bar when appropriate. @@ -190,7 +195,7 @@ def contour(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") @@ -228,7 +233,7 @@ def contourf(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") axes = kwargs.get("axes") @@ -260,7 +265,7 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None, footer=None Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ result = iplt.outline( @@ -285,7 +290,7 @@ def pcolor(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") axes = kwargs.get("axes") @@ -310,7 +315,7 @@ def pcolormesh(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") @@ -331,7 +336,7 @@ def points(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") @@ -352,7 +357,7 @@ def plot(*args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ axes = kwargs.get("axes") @@ -372,7 +377,7 @@ def scatter(x, y, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ axes = kwargs.get("axes") @@ -392,7 +397,7 @@ def fill_between(x, y1, y2, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ axes = kwargs.get("axes") @@ -412,7 +417,7 @@ def hist(x, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ axes = kwargs.get("axes") diff --git a/lib/iris/symbols.py b/lib/iris/symbols.py index e52266b2fe..ad91bdfab5 100644 --- a/lib/iris/symbols.py +++ b/lib/iris/symbols.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Contains symbol definitions for use with :func:`iris.plot.symbols`.""" +"""Contains symbol definitions for use with :func:`iris.plot.symbols`. + +.. z_reference:: iris.symbols + :tags: topic_plotting + + API reference +""" import itertools import math diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 59c1b40baf..58f5ef05fe 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -247,8 +247,8 @@ def test_license_headers(self): "dist/*", "docs/gallery_code/*/*.py", "docs/src/developers_guide/documenting/*.py", - "docs/src/userguide/plotting_examples/*.py", - "docs/src/userguide/regridding_plots/*.py", + "docs/src/user_manual/tutorial/plotting_examples/*.py", + "docs/src/user_manual/tutorial/regridding_plots/*.py", "docs/src/_build/*", "lib/iris/analysis/_scipy_interpolate.py", ) diff --git a/lib/iris/time.py b/lib/iris/time.py index f2bc4a08ce..6a9629b9ce 100644 --- a/lib/iris/time.py +++ b/lib/iris/time.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Time handling.""" +"""Time handling. + +.. z_reference:: iris.time + :tags: topic_data_model + + API reference +""" import functools diff --git a/lib/iris/util.py b/lib/iris/util.py index 2c413d2822..551b5aeb68 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Miscellaneous utility functions.""" +"""Miscellaneous utility functions. + +.. z_reference:: iris.util + :tags: topic_data_model;topic_slice_combine + + API reference +""" from __future__ import annotations @@ -92,7 +98,7 @@ def broadcast_to_shape(array, shape, dim_map, chunks=None): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if isinstance(array, da.Array): @@ -175,7 +181,7 @@ def delta(ndarray, dimension, circular=False): .. note:: This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if circular is not False: @@ -217,7 +223,7 @@ def describe_diff(cube_a, cube_b, output_file=None): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. .. note:: @@ -294,7 +300,7 @@ def guess_coord_axis(coord) -> Axis | None: Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. The ``guess_coord_axis`` behaviour can be skipped by setting the :attr:`~iris.coords.Coord.ignore_axis` property on `coord` to ``False``. @@ -371,7 +377,7 @@ def rolling_window( Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if window < 1: @@ -491,7 +497,7 @@ def array_equal(array1, array2, withnans: bool = False) -> bool: additional support for arrays of strings and NaN-tolerant operation. This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ def normalise_array(array): @@ -540,7 +546,7 @@ def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. .. deprecated:: 3.2.0 @@ -600,7 +606,7 @@ def between(lh, rh, lh_inclusive=True, rh_inclusive=True): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if lh_inclusive and rh_inclusive: @@ -660,7 +666,7 @@ def reverse(cube_or_array, coords_or_dims): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.cube import Cube @@ -732,7 +738,7 @@ def monotonic(array, strict=False, return_direction=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if array.ndim != 1 or len(array) <= 1: @@ -789,7 +795,7 @@ def column_slices_generator(full_slice, ndims): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ list_of_slices = [] @@ -1193,7 +1199,7 @@ def clip_string(the_str, clip_length=70, rider="..."): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if clip_length >= len(the_str) or clip_length <= 0: @@ -1228,7 +1234,7 @@ def format_array(arr, edgeitems=3): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ max_line_len = 50 @@ -1282,7 +1288,7 @@ def new_axis(src_cube, scalar_coord=None, expand_extras=()): # maybe not lazy Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ @@ -1401,7 +1407,7 @@ def squeeze(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ slices = [0 if cube.shape[dim] == 1 else slice(None) for dim in range(cube.ndim)] @@ -1478,7 +1484,7 @@ def is_regular(coord): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ try: regular_step(coord) @@ -1495,7 +1501,7 @@ def regular_step(coord): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if coord.ndim != 1: @@ -1529,7 +1535,7 @@ def regular_points(zeroth, step, count): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ def make_steps(dtype: np.dtype): @@ -1561,7 +1567,7 @@ def points_step(points): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # Calculations only make sense with multiple points points = np.asanyarray(points) @@ -1596,7 +1602,7 @@ def unify_time_units(cubes): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ epochs = {} @@ -1738,7 +1744,7 @@ def promote_aux_coord_to_dim_coord(cube, name_or_coord): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.coords import Coord, DimCoord @@ -1857,7 +1863,7 @@ def demote_dim_coord_to_aux_coord(cube, name_or_coord): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.coords import Coord @@ -1951,7 +1957,7 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ lats_and_lons = [ @@ -2085,7 +2091,7 @@ def mask_cube(cube, points_to_mask, in_place=False, dim=None): If either ``cube`` or ``points_to_mask`` is lazy, the result will be lazy. This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ @@ -2136,7 +2142,7 @@ def equalise_attributes(cubes): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # deferred import to avoid circularity problem @@ -2212,7 +2218,7 @@ def is_masked(array): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ diff --git a/lib/iris/warnings.py b/lib/iris/warnings.py index 1a885f60a3..d59ecf7885 100644 --- a/lib/iris/warnings.py +++ b/lib/iris/warnings.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Warnings specific to the :mod:`iris` package. +.. z_reference:: iris.warnings + :tags: topic_troubleshooting + + API reference + PLEASE NAMESPACE ALL WARNING CLASSES (i.e. prefix with Iris...). """ diff --git a/pyproject.toml b/pyproject.toml index 643fc661d7..88ae1c3bcc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,7 @@ Issues = "https://github.com/SciTools/iris/issues" extend-exclude = [ "_ff_cross_references.py", "um_cf_map.py", - "docs/src/sphinxext", + "docs/src/sphinxext/api_rst_formatting.py", "tools", ] line-length = 88 diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index 56a43b85f7..e8d6e710a1 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: f6f5de785dfa266ec64d091b66f6ab12432b446820ea95baba9f63fee66c3ce4 +# input_hash: bba21896c1c2a030e57b979600273f03e99292131aca99b47994f07c0008f538 @EXPLICIT https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 @@ -11,7 +11,7 @@ https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.12.0-h54a6638_1. https://conda.anaconda.org/conda-forge/noarch/python_abi-3.12-8_cp312.conda#c3efd25ac4d74b1584d2f7a57195ddf1 https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda#ad659d0a2b3e47e38d829aa8cad2d610 https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.09-ha770c72_0.conda#99884244028fe76046e3914f90d4ad05 -https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 +https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda#4492fd26db29495f0ba23f146cd5638d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda#239c5e9546c38a1e884d69effcf4c882 @@ -103,7 +103,7 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_0.conda#b7113551db5a3e2403cdd052c66e9999 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda#bb26456332b07f68bf3b7622ed71c0da https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 @@ -129,7 +129,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.con https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_1.conda#7eb4977dd6f60b3aaab0715a0ea76f11 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_0.conda#70a09b6817c7ad694ef4543204c59c25 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_1.conda#b52b769cd13f7adaa6ccdc68ef801709 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 @@ -143,7 +143,7 @@ https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda# https://conda.anaconda.org/conda-forge/linux-64/python-3.12.12-hd63d673_2_cpython.conda#c4540d3de3fa228d9fa95e31f8e97f89 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda#b56e0c8432b56decafae7e78c5f29ba5 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda#34e54f03dfea3e7a2dcf1453a85f1085 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda#ba231da7fccf9ea1e768caf5c7099b84 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e @@ -153,13 +153,14 @@ https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/noarch/attrs-25.4.0-pyhcf101f3_1.conda#537296d57ea995666c68c821b00e360b https://conda.anaconda.org/conda-forge/linux-64/backports.zstd-1.3.0-py312h90b7ffd_0.conda#5b8c55fed2e576dde4b0b33693a4fdb1 https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.2.0-py312hdb49522_1.conda#64088dffd7413a2dd557ce837b4cbbdb -https://conda.anaconda.org/conda-forge/noarch/certifi-2026.1.4-pyhd8ed1ab_0.conda#eacc711330cd46939f66cd401ff9c44b +https://conda.anaconda.org/conda-forge/noarch/certifi-2026.2.25-pyhd8ed1ab_0.conda#765c4d97e877cdbbb88ff33152b86125 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.5.0-pyhd8ed1ab_0.conda#381bd45fb7aa032691f3063aff47e3a1 https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.4-pyhd8ed1ab_0.conda#a22d1fd9bf98827e280a02875d9a007a https://conda.anaconda.org/conda-forge/noarch/click-8.3.1-pyh8f84b5b_1.conda#ea8a6c3256897cc31263de9f455e25d9 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda#61b8078a0905b12529abc622406cb62c https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a +https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.12-py312hd8ed1ab_2.conda#ef3e093ecfd4533eee992cdaa155b47e https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda#4c2a8fef270f6c69591889b93f9f55c1 https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py312h68e6be4_0.conda#14f638dad5953c83443a2c4f011f1c9e https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 @@ -178,6 +179,7 @@ https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.3.0-pyhd8ed1ab_0.conda#9614359868482abba1bd15ce465e3c42 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.2-pyhd8ed1ab_0.conda#895f6625dd8a246fece9279fcc12c1de +https://conda.anaconda.org/conda-forge/linux-64/jsonschema-rs-0.37.4-py312h121d7ae_0.conda#1a2a2624770f712a536b8c5758c3387d https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py312h0a2e395_2.conda#3a3004fddd39e3bb1a631b08d7045156 https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.8.5-gpl_hc2c16d8_100.conda#5fdaa8b856683a5598459dead3976578 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.11.0-5_h0358290_openblas.conda#6636a2b6f1a87572df2970d3ebc87cc0 @@ -187,6 +189,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.12.2-default_hafda6a7 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.11.0-5_h47877c9_openblas.conda#b38076eb5c8e40d0106beda6f95d7609 https://conda.anaconda.org/conda-forge/linux-64/libllvm20-20.1.8-hf7376ad_1.conda#eafa8fd1dfc9a107fe62f7f12cabbc9c https://conda.anaconda.org/conda-forge/linux-64/libllvm21-21.1.8-hf7376ad_0.conda#1a2708a460884d6861425b7f9a7bef99 +https://conda.anaconda.org/conda-forge/linux-64/libllvm22-22.1.0-hf7376ad_0.conda#213f51bbcce2964ff2ec00d0fdd38541 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda#2bca1fbb221d9c3c8e3a155784bbc2e9 https://conda.anaconda.org/conda-forge/linux-64/libxml2-devel-2.15.1-he237659_1.conda#644b2a3a92ba0bb8e2aa671dd831e793 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 @@ -245,7 +248,7 @@ https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1 https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py312h8a5da7c_0.conda#a8df7f0812ac4fa6bbc7135556d3e2c4 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py312h4c3975b_1.conda#693cda60b9223f55d0836c885621611b +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py312h4c3975b_2.conda#29fd0bdf551881ab3d2801f7deaba528 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.61.1-py312h8a5da7c_0.conda#3bf8fb959dc598c67dac0430b4aff57a https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea943ca4f0d01d6eec6a60d24415dc5 @@ -253,7 +256,7 @@ https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc4 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda#24a2802074d26aecfdbc9b3f1d8168d1 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_3.conda#b4277f5a09d458a0306db3147bd0171c +https://conda.anaconda.org/conda-forge/linux-64/libclang13-22.1.0-default_h746c552_0.conda#140459a7413d8f6884eb68205ce39a0d https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 @@ -267,11 +270,14 @@ https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.2-py312h33ff503_1.cond https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py312h9b6a7d9_2.conda#573b9a879a3a42990f9c51d7376dce6b https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 +https://conda.anaconda.org/conda-forge/noarch/python-discovery-1.1.0-pyhcf101f3_0.conda#ee4e2cad073411bdfa8598f599537498 +https://conda.anaconda.org/conda-forge/noarch/python-gil-3.12.12-hd8ed1ab_2.conda#d41b6b394546ee6e1c423e28a581fc71 https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.3.0-hb700be7_2.conda#8f7278ca5f7456a974992a8b34284737 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 https://conda.anaconda.org/conda-forge/noarch/wheel-0.46.3-pyhd8ed1ab_0.conda#bdbd7385b4a67025ac2dba4ef8cb6a8f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.22.0-py312h8a5da7c_0.conda#6a3fd177315aaafd4366930d440e4430 +https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda#aaa2a381ccc56eac91d63b6c1240312f https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.13.3-py312h5d8c7f2_0.conda#7ee12bbdb2e989618c080c7c611048db https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 @@ -292,13 +298,13 @@ https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py312h4f23490_2.c https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py312h4f23490_2.conda#ab856c36638ab1acf90e70349c525cf9 https://conda.anaconda.org/conda-forge/noarch/rich-14.3.3-pyhcf101f3_0.conda#7a6289c50631d620652f5045a63eb573 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py312h54fa4ab_1.conda#828eb07c4c87c38ed8c6560c25893280 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.1-py312h54fa4ab_0.conda#3e38daeb1fb05a95656ff5af089d2e4c https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py312h383787d_2.conda#69e400d3deca12ee7afd4b73a5596905 https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py312hd9148b4_0.conda#55fd03988b1b1bc6faabbfb5b481ecd7 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.38.0-pyhcf101f3_0.conda#eaf8f08a04ab2d8612e45aa4f4c33357 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-21.0.0-pyhcf101f3_0.conda#0d574419484a88ee7e753cc0c80ee2c1 https://conda.anaconda.org/conda-forge/linux-64/viskores-1.1.0-hca82ae8_0.conda#5b4d69a15107ebad71ee9aaf76c4b09e https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py312h4f23490_0.conda#6aef45ba3c0123547eb7b0f15852cac9 @@ -309,6 +315,7 @@ https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py312he3d6523_0.conda#b8dc157bbbb69c1407478feede8b7b42 +https://conda.anaconda.org/conda-forge/linux-64/minijinja-2.16.0-py310h6de7dc8_0.conda#ff98a47d5488a9f504866f46ccae8e7c https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py312h25f8dc5_102.conda#99217b58c029977345b72bb36a1f6596 https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 @@ -330,19 +337,25 @@ https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.2-h61e6d4b_0.conda# https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pooch-1.9.0-pyhd8ed1ab_0.conda#dd4b6337bf8886855db6905b336db3c8 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 +https://conda.anaconda.org/conda-forge/noarch/requests-file-2.1.0-pyhd8ed1ab_1.conda#69be0472744969b97324d5d3b21845a9 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py312h6fba518_7.conda#2edca3790f2a372db44ff1aa159769fc https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e -https://conda.anaconda.org/conda-forge/noarch/pyvista-0.47.0-pyhd8ed1ab_0.conda#dd7ebc742e6a766322ed77931123631e +https://conda.anaconda.org/conda-forge/noarch/pyvista-0.47.1-pyhd8ed1ab_0.conda#470eec436327b4ba57068baf83d57ed4 https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 +https://conda.anaconda.org/conda-forge/noarch/sphinx-data-viewer-0.1.5-pyhd8ed1ab_1.conda#fbe6437c6899c54e52f9258518035aca https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.7.0-pyhd8ed1ab_0.conda#28eddfb8b9ecdd044a6f609f985398a7 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.20.0-pyhd8ed1ab_0.conda#4cae490c8d142824fb80d9aed672fddd +https://conda.anaconda.org/conda-forge/noarch/sphinx-reredirects-1.1.0-pyhd8ed1ab_0.conda#8392493ce6fbb8ab72a101af8e8a5d03 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jquery-4.1-pyhd8ed1ab_1.conda#403185829255321ea427333f7773dd1f +https://conda.anaconda.org/conda-forge/noarch/sphinx-needs-7.0.0-pyh2c0ff20_1.conda#f1b94f4ad598a4548fd08dbd46a73480 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 + diff --git a/requirements/locks/py313-linux-64.lock b/requirements/locks/py313-linux-64.lock index 182c932917..7261d16b7a 100644 --- a/requirements/locks/py313-linux-64.lock +++ b/requirements/locks/py313-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: b214c8fa287dfc2dc1dce58414771bc08704b4ae09ebb02359517434384613bb +# input_hash: 9ab5bed7b6bc430379d6fc5b3517135bdf315a73bacb5020d41a674b2c708a4e @EXPLICIT https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 @@ -11,7 +11,7 @@ https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.12.0-h54a6638_1. https://conda.anaconda.org/conda-forge/noarch/python_abi-3.13-8_cp313.conda#94305520c52a4aa3f6c2b1ff6008d9f8 https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda#ad659d0a2b3e47e38d829aa8cad2d610 https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.09-ha770c72_0.conda#99884244028fe76046e3914f90d4ad05 -https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 +https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda#4492fd26db29495f0ba23f146cd5638d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda#239c5e9546c38a1e884d69effcf4c882 @@ -104,7 +104,7 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_0.conda#b7113551db5a3e2403cdd052c66e9999 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda#bb26456332b07f68bf3b7622ed71c0da https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 @@ -130,7 +130,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.con https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_1.conda#7eb4977dd6f60b3aaab0715a0ea76f11 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_0.conda#70a09b6817c7ad694ef4543204c59c25 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_1.conda#b52b769cd13f7adaa6ccdc68ef801709 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 @@ -144,7 +144,7 @@ https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda# https://conda.anaconda.org/conda-forge/linux-64/python-3.13.12-hc97d973_100_cp313.conda#4c875ed0e78c2d407ec55eadffb8cf3d https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda#b56e0c8432b56decafae7e78c5f29ba5 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda#34e54f03dfea3e7a2dcf1453a85f1085 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda#ba231da7fccf9ea1e768caf5c7099b84 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e @@ -154,13 +154,14 @@ https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/noarch/attrs-25.4.0-pyhcf101f3_1.conda#537296d57ea995666c68c821b00e360b https://conda.anaconda.org/conda-forge/linux-64/backports.zstd-1.3.0-py313h18e8e13_0.conda#d9e90792551a527200637e23a915dd79 https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.2.0-py313hf159716_1.conda#6c4d3597cf43f3439a51b2b13e29a4ba -https://conda.anaconda.org/conda-forge/noarch/certifi-2026.1.4-pyhd8ed1ab_0.conda#eacc711330cd46939f66cd401ff9c44b +https://conda.anaconda.org/conda-forge/noarch/certifi-2026.2.25-pyhd8ed1ab_0.conda#765c4d97e877cdbbb88ff33152b86125 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.5.0-pyhd8ed1ab_0.conda#381bd45fb7aa032691f3063aff47e3a1 https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.4-pyhd8ed1ab_0.conda#a22d1fd9bf98827e280a02875d9a007a https://conda.anaconda.org/conda-forge/noarch/click-8.3.1-pyh8f84b5b_1.conda#ea8a6c3256897cc31263de9f455e25d9 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda#61b8078a0905b12529abc622406cb62c https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a +https://conda.anaconda.org/conda-forge/noarch/cpython-3.13.12-py313hd8ed1ab_100.conda#9a4b8a37303b933b847c14a310f0557b https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda#4c2a8fef270f6c69591889b93f9f55c1 https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py313hc80a56d_0.conda#4a08e7dd57fdc0a13dc699c4c6d76c3a https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 @@ -179,6 +180,7 @@ https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.3.0-pyhd8ed1ab_0.conda#9614359868482abba1bd15ce465e3c42 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.2-pyhd8ed1ab_0.conda#895f6625dd8a246fece9279fcc12c1de +https://conda.anaconda.org/conda-forge/linux-64/jsonschema-rs-0.37.4-py313hdeb11d6_0.conda#e736b02c4a0905ad7636d798d48f3bed https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py313hc8edb43_2.conda#3e0e65595330e26515e31b7fc6d933c7 https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.8.5-gpl_hc2c16d8_100.conda#5fdaa8b856683a5598459dead3976578 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.11.0-5_h0358290_openblas.conda#6636a2b6f1a87572df2970d3ebc87cc0 @@ -188,6 +190,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.12.2-default_hafda6a7 https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.11.0-5_h47877c9_openblas.conda#b38076eb5c8e40d0106beda6f95d7609 https://conda.anaconda.org/conda-forge/linux-64/libllvm20-20.1.8-hf7376ad_1.conda#eafa8fd1dfc9a107fe62f7f12cabbc9c https://conda.anaconda.org/conda-forge/linux-64/libllvm21-21.1.8-hf7376ad_0.conda#1a2708a460884d6861425b7f9a7bef99 +https://conda.anaconda.org/conda-forge/linux-64/libllvm22-22.1.0-hf7376ad_0.conda#213f51bbcce2964ff2ec00d0fdd38541 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda#2bca1fbb221d9c3c8e3a155784bbc2e9 https://conda.anaconda.org/conda-forge/linux-64/libxml2-devel-2.15.1-he237659_1.conda#644b2a3a92ba0bb8e2aa671dd831e793 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 @@ -245,7 +248,7 @@ https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1 https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py313h3dea7bd_0.conda#77e1fc7133e03ccd62070f2405c82ea9 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py313h07c4f96_1.conda#bcca9afd203fe05d9582249ac12762da +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py313h07c4f96_2.conda#7e7e3c5a8a28c6b8eb430183e0554adf https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.61.1-py313h3dea7bd_0.conda#c0f36dfbb130da4f6ce2df31f6b25ea8 https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea943ca4f0d01d6eec6a60d24415dc5 @@ -253,7 +256,7 @@ https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc4 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda#24a2802074d26aecfdbc9b3f1d8168d1 -https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_3.conda#b4277f5a09d458a0306db3147bd0171c +https://conda.anaconda.org/conda-forge/linux-64/libclang13-22.1.0-default_h746c552_0.conda#140459a7413d8f6884eb68205ce39a0d https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 @@ -268,10 +271,13 @@ https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0ba https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py313h77f6078_2.conda#42d11c7d1ac21ae2085f58353641e71c https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 +https://conda.anaconda.org/conda-forge/noarch/python-discovery-1.1.0-pyhcf101f3_0.conda#ee4e2cad073411bdfa8598f599537498 +https://conda.anaconda.org/conda-forge/noarch/python-gil-3.13.12-h4df99d1_100.conda#3d92938d5b83c49162ade038aab58a59 https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.3.0-hb700be7_2.conda#8f7278ca5f7456a974992a8b34284737 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.22.0-py313h3dea7bd_0.conda#e9415b0f7b43d2e32a3f24fd889c9e70 +https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda#aaa2a381ccc56eac91d63b6c1240312f https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.13.3-py313hd6074c6_0.conda#684fb9c78db5024b939a1ed0a107f464 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 @@ -291,14 +297,14 @@ https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py313h29aa505_2.c https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py313h29aa505_2.conda#60f5d1c039da10fe89a530cc93ea50ac https://conda.anaconda.org/conda-forge/noarch/rich-14.3.3-pyhcf101f3_0.conda#7a6289c50631d620652f5045a63eb573 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py313h4b8bb8b_1.conda#2b18fe5b4b2d1611ddf8c2f080a46563 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.1-py313h4b8bb8b_0.conda#ec81bc03787968decae6765c7f61b7cf https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py313had47c43_2.conda#6e550dd748e9ac9b2925411684e076a1 https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py313h7037e92_0.conda#cb423e0853b3dde2b3738db4dedf5ba2 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.38.0-pyhcf101f3_0.conda#eaf8f08a04ab2d8612e45aa4f4c33357 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-21.0.0-pyhcf101f3_0.conda#0d574419484a88ee7e753cc0c80ee2c1 https://conda.anaconda.org/conda-forge/linux-64/viskores-1.1.0-hca82ae8_0.conda#5b4d69a15107ebad71ee9aaf76c4b09e https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py313h29aa505_0.conda#3942b6a86fe92d0888b3373f2c1e1676 @@ -309,6 +315,7 @@ https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py313h683a580_0.conda#ffe67570e1a9192d2f4c189b27f75f89 +https://conda.anaconda.org/conda-forge/linux-64/minijinja-2.16.0-py310h6de7dc8_0.conda#ff98a47d5488a9f504866f46ccae8e7c https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py313h16051e2_102.conda#20ae46c5e9c7106bdb2cac6b44b7d845 https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 @@ -329,18 +336,23 @@ https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.2-h61e6d4b_0.conda# https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pooch-1.9.0-pyhd8ed1ab_0.conda#dd4b6337bf8886855db6905b336db3c8 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 +https://conda.anaconda.org/conda-forge/noarch/requests-file-2.1.0-pyhd8ed1ab_1.conda#69be0472744969b97324d5d3b21845a9 https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py313hbb97348_7.conda#03c6ddd039b6877278b5c4df20b61f29 https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e -https://conda.anaconda.org/conda-forge/noarch/pyvista-0.47.0-pyhd8ed1ab_0.conda#dd7ebc742e6a766322ed77931123631e +https://conda.anaconda.org/conda-forge/noarch/pyvista-0.47.1-pyhd8ed1ab_0.conda#470eec436327b4ba57068baf83d57ed4 https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 +https://conda.anaconda.org/conda-forge/noarch/sphinx-data-viewer-0.1.5-pyhd8ed1ab_1.conda#fbe6437c6899c54e52f9258518035aca https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.7.0-pyhd8ed1ab_0.conda#28eddfb8b9ecdd044a6f609f985398a7 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.20.0-pyhd8ed1ab_0.conda#4cae490c8d142824fb80d9aed672fddd +https://conda.anaconda.org/conda-forge/noarch/sphinx-reredirects-1.1.0-pyhd8ed1ab_0.conda#8392493ce6fbb8ab72a101af8e8a5d03 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jquery-4.1-pyhd8ed1ab_1.conda#403185829255321ea427333f7773dd1f +https://conda.anaconda.org/conda-forge/noarch/sphinx-needs-7.0.0-pyh2c0ff20_1.conda#f1b94f4ad598a4548fd08dbd46a73480 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 diff --git a/requirements/locks/py314-linux-64.lock b/requirements/locks/py314-linux-64.lock index 420e625a9c..d9b07c61b0 100644 --- a/requirements/locks/py314-linux-64.lock +++ b/requirements/locks/py314-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 5140d73d0e22ca24039e1c88bb70517169c79003d4d42a5bb2c726d5e0867d54 +# input_hash: 222dc40ff464e3baba18164bb7c1c488a585b0ac86accd3a8b2b8bc60c5cde07 @EXPLICIT https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 @@ -9,7 +9,7 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.co https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_3.conda#129e404c5b001f3ef5581316971e3ea0 https://conda.anaconda.org/conda-forge/noarch/python_abi-3.14-8_cp314.conda#0539938c55b6b1a59b560e843ad864a4 https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda#ad659d0a2b3e47e38d829aa8cad2d610 -https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 +https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda#4492fd26db29495f0ba23f146cd5638d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda#239c5e9546c38a1e884d69effcf4c882 @@ -86,7 +86,7 @@ https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd7 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.22.2-ha1258a1_0.conda#fb53fb07ce46a575c5d004bbc96032c2 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_0.conda#b7113551db5a3e2403cdd052c66e9999 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda#bb26456332b07f68bf3b7622ed71c0da https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 @@ -106,7 +106,7 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8c https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_1.conda#7eb4977dd6f60b3aaab0715a0ea76f11 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_0.conda#70a09b6817c7ad694ef4543204c59c25 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_1.conda#b52b769cd13f7adaa6ccdc68ef801709 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 @@ -119,7 +119,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda# https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd https://conda.anaconda.org/conda-forge/linux-64/python-3.14.3-h32b2ec7_101_cp314.conda#c014ad06e60441661737121d3eae8a60 https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda#b56e0c8432b56decafae7e78c5f29ba5 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda#34e54f03dfea3e7a2dcf1453a85f1085 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda#ba231da7fccf9ea1e768caf5c7099b84 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e @@ -128,12 +128,13 @@ https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/noarch/attrs-25.4.0-pyhcf101f3_1.conda#537296d57ea995666c68c821b00e360b https://conda.anaconda.org/conda-forge/noarch/backports.zstd-1.3.0-py314h680f03e_0.conda#a2ac7763a9ac75055b68f325d3255265 https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.2.0-py314h3de4e8d_1.conda#8910d2c46f7e7b519129f486e0fe927a -https://conda.anaconda.org/conda-forge/noarch/certifi-2026.1.4-pyhd8ed1ab_0.conda#eacc711330cd46939f66cd401ff9c44b +https://conda.anaconda.org/conda-forge/noarch/certifi-2026.2.25-pyhd8ed1ab_0.conda#765c4d97e877cdbbb88ff33152b86125 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.5.0-pyhd8ed1ab_0.conda#381bd45fb7aa032691f3063aff47e3a1 https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.4-pyhd8ed1ab_0.conda#a22d1fd9bf98827e280a02875d9a007a https://conda.anaconda.org/conda-forge/noarch/click-8.3.1-pyh8f84b5b_1.conda#ea8a6c3256897cc31263de9f455e25d9 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda#61b8078a0905b12529abc622406cb62c https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 +https://conda.anaconda.org/conda-forge/noarch/cpython-3.14.3-py314hd8ed1ab_101.conda#3bb89e4f795e5414addaa531d6b1500a https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda#4c2a8fef270f6c69591889b93f9f55c1 https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py314h1807b08_0.conda#866fd3d25b767bccb4adc8476f4035cd https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 @@ -150,6 +151,7 @@ https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.3.0-pyhd8ed1ab_0.conda#9614359868482abba1bd15ce465e3c42 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.2-pyhd8ed1ab_0.conda#895f6625dd8a246fece9279fcc12c1de +https://conda.anaconda.org/conda-forge/linux-64/jsonschema-rs-0.37.4-py314h5059d10_0.conda#bbdf9e1de411fd55c05751bea6e2361c https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py314h97ea11e_2.conda#57f1ce4f7ba6bcd460be8f83c8f04c69 https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.8.5-gpl_hc2c16d8_100.conda#5fdaa8b856683a5598459dead3976578 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.11.0-5_h0358290_openblas.conda#6636a2b6f1a87572df2970d3ebc87cc0 @@ -205,7 +207,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py314h4a8dc5f_1.conda https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py314h67df5f8_0.conda#6c7efc167cee337d9c41200506d022b8 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py314h5bd0f2a_1.conda#51b0391b0ce96be49b1174e9a3e4a279 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py314h5bd0f2a_2.conda#a6a32cab83d59c7812ddbb03220057e3 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab https://conda.anaconda.org/conda-forge/noarch/fonttools-4.61.1-pyh7db6752_0.conda#d5da976e963e70364b9e3ff270842b9f https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 @@ -221,8 +223,11 @@ https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0ba https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py314h24aeaa0_2.conda#b46a7e6a2b8c064488576c3e42d85df0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 +https://conda.anaconda.org/conda-forge/noarch/python-discovery-1.1.0-pyhcf101f3_0.conda#ee4e2cad073411bdfa8598f599537498 +https://conda.anaconda.org/conda-forge/noarch/python-gil-3.14.3-h4df99d1_101.conda#235765e4ea0d0301c75965985163b5a1 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f +https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda#aaa2a381ccc56eac91d63b6c1240312f https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0.conda#5267bef8efea4127aacd1f4e1f149b6e @@ -237,14 +242,14 @@ https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de https://conda.anaconda.org/conda-forge/linux-64/pandas-3.0.1-py314hb4ffadd_0.conda#23fc526360815090f6bfcd7c6c8e4954 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py314hc02f841_2.conda#5be92985870940eac3f3b8cda57002cc -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.0-py314hf07bd8e_1.conda#c7df812186fb1290bc00d9b7b5a50b18 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.1-py314hf07bd8e_0.conda#d0510124f87c75403090e220db1e9d41 https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py314hbe3edd8_2.conda#5963e6ee81772d450a35e6bc95522761 https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py314h9891dd4_0.conda#5d3c008e54c7f49592fca9c32896a76f https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.38.0-pyhcf101f3_0.conda#eaf8f08a04ab2d8612e45aa4f4c33357 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-21.0.0-pyhcf101f3_0.conda#0d574419484a88ee7e753cc0c80ee2c1 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py314hc02f841_0.conda#de50a60eab348de04809a33e180b4b01 https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_1.conda#3c155e2914169b807ebb4027a8c0999c @@ -253,6 +258,7 @@ https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.cond https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py314h1194b4b_0.conda#b8683e6068099b69c10dbfcf7204203f +https://conda.anaconda.org/conda-forge/linux-64/minijinja-2.16.0-py310h6de7dc8_0.conda#ff98a47d5488a9f504866f46ccae8e7c https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py314h4ae7121_102.conda#cf495d9fc5e01a2ee10e0867ce957a44 https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 @@ -267,15 +273,20 @@ https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-ha5ea40c_7.conda#f6 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.2-h61e6d4b_0.conda#d62da3d560992bfa2feb611d7be813b8 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 +https://conda.anaconda.org/conda-forge/noarch/requests-file-2.1.0-pyhd8ed1ab_1.conda#69be0472744969b97324d5d3b21845a9 https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 +https://conda.anaconda.org/conda-forge/noarch/sphinx-data-viewer-0.1.5-pyhd8ed1ab_1.conda#fbe6437c6899c54e52f9258518035aca https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.7.0-pyhd8ed1ab_0.conda#28eddfb8b9ecdd044a6f609f985398a7 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.20.0-pyhd8ed1ab_0.conda#4cae490c8d142824fb80d9aed672fddd +https://conda.anaconda.org/conda-forge/noarch/sphinx-reredirects-1.1.0-pyhd8ed1ab_0.conda#8392493ce6fbb8ab72a101af8e8a5d03 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jquery-4.1-pyhd8ed1ab_1.conda#403185829255321ea427333f7773dd1f +https://conda.anaconda.org/conda-forge/noarch/sphinx-needs-7.0.0-pyh2c0ff20_1.conda#f1b94f4ad598a4548fd08dbd46a73480 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 diff --git a/requirements/py312.yml b/requirements/py312.yml index 454bfd7e79..a9a6bcdc3d 100644 --- a/requirements/py312.yml +++ b/requirements/py312.yml @@ -58,6 +58,8 @@ dependencies: - sphinx-design # Pinned reason: https://github.com/SciTools/iris/issues/6885 - pydata-sphinx-theme !=0.16.0,!=0.16.1,<0.16.2 + - sphinx-needs + - sphinx-reredirects # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 diff --git a/requirements/py313.yml b/requirements/py313.yml index 32b606c02a..ab771263e1 100644 --- a/requirements/py313.yml +++ b/requirements/py313.yml @@ -58,6 +58,8 @@ dependencies: - sphinx-design # Pinned reason: https://github.com/SciTools/iris/issues/6885 - pydata-sphinx-theme !=0.16.0,!=0.16.1,<0.16.2 + - sphinx-needs + - sphinx-reredirects # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 diff --git a/requirements/py314.yml b/requirements/py314.yml index 706f697e20..fa6d8b275b 100644 --- a/requirements/py314.yml +++ b/requirements/py314.yml @@ -58,6 +58,8 @@ dependencies: - sphinx-design # Pinned reason: https://github.com/SciTools/iris/issues/6885 - pydata-sphinx-theme !=0.16.0,!=0.16.1,<0.16.2 + - sphinx-needs + - sphinx-reredirects # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 diff --git a/tools/generate_std_names.py b/tools/generate_std_names.py index 85372b7cc7..1e491ad5f7 100644 --- a/tools/generate_std_names.py +++ b/tools/generate_std_names.py @@ -33,6 +33,11 @@ to another dictionary of other standard name attributes. Currently only the `canonical_unit` exists in these attribute dictionaries. +.. z_reference:: iris.std_names + :tags: topic_load_save;topic_data_model + + API reference + This file is automatically generated. Do not edit this file by hand. Generated from CF standard-name table version : {table_version} From 777a7a7e6ea00c799f059353e5cb4f4ad3949620 Mon Sep 17 00:00:00 2001 From: "scitools-ci[bot]" <107775138+scitools-ci[bot]@users.noreply.github.com> Date: Mon, 2 Mar 2026 14:57:24 +0000 Subject: [PATCH 29/77] Updated environment lockfiles (#6960) Co-authored-by: Lockfile bot --- requirements/locks/py312-linux-64.lock | 25 +++++++++++-------------- requirements/locks/py313-linux-64.lock | 22 ++++++++++------------ requirements/locks/py314-linux-64.lock | 3 +-- 3 files changed, 22 insertions(+), 28 deletions(-) diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index e8d6e710a1..61922cfa50 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: bba21896c1c2a030e57b979600273f03e99292131aca99b47994f07c0008f538 +# input_hash: ca8b16b5beee5e7904ebf63d5ce008b9de4a9434ad4f7a3cfe3ae09a5479ef0c @EXPLICIT https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 @@ -55,7 +55,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxshmfence-1.3.3-hb9d3cd8 https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2025.1-hb03c661_0.conda#aa8d21be4b461ce612d8f5fb791decae https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.3-hb47aa4a_0.conda#607e13a8caac17f9a664bcab5302ce06 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h280c20c_3.conda#a77f85f77be52ff59391544bfe73390a -https://conda.anaconda.org/conda-forge/linux-64/cli11-2.6.0-h54a6638_0.conda#ddf9fed4661bace13f33f08fe38a5f45 +https://conda.anaconda.org/conda-forge/linux-64/cli11-2.6.2-h54a6638_0.conda#83dae3dfadcfec9b37a9fbff6f7f7378 https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.4.0-hecca717_0.conda#dbe3ec0f120af456b3477743ffd99b74 https://conda.anaconda.org/conda-forge/linux-64/fmt-12.1.0-hff5e90c_0.conda#f7d7a4104082b39e3b3473fbd4a38229 https://conda.anaconda.org/conda-forge/linux-64/geos-3.14.1-h480dda7_0.conda#4d4efd0645cd556fab54617c4ad477ef @@ -100,7 +100,7 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.2.0-hb03c661_1.conda#af39b9a8711d4a8d437b52c1d78eb6a1 https://conda.anaconda.org/conda-forge/linux-64/gl2ps-1.4.2-hae5d5c5_1.conda#00e642ec191a19bf806a3915800e9524 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.22.2-ha1258a1_0.conda#fb53fb07ce46a575c5d004bbc96032c2 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda#bb26456332b07f68bf3b7622ed71c0da @@ -125,7 +125,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.3.0-hd9031aa_1.conda# https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda#861fb6ccbc677bb9a9fb2468430b9c6a https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8ccf913aaba749a5496c17629d859ed1 -https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda#cae723309a49399d2949362f4ab5c9e4 +https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hac629b4_1.conda#af491aae930edc096b58466c51c4126c https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_1.conda#7eb4977dd6f60b3aaab0715a0ea76f11 @@ -133,8 +133,8 @@ https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_1.con https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-hb8b1518_5.conda#d4a250da4737ee127fb1fa6452a9002e -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-h4e3cde8_0.conda#0a5563efed19ca4461cf927419b6eb73 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h7a8fb5f_6.conda#49c553b47ff679a6a1e9fc80b9c5a2d4 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-hcf29cc6_1.conda#1707cdd636af2ff697b53186572c9f77 https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.14.1-ha770c72_0.conda#f4084e4e6577797150f9b04a4560ceb0 https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca @@ -188,7 +188,6 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-devel-1.7.0-ha4b6fd6_2.co https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.12.2-default_hafda6a7_1000.conda#0ed3aa3e3e6bc85050d38881673a692f https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.11.0-5_h47877c9_openblas.conda#b38076eb5c8e40d0106beda6f95d7609 https://conda.anaconda.org/conda-forge/linux-64/libllvm20-20.1.8-hf7376ad_1.conda#eafa8fd1dfc9a107fe62f7f12cabbc9c -https://conda.anaconda.org/conda-forge/linux-64/libllvm21-21.1.8-hf7376ad_0.conda#1a2708a460884d6861425b7f9a7bef99 https://conda.anaconda.org/conda-forge/linux-64/libllvm22-22.1.0-hf7376ad_0.conda#213f51bbcce2964ff2ec00d0fdd38541 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda#2bca1fbb221d9c3c8e3a155784bbc2e9 https://conda.anaconda.org/conda-forge/linux-64/libxml2-devel-2.15.1-he237659_1.conda#644b2a3a92ba0bb8e2aa671dd831e793 @@ -199,7 +198,7 @@ https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py312hd9148b4_1.conda#2e489969e38f0b428c39492619b5e6e5 https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.1-py312h8a5da7c_0.conda#17c77acc59407701b54404cfd3639cac https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 -https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda#2e5bf4f1da39c0b32778561c3c4e5878 +https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-hbde042b_1.conda#3c40a106eadf7c14c6236ceddb267893 https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.1-py312h50c33e8_0.conda#c5eff3ada1a829f0bdb780dc4b62bbae https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.9.2-pyhcf101f3_0.conda#4fefefb892ce9cc1539405bec2f1a6cd @@ -212,7 +211,6 @@ https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda#3687cc0b82a8b4c17e1f0eb7e47163d5 https://conda.anaconda.org/conda-forge/noarch/pyshp-3.0.3-pyhd8ed1ab_0.conda#c138c7aaa6a10b5762dcd92247864aff https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py312h0d868a3_1.conda#1cfb9b04c827219597def32c22fb9ca2 https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py312h8a5da7c_1.conda#15878599a87992e44c059731771591cb @@ -255,12 +253,12 @@ https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea9 https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda#24a2802074d26aecfdbc9b3f1d8168d1 +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp22.1-22.1.0-default_h99862b1_0.conda#d966a23335e090a5410cc4f0dec8d00a https://conda.anaconda.org/conda-forge/linux-64/libclang13-22.1.0-default_h746c552_0.conda#140459a7413d8f6884eb68205ce39a0d https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 -https://conda.anaconda.org/conda-forge/linux-64/libpq-18.2-hb80d175_0.conda#fa63c385ddb50957d93bdb394e355be8 +https://conda.anaconda.org/conda-forge/linux-64/libpq-18.3-h9abb657_0.conda#405ec206d230d9d37ad7c2636114cbf4 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda#31ad065eda3c2d88f8215b1289df9c89 https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda#5b5203189eb668f042ac2b0826244964 @@ -292,7 +290,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.co https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_1.conda#c8ca404aeab8e32c9d6d82a82aeb6511 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h4f23490_2.conda#cec5bc5f7d374f8f8095f8e28e31f6cb https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py312hf79963d_1.conda#e597b3e812d9613f659b7d87ad252d18 +https://conda.anaconda.org/conda-forge/linux-64/pandas-3.0.1-py312h8ecdadd_0.conda#c15e7f8dd2e407188a8b7c0790211206 https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh8b19718_0.conda#67bdec43082fd8a9cffb9484420b39a2 https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py312h4f23490_2.conda#1af24b0eb99b1158c0d8c64a4d6c5d79 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 @@ -323,7 +321,7 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.cond https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py312h4c3975b_3.conda#b0610b4174af97290f5f466a72583071 -https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-hb82b983_4.conda#9861c7820fdb45bc50a2ea60f4ff7952 +https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-h17e89b9_5.conda#6c4f73c9a7e9b51f3a8e321c3e867bb6 https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py312hcedc861_0.conda#f0d110978a87b200a06412b56b26407c https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 https://conda.anaconda.org/conda-forge/noarch/rich-rst-1.3.2-pyhd8ed1ab_0.conda#cbd84dbdb3f5a7d762b5fb2b0d49e7cd @@ -358,4 +356,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinx-needs-7.0.0-pyh2c0ff20_1.co https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 - diff --git a/requirements/locks/py313-linux-64.lock b/requirements/locks/py313-linux-64.lock index 7261d16b7a..023aa62b41 100644 --- a/requirements/locks/py313-linux-64.lock +++ b/requirements/locks/py313-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 9ab5bed7b6bc430379d6fc5b3517135bdf315a73bacb5020d41a674b2c708a4e +# input_hash: 166090584da1a695f5de5cca145a0d4f98f482456d21a57920f19d9a23cd9bba @EXPLICIT https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 @@ -56,7 +56,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxshmfence-1.3.3-hb9d3cd8 https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2025.1-hb03c661_0.conda#aa8d21be4b461ce612d8f5fb791decae https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.3-hb47aa4a_0.conda#607e13a8caac17f9a664bcab5302ce06 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h280c20c_3.conda#a77f85f77be52ff59391544bfe73390a -https://conda.anaconda.org/conda-forge/linux-64/cli11-2.6.0-h54a6638_0.conda#ddf9fed4661bace13f33f08fe38a5f45 +https://conda.anaconda.org/conda-forge/linux-64/cli11-2.6.2-h54a6638_0.conda#83dae3dfadcfec9b37a9fbff6f7f7378 https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.4.0-hecca717_0.conda#dbe3ec0f120af456b3477743ffd99b74 https://conda.anaconda.org/conda-forge/linux-64/fmt-12.1.0-hff5e90c_0.conda#f7d7a4104082b39e3b3473fbd4a38229 https://conda.anaconda.org/conda-forge/linux-64/geos-3.14.1-h480dda7_0.conda#4d4efd0645cd556fab54617c4ad477ef @@ -101,7 +101,7 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.2.0-hb03c661_1.conda#af39b9a8711d4a8d437b52c1d78eb6a1 https://conda.anaconda.org/conda-forge/linux-64/gl2ps-1.4.2-hae5d5c5_1.conda#00e642ec191a19bf806a3915800e9524 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.22.2-ha1258a1_0.conda#fb53fb07ce46a575c5d004bbc96032c2 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda#bb26456332b07f68bf3b7622ed71c0da @@ -126,7 +126,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.3.0-hd9031aa_1.conda# https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda#861fb6ccbc677bb9a9fb2468430b9c6a https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8ccf913aaba749a5496c17629d859ed1 -https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda#cae723309a49399d2949362f4ab5c9e4 +https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hac629b4_1.conda#af491aae930edc096b58466c51c4126c https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_1.conda#7eb4977dd6f60b3aaab0715a0ea76f11 @@ -134,8 +134,8 @@ https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_1.con https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-hb8b1518_5.conda#d4a250da4737ee127fb1fa6452a9002e -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-h4e3cde8_0.conda#0a5563efed19ca4461cf927419b6eb73 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h7a8fb5f_6.conda#49c553b47ff679a6a1e9fc80b9c5a2d4 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-hcf29cc6_1.conda#1707cdd636af2ff697b53186572c9f77 https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.14.1-ha770c72_0.conda#f4084e4e6577797150f9b04a4560ceb0 https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca @@ -189,7 +189,6 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-devel-1.7.0-ha4b6fd6_2.co https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.12.2-default_hafda6a7_1000.conda#0ed3aa3e3e6bc85050d38881673a692f https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.11.0-5_h47877c9_openblas.conda#b38076eb5c8e40d0106beda6f95d7609 https://conda.anaconda.org/conda-forge/linux-64/libllvm20-20.1.8-hf7376ad_1.conda#eafa8fd1dfc9a107fe62f7f12cabbc9c -https://conda.anaconda.org/conda-forge/linux-64/libllvm21-21.1.8-hf7376ad_0.conda#1a2708a460884d6861425b7f9a7bef99 https://conda.anaconda.org/conda-forge/linux-64/libllvm22-22.1.0-hf7376ad_0.conda#213f51bbcce2964ff2ec00d0fdd38541 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda#2bca1fbb221d9c3c8e3a155784bbc2e9 https://conda.anaconda.org/conda-forge/linux-64/libxml2-devel-2.15.1-he237659_1.conda#644b2a3a92ba0bb8e2aa671dd831e793 @@ -200,7 +199,7 @@ https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py313h7037e92_1.conda#cd1cfde0ea3bca6c805c73ffa988b12a https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.1-py313h3dea7bd_0.conda#4f3e7bf5a9fc60a7d39047ba9e84c84c https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 -https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda#2e5bf4f1da39c0b32778561c3c4e5878 +https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-hbde042b_1.conda#3c40a106eadf7c14c6236ceddb267893 https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.1-py313h80991f8_0.conda#2d5ee4938cdde91a8967f3eea686c546 https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh145f28c_0.conda#09a970fbf75e8ed1aa633827ded6aa4f @@ -255,12 +254,12 @@ https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea9 https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_3.conda#24a2802074d26aecfdbc9b3f1d8168d1 +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp22.1-22.1.0-default_h99862b1_0.conda#d966a23335e090a5410cc4f0dec8d00a https://conda.anaconda.org/conda-forge/linux-64/libclang13-22.1.0-default_h746c552_0.conda#140459a7413d8f6884eb68205ce39a0d https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 -https://conda.anaconda.org/conda-forge/linux-64/libpq-18.2-hb80d175_0.conda#fa63c385ddb50957d93bdb394e355be8 +https://conda.anaconda.org/conda-forge/linux-64/libpq-18.3-h9abb657_0.conda#405ec206d230d9d37ad7c2636114cbf4 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda#31ad065eda3c2d88f8215b1289df9c89 https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda#5b5203189eb668f042ac2b0826244964 @@ -322,7 +321,7 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.cond https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py313h07c4f96_3.conda#b7810803a3481e22968022a94107ed93 -https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-hb82b983_4.conda#9861c7820fdb45bc50a2ea60f4ff7952 +https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-h17e89b9_5.conda#6c4f73c9a7e9b51f3a8e321c3e867bb6 https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py313h2005660_0.conda#d551bd1d2fcfac36674dbe2be4b0a410 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 https://conda.anaconda.org/conda-forge/noarch/rich-rst-1.3.2-pyhd8ed1ab_0.conda#cbd84dbdb3f5a7d762b5fb2b0d49e7cd @@ -356,4 +355,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinx-needs-7.0.0-pyh2c0ff20_1.co https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 - diff --git a/requirements/locks/py314-linux-64.lock b/requirements/locks/py314-linux-64.lock index d9b07c61b0..faef16acc7 100644 --- a/requirements/locks/py314-linux-64.lock +++ b/requirements/locks/py314-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 222dc40ff464e3baba18164bb7c1c488a585b0ac86accd3a8b2b8bc60c5cde07 +# input_hash: cb29f99d14bcb9e02d4cad9b06609f6e0bd28b089ec3613bf13282c0636a7c94 @EXPLICIT https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 @@ -290,4 +290,3 @@ https://conda.anaconda.org/conda-forge/noarch/sphinx-needs-7.0.0-pyh2c0ff20_1.co https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 - From 955e247ec0aea752cdbbdca2475471bc9083e57a Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Mon, 2 Mar 2026 15:58:58 +0000 Subject: [PATCH 30/77] Credit @ESadek-MO for critical Diataxis buddy work. (#6959) --- docs/src/whatsnew/latest.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index f131f619d6..55399c92df 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -102,7 +102,7 @@ This document explains the changes made to Iris for this release #. `@pp-mo`_ added a page on how to access datafiles in S3 buckets. (:issue:`6374`, :pull:`6951`) -#. `@trexfeathers`_, `@stephenworsley`_ and `@tkknight`_ reorganised **all** +#. `@trexfeathers`_, `@stephenworsley`_, `@ESadek-MO`_ and `@tkknight`_ reorganised **all** user documentation pages into a new structure: :doc:`/user_manual/index`. This restructure is to maximise discoverability of the available pages, as well as embracing the `Diataxis`_ framework for better engagement with user From bf992a867398a29519a50a41d37412b4ff231f11 Mon Sep 17 00:00:00 2001 From: Chris Bunney <48915820+ukmo-ccbunney@users.noreply.github.com> Date: Wed, 4 Mar 2026 14:07:39 +0000 Subject: [PATCH 31/77] `str` representation of numpy scalars in cube summary (#6966) * Use `str` representation for numpy scalars in cube/coord summary * Added Whatsnew --- docs/src/whatsnew/latest.rst | 4 ++++ lib/iris/_representation/cube_summary.py | 3 ++- .../representation/cube_summary/test_CubeSummary.py | 11 ++++++++++- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 55399c92df..34e5282c7d 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -140,6 +140,10 @@ This document explains the changes made to Iris for this release #. `@hsteptoe`_ and `@ESadek-MO`_ (reviewer) added static type hinting to :mod:`iris.pandas`. (:pull:`6948`) +#. `@ukmo-ccbunney`_ changed formatting of numpy scalars attributes when generating a + Cube/Coord summary to use ``str`` representation instead of ``repr``. + (:pull:`6966`, :issue:`6692`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: diff --git a/lib/iris/_representation/cube_summary.py b/lib/iris/_representation/cube_summary.py index 20d93f1acf..ae0318d728 100644 --- a/lib/iris/_representation/cube_summary.py +++ b/lib/iris/_representation/cube_summary.py @@ -81,7 +81,8 @@ def array_repr(arr): def value_repr(value, quote_strings=False, clip_strings=False): """Produce a single-line printable version of an attribute or scalar value.""" - if hasattr(value, "dtype"): + if hasattr(value, "dtype") and hasattr(value, "shape") and len(value.shape) > 0: + # Only format as array if value is not a scalar. value = array_repr(value) elif isinstance(value, str): value = string_repr( diff --git a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py index ec568ed13d..4323755606 100644 --- a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py +++ b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py @@ -220,15 +220,24 @@ def test_ancillary_variable(self): def test_attributes(self): cube = self.cube - cube.attributes = {"a": 1, "b": "two", "c": " this \n that\tand."} + cube.attributes = { + "a": 1, + "b": "two", + "c": " this \n that\tand.", + "d": np.array([1, 2]), + "e": np.float32(123.456), + } rep = CubeSummary(cube) attribute_section = rep.scalar_sections["Attributes:"] attribute_contents = attribute_section.contents + print(attribute_contents) expected_contents = [ "a: 1", "b: 'two'", "c: ' this \\n that\\tand.'", + "d: array([1, 2])", + "e: 123.456", ] # Note: a string with \n or \t in it gets "repr-d". # Other strings don't (though in coord 'extra' lines, they do.) From c6dbfc145c9a62281d7a72df87de43839d0e1bf3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 4 Mar 2026 14:55:32 +0000 Subject: [PATCH 32/77] chore: update pre-commit hooks (#6963) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update pre-commit hooks updates: - [github.com/astral-sh/ruff-pre-commit: v0.14.14 → v0.15.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.14.14...v0.15.4) * style: pre-commit fixes * Ruff FURB171, test_CubeMetadata: replace in with equality check. * Ruff FURB171, test__grid_mappings: replace if with match, to maintain syntax consistency. * Revert "Ruff FURB171, test__grid_mappings: replace if with match, to maintain syntax consistency." This reverts commit 75867abf44b325eda0c5dad23218f2d19829ea69. * Ruff FURB171, test__grid_mappings: replace in with equality check. --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Martin Yeo --- .pre-commit-config.yaml | 2 +- lib/iris/common/metadata.py | 16 +++++++++------- lib/iris/coords.py | 4 ++-- lib/iris/cube.py | 6 ++++-- lib/iris/plot.py | 16 ++++++++-------- .../unit/common/metadata/test_CubeMetadata.py | 2 +- .../unit/common/metadata/test_MeshMetadata.py | 5 +++-- .../nc_load_rules/actions/test__grid_mappings.py | 6 +++--- .../tests/unit/fileformats/rules/test_rules.py | 4 ++-- 9 files changed, 33 insertions(+), 28 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f3ef97b424..e4ac43f2f3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,7 +35,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.14.14" + rev: "v0.15.4" hooks: - id: ruff types: [file, python] diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 93898a34de..f4af7584b4 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -210,13 +210,15 @@ def func(field): # Certain members never participate in strict equivalence, so # are filtered out. fields = filter( - lambda field: field - not in ( - "circular", - "location_axis", - "node_dimension", - "edge_dimension", - "face_dimension", + lambda field: ( + field + not in ( + "circular", + "location_axis", + "node_dimension", + "edge_dimension", + "face_dimension", + ) ), self._fields, ) diff --git a/lib/iris/coords.py b/lib/iris/coords.py index c1cc35dede..044dc21f0f 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -3155,8 +3155,8 @@ def __init__(self, method, coords=None, intervals=None, comments=None): elif isinstance(coords, str): _coords.append(BaseMetadata.token(coords) or default_name) else: - normalise = ( - lambda coord: coord.name(token=True) + normalise = lambda coord: ( + coord.name(token=True) if isinstance(coord, Coord) else BaseMetadata.token(coord) or default_name ) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index 5be2f9fe1e..44be3a63d7 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -4929,8 +4929,10 @@ def aggregated_by( # coordinate dimension. shared_coords = list( filter( - lambda coord_: coord_ not in groupby_coords - and dimension_to_groupby in self.coord_dims(coord_), + lambda coord_: ( + coord_ not in groupby_coords + and dimension_to_groupby in self.coord_dims(coord_) + ), self.dim_coords + self.aux_coords, ) ) diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 82e45aba3a..41d6fc73aa 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -261,7 +261,7 @@ def _broadcast_2d(u, v): def _string_coord_axis_tick_labels(string_axes, axes=None): """Apply tick labels for string coordinates.""" - ax = axes if axes else plt.gca() + ax = axes or plt.gca() for axis, ticks in string_axes.items(): # Define a tick formatter. This will assign a label to all ticks # located precisely on an integer in range(len(ticks)) and assign @@ -295,7 +295,7 @@ def _invert_yaxis(v_coord, axes=None): axes : optional """ - axes = axes if axes else plt.gca() + axes = axes or plt.gca() yaxis_is_inverted = axes.yaxis_inverted() if not yaxis_is_inverted and isinstance(v_coord, iris.coords.Coord): attr_pve = v_coord.attributes.get("positive") @@ -480,7 +480,7 @@ def _draw_2d_from_bounds(draw_method_name, cube, *args, **kwargs): u, v = _broadcast_2d(u, v) axes = kwargs.pop("axes", None) - draw_method = getattr(axes if axes else plt, draw_method_name) + draw_method = getattr(axes or plt, draw_method_name) result = draw_method(u, v, data, *args, **kwargs) # Apply tick labels for string coordinates. @@ -571,7 +571,7 @@ def _draw_2d_from_points(draw_method_name, arg_func, cube, *args, **kwargs): u, v = _broadcast_2d(u, v) axes = kwargs.pop("axes", None) - draw_method = getattr(axes if axes else plt, draw_method_name) + draw_method = getattr(axes or plt, draw_method_name) if arg_func is not None: args, kwargs = arg_func(u, v, data, *args, **kwargs) result = draw_method(*args, **kwargs) @@ -827,7 +827,7 @@ def _draw_1d_from_points(draw_method_name, arg_func, *args, **kwargs): u = _shift_plot_sections(u_object, u, v) axes = kwargs.pop("axes", None) - draw_method = getattr(axes if axes else plt, draw_method_name) + draw_method = getattr(axes or plt, draw_method_name) if arg_func is not None: args, kwargs = arg_func(u, v, *args, **kwargs) result = draw_method(*args, **kwargs) @@ -875,7 +875,7 @@ def _draw_two_1d_from_points(draw_method_name, arg_func, *args, **kwargs): kwargs = _ensure_cartopy_axes_and_determine_kwargs(u_object, v_object1, kwargs) axes = kwargs.pop("axes", None) - draw_method = getattr(axes if axes else plt, draw_method_name) + draw_method = getattr(axes or plt, draw_method_name) if arg_func is not None: args, kwargs = arg_func(u, v1, v2, *args, **kwargs) result = draw_method(*args, **kwargs) @@ -1072,7 +1072,7 @@ def _map_common(draw_method_name, arg_func, mode, cube, plot_defn, *args, **kwar # Draw the contour lines/filled contours. axes = kwargs.pop("axes", None) - plotfn = getattr(axes if axes else plt, draw_method_name) + plotfn = getattr(axes or plt, draw_method_name) return plotfn(*new_args, **kwargs) @@ -1850,7 +1850,7 @@ def citation(text, figure=None, axes=None): figure = plt.gcf() anchor = AnchoredText(text, prop=dict(size=6), frameon=True, loc=4) anchor.patch.set_boxstyle("round, pad=0, rounding_size=0.2") - axes = axes if axes else figure.gca() + axes = axes or figure.gca() axes.add_artist(anchor) diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index 0b4725da42..a83f13e1df 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -814,7 +814,7 @@ def test_op_same(self, leniency, mocker): def test_op_different__none(self, fieldname, leniency, mocker): # One side has field=value, and the other field=None, both strict + lenient. - if fieldname in ("attributes",): + if fieldname == "attributes": # These cannot properly be set to 'None'. Tested elsewhere. pytest.skip() diff --git a/lib/iris/tests/unit/common/metadata/test_MeshMetadata.py b/lib/iris/tests/unit/common/metadata/test_MeshMetadata.py index 5e9c06cc8e..8811cc15b1 100644 --- a/lib/iris/tests/unit/common/metadata/test_MeshMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_MeshMetadata.py @@ -94,8 +94,9 @@ def _setup(self, mocker): # The "node_dimension", "edge_dimension" and "face_dimension" members # are stateful only; they do not participate in lenient/strict equivalence. self.members_dim_names = filter( - lambda member: member - in ("node_dimension", "edge_dimension", "face_dimension"), + lambda member: ( + member in ("node_dimension", "edge_dimension", "face_dimension") + ), self.cls._members, ) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index f0eca80d32..41d5c33623 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -177,7 +177,7 @@ def _make_testcase_cdl( {g_varname}:{lonpo_name} = 0.0 ; """ # Those which require 'longitude of central meridian' - if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): + if mapping_type_name == hh.CF_GRID_MAPPING_TRANSVERSE: latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN g_string += f""" {g_varname}:{latcm_name} = 0.0 ; @@ -192,7 +192,7 @@ def _make_testcase_cdl( {g_varname}:{pph_name} = 600000.0 ; """ # Those which require 'sweep angle axis' - if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): + if mapping_type_name == hh.CF_GRID_MAPPING_GEOSTATIONARY: saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS g_string += f""" {g_varname}:{saa_name} = "y" ; @@ -200,7 +200,7 @@ def _make_testcase_cdl( # Polar stereo needs a special 'latitude of projection origin', a # 'straight_vertical_longitude_from_pole' and a `standard_parallel` or # `scale_factor_at_projection_origin` so treat it specially - if mapping_type_name in (hh.CF_GRID_MAPPING_POLAR,): + if mapping_type_name == hh.CF_GRID_MAPPING_POLAR: latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN g_string += f""" {g_varname}:{latpo_name} = 90.0 ; diff --git a/lib/iris/tests/unit/fileformats/rules/test_rules.py b/lib/iris/tests/unit/fileformats/rules/test_rules.py index df3c769a70..423725221f 100644 --- a/lib/iris/tests/unit/fileformats/rules/test_rules.py +++ b/lib/iris/tests/unit/fileformats/rules/test_rules.py @@ -110,8 +110,8 @@ def field_generator(filename): aux_factory = mock.Mock() factory = mock.Mock() factory.args = [{"name": "foo"}] - factory.factory_class = ( - lambda *args: setattr(aux_factory, "fake_args", args) or aux_factory + factory.factory_class = lambda *args: ( + setattr(aux_factory, "fake_args", args) or aux_factory ) def converter(field): From f2830d8fc79bae44f9914eaf48ab6fb7b47bde13 Mon Sep 17 00:00:00 2001 From: Martin Yeo <40734014+trexfeathers@users.noreply.github.com> Date: Wed, 4 Mar 2026 17:02:39 +0000 Subject: [PATCH 33/77] Correction for SciTools/iris#6816 What's New entry (#6969) * Correction for SciTools/iris#6816 What's New entry. * Fix indents for SciTools/iris#6966 entry. --- docs/src/whatsnew/latest.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 34e5282c7d..f1d8547c75 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -126,7 +126,7 @@ This document explains the changes made to Iris for this release #. `@trexfeathers`_ and `@ukmo-ccbunney`_ updated CI to support Python 3.14 inline with `SPEC0 Minimum Supported Dependencies`_. Note: `pyvista` (and hence `geovista`) is not yet compatible with Python 3.14, so - `:module:~iris.experimental.geovista` is currently only available for + :mod:`iris.experimental.geovista` is currently only available for Python \<3.14. (:pull:`6816`, :issue:`6775`) #. `@ESadek-MO`_, `@trexfeathers`_, `@bjlittle`_, `@HGWright`_, `@pp-mo`_, @@ -141,8 +141,8 @@ This document explains the changes made to Iris for this release #. `@hsteptoe`_ and `@ESadek-MO`_ (reviewer) added static type hinting to :mod:`iris.pandas`. (:pull:`6948`) #. `@ukmo-ccbunney`_ changed formatting of numpy scalars attributes when generating a - Cube/Coord summary to use ``str`` representation instead of ``repr``. - (:pull:`6966`, :issue:`6692`) + Cube/Coord summary to use ``str`` representation instead of ``repr``. + (:pull:`6966`, :issue:`6692`) .. comment Whatsnew author names (@github name) in alphabetical order. Note that, From 043b0bc5a040f8e37cad9a91d8928c647dd0dfd7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Mar 2026 09:46:17 +0000 Subject: [PATCH 34/77] Bump the gha group across 1 directory with 5 updates (#6970) Bumps the gha group with 5 updates in the / directory: | Package | From | To | | --- | --- | --- | | [actions/upload-artifact](https://github.com/actions/upload-artifact) | `6` | `7` | | [actions/download-artifact](https://github.com/actions/download-artifact) | `7` | `8` | | [scitools/workflows/.github/workflows/ci-manifest.yml](https://github.com/scitools/workflows) | `2026.02.0` | `2026.03.2` | | [scitools/workflows/.github/workflows/ci-template-check.yml](https://github.com/scitools/workflows) | `2026.02.0` | `2026.03.2` | | [scitools/workflows/.github/workflows/refresh-lockfiles.yml](https://github.com/scitools/workflows) | `2026.02.0` | `2026.03.2` | Updates `actions/upload-artifact` from 6 to 7 - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v6...v7) Updates `actions/download-artifact` from 7 to 8 - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v7...v8) Updates `scitools/workflows/.github/workflows/ci-manifest.yml` from 2026.02.0 to 2026.03.2 - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2026.02.0...2026.03.2) Updates `scitools/workflows/.github/workflows/ci-template-check.yml` from 2026.02.0 to 2026.03.2 - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2026.02.0...2026.03.2) Updates `scitools/workflows/.github/workflows/refresh-lockfiles.yml` from 2026.02.0 to 2026.03.2 - [Release notes](https://github.com/scitools/workflows/releases) - [Commits](https://github.com/scitools/workflows/compare/2026.02.0...2026.03.2) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-version: '7' dependency-type: direct:production update-type: version-update:semver-major dependency-group: gha - dependency-name: actions/download-artifact dependency-version: '8' dependency-type: direct:production update-type: version-update:semver-major dependency-group: gha - dependency-name: scitools/workflows/.github/workflows/ci-manifest.yml dependency-version: 2026.03.2 dependency-type: direct:production dependency-group: gha - dependency-name: scitools/workflows/.github/workflows/ci-template-check.yml dependency-version: 2026.03.2 dependency-type: direct:production dependency-group: gha - dependency-name: scitools/workflows/.github/workflows/refresh-lockfiles.yml dependency-version: 2026.03.2 dependency-type: direct:production dependency-group: gha ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/benchmarks_report.yml | 4 ++-- .github/workflows/benchmarks_run.yml | 4 ++-- .github/workflows/ci-manifest.yml | 2 +- .github/workflows/ci-template-check.yml | 2 +- .github/workflows/ci-wheels.yml | 10 +++++----- .github/workflows/refresh-lockfiles.yml | 2 +- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/benchmarks_report.yml b/.github/workflows/benchmarks_report.yml index 8d7dfb6b0b..8caf063e74 100644 --- a/.github/workflows/benchmarks_report.yml +++ b/.github/workflows/benchmarks_report.yml @@ -54,7 +54,7 @@ jobs: echo "reports_exist=$reports_exist" >> "$GITHUB_OUTPUT" - name: Store artifact - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: benchmark_reports path: benchmark_reports @@ -68,7 +68,7 @@ jobs: uses: actions/checkout@v6 - name: Download artifact - uses: actions/download-artifact@v7 + uses: actions/download-artifact@v8 with: name: benchmark_reports path: .github/workflows/benchmark_reports diff --git a/.github/workflows/benchmarks_run.yml b/.github/workflows/benchmarks_run.yml index 9b30d6a848..bfbf5a81f0 100644 --- a/.github/workflows/benchmarks_run.yml +++ b/.github/workflows/benchmarks_run.yml @@ -157,7 +157,7 @@ jobs: - name: Upload any benchmark reports # Uploading enables more downstream processing e.g. posting a PR comment. if: success() || steps.overnight.outcome == 'failure' - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: benchmark_reports path: .github/workflows/benchmark_reports @@ -165,7 +165,7 @@ jobs: - name: Archive asv results # Store the raw ASV database(s) to help manual investigations. if: ${{ always() }} - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: asv-raw-results path: benchmarks/.asv/results diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 4406e1c01d..46f8fda4c8 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2026.02.0 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2026.03.2 diff --git a/.github/workflows/ci-template-check.yml b/.github/workflows/ci-template-check.yml index 32f74f9043..3d0bb0d12d 100644 --- a/.github/workflows/ci-template-check.yml +++ b/.github/workflows/ci-template-check.yml @@ -10,7 +10,7 @@ on: jobs: prompt-share: - uses: scitools/workflows/.github/workflows/ci-template-check.yml@2026.02.0 + uses: scitools/workflows/.github/workflows/ci-template-check.yml@2026.03.2 secrets: inherit with: pr_number: ${{ github.event.pull_request.number }} diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml index e17e2ef983..152778ff26 100644 --- a/.github/workflows/ci-wheels.yml +++ b/.github/workflows/ci-wheels.yml @@ -37,7 +37,7 @@ jobs: run: | pipx run build - - uses: actions/upload-artifact@v6 + - uses: actions/upload-artifact@v7 with: name: pypi-artifacts path: ${{ github.workspace }}/dist/* @@ -61,7 +61,7 @@ jobs: with: fetch-depth: 0 - - uses: actions/download-artifact@v7 + - uses: actions/download-artifact@v8 with: name: pypi-artifacts path: ${{ github.workspace }}/dist @@ -116,7 +116,7 @@ jobs: name: "show artifacts" runs-on: ubuntu-latest steps: - - uses: actions/download-artifact@v7 + - uses: actions/download-artifact@v8 with: name: pypi-artifacts path: ${{ github.workspace }}/dist @@ -137,7 +137,7 @@ jobs: # and check for the SciTools repo if: github.event_name == 'push' && github.event.ref == 'refs/heads/main' && github.repository_owner == 'SciTools' steps: - - uses: actions/download-artifact@v7 + - uses: actions/download-artifact@v8 with: name: pypi-artifacts path: ${{ github.workspace }}/dist @@ -159,7 +159,7 @@ jobs: # upload to PyPI for every tag starting with 'v' if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') && github.repository_owner == 'SciTools' steps: - - uses: actions/download-artifact@v7 + - uses: actions/download-artifact@v8 with: name: pypi-artifacts path: ${{ github.workspace }}/dist diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index b78a6523c3..69eb9a52c3 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2026.02.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2026.03.2 secrets: inherit From a36ad3d50f9921e79609ea241a3f8001ee44eb29 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sat, 25 Oct 2025 00:18:03 +0100 Subject: [PATCH 35/77] Initial tests. --- lib/iris/fileformats/cf.py | 2 +- .../integration/netcdf/test_chararrays.py | 112 ++++++++++++++++++ 2 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 lib/iris/tests/integration/netcdf/test_chararrays.py diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 308ce381ee..a11ab2ddd3 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -807,7 +807,7 @@ def cf_label_data(self, cf_data_var): label_data = self[:] if ma.isMaskedArray(label_data): - label_data = label_data.filled() + label_data = label_data.filled(b"\0") # Determine whether we have a string-valued scalar label # i.e. a character variable that only has one dimension (the length of the string). diff --git a/lib/iris/tests/integration/netcdf/test_chararrays.py b/lib/iris/tests/integration/netcdf/test_chararrays.py new file mode 100644 index 0000000000..feb93047dd --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_chararrays.py @@ -0,0 +1,112 @@ +import netCDF4 as nc +import numpy as np +import pytest + +import iris + +NX, N_STRLEN = 3, 64 +TEST_STRINGS = ["Münster", "London", "Amsterdam"] +TEST_COORD_VALS = ["bun", "éclair", "sandwich"] + + +def convert_chararray(string_array_1d, maxlen, encoding="utf-8"): + bbytes = [text.encode(encoding) for text in string_array_1d] + pad = b"\0" * maxlen + bbytes = [(x + pad)[:maxlen] for x in bbytes] + chararray = np.array([[bb[i : i + 1] for i in range(maxlen)] for bb in bbytes]) + return chararray + + +INCLUDE_COORD = True +# INCLUDE_COORD = False + + +def make_testfile(filepath, chararray, coordarray, encoding_str=None): + with nc.Dataset(filepath, "w") as ds: + ds.createDimension("x", NX) + ds.createDimension("nstr", N_STRLEN) + vx = ds.createVariable("x", int, dimensions=("x")) + vx[:] = np.arange(NX) + if INCLUDE_COORD: + ds.createDimension("nstr2", N_STRLEN) + v_co = ds.createVariable( + "v_co", + "S1", + dimensions=( + "x", + "nstr2", + ), + ) + v_co[:] = coordarray + if encoding_str is not None: + v_co._Encoding = encoding_str + v = ds.createVariable( + "v", + "S1", + dimensions=( + "x", + "nstr", + ), + ) + v[:] = chararray + if encoding_str is not None: + v._Encoding = encoding_str + if INCLUDE_COORD: + v.coordinates = "v_co" + + +def show_result(filepath): + from pp_utils import ncdump + + print(f"File {filepath}") + print("NCDUMP:") + ncdump(filepath, "") + # with nc.Dataset(filepath, "r") as ds: + # v = ds.variables["v"] + # print("\n----\nNetcdf data readback (basic)") + # try: + # print(repr(v[:])) + # except UnicodeDecodeError as err: + # print(repr(err)) + # print("..raw:") + # v.set_auto_chartostring(False) + # print(repr(v[:])) + print("\nAs iris cube..") + try: + cube = iris.load_cube(filepath) + print(cube) + if iris.loading.LOAD_PROBLEMS._problems: + print(iris.loading.LOAD_PROBLEMS) + print( + "\n".join(iris.loading.LOAD_PROBLEMS._problems[0].stack_trace.format()) + ) + print("-data-") + print(repr(cube.data)) + if INCLUDE_COORD: + print("-coord data-") + try: + print(repr(cube.coord("v_co").points)) + except Exception as err2: + print(repr(err2)) + except UnicodeDecodeError as err: + print(repr(err)) + + +# tsts = (None, "ascii", "utf-8", "utf-32",) +# tsts = ("utf-8",) +# tsts = ("utf-8", "utf-32",) +# tsts = ("utf-32",) +tsts = ("utf-8", "ascii", "utf-8") + + +@pytest.mark.parametrize("encoding", tsts) +def test_encodings(encoding): + print(f"\n=========\nTesting encoding: {encoding}") + filepath = f"tmp_{str(encoding)}.nc" + do_as = encoding + if encoding != "utf-32": + do_as = "utf-8" + TEST_CHARARRAY = convert_chararray(TEST_STRINGS, N_STRLEN, encoding=do_as) + TEST_COORDARRAY = convert_chararray(TEST_COORD_VALS, N_STRLEN, encoding=do_as) + make_testfile(filepath, TEST_CHARARRAY, TEST_COORDARRAY, encoding_str=encoding) + show_result(filepath) From 15e0e8e4516a9c787d04b1d56976275de36ad507 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sat, 25 Oct 2025 01:22:30 +0100 Subject: [PATCH 36/77] Get 'create_cf_data_variable' to call 'create_generic_cf_array_var': Mostly working? Get 'create_cf_data_variable' to call 'create_generic_cf_array_var': Mostly working? --- .../fileformats/_nc_load_rules/helpers.py | 8 +- lib/iris/fileformats/netcdf/saver.py | 158 +++++++++--------- .../integration/netcdf/test_chararrays.py | 1 + 3 files changed, 85 insertions(+), 82 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 35c2e96924..50e282db5f 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -708,13 +708,13 @@ def build_and_add_global_attributes(engine: Engine): ), ) if problem is not None: - stack_notes = problem.stack_trace.__notes__ + stack_notes = problem.stack_trace.__notes__ # type: ignore[attr-defined] if stack_notes is None: stack_notes = [] stack_notes.append( f"Skipping disallowed global attribute '{attr_name}' (see above error)" ) - problem.stack_trace.__notes__ = stack_notes + problem.stack_trace.__notes__ = stack_notes # type: ignore[attr-defined] ################################################################################ @@ -1536,14 +1536,14 @@ def build_and_add_dimension_coordinate( ) if problem is not None: coord_var_name = str(cf_coord_var.cf_name) - stack_notes = problem.stack_trace.__notes__ + stack_notes = problem.stack_trace.__notes__ # type: ignore[attr-defined] if stack_notes is None: stack_notes = [] stack_notes.append( f"Failed to create {coord_var_name} dimension coordinate:\n" f"Gracefully creating {coord_var_name!r} auxiliary coordinate instead." ) - problem.stack_trace.__notes__ = stack_notes + problem.stack_trace.__notes__ = stack_notes # type: ignore[attr-defined] problem.handled = True _ = _add_or_capture( diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 31a685f8ee..b2617feef5 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -764,7 +764,7 @@ def _create_cf_dimensions(self, cube, dimension_names, unlimited_dimensions=None # used for a different one pass else: - dim_name = self._get_coord_variable_name(cube, coord) + dim_name = self._get_element_variable_name(cube, coord) unlimited_dim_names.append(dim_name) for dim_name in dimension_names: @@ -995,12 +995,12 @@ def _add_aux_coords( ] # Include any relevant mesh location coordinates. - mesh: MeshXY | None = getattr(cube, "mesh") - mesh_location: str | None = getattr(cube, "location") + mesh: MeshXY | None = getattr(cube, "mesh") # type: ignore[annotation-unchecked] + mesh_location: str | None = getattr(cube, "location") # type: ignore[annotation-unchecked] if mesh and mesh_location: location_coords: MeshNodeCoords | MeshEdgeCoords | MeshFaceCoords = getattr( mesh, f"{mesh_location}_coords" - ) + ) # type: ignore[annotation-unchecked] coords_to_add.extend(list(location_coords)) return self._add_inner_related_vars( @@ -1370,7 +1370,7 @@ def record_dimension(names_list, dim_name, length, matching_coords=None): if dim_name is None: # Not already present : create a unique dimension name # from the coord. - dim_name = self._get_coord_variable_name(cube, coord) + dim_name = self._get_element_variable_name(cube, coord) # Disambiguate if it has the same name as an # existing dimension. # OR if it matches an existing file variable name. @@ -1546,38 +1546,14 @@ def _create_cf_bounds(self, coord, cf_var, cf_name, /, *, compression_kwargs=Non ) self._lazy_stream_data(data=bounds, cf_var=cf_var_bounds) - def _get_cube_variable_name(self, cube): - """Return a CF-netCDF variable name for the given cube. - - Parameters - ---------- - cube : :class:`iris.cube.Cube` - An instance of a cube for which a CF-netCDF variable - name is required. - - Returns - ------- - str - A CF-netCDF variable name as a string. - - """ - if cube.var_name is not None: - cf_name = cube.var_name - else: - # Convert to lower case and replace whitespace by underscores. - cf_name = "_".join(cube.name().lower().split()) - - cf_name = self.cf_valid_var_name(cf_name) - return cf_name - - def _get_coord_variable_name(self, cube_or_mesh, coord): - """Return a CF-netCDF variable name for a given coordinate-like element. + def _get_element_variable_name(self, cube_or_mesh, element): + """Return a CF-netCDF variable name for a given coordinate-like element, or cube. Parameters ---------- cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.mesh.MeshXY` The Cube or Mesh being saved to the netCDF file. - coord : :class:`iris.coords._DimensionalMetadata` + element : :class:`iris.coords._DimensionalMetadata` | :class:``iris.cube.Cube`` An instance of a coordinate (or similar), for which a CF-netCDF variable name is required. @@ -1597,17 +1573,21 @@ def _get_coord_variable_name(self, cube_or_mesh, coord): cube = None mesh = cube_or_mesh - if coord.var_name is not None: - cf_name = coord.var_name + if element.var_name is not None: + cf_name = element.var_name + elif isinstance(element, Cube): + # Make name for a Cube without a var_name. + cf_name = "_".join(element.name().lower().split()) else: - name = coord.standard_name or coord.long_name + # Make name for a Coord-like element without a var_name + name = element.standard_name or element.long_name if not name or set(name).intersection(string.whitespace): # We need to invent a name, based on its associated dimensions. - if cube is not None and cube.coords(coord): - # It is a regular cube coordinate. + if cube is not None and cube.elements(element): + # It is a regular cube elementinate. # Auto-generate a name based on the dims. name = "" - for dim in cube.coord_dims(coord): + for dim in cube.coord_dims(element): name += f"dim{dim}" # Handle scalar coordinate (dims == ()). if not name: @@ -1621,8 +1601,8 @@ def _get_coord_variable_name(self, cube_or_mesh, coord): # At present, a location-coord cannot be nameless, as the # MeshXY code relies on guess_coord_axis. - assert isinstance(coord, Connectivity) - location = coord.cf_role.split("_")[0] + assert isinstance(element, Connectivity) + location = element.cf_role.split("_")[0] location_dim_attr = f"{location}_dimension" name = getattr(mesh, location_dim_attr) @@ -1698,6 +1678,8 @@ def _create_mesh(self, mesh): return cf_mesh_name def _set_cf_var_attributes(self, cf_var, element): + from iris.cube import Cube + # Deal with CF-netCDF units, and add the name+units properties. if isinstance(element, iris.coords.Coord): # Fix "degree" units if needed. @@ -1720,19 +1702,21 @@ def _set_cf_var_attributes(self, cf_var, element): if element.units.calendar: _setncattr(cf_var, "calendar", str(element.units.calendar)) - # Add any other custom coordinate attributes. - for name in sorted(element.attributes): - value = element.attributes[name] + if not isinstance(element, Cube): + # Add any other custom coordinate attributes. + # N.B. not Cube, which has specific handling in _create_cf_data_variable + for name in sorted(element.attributes): + value = element.attributes[name] - if name == "STASH": - # Adopting provisional Metadata Conventions for representing MO - # Scientific Data encoded in NetCDF Format. - name = "um_stash_source" - value = str(value) + if name == "STASH": + # Adopting provisional Metadata Conventions for representing MO + # Scientific Data encoded in NetCDF Format. + name = "um_stash_source" + value = str(value) - # Don't clobber existing attributes. - if not hasattr(cf_var, name): - _setncattr(cf_var, name, value) + # Don't clobber existing attributes. + if not hasattr(cf_var, name): + _setncattr(cf_var, name, value) def _create_generic_cf_array_var( self, @@ -1744,6 +1728,7 @@ def _create_generic_cf_array_var( element_dims=None, fill_value=None, compression_kwargs=None, + is_dataless=False, ): """Create theCF-netCDF variable given dimensional_metadata. @@ -1796,7 +1781,7 @@ def _create_generic_cf_array_var( # Work out the var-name to use. # N.B. the only part of this routine that may use a mesh _or_ a cube. - cf_name = self._get_coord_variable_name(cube_or_mesh, element) + cf_name = self._get_element_variable_name(cube_or_mesh, element) while cf_name in self._dataset.variables: cf_name = self._increment_name(cf_name) @@ -1809,10 +1794,13 @@ def _create_generic_cf_array_var( # Get the data values, in a way which works for any element type, as # all are subclasses of _DimensionalMetadata. # (e.g. =points if a coord, =data if an ancillary, etc) - data = element._core_values() + if isinstance(element, Cube): + data = element.core_data() + else: + data = element._core_values() # This compression contract is *not* applicable to a mesh. - if cube and cube.shape != data.shape: + if cube is not None and data is not None and cube.shape != data.shape: compression_kwargs = {} if np.issubdtype(data.dtype, np.str_): @@ -1842,11 +1830,13 @@ def _create_generic_cf_array_var( # Convert data from an array of strings into a character array # with an extra string-length dimension. if len(element_dims) == 1: + # Scalar variable (only has string dimension). data_first = data[0] if is_lazy_data(data_first): data_first = dask.compute(data_first) data = list("%- *s" % (string_dimension_depth, data_first)) else: + # NOTE: at present, can't do this lazily?? orig_shape = data.shape new_shape = orig_shape + (string_dimension_depth,) new_data = np.zeros(new_shape, cf_var.dtype) @@ -1855,7 +1845,7 @@ def _create_generic_cf_array_var( new_data[index_slice] = list( "%- *s" % (string_dimension_depth, data[index]) ) - data = new_data + data = new_data else: # A normal (numeric) variable. # ensure a valid datatype for the file format. @@ -1892,7 +1882,8 @@ def _create_generic_cf_array_var( ) # Add the data to the CF-netCDF variable. - self._lazy_stream_data(data=data, cf_var=cf_var) + if not is_dataless: + self._lazy_stream_data(data=data, cf_var=cf_var) # Add names + units self._set_cf_var_attributes(cf_var, element) @@ -2243,9 +2234,9 @@ def _create_cf_grid_mapping(self, cube, cf_var_cube): cfvar = self._name_coord_map.name(coord) if not cfvar: # not found - create and store it: - cfvar = self._get_coord_variable_name(cube, coord) + cfvar = self._get_element_variable_name(cube, coord) self._name_coord_map.append( - cfvar, self._get_coord_variable_name(cube, coord) + cfvar, self._get_element_variable_name(cube, coord) ) cfvar_names.append(cfvar) @@ -2388,32 +2379,43 @@ def set_packing_ncattrs(cfvar): if add_offset: _setncattr(cfvar, "add_offset", add_offset) - cf_name = self._get_cube_variable_name(cube) - while cf_name in self._dataset.variables: - cf_name = self._increment_name(cf_name) - + # cf_name = self._get_element_variable_name(cube_or_mesh=None, element=cube) + # while cf_name in self._dataset.variables: + # cf_name = self._increment_name(cf_name) + # + # cf_var = self._dataset.createVariable( + # cf_name, dtype, dimension_names, fill_value=fill_value, **kwargs + # ) # Create the cube CF-netCDF data variable with data payload. - cf_var = self._dataset.createVariable( - cf_name, dtype, dimension_names, fill_value=fill_value, **kwargs + cf_name = self._create_generic_cf_array_var( + cube, + dimension_names, + cube, + element_dims=dimension_names, + fill_value=fill_value, + compression_kwargs=kwargs, + is_dataless=is_dataless, ) + cf_var = self._dataset.variables[cf_name] if not is_dataless: set_packing_ncattrs(cf_var) - self._lazy_stream_data(data=data, cf_var=cf_var) - - if cube.standard_name: - _setncattr(cf_var, "standard_name", cube.standard_name) - - if cube.long_name: - _setncattr(cf_var, "long_name", cube.long_name) - - if cube.units.is_udunits(): - _setncattr(cf_var, "units", str(cube.units)) - - # Add the CF-netCDF calendar attribute. - if cube.units.calendar: - _setncattr(cf_var, "calendar", cube.units.calendar) + # if cube.standard_name: + # _setncattr(cf_var, "standard_name", cube.standard_name) + # + # if cube.long_name: + # _setncattr(cf_var, "long_name", cube.long_name) + # + # if cube.units.is_udunits(): + # _setncattr(cf_var, "units", str(cube.units)) + # + # # Add the CF-netCDF calendar attribute. + # if cube.units.calendar: + # _setncattr(cf_var, "calendar", cube.units.calendar) + + # Set attributes: NB this part is cube-specific (not the same for components) + # - therefore 'set_cf_var_attributes' doesn't set attributes if element is a Cube if iris.FUTURE.save_split_attrs: attr_names = cube.attributes.locals.keys() else: diff --git a/lib/iris/tests/integration/netcdf/test_chararrays.py b/lib/iris/tests/integration/netcdf/test_chararrays.py index feb93047dd..a3ce9f9128 100644 --- a/lib/iris/tests/integration/netcdf/test_chararrays.py +++ b/lib/iris/tests/integration/netcdf/test_chararrays.py @@ -101,6 +101,7 @@ def show_result(filepath): @pytest.mark.parametrize("encoding", tsts) def test_encodings(encoding): + # small change print(f"\n=========\nTesting encoding: {encoding}") filepath = f"tmp_{str(encoding)}.nc" do_as = encoding From e9cddba242380b4eee2000e7090c840211059b7f Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 28 Oct 2025 21:11:15 +0000 Subject: [PATCH 37/77] Reinstate decode on load, now in-Iris coded. --- .../fileformats/_nc_load_rules/helpers.py | 10 ++- lib/iris/fileformats/cf.py | 18 +++++- .../fileformats/netcdf/_thread_safe_nc.py | 45 +++++++++++-- lib/iris/fileformats/netcdf/loader.py | 38 ++++++++++- lib/iris/fileformats/netcdf/saver.py | 4 +- .../integration/netcdf/test_chararrays.py | 64 ++++++++++++++++++- lib/iris/util.py | 21 ++++++ 7 files changed, 184 insertions(+), 16 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 50e282db5f..fa63002f09 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -1643,9 +1643,13 @@ def _add_auxiliary_coordinate( # Determine the name of the dimension/s shared between the CF-netCDF data variable # and the coordinate being built. - common_dims = [ - dim for dim in cf_coord_var.dimensions if dim in engine.cf_var.dimensions - ] + coord_dims = cf_coord_var.dimensions + if cf._is_str_dtype(cf_coord_var): + coord_dims = coord_dims[:-1] + datavar_dims = engine.cf_var.dimensions + if cf._is_str_dtype(engine.cf_var): + datavar_dims = datavar_dims[:-1] + common_dims = [dim for dim in coord_dims if dim in datavar_dims] data_dims = None if common_dims: # Calculate the offset of each common dimension. diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index a11ab2ddd3..b4dc6a259d 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -795,15 +795,27 @@ def cf_label_data(self, cf_data_var): # Determine the name of the label string (or length) dimension by # finding the dimension name that doesn't exist within the data dimensions. - str_dim_name = list(set(self.dimensions) - set(cf_data_var.dimensions)) + str_dim_names = list(set(self.dimensions) - set(cf_data_var.dimensions)) + n_nondata_dims = len(str_dim_names) + + if n_nondata_dims == 0: + # *All* dims are shared with the data-variable. + # This is only ok if the data-var is *also* a string type. + dim_ok = _is_str_dtype(cf_data_var) + # In this case, we must just *assume* that the last dimension is "the" + # string dimension + str_dim_name = self.dimensions[-1] + else: + # If there is exactly one non-data dim, that is the one we want + dim_ok = len(str_dim_names) == 1 + (str_dim_name,) = str_dim_names - if len(str_dim_name) != 1: + if not dim_ok: raise ValueError( "Invalid string dimensions for CF-netCDF label variable %r" % self.cf_name ) - str_dim_name = str_dim_name[0] label_data = self[:] if ma.isMaskedArray(label_data): diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index 33183ef0fa..4b3dc10620 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -311,14 +311,39 @@ def fromcdl(cls, *args, **kwargs): class NetCDFDataProxy: """A reference to the data payload of a single NetCDF file variable.""" - __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value") - - def __init__(self, shape, dtype, path, variable_name, fill_value): + __slots__ = ( + "shape", + "dtype", + "path", + "variable_name", + "fill_value", + "is_bytes", + "encoding", + "string_length", + ) + + def __init__( + self, + shape, + dtype, + path, + variable_name, + fill_value, + encoding: str | None = None, + string_length: int = 0, + ): self.shape = shape self.dtype = dtype self.path = path self.variable_name = variable_name self.fill_value = fill_value + self.is_bytes = dtype.kind == "S" and dtype.itemsize == 1 + if self.is_bytes: + # We will be returning a different shape : the last dim is the byte-length + self.shape = self.shape[:-1] + self.dtype = np.dtype(f"U{string_length}") + self.encoding = encoding + self.string_length = string_length @property def ndim(self): @@ -338,10 +363,20 @@ def __getitem__(self, keys): try: variable = dataset.variables[self.variable_name] # Get the NetCDF variable data and slice. - var = variable[keys] + data = variable[keys] + + # If bytes, decode to strings + if self.is_bytes: + from iris.util import convert_bytesarray_to_strings + + data = convert_bytesarray_to_strings( + data, + encoding=self.encoding, + string_length=self.string_length, + ) finally: dataset.close() - return np.asanyarray(var) + return np.asanyarray(data) def __repr__(self): fmt = ( diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 6557f4aebc..e749d26d9b 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -16,6 +16,7 @@ """ +import codecs from collections.abc import Iterable, Iterator, Mapping from contextlib import contextmanager from copy import deepcopy @@ -274,10 +275,36 @@ def _get_cf_var_data(cf_var): # Normal NCVariable type: total_bytes = cf_var.size * cf_var.dtype.itemsize + default_encoding = "utf-8" + encoding = getattr(cf_var, "_Encoding", None) + if encoding is None: + # utf-8 is a reasonable "safe" default, equivalent to 'ascii' for ascii data + encoding = default_encoding + else: + try: + # Accept + normalise naming of encodings + encoding = codecs.lookup(encoding).name + # NOTE: if encoding does not suit data, errors can occur. + # For example, _Encoding = "ascii", with non-ascii content. + except LookupError: + # Replace some invalid setting with "safe"(ish) fallback. + encoding = default_encoding + + string_length = getattr(cf_var, "iris_string_length", None) + if total_bytes < _LAZYVAR_MIN_BYTES: # Don't make a lazy array, as it will cost more memory AND more time to access. result = cf_var[:] + if result.dtype.kind == "S": + from iris.util import convert_bytesarray_to_strings + + result = convert_bytesarray_to_strings( + result, + encoding=encoding, + string_length=string_length, + ) + # Special handling of masked scalar value; this will be returned as # an `np.ma.masked` instance which will lose the original dtype. # Workaround for this it return a 1-element masked array of the @@ -300,8 +327,17 @@ def _get_cf_var_data(cf_var): "_FillValue", _thread_safe_nc.default_fillvals[fill_dtype], ) + + # NOTE: if the data is bytes which need to be converted to strings on read, + # the data-proxy will do that (and it modifies its shape + dtype). proxy = NetCDFDataProxy( - cf_var.shape, dtype, cf_var.filename, cf_var.cf_name, fill_value + cf_var.shape, + dtype, + cf_var.filename, + cf_var.cf_name, + fill_value, + encoding=encoding, + string_length=string_length, ) # Get the chunking specified for the variable : this is either a shape, or # maybe the string "contiguous". diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index b2617feef5..158d431f9a 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1583,8 +1583,8 @@ def _get_element_variable_name(self, cube_or_mesh, element): name = element.standard_name or element.long_name if not name or set(name).intersection(string.whitespace): # We need to invent a name, based on its associated dimensions. - if cube is not None and cube.elements(element): - # It is a regular cube elementinate. + if cube is not None and cube.coords(element): + # It is a regular cube coordinate. # Auto-generate a name based on the dims. name = "" for dim in cube.coord_dims(element): diff --git a/lib/iris/tests/integration/netcdf/test_chararrays.py b/lib/iris/tests/integration/netcdf/test_chararrays.py index a3ce9f9128..8f29fcdcd5 100644 --- a/lib/iris/tests/integration/netcdf/test_chararrays.py +++ b/lib/iris/tests/integration/netcdf/test_chararrays.py @@ -4,10 +4,18 @@ import iris +iris.FUTURE.save_split_attrs = True + + NX, N_STRLEN = 3, 64 TEST_STRINGS = ["Münster", "London", "Amsterdam"] TEST_COORD_VALS = ["bun", "éclair", "sandwich"] +# VARS_COORDS_SHARE_STRING_DIM = True +VARS_COORDS_SHARE_STRING_DIM = False +if VARS_COORDS_SHARE_STRING_DIM: + TEST_COORD_VALS[-1] = "Xsandwich" # makes the max coord strlen same as data one + def convert_chararray(string_array_1d, maxlen, encoding="utf-8"): bbytes = [text.encode(encoding) for text in string_array_1d] @@ -17,9 +25,33 @@ def convert_chararray(string_array_1d, maxlen, encoding="utf-8"): return chararray +def convert_bytesarray_to_strings( + byte_array, encoding="utf-8", string_length: int | None = None +): + """Convert bytes to strings. + + N.B. for now at least, we assume the string dim is **always the last one**. + """ + bytes_shape = byte_array.shape + var_shape = bytes_shape[:-1] + if string_length is None: + string_length = bytes_shape[-1] + string_dtype = f"U{string_length}" + result = np.empty(var_shape, dtype=string_dtype) + for ndindex in np.ndindex(var_shape): + element_bytes = byte_array[ndindex] + bytes = b"".join([b if b else b"\0" for b in element_bytes]) + string = bytes.decode(encoding) + result[ndindex] = string + return result + + INCLUDE_COORD = True # INCLUDE_COORD = False +INCLUDE_NUMERIC_AUXCOORD = True +# INCLUDE_NUMERIC_AUXCOORD = False + def make_testfile(filepath, chararray, coordarray, encoding_str=None): with nc.Dataset(filepath, "w") as ds: @@ -40,6 +72,13 @@ def make_testfile(filepath, chararray, coordarray, encoding_str=None): v_co[:] = coordarray if encoding_str is not None: v_co._Encoding = encoding_str + if INCLUDE_NUMERIC_AUXCOORD: + v_num = ds.createVariable( + "v_num", + float, + dimensions=("x",), + ) + v_num[:] = np.arange(NX) v = ds.createVariable( "v", "S1", @@ -52,7 +91,10 @@ def make_testfile(filepath, chararray, coordarray, encoding_str=None): if encoding_str is not None: v._Encoding = encoding_str if INCLUDE_COORD: - v.coordinates = "v_co" + coords_str = "v_co" + if INCLUDE_NUMERIC_AUXCOORD: + coords_str += " v_num" + v.coordinates = coords_str def show_result(filepath): @@ -82,8 +124,10 @@ def show_result(filepath): ) print("-data-") print(repr(cube.data)) + print("-numeric auxcoord data-") + print(repr(cube.coord("x").points)) if INCLUDE_COORD: - print("-coord data-") + print("-string auxcoord data-") try: print(repr(cube.coord("v_co").points)) except Exception as err2: @@ -111,3 +155,19 @@ def test_encodings(encoding): TEST_COORDARRAY = convert_chararray(TEST_COORD_VALS, N_STRLEN, encoding=do_as) make_testfile(filepath, TEST_CHARARRAY, TEST_COORDARRAY, encoding_str=encoding) show_result(filepath) + + +# @pytest.mark.parametrize("ndim", [1, 2]) +# def test_convert_bytes_to_strings(ndim: int): +# if ndim == 1: +# source = convert_strings_to_chararray(TEST_STRINGS, 16) +# elif ndim == 2: +# source = np.stack([ +# convert_strings_to_chararray(TEST_STRINGS, 16), +# convert_strings_to_chararray(TEST_COORD_VALS, 16), +# ]) +# else: +# raise ValueError(f"Unexpected param ndim={ndim}.") +# # convert the strings to bytes +# result = convert_bytesarray_to_strings(source) +# print(result) diff --git a/lib/iris/util.py b/lib/iris/util.py index 551b5aeb68..edcd8d52a9 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -3189,3 +3189,24 @@ def set( # Global CML settings object for use as context manager CML_SETTINGS: CMLSettings = CMLSettings() + + +def convert_bytesarray_to_strings( + byte_array, encoding="utf-8", string_length: int | None = None +): + """Convert bytes to strings. + + N.B. for now at least, we assume the string dim is **always the last one**. + """ + bytes_shape = byte_array.shape + var_shape = bytes_shape[:-1] + if string_length is None: + string_length = bytes_shape[-1] + string_dtype = f"U{string_length}" + result = np.empty(var_shape, dtype=string_dtype) + for ndindex in np.ndindex(var_shape): + element_bytes = byte_array[ndindex] + bytes = b"".join([b if b else b"\0" for b in element_bytes]) + string = bytes.decode(encoding) + result[ndindex] = string + return result From 5e4bb1398336ef708b910c990a41a50bc188cf3b Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sun, 7 Dec 2025 00:34:53 +0000 Subject: [PATCH 38/77] Revert and amend. --- .../fileformats/netcdf/_thread_safe_nc.py | 45 +++---------------- lib/iris/fileformats/netcdf/loader.py | 38 +--------------- lib/iris/fileformats/netcdf/saver.py | 4 +- lib/iris/util.py | 21 --------- 4 files changed, 8 insertions(+), 100 deletions(-) diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index 4b3dc10620..33183ef0fa 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -311,39 +311,14 @@ def fromcdl(cls, *args, **kwargs): class NetCDFDataProxy: """A reference to the data payload of a single NetCDF file variable.""" - __slots__ = ( - "shape", - "dtype", - "path", - "variable_name", - "fill_value", - "is_bytes", - "encoding", - "string_length", - ) - - def __init__( - self, - shape, - dtype, - path, - variable_name, - fill_value, - encoding: str | None = None, - string_length: int = 0, - ): + __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value") + + def __init__(self, shape, dtype, path, variable_name, fill_value): self.shape = shape self.dtype = dtype self.path = path self.variable_name = variable_name self.fill_value = fill_value - self.is_bytes = dtype.kind == "S" and dtype.itemsize == 1 - if self.is_bytes: - # We will be returning a different shape : the last dim is the byte-length - self.shape = self.shape[:-1] - self.dtype = np.dtype(f"U{string_length}") - self.encoding = encoding - self.string_length = string_length @property def ndim(self): @@ -363,20 +338,10 @@ def __getitem__(self, keys): try: variable = dataset.variables[self.variable_name] # Get the NetCDF variable data and slice. - data = variable[keys] - - # If bytes, decode to strings - if self.is_bytes: - from iris.util import convert_bytesarray_to_strings - - data = convert_bytesarray_to_strings( - data, - encoding=self.encoding, - string_length=self.string_length, - ) + var = variable[keys] finally: dataset.close() - return np.asanyarray(data) + return np.asanyarray(var) def __repr__(self): fmt = ( diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index e749d26d9b..6557f4aebc 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -16,7 +16,6 @@ """ -import codecs from collections.abc import Iterable, Iterator, Mapping from contextlib import contextmanager from copy import deepcopy @@ -275,36 +274,10 @@ def _get_cf_var_data(cf_var): # Normal NCVariable type: total_bytes = cf_var.size * cf_var.dtype.itemsize - default_encoding = "utf-8" - encoding = getattr(cf_var, "_Encoding", None) - if encoding is None: - # utf-8 is a reasonable "safe" default, equivalent to 'ascii' for ascii data - encoding = default_encoding - else: - try: - # Accept + normalise naming of encodings - encoding = codecs.lookup(encoding).name - # NOTE: if encoding does not suit data, errors can occur. - # For example, _Encoding = "ascii", with non-ascii content. - except LookupError: - # Replace some invalid setting with "safe"(ish) fallback. - encoding = default_encoding - - string_length = getattr(cf_var, "iris_string_length", None) - if total_bytes < _LAZYVAR_MIN_BYTES: # Don't make a lazy array, as it will cost more memory AND more time to access. result = cf_var[:] - if result.dtype.kind == "S": - from iris.util import convert_bytesarray_to_strings - - result = convert_bytesarray_to_strings( - result, - encoding=encoding, - string_length=string_length, - ) - # Special handling of masked scalar value; this will be returned as # an `np.ma.masked` instance which will lose the original dtype. # Workaround for this it return a 1-element masked array of the @@ -327,17 +300,8 @@ def _get_cf_var_data(cf_var): "_FillValue", _thread_safe_nc.default_fillvals[fill_dtype], ) - - # NOTE: if the data is bytes which need to be converted to strings on read, - # the data-proxy will do that (and it modifies its shape + dtype). proxy = NetCDFDataProxy( - cf_var.shape, - dtype, - cf_var.filename, - cf_var.cf_name, - fill_value, - encoding=encoding, - string_length=string_length, + cf_var.shape, dtype, cf_var.filename, cf_var.cf_name, fill_value ) # Get the chunking specified for the variable : this is either a shape, or # maybe the string "contiguous". diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 158d431f9a..b2617feef5 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1583,8 +1583,8 @@ def _get_element_variable_name(self, cube_or_mesh, element): name = element.standard_name or element.long_name if not name or set(name).intersection(string.whitespace): # We need to invent a name, based on its associated dimensions. - if cube is not None and cube.coords(element): - # It is a regular cube coordinate. + if cube is not None and cube.elements(element): + # It is a regular cube elementinate. # Auto-generate a name based on the dims. name = "" for dim in cube.coord_dims(element): diff --git a/lib/iris/util.py b/lib/iris/util.py index edcd8d52a9..551b5aeb68 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -3189,24 +3189,3 @@ def set( # Global CML settings object for use as context manager CML_SETTINGS: CMLSettings = CMLSettings() - - -def convert_bytesarray_to_strings( - byte_array, encoding="utf-8", string_length: int | None = None -): - """Convert bytes to strings. - - N.B. for now at least, we assume the string dim is **always the last one**. - """ - bytes_shape = byte_array.shape - var_shape = bytes_shape[:-1] - if string_length is None: - string_length = bytes_shape[-1] - string_dtype = f"U{string_length}" - result = np.empty(var_shape, dtype=string_dtype) - for ndindex in np.ndindex(var_shape): - element_bytes = byte_array[ndindex] - bytes = b"".join([b if b else b"\0" for b in element_bytes]) - string = bytes.decode(encoding) - result[ndindex] = string - return result From b92f88da28df8de0591596b4e9d96f60c8ceea6c Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 29 Oct 2025 12:23:07 +0000 Subject: [PATCH 39/77] Hack to preserve the existing order of attributes on saved Coords and Cubes. --- lib/iris/fileformats/netcdf/saver.py | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index b2617feef5..96911991d3 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1687,12 +1687,26 @@ def _set_cf_var_attributes(self, cf_var, element): else: units_str = str(element.units) - if cf_units.as_unit(units_str).is_udunits(): - _setncattr(cf_var, "units", units_str) + # NB this bit is a nasty hack to preserve existing behaviour through a refactor: + # The attributes for Coords are created in the order units, standard_name, + # whereas for data-variables (aka Cubes) it is the other way around. + # Needed now that this routine is also called from _create_cf_data_variable. + # TODO: when we can break things, rationalise these to be the same. + def add_units(): + if cf_units.as_unit(units_str).is_udunits(): + _setncattr(cf_var, "units", units_str) + + def add_stdname(): + standard_name = element.standard_name + if standard_name is not None: + _setncattr(cf_var, "standard_name", standard_name) - standard_name = element.standard_name - if standard_name is not None: - _setncattr(cf_var, "standard_name", standard_name) + if isinstance(element, Cube): + add_stdname() + add_units() + else: + add_units() + add_stdname() long_name = element.long_name if long_name is not None: From bed165a465a2528497a1b2dab023996e92c4bcf4 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 29 Oct 2025 14:54:33 +0000 Subject: [PATCH 40/77] Fix for dataless; avoid FUTURE global state change from temporary tests. --- lib/iris/fileformats/netcdf/saver.py | 30 ++++---- .../integration/netcdf/test_chararrays.py | 72 ++++++++++++++++--- 2 files changed, 75 insertions(+), 27 deletions(-) diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 96911991d3..2bf94cb896 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1817,7 +1817,7 @@ def _create_generic_cf_array_var( if cube is not None and data is not None and cube.shape != data.shape: compression_kwargs = {} - if np.issubdtype(data.dtype, np.str_): + if not is_dataless and np.issubdtype(data.dtype, np.str_): # Deal with string-type variables. # Typically CF label variables, but also possibly ancil-vars ? string_dimension_depth = data.dtype.itemsize @@ -1863,8 +1863,13 @@ def _create_generic_cf_array_var( else: # A normal (numeric) variable. # ensure a valid datatype for the file format. - element_type = type(element).__name__ - data = self._ensure_valid_dtype(data, element_type, element) + if is_dataless: + dtype = self._DATALESS_DTYPE + fill_value = self._DATALESS_FILLVALUE + else: + element_type = type(element).__name__ + data = self._ensure_valid_dtype(data, element_type, element) + dtype = data.dtype.newbyteorder("=") # Check if this is a dim-coord. is_dimcoord = cube is not None and element in cube.dim_coords @@ -1878,7 +1883,7 @@ def _create_generic_cf_array_var( # Create the CF-netCDF variable. cf_var = self._dataset.createVariable( cf_name, - data.dtype.newbyteorder("="), + dtype, element_dims, fill_value=fill_value, **compression_kwargs, @@ -2330,19 +2335,12 @@ def _create_cf_data_variable( # be removed. # Get the values in a form which is valid for the file format. is_dataless = cube.is_dataless() - if is_dataless: - data = None - else: - data = self._ensure_valid_dtype(cube.core_data(), "cube", cube) - if is_dataless: - # The variable must have *some* dtype, and it must be maskable - dtype = self._DATALESS_DTYPE - fill_value = self._DATALESS_FILLVALUE - elif not packing: - dtype = data.dtype.newbyteorder("=") - else: - if isinstance(packing, dict): + if not is_dataless: + data = self._ensure_valid_dtype(cube.core_data(), "cube", cube) + if not packing: + dtype = data.dtype.newbyteorder("=") + elif isinstance(packing, dict): if "dtype" not in packing: msg = "The dtype attribute is required for packing." raise ValueError(msg) diff --git a/lib/iris/tests/integration/netcdf/test_chararrays.py b/lib/iris/tests/integration/netcdf/test_chararrays.py index 8f29fcdcd5..c8bba94671 100644 --- a/lib/iris/tests/integration/netcdf/test_chararrays.py +++ b/lib/iris/tests/integration/netcdf/test_chararrays.py @@ -3,9 +3,8 @@ import pytest import iris - -iris.FUTURE.save_split_attrs = True - +from iris.coords import AuxCoord, DimCoord +from iris.cube import Cube NX, N_STRLEN = 3, 64 TEST_STRINGS = ["Münster", "London", "Amsterdam"] @@ -17,7 +16,13 @@ TEST_COORD_VALS[-1] = "Xsandwich" # makes the max coord strlen same as data one -def convert_chararray(string_array_1d, maxlen, encoding="utf-8"): +@pytest.fixture(scope="module", autouse=True) +def enable_split_attrs(): + with iris.FUTURE.context(save_split_attrs=True): + yield + + +def convert_strings_to_chararray(string_array_1d, maxlen, encoding="utf-8"): bbytes = [text.encode(encoding) for text in string_array_1d] pad = b"\0" * maxlen bbytes = [(x + pad)[:maxlen] for x in bbytes] @@ -97,6 +102,23 @@ def make_testfile(filepath, chararray, coordarray, encoding_str=None): v.coordinates = coords_str +def make_testcube( + dataarray, + coordarray, # for now, these are always *string* arrays + encoding_str: str | None = None, +): + cube = Cube(dataarray, var_name="v") + cube.add_dim_coord(DimCoord(np.arange(NX), var_name="x"), 0) + if encoding_str is not None: + cube.attributes["_Encoding"] = encoding_str + if INCLUDE_COORD: + co_x = AuxCoord(coordarray, var_name="v_co") + if encoding_str is not None: + co_x.attributes["_Encoding"] = encoding_str + cube.add_aux_coord(co_x, 0) + return cube + + def show_result(filepath): from pp_utils import ncdump @@ -115,12 +137,13 @@ def show_result(filepath): # print(repr(v[:])) print("\nAs iris cube..") try: + iris.loading.LOAD_PROBLEMS.reset() cube = iris.load_cube(filepath) print(cube) - if iris.loading.LOAD_PROBLEMS._problems: + if iris.loading.LOAD_PROBLEMS.problems: print(iris.loading.LOAD_PROBLEMS) print( - "\n".join(iris.loading.LOAD_PROBLEMS._problems[0].stack_trace.format()) + "\n".join(iris.loading.LOAD_PROBLEMS.problems[0].stack_trace.format()) ) print("-data-") print(repr(cube.data)) @@ -136,27 +159,54 @@ def show_result(filepath): print(repr(err)) -# tsts = (None, "ascii", "utf-8", "utf-32",) +tsts = ( + None, + "ascii", + "utf-8", + "utf-32", +) # tsts = ("utf-8",) # tsts = ("utf-8", "utf-32",) # tsts = ("utf-32",) -tsts = ("utf-8", "ascii", "utf-8") +# tsts = ("utf-8", "ascii", "utf-8") @pytest.mark.parametrize("encoding", tsts) -def test_encodings(encoding): +def test_load_encodings(encoding): # small change print(f"\n=========\nTesting encoding: {encoding}") filepath = f"tmp_{str(encoding)}.nc" do_as = encoding if encoding != "utf-32": do_as = "utf-8" - TEST_CHARARRAY = convert_chararray(TEST_STRINGS, N_STRLEN, encoding=do_as) - TEST_COORDARRAY = convert_chararray(TEST_COORD_VALS, N_STRLEN, encoding=do_as) + TEST_CHARARRAY = convert_strings_to_chararray( + TEST_STRINGS, N_STRLEN, encoding=do_as + ) + TEST_COORDARRAY = convert_strings_to_chararray( + TEST_COORD_VALS, N_STRLEN, encoding=do_as + ) make_testfile(filepath, TEST_CHARARRAY, TEST_COORDARRAY, encoding_str=encoding) show_result(filepath) +@pytest.mark.parametrize("encoding", tsts) +def test_save_encodings(encoding): + cube = make_testcube( + dataarray=TEST_STRINGS, coordarray=TEST_COORD_VALS, encoding_str=encoding + ) + print(cube) + filepath = f"tmp_save_{str(encoding)}.nc" + if encoding == "ascii": + with pytest.raises( + UnicodeEncodeError, + match="'ascii' codec can't encode character.*not in range", + ): + iris.save(cube, filepath) + else: + iris.save(cube, filepath) + show_result(filepath) + + # @pytest.mark.parametrize("ndim", [1, 2]) # def test_convert_bytes_to_strings(ndim: int): # if ndim == 1: From 68d84465bc45ed3f9702e216a2834332b16d9480 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 29 Oct 2025 15:21:31 +0000 Subject: [PATCH 41/77] Further fix to attribute ordering. --- lib/iris/fileformats/netcdf/saver.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 2bf94cb896..1298ce1a9e 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1692,25 +1692,25 @@ def _set_cf_var_attributes(self, cf_var, element): # whereas for data-variables (aka Cubes) it is the other way around. # Needed now that this routine is also called from _create_cf_data_variable. # TODO: when we can break things, rationalise these to be the same. - def add_units(): + def add_units_attr(): if cf_units.as_unit(units_str).is_udunits(): _setncattr(cf_var, "units", units_str) - def add_stdname(): + def add_names_attrs(): standard_name = element.standard_name if standard_name is not None: _setncattr(cf_var, "standard_name", standard_name) + long_name = element.long_name + if long_name is not None: + _setncattr(cf_var, "long_name", long_name) + if isinstance(element, Cube): - add_stdname() - add_units() + add_names_attrs() + add_units_attr() else: - add_units() - add_stdname() - - long_name = element.long_name - if long_name is not None: - _setncattr(cf_var, "long_name", long_name) + add_units_attr() + add_names_attrs() # Add the CF-netCDF calendar attribute. if element.units.calendar: From 4054a74742f4ad4576a89d3430972a0ab2afeca8 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 29 Oct 2025 18:08:35 +0000 Subject: [PATCH 42/77] Fixes for data packing. --- lib/iris/fileformats/netcdf/saver.py | 64 ++++++++++------------------ 1 file changed, 22 insertions(+), 42 deletions(-) diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 1298ce1a9e..e9eec20649 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1742,6 +1742,7 @@ def _create_generic_cf_array_var( element_dims=None, fill_value=None, compression_kwargs=None, + packing_controls: dict | None = None, is_dataless=False, ): """Create theCF-netCDF variable given dimensional_metadata. @@ -1869,7 +1870,10 @@ def _create_generic_cf_array_var( else: element_type = type(element).__name__ data = self._ensure_valid_dtype(data, element_type, element) - dtype = data.dtype.newbyteorder("=") + if not packing_controls: + dtype = data.dtype.newbyteorder("=") + else: + dtype = packing_controls["dtype"] # Check if this is a dim-coord. is_dimcoord = cube is not None and element in cube.dim_coords @@ -1902,6 +1906,10 @@ def _create_generic_cf_array_var( # Add the data to the CF-netCDF variable. if not is_dataless: + if packing_controls: + # We must set packing attributes (if any), before assigning values. + for key, value in packing_controls["attributes"]: + _setncattr(cf_var, key, value) self._lazy_stream_data(data=data, cf_var=cf_var) # Add names + units @@ -2336,11 +2344,10 @@ def _create_cf_data_variable( # Get the values in a form which is valid for the file format. is_dataless = cube.is_dataless() - if not is_dataless: + packing_controls = None + if packing and not is_dataless: data = self._ensure_valid_dtype(cube.core_data(), "cube", cube) - if not packing: - dtype = data.dtype.newbyteorder("=") - elif isinstance(packing, dict): + if isinstance(packing, dict): if "dtype" not in packing: msg = "The dtype attribute is required for packing." raise ValueError(msg) @@ -2378,26 +2385,14 @@ def _create_cf_data_variable( else: add_offset = cmin + 2 ** (n - 1) * scale_factor - def set_packing_ncattrs(cfvar): - """Set netCDF packing attributes. - - NOTE: cfvar needs to be a _thread_safe_nc._ThreadSafeWrapper subclass. + packing_controls = { + "dtype": dtype, + "attributes": [ + ("scale_factor", scale_factor), + ("add_offset", add_offset), + ], + } - """ - assert hasattr(cfvar, "THREAD_SAFE_FLAG") - if packing: - if scale_factor: - _setncattr(cfvar, "scale_factor", scale_factor) - if add_offset: - _setncattr(cfvar, "add_offset", add_offset) - - # cf_name = self._get_element_variable_name(cube_or_mesh=None, element=cube) - # while cf_name in self._dataset.variables: - # cf_name = self._increment_name(cf_name) - # - # cf_var = self._dataset.createVariable( - # cf_name, dtype, dimension_names, fill_value=fill_value, **kwargs - # ) # Create the cube CF-netCDF data variable with data payload. cf_name = self._create_generic_cf_array_var( cube, @@ -2406,28 +2401,13 @@ def set_packing_ncattrs(cfvar): element_dims=dimension_names, fill_value=fill_value, compression_kwargs=kwargs, + packing_controls=packing_controls, is_dataless=is_dataless, ) cf_var = self._dataset.variables[cf_name] - if not is_dataless: - set_packing_ncattrs(cf_var) - - # if cube.standard_name: - # _setncattr(cf_var, "standard_name", cube.standard_name) - # - # if cube.long_name: - # _setncattr(cf_var, "long_name", cube.long_name) - # - # if cube.units.is_udunits(): - # _setncattr(cf_var, "units", str(cube.units)) - # - # # Add the CF-netCDF calendar attribute. - # if cube.units.calendar: - # _setncattr(cf_var, "calendar", cube.units.calendar) - - # Set attributes: NB this part is cube-specific (not the same for components) - # - therefore 'set_cf_var_attributes' doesn't set attributes if element is a Cube + # Set general attrs: NB this part is cube-specific (not the same for components) + # - so 'set_cf_var_attributes' *doesn't* set these, if element is a Cube if iris.FUTURE.save_split_attrs: attr_names = cube.attributes.locals.keys() else: From 3a749df5324663ca6ed4dccd17d5a60ae04e8ab8 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sun, 7 Dec 2025 00:42:34 +0000 Subject: [PATCH 43/77] Latest test-chararrays. --- .../integration/netcdf/test_chararrays.py | 61 +++++++++++-------- 1 file changed, 36 insertions(+), 25 deletions(-) diff --git a/lib/iris/tests/integration/netcdf/test_chararrays.py b/lib/iris/tests/integration/netcdf/test_chararrays.py index c8bba94671..0eb211c8b0 100644 --- a/lib/iris/tests/integration/netcdf/test_chararrays.py +++ b/lib/iris/tests/integration/netcdf/test_chararrays.py @@ -1,10 +1,19 @@ -import netCDF4 as nc +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Integration tests for string data handling.""" + +import subprocess + import numpy as np import pytest import iris from iris.coords import AuxCoord, DimCoord from iris.cube import Cube +from iris.fileformats.netcdf import _thread_safe_nc +from iris.tests import env_bin_path NX, N_STRLEN = 3, 64 TEST_STRINGS = ["Münster", "London", "Amsterdam"] @@ -16,6 +25,7 @@ TEST_COORD_VALS[-1] = "Xsandwich" # makes the max coord strlen same as data one +# Ensure all tests run with "split attrs" turned on. @pytest.fixture(scope="module", autouse=True) def enable_split_attrs(): with iris.FUTURE.context(save_split_attrs=True): @@ -59,7 +69,8 @@ def convert_bytesarray_to_strings( def make_testfile(filepath, chararray, coordarray, encoding_str=None): - with nc.Dataset(filepath, "w") as ds: + ds = _thread_safe_nc.DatasetWrapper(filepath, "w") + try: ds.createDimension("x", NX) ds.createDimension("nstr", N_STRLEN) vx = ds.createVariable("x", int, dimensions=("x")) @@ -100,6 +111,8 @@ def make_testfile(filepath, chararray, coordarray, encoding_str=None): if INCLUDE_NUMERIC_AUXCOORD: coords_str += " v_num" v.coordinates = coords_str + finally: + ds.close() def make_testcube( @@ -119,12 +132,19 @@ def make_testcube( return cube -def show_result(filepath): - from pp_utils import ncdump +NCDUMP_PATHSTR = str(env_bin_path("ncdump")) + + +def ncdump(nc_path: str, *args): + """Call ncdump to print a dump of a file.""" + call_args = [NCDUMP_PATHSTR, nc_path] + list(*args) + subprocess.run(call_args, check=True) + +def show_result(filepath): print(f"File {filepath}") print("NCDUMP:") - ncdump(filepath, "") + ncdump(filepath) # with nc.Dataset(filepath, "r") as ds: # v = ds.variables["v"] # print("\n----\nNetcdf data readback (basic)") @@ -159,6 +179,13 @@ def show_result(filepath): print(repr(err)) +@pytest.fixture(scope="session") +def save_dir(tmp_path_factory): + return tmp_path_factory.mktemp("save_files") + + +# TODO: the tests don't test things properly yet, they just exercise the code and print +# things for manual debugging. tsts = ( None, "ascii", @@ -172,10 +199,10 @@ def show_result(filepath): @pytest.mark.parametrize("encoding", tsts) -def test_load_encodings(encoding): +def test_load_encodings(encoding, save_dir): # small change print(f"\n=========\nTesting encoding: {encoding}") - filepath = f"tmp_{str(encoding)}.nc" + filepath = save_dir / f"tmp_load_{str(encoding)}.nc" do_as = encoding if encoding != "utf-32": do_as = "utf-8" @@ -190,12 +217,12 @@ def test_load_encodings(encoding): @pytest.mark.parametrize("encoding", tsts) -def test_save_encodings(encoding): +def test_save_encodings(encoding, save_dir): cube = make_testcube( dataarray=TEST_STRINGS, coordarray=TEST_COORD_VALS, encoding_str=encoding ) print(cube) - filepath = f"tmp_save_{str(encoding)}.nc" + filepath = save_dir / f"tmp_save_{str(encoding)}.nc" if encoding == "ascii": with pytest.raises( UnicodeEncodeError, @@ -205,19 +232,3 @@ def test_save_encodings(encoding): else: iris.save(cube, filepath) show_result(filepath) - - -# @pytest.mark.parametrize("ndim", [1, 2]) -# def test_convert_bytes_to_strings(ndim: int): -# if ndim == 1: -# source = convert_strings_to_chararray(TEST_STRINGS, 16) -# elif ndim == 2: -# source = np.stack([ -# convert_strings_to_chararray(TEST_STRINGS, 16), -# convert_strings_to_chararray(TEST_COORD_VALS, 16), -# ]) -# else: -# raise ValueError(f"Unexpected param ndim={ndim}.") -# # convert the strings to bytes -# result = convert_bytesarray_to_strings(source) -# print(result) From 6180d94fb0a8dd9d6898c5ec49f668d2f0021eb7 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sun, 7 Dec 2025 10:43:18 +0000 Subject: [PATCH 44/77] Fix search+replace error. --- lib/iris/fileformats/netcdf/saver.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index e9eec20649..5dceff7e68 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1583,8 +1583,8 @@ def _get_element_variable_name(self, cube_or_mesh, element): name = element.standard_name or element.long_name if not name or set(name).intersection(string.whitespace): # We need to invent a name, based on its associated dimensions. - if cube is not None and cube.elements(element): - # It is a regular cube elementinate. + if cube is not None and cube.coords(element): + # It is a regular cube coordinate. # Auto-generate a name based on the dims. name = "" for dim in cube.coord_dims(element): From d689ae74a47cfea827fbb56466a9eed22260b452 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 14 Jan 2026 13:18:07 +0000 Subject: [PATCH 45/77] Tiny fix in crucial place! (merge error?). --- lib/iris/fileformats/netcdf/saver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 5dceff7e68..b2e46c6188 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1860,7 +1860,7 @@ def _create_generic_cf_array_var( new_data[index_slice] = list( "%- *s" % (string_dimension_depth, data[index]) ) - data = new_data + data = new_data else: # A normal (numeric) variable. # ensure a valid datatype for the file format. From 04aae9060e855e977dc65d45d0279bf766601487 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 14 Jan 2026 15:33:04 +0000 Subject: [PATCH 46/77] Extra mock property prevents weird test crashes. --- .../helpers/test_build_and_add_auxiliary_coordinate.py | 9 +++++++-- .../helpers/test_build_and_add_dimension_coordinate.py | 1 + 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py index a6d613eb9a..766f23fe43 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py @@ -41,7 +41,11 @@ def _setup(self, mocker): self.engine = mocker.Mock( cube=mocker.Mock(), - cf_var=mocker.Mock(dimensions=("foo", "bar"), cf_data=cf_data), + cf_var=mocker.Mock( + dimensions=("foo", "bar"), + cf_data=cf_data, + dtype=np.int32, + ), filename="DUMMY", cube_parts=dict(coordinates=[]), ) @@ -172,7 +176,7 @@ def _setup(self, mocker): self.engine = mocker.Mock( cube=mocker.Mock(), - cf_var=mocker.Mock(dimensions=("foo", "bar")), + cf_var=mocker.Mock(dimensions=("foo", "bar"), dtype=np.int32), filename="DUMMY", cube_parts=dict(coordinates=[]), ) @@ -243,6 +247,7 @@ def _setup(self, mocker): self.engine = mocker.Mock( cube=mocker.Mock(), cf_var=mocker.Mock(dimensions=("foo", "bar")), + dtype=np.float32, filename="DUMMY", cube_parts=dict(coordinates=[]), ) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py index 9cf983d0a3..f52d18a076 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py @@ -25,6 +25,7 @@ def _mixin_setup(self, mocker): self.engine = mocker.Mock( cube=mocker.Mock(), cf_var=mocker.Mock(dimensions=("foo", "bar")), + dtype=np.int32, filename="DUMMY", cube_parts=dict(coordinates=[]), ) From 6a6d9782290a6e9d6ff91b3dbc89e52302ff57af Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 14 Jan 2026 18:00:24 +0000 Subject: [PATCH 47/77] Fix another mock problem. --- .../fileformats/netcdf/saver/test_Saver.py | 50 +++++++++++-------- 1 file changed, 29 insertions(+), 21 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index a2d2ff71ee..5231632252 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -257,9 +257,6 @@ def test_compression(self, mocker, tmp_path): ) cube.add_ancillary_variable(anc_coord, data_dims=data_dims) - patch = mocker.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable" - ) compression_kwargs = { "complevel": 9, "fletcher32": True, @@ -267,9 +264,15 @@ def test_compression(self, mocker, tmp_path): "zlib": True, } - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4", compute=False) as saver: - saver.write(cube, **compression_kwargs) + with self.temp_filename(suffix=".nc") as nc_path: + with Saver(nc_path, "NETCDF4", compute=False) as saver: + patch = mocker.patch( + "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", + # Use 'wraps' to allow the patched methods to function as normal + # - the patch object just acts as a 'spy' on its calls. + wraps=saver._dataset.createVariable, + ) + saver.write(cube, **compression_kwargs) assert 5 == patch.call_count result = self._filter_compression_calls(patch, compression_kwargs) @@ -290,9 +293,6 @@ def test_non_compression__shape(self, mocker, tmp_path): ) cube.add_ancillary_variable(anc_coord, data_dims=data_dims[1]) - patch = mocker.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable" - ) compression_kwargs = { "complevel": 9, "fletcher32": True, @@ -300,13 +300,19 @@ def test_non_compression__shape(self, mocker, tmp_path): "zlib": True, } - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4", compute=False) as saver: - saver.write(cube, **compression_kwargs) + with self.temp_filename(suffix=".nc") as nc_path: + with Saver(nc_path, "NETCDF4", compute=False) as saver: + patch = mocker.patch( + "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", + # Use 'wraps' to allow the patched methods to function as normal + # - the patch object just acts as a 'spy' on its calls. + wraps=saver._dataset.createVariable, + ) + saver.write(cube, **compression_kwargs) assert 5 == patch.call_count result = self._filter_compression_calls( - patch, compression_kwargs, mismatch=True + createvar_spy, compression_kwargs, mismatch=True ) assert 4 == len(result) # the aux coord and ancil variable are not compressed due to shape, and @@ -323,10 +329,6 @@ def test_non_compression__dtype(self, mocker, tmp_path): aux_coord = AuxCoord(data, var_name="non_compress_aux", units="1") cube.add_aux_coord(aux_coord, data_dims=data_dims) - patch = mocker.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable" - ) - patch.return_value = mocker.MagicMock(dtype=np.dtype("S1")) compression_kwargs = { "complevel": 9, "fletcher32": True, @@ -334,13 +336,19 @@ def test_non_compression__dtype(self, mocker, tmp_path): "zlib": True, } - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4", compute=False) as saver: - saver.write(cube, **compression_kwargs) + with self.temp_filename(suffix=".nc") as nc_path: + with Saver(nc_path, "NETCDF4", compute=False) as saver: + patch = self.patch( + "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", + # Use 'wraps' to allow the patched methods to function as normal + # - the patch object just acts as a 'spy' on its calls. + wraps=saver._dataset.createVariable, + ) + saver.write(cube, **compression_kwargs) assert 4 == patch.call_count result = self._filter_compression_calls( - patch, compression_kwargs, mismatch=True + createvar_spy, compression_kwargs, mismatch=True ) assert 3 == len(result) # the aux coord is not compressed due to its string dtype, and From 1e2cbbeab000b0d312b69bd1133b4a2c871dd5e3 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 31 Oct 2025 15:38:04 +0000 Subject: [PATCH 48/77] Initial dataset wrappers. Rename; addin parts of old investigation; add temporary notes. --- .../netcdf/_bytecoding_datasets.py | 182 ++++++++++++++++++ .../fileformats/netcdf/_thread_safe_nc.py | 15 +- .../fileformats/netcdf/encoding_tests.txt | 18 ++ .../netcdf/test_bytecoding_datasets.py | 14 ++ 4 files changed, 223 insertions(+), 6 deletions(-) create mode 100644 lib/iris/fileformats/netcdf/_bytecoding_datasets.py create mode 100644 lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt create mode 100644 lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py new file mode 100644 index 0000000000..41e801d103 --- /dev/null +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -0,0 +1,182 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Module providing to netcdf datasets with automatic character encoding. + +The requirement is to convert numpy fixed-width unicode arrays on writing to a variable +which is declared as a byte (character) array with a fixed-length string dimension. + +Numpy unicode string arrays are ones with dtypes of the form "U". +Numpy character variables have the dtype "S1", and map to a fixed-length "string +dimension". + +In principle, netCDF4 already performs these translations, but in practice current +releases are not functional for anything other than "ascii" encoding -- including UTF-8, +which is the most obvious and desirable "general" solution. + +There is also the question of whether we should like to implement UTF-8 as our default. +Current discussions on this are inconclusive and neither CF conventions nor the NetCDF +User Guide are definite on what possible values of "_Encoding" are, or what the effective +default is, even though they do both mention the "_Encoding" attribute as a potential +way to handle the issue. + +Because of this, we interpret as follows: + * when reading bytes : in the absence of an "_Encoding" attribute, we will attempt to + decode bytes as UTF-8 + * when writing strings : in the absence of an "_Encoding" attribute (on the Iris + cube or coord object), we will attempt to encode data with "ascii" : If this fails, + it raise an error prompting the user to supply an "_Encoding" attribute. + +Where an "_Encoding" attribute is provided to Iris, we will honour it where possible, +identifying with "codecs.lookup" : This means we support the encodings in the Python +Standard Library, and the name aliases which it recognises. + +See: + +* known problems https://github.com/Unidata/netcdf4-python/issues/1440 +* suggestions for how this "ought" to work, discussed in the netcdf-c library + * https://github.com/Unidata/netcdf-c/issues/402 + +""" + +import codecs +import warnings + +import numpy as np + +from iris.fileformats.netcdf._thread_safe_nc import DatasetWrapper, VariableWrapper + + +def decode_bytesarray_to_stringarray( + byte_array: np.ndarray, encoding="utf-8", string_width: int | None = None +) -> np.ndarray: + """Convert an array of bytes to an array of strings, with one less dimension. + + N.B. for now at least, we assume the string dim is **always the last one**. + If 'string_width' is not given, it is set to the final dimension of 'byte_array'. + """ + bytes_shape = byte_array.shape + var_shape = bytes_shape[:-1] + if string_width is None: + string_width = bytes_shape[-1] + string_dtype = f"U{string_width}" + result = np.empty(var_shape, dtype=string_dtype) + for ndindex in np.ndindex(var_shape): + element_bytes = byte_array[ndindex] + bytes = b"".join([b if b else b"\0" for b in element_bytes]) + string = bytes.decode(encoding) + result[ndindex] = string + return result + + +def encode_stringarray_as_bytearray( + data: np.ndarray, encoding=None, string_dimension_length: int | None = None +) -> np.ndarray: + """Encode strings as bytearray. + + Note: if 'string_dimension_length' is not given (None), it is set to the longest + encoded bytes element. If 'string_dimension_length' is specified, the last array + dimension is set to this and content strings are truncated or extended as required. + """ + element_shape = data.shape + max_length = 1 # this is a MINIMUM - i.e. not zero! + data_elements = np.zeros(element_shape, dtype=object) + for index in np.ndindex(element_shape): + data_element = data[index].encode(encoding=encoding) + element_length = len(data_element) + data_elements[index] = data_element + if element_length > max_length: + max_length = element_length + + if string_dimension_length is None: + string_dimension_length = max_length + + # We already encoded all the strings, but stored them in an object-array as + # we didn't yet know the fixed byte-length to convert to. + # Now convert to a fixed-width byte array with an extra string-length dimension + result = np.zeros(element_shape + (string_dimension_length,), dtype="S1") + right_pad = b"\0" * string_dimension_length + for index in np.ndindex(element_shape): + bytes = data_elements[index] + bytes = (bytes + right_pad)[:string_dimension_length] + result[index] = [bytes[i : i + 1] for i in range(string_dimension_length)] + + return result + + +DEFAULT_ENCODING = "utf-8" + + +class EncodedVariable(VariableWrapper): + """A variable wrapper that translates variable data according to byte encodings.""" + + def __getitem__(self, keys): + if self.is_chardata(): + super().set_auto_chartostring(False) + + data = super().__getitem__(keys) + + if self.is_chardata(): + encoding = self.get_byte_encoding() + strlen = self.get_string_length() + data = decode_bytesarray_to_stringarray(data, encoding, strlen) + + return data + + def __setitem__(self, keys, data): + if self.is_chardata(): + encoding = self.get_byte_encoding() + strlen = self.get_string_length() + if encoding is not None: + data = encode_stringarray_as_bytearray(data, encoding, strlen) + else: + try: + # Check if all characters are valid ascii + data = encode_stringarray_as_bytearray(data, "ascii", strlen) + except UnicodeEncodeError: + data = encode_stringarray_as_bytearray( + data, DEFAULT_ENCODING, strlen + ) + # As this was necessary, record the new encoding on the variable + self.set_ncattr("_Encoding", DEFAULT_ENCODING) + msg = ( + f"Non-ascii data written to label variable {self.name}. " + f"Applied {DEFAULT_ENCODING!r} encoding, " + f"and set attribute _Encoding={DEFAULT_ENCODING!r}." + ) + warnings.warn(msg, UserWarning) + + super().set_auto_chartostring(False) + + super().__setitem__(keys, data) + + def is_chardata(self): + return np.issubdtype(self.dtype, np.bytes_) + + def get_encoding(self) -> str | None: + """Get the effective byte encoding to be used for this variable.""" + # utf-8 is a reasonable "safe" default, equivalent to 'ascii' for ascii data + result = getattr(self, "_Encoding", None) + if result is not None: + try: + # Accept + normalise naming of encodings + result = codecs.lookup(result).name + # NOTE: if encoding does not suit data, errors can occur. + # For example, _Encoding = "ascii", with non-ascii content. + except LookupError: + # Replace some invalid setting with "safe"(ish) fallback. + msg = f"Unknown encoding for variable {self.name!r}: {result!r}" + warnings.warn(msg, UserWarning) + + return result + + def get_string_length(self): + """Return the string-length defined for this variable (or None).""" + return getattr(self, "iris_string_length", None) + + +class EncodedDataset(DatasetWrapper): + """A specialised DatasetWrapper whose variables perform byte encoding.""" + + VAR_WRAPPER_CLS = EncodedVariable diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index 33183ef0fa..46b8609bb7 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -159,6 +159,9 @@ class GroupWrapper(_ThreadSafeWrapper): CONTAINED_CLASS = netCDF4.Group # Note: will also accept a whole Dataset object, but that is OK. _DUCKTYPE_CHECK_PROPERTIES = ["createVariable"] + # Class to use when creating variable wrappers (default=VariableWrapper). + # - needed to support _byte_encoded_data.EncodedDataset. + VAR_WRAPPER_CLS = VariableWrapper # All Group API that returns Dimension(s) is wrapped to instead return # DimensionWrapper(s). @@ -203,7 +206,7 @@ def variables(self) -> typing.Dict[str, VariableWrapper]: """ with _GLOBAL_NETCDF4_LOCK: variables_ = self._contained_instance.variables - return {k: VariableWrapper.from_existing(v) for k, v in variables_.items()} + return {k: self.VAR_WRAPPER_CLS.from_existing(v) for k, v in variables_.items()} def createVariable(self, *args, **kwargs) -> VariableWrapper: """Call createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. @@ -216,7 +219,7 @@ def createVariable(self, *args, **kwargs) -> VariableWrapper: """ with _GLOBAL_NETCDF4_LOCK: new_variable = self._contained_instance.createVariable(*args, **kwargs) - return VariableWrapper.from_existing(new_variable) + return self.VAR_WRAPPER_CLS.from_existing(new_variable) def get_variables_by_attributes( self, *args, **kwargs @@ -234,7 +237,7 @@ def get_variables_by_attributes( variables_ = list( self._contained_instance.get_variables_by_attributes(*args, **kwargs) ) - return [VariableWrapper.from_existing(v) for v in variables_] + return [self.VAR_WRAPPER_CLS.from_existing(v) for v in variables_] # All Group API that returns Group(s) is wrapped to instead return # GroupWrapper(s). @@ -252,7 +255,7 @@ def groups(self): """ with _GLOBAL_NETCDF4_LOCK: groups_ = self._contained_instance.groups - return {k: GroupWrapper.from_existing(v) for k, v in groups_.items()} + return {k: self.__class__.from_existing(v) for k, v in groups_.items()} @property def parent(self): @@ -268,7 +271,7 @@ def parent(self): """ with _GLOBAL_NETCDF4_LOCK: parent_ = self._contained_instance.parent - return GroupWrapper.from_existing(parent_) + return self.__class__.from_existing(parent_) def createGroup(self, *args, **kwargs): """Call createGroup() from netCDF4.Group/Dataset. @@ -281,7 +284,7 @@ def createGroup(self, *args, **kwargs): """ with _GLOBAL_NETCDF4_LOCK: new_group = self._contained_instance.createGroup(*args, **kwargs) - return GroupWrapper.from_existing(new_group) + return self.__class__.from_existing(new_group) class DatasetWrapper(GroupWrapper): diff --git a/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt b/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt new file mode 100644 index 0000000000..bab04aa0c4 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt @@ -0,0 +1,18 @@ + +forms in files: + * char chardata(dim1, dim2, strlen_xx); # char data + * string data(dim1, dim2); + +forms in numpy: + * np.ndarray(dtype="S1") # char data + * np.ndarray(dtype="Snn") # char data + * np.ndarray(dtype="Unn") # strings + * np.ndarray(dtype="") + +possibilities in createVariable: +""" + The datatype can be a numpy datatype object, or a string that describes a numpy dtype object ... + datatype can also be a CompoundType instance (for a structured, or compound array), a VLType instance (for a variable-length array), +** or the python str builtin (for a variable-length string array). +** Numpy string and unicode datatypes with length greater than one are aliases for str. +""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py new file mode 100644 index 0000000000..8b449c5912 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py @@ -0,0 +1,14 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for :class:`iris.fileformats.netcdf._bytecoding_datasets` module.""" + +# import numpy as np +# import pytest +# +# from iris.fileformats.netcdf._bytecoding_datasets import EncodedDataset + + +class TestEncodedDataset: + """Test how GRIB_PARAM attributes convert to strings for storage in netcdf files.""" From e8f0d39f2da14b18c887cf596673c396277cbff0 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 3 Dec 2025 18:59:43 +0000 Subject: [PATCH 49/77] Various notes, choices + changes: Beginnings of encoded-dataset testing. --- .../netcdf/_bytecoding_datasets.py | 155 ++++++++---- .../integration/netcdf/test_chararrays.py | 7 +- .../fileformats/netcdf/encoding_tests.txt | 164 +++++++++++++ .../netcdf/test_bytecoding_datasets.py | 223 +++++++++++++++++- .../unit/fileformats/netcdf/test_nc_dtypes.py | 96 ++++++++ 5 files changed, 595 insertions(+), 50 deletions(-) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/test_nc_dtypes.py diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index 41e801d103..353f14d538 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -41,6 +41,8 @@ """ import codecs +import contextlib +import threading import warnings import numpy as np @@ -49,17 +51,18 @@ def decode_bytesarray_to_stringarray( - byte_array: np.ndarray, encoding="utf-8", string_width: int | None = None + byte_array: np.ndarray, encoding: str, string_width: int ) -> np.ndarray: """Convert an array of bytes to an array of strings, with one less dimension. N.B. for now at least, we assume the string dim is **always the last one**. If 'string_width' is not given, it is set to the final dimension of 'byte_array'. """ + if np.ma.isMaskedArray(byte_array): + # netCDF4-python sees zeros as "missing" -- we don't need or want that + byte_array = byte_array.data bytes_shape = byte_array.shape var_shape = bytes_shape[:-1] - if string_width is None: - string_width = bytes_shape[-1] string_dtype = f"U{string_width}" result = np.empty(var_shape, dtype=string_dtype) for ndindex in np.ndindex(var_shape): @@ -70,16 +73,25 @@ def decode_bytesarray_to_stringarray( return result -def encode_stringarray_as_bytearray( +# +# TODO: remove? +# this older version is "overly flexible", less efficient and not needed here. +# +def flexi_encode_stringarray_as_bytearray( data: np.ndarray, encoding=None, string_dimension_length: int | None = None ) -> np.ndarray: """Encode strings as bytearray. Note: if 'string_dimension_length' is not given (None), it is set to the longest - encoded bytes element. If 'string_dimension_length' is specified, the last array + encoded bytes element, **OR** the dtype size, if that is greater. + If 'string_dimension_length' is specified, the last array dimension is set to this and content strings are truncated or extended as required. """ + if np.ma.isMaskedArray(data): + # netCDF4-python sees zeros as "missing" -- we don't need or want that + data = data.data element_shape = data.shape + # Encode all the strings + see which is longest max_length = 1 # this is a MINIMUM - i.e. not zero! data_elements = np.zeros(element_shape, dtype=object) for index in np.ndindex(element_shape): @@ -90,10 +102,15 @@ def encode_stringarray_as_bytearray( max_length = element_length if string_dimension_length is None: + # If the string length was not specified, it is the maximum encoded length + # (n-bytes), **or** the dtype string-length, if greater. string_dimension_length = max_length + array_string_length = int(str(data.dtype)[2:]) # Yuck. No better public way? + if array_string_length > string_dimension_length: + string_dimension_length = array_string_length - # We already encoded all the strings, but stored them in an object-array as - # we didn't yet know the fixed byte-length to convert to. + # We maybe *already* encoded all the strings above, but stored them in an + # object-array as we didn't yet know the fixed byte-length to convert to. # Now convert to a fixed-width byte array with an extra string-length dimension result = np.zeros(element_shape + (string_dimension_length,), dtype="S1") right_pad = b"\0" * string_dimension_length @@ -105,58 +122,98 @@ def encode_stringarray_as_bytearray( return result -DEFAULT_ENCODING = "utf-8" +def encode_stringarray_as_bytearray( + data: np.ndarray, encoding: str, string_dimension_length: int +) -> np.ndarray: + """Encode strings as a bytes array.""" + element_shape = data.shape + result = np.zeros(element_shape + (string_dimension_length,), dtype="S1") + right_pad = b"\0" * string_dimension_length + for index in np.ndindex(element_shape): + bytes = data[index].encode(encoding=encoding) + # It's all a bit nasty ... + bytes = (bytes + right_pad)[:string_dimension_length] + result[index] = [bytes[i : i + 1] for i in range(string_dimension_length)] + + return result + + +class NetcdfStringDecodeSetting(threading.local): + def __init__(self, perform_encoding: bool = True): + self.set(perform_encoding) + + def set(self, perform_encoding: bool): + self.perform_encoding = perform_encoding + + def __bool__(self): + return self.perform_encoding + + @contextlib.contextmanager + def context(self, perform_encoding: bool): + old_setting = self.perform_encoding + self.perform_encoding = perform_encoding + yield + self.perform_encoding = old_setting + + +DECODE_TO_STRINGS_ON_READ = NetcdfStringDecodeSetting() +DEFAULT_READ_ENCODING = "utf-8" +DEFAULT_WRITE_ENCODING = "ascii" class EncodedVariable(VariableWrapper): """A variable wrapper that translates variable data according to byte encodings.""" def __getitem__(self, keys): - if self.is_chardata(): - super().set_auto_chartostring(False) + if self._is_chardata(): + # N.B. we never need to UNset this, as we totally control it + self._contained_instance.set_auto_chartostring(False) data = super().__getitem__(keys) - if self.is_chardata(): - encoding = self.get_byte_encoding() - strlen = self.get_string_length() - data = decode_bytesarray_to_stringarray(data, encoding, strlen) + if DECODE_TO_STRINGS_ON_READ and self._is_chardata(): + encoding = self._get_encoding() or DEFAULT_READ_ENCODING + # N.B. typically, read encoding default is UTF-8 --> a "usually safe" choice + strlen = self._get_string_length() + try: + data = decode_bytesarray_to_stringarray(data, encoding, strlen) + except UnicodeDecodeError as err: + msg = ( + f"Character data in variable {self.name!r} could not be decoded" + f"with the {encoding!r} encoding. This can be fixed by setting the " + "variable '_Encoding' attribute to suit the content." + ) + raise ValueError(msg) from err return data def __setitem__(self, keys, data): - if self.is_chardata(): - encoding = self.get_byte_encoding() - strlen = self.get_string_length() - if encoding is not None: - data = encode_stringarray_as_bytearray(data, encoding, strlen) - else: + if self._is_chardata(): + # N.B. we never need to UNset this, as we totally control it + self._contained_instance.set_auto_chartostring(False) + + encoding = self._get_encoding() or DEFAULT_WRITE_ENCODING + # N.B. typically, write encoding default is "ascii" --> fails bad content + if data.dtype.kind == "U": try: - # Check if all characters are valid ascii - data = encode_stringarray_as_bytearray(data, "ascii", strlen) - except UnicodeEncodeError: - data = encode_stringarray_as_bytearray( - data, DEFAULT_ENCODING, strlen - ) - # As this was necessary, record the new encoding on the variable - self.set_ncattr("_Encoding", DEFAULT_ENCODING) + strlen = self._get_string_length() + data = encode_stringarray_as_bytearray(data, encoding, strlen) + except UnicodeEncodeError as err: msg = ( - f"Non-ascii data written to label variable {self.name}. " - f"Applied {DEFAULT_ENCODING!r} encoding, " - f"and set attribute _Encoding={DEFAULT_ENCODING!r}." + f"String data written to netcdf character variable {self.name!r} " + f"could not be represented in encoding {encoding!r}. This can be " + "fixed by setting a suitable variable '_Encoding' attribute, " + 'e.g. ._Encoding="UTF-8".' ) - warnings.warn(msg, UserWarning) - - super().set_auto_chartostring(False) + raise ValueError(msg) from err super().__setitem__(keys, data) - def is_chardata(self): + def _is_chardata(self): return np.issubdtype(self.dtype, np.bytes_) - def get_encoding(self) -> str | None: - """Get the effective byte encoding to be used for this variable.""" - # utf-8 is a reasonable "safe" default, equivalent to 'ascii' for ascii data + def _get_encoding(self) -> str | None: + """Get the byte encoding defined for this variable (or None).""" result = getattr(self, "_Encoding", None) if result is not None: try: @@ -165,18 +222,32 @@ def get_encoding(self) -> str | None: # NOTE: if encoding does not suit data, errors can occur. # For example, _Encoding = "ascii", with non-ascii content. except LookupError: - # Replace some invalid setting with "safe"(ish) fallback. + # Unrecognised encoding name : handle this as just a warning msg = f"Unknown encoding for variable {self.name!r}: {result!r}" warnings.warn(msg, UserWarning) return result - def get_string_length(self): - """Return the string-length defined for this variable (or None).""" - return getattr(self, "iris_string_length", None) + def _get_string_length(self): + """Return the string-length defined for this variable.""" + if not hasattr(self, "_strlen"): + # Work out the string length from the parent dataset dimensions. + strlen = self.group().dimensions[self.dimensions[-1]].size + # Cache this on the variable -- but not as a netcdf attribute (!) + self.__dict__["_strlen"] = strlen + + return self._strlen + + def set_auto_chartostring(self, onoff: bool): + msg = "auto_chartostring is not supported by Iris 'EncodedVariable' type." + raise TypeError(msg) class EncodedDataset(DatasetWrapper): """A specialised DatasetWrapper whose variables perform byte encoding.""" VAR_WRAPPER_CLS = EncodedVariable + + def set_auto_chartostring(self, onoff: bool): + msg = "auto_chartostring is not supported by Iris 'EncodedDataset' type." + raise TypeError(msg) diff --git a/lib/iris/tests/integration/netcdf/test_chararrays.py b/lib/iris/tests/integration/netcdf/test_chararrays.py index 0eb211c8b0..4414444733 100644 --- a/lib/iris/tests/integration/netcdf/test_chararrays.py +++ b/lib/iris/tests/integration/netcdf/test_chararrays.py @@ -137,8 +137,11 @@ def make_testcube( def ncdump(nc_path: str, *args): """Call ncdump to print a dump of a file.""" - call_args = [NCDUMP_PATHSTR, nc_path] + list(*args) - subprocess.run(call_args, check=True) + call_args = [NCDUMP_PATHSTR, nc_path] + list(args) + bytes = subprocess.check_output(call_args) + text = bytes.decode("utf-8") + print(text) + return text def show_result(filepath): diff --git a/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt b/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt index bab04aa0c4..e77427cd63 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt +++ b/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt @@ -1,8 +1,95 @@ +=========== +Outstanding Qs +* What would we like to do with all this IN IRIS?? + - generally present as string arrays (Uxx) + - existing scheme of naming dims for length + re-using is quite cunning! + - choice of seeing actual character arrays as alternative to string conversions? + +* string length handling for load/save/roundtrip + - on SAVE, we need some control so we can create files which are compatible, + irrespective of the data (which currently we are not doing) + - ALSO this is wanted to ensure that multiple vars (e.g. string cubes or string coords) + will share the string dim -- instead of creating arbitrary different ones + - presumably, if encoding blows the max-len, we must get a warning/error + + - on LOAD, we may want to *capture* the actual original string dim length, so it can be + re-created on save (by some scheme, as per previous) -- i.e. enable roundtripping. + I don't really want to preserve the name of the string dim, but this could be a + slightly tender point. To consider also : the impact of this on the non-equivalence + of loaded cubes, if we use actual *attributes* to carry this info (see below). + - **if not** : just load data + convert to string arrays as seems best + - this will also lead to incompatible cubes. + + - on SAVE, in the absence of strlen-controls, what is a reasonable default choice? + - take longest encoded + - set nbytes = NEXPAND(encoding) * nchars + - sensible values would depend on the encoding... + : ascii -> 1 + : utf-8 -> 1 or 4 ??? + : utf-16 -> 2 or 4 ??? + : utf-32 -> 4 + + - on LOAD, in absence of strlen controls, how do we choose the result DTYPE (i.e. character length)? + - again, may depend on the encoding: + : ascii = "U" + : UTF-8 = "U" + : UTF-16 = "U" + : UTF-32 = "U" + - N.B. these are ll at least "safe" - i.e. won't lose characters + + +separately from these, there is the question of how the controls affect "normal" +cube operations. + - the easiest approach is to define a "special" attribute, + which can be set on any cube/component + - using the dtype-length of the data would be *possible*, in conjunction with the + above-proposed "default rules" for choosing strlen from the dtype. + But this might not round-trip in all cases. + +within the actual data arrays + - we can't really expect any different to what numpy does + - that is, the dtype-length of any element <= that of the array (and not ==) + this may be tricky, but we can't easily prevent it. + >>> a = np.array(['', 'a', 'bb']) + >>> a + array(['', 'a', 'bb'], dtype='>> a[0].dtype + dtype('>> a[1].dtype + dtype('>> a[2].dtype + dtype('>> a.dtype + dtype('>> + - likewise, we can't assign without possible truncation. + If you **want** to expand the supported width, can use ".astype()" first ? + + +======================== +========================= forms in files: * char chardata(dim1, dim2, strlen_xx); # char data * string data(dim1, dim2); +netcdf types: +(netcdf docs terms) + NC_BYTE 8-bit signed integer + NC_UBYTE 8-bit unsigned integer + NC_CHAR 8-bit character + NC_STRING variable length character string + +***NOTE*** there is no NC_UCHAR or "unsigned char" type + + +relevant numpy base types (scalar dtypes): + * "S" bytes : np.bytes_ == np.int8 + * "B" unsigned bytes : np.ubyte == np.uint8 + * 'i' ints : np.int_ + * 'u' unsigned ints : np.int_ + * "U" unicode string : np.str_ + forms in numpy: * np.ndarray(dtype="S1") # char data * np.ndarray(dtype="Snn") # char data @@ -16,3 +103,80 @@ possibilities in createVariable: ** or the python str builtin (for a variable-length string array). ** Numpy string and unicode datatypes with length greater than one are aliases for str. """ + +test types: + "i1" : np.int8 + "u1" : np.uint8 + "S1" : np.byte_ + "U1" : np.str_ + "S" : + "U" : with/without non-ascii content + +save all these to files... +outputs from "test_nc_dtypes.py" test run: + SPEC:i1 SAVED-AS:int8 byte RELOAD-AS:int8 + SPEC:u1 SAVED-AS:uint8 ubyte RELOAD-AS:uint8 + SPEC:S1 SAVED-AS:|S1 char RELOAD-AS: () + SPEC:U1 SAVED-AS: EncodedDataset: + """Create a test EncodedDataset linked to an actual file. + + * strlen becomes the string dimension (i.e. a number of *bytes*) + * a variable "vxs" is created + * If 'encoding' is given, the "vxs::_Encoding" attribute is created with this value + """ + ds = EncodedDataset(path, "w") + ds.createDimension("x", 3) + ds.createDimension("strlen", strlen) + v = ds.createVariable("vxs", "S1", ("x", "strlen")) + if encoding is not None: + v.setncattr("_Encoding", encoding) + return ds + + +def fetch_undecoded_var(path, varname): + # Open a path as a "normal" dataset, and return a given variable. + ds_normal = DatasetWrapper(path) + ds_normal._contained_instance.set_auto_chartostring(False) + v = ds_normal.variables[varname] + # Return a variable, rather than its data, so we can check attributes etc. + return v + + +class TestWriteStrings: + """Test how string data is saved to a file.""" + + def test_write_strings(self, encoding, tempdir): + # Create a dataset with the variable + path = tempdir / f"test_writestrings_encoding_{encoding!s}.nc" + + if encoding in [None, "ascii"]: + writedata = samples_3_ascii + write_encoding = "ascii" + else: + writedata = samples_3_nonascii + write_encoding = encoding + + writedata = writedata.copy() # just for safety? + strlen = strings_maxbytes(writedata, write_encoding) + + ds_encoded = make_encoded_dataset(path, strlen, encoding) + v = ds_encoded.variables["vxs"] + + # Effectively, checks that we *can* write strings + v[:] = writedata + + # Close, re-open as an "ordinary" dataset, and check the raw content. + ds_encoded.close() + v = fetch_undecoded_var(path, "vxs") + + # Check that the raw result is as expected + bytes_result = v[:] + expected = encode_stringarray_as_bytearray(writedata, write_encoding, strlen) + assert ( + bytes_result.shape == expected.shape + and bytes_result.dtype == expected.dtype + and np.all(bytes_result == expected) + ) + + # Check that the "_Encoding" property is also as expected + result_attr = v.getncattr("_Encoding") if "_Encoding" in v.ncattrs() else None + assert result_attr == encoding + + def test_scalar(self, tempdir): + # Like 'test_write_strings', but the variable has *only* the string dimension. + path = tempdir / "test_writestrings_scalar.nc" + + ds_encoded = make_encoded_dataset(path, strlen=5) + v = ds_encoded.createVariable("v0_scalar", "S1", ("strlen",)) + + # Checks that we *can* write a string + v[:] = np.array("stuff", dtype=str) + + # Close, re-open as an "ordinary" dataset, and check the raw content. + ds_encoded.close() + v = fetch_undecoded_var(path, "v0_scalar") + result = v[:] + + # Check that the raw result is as expected + assert ( + result.shape == (5,) + and result.dtype == " Date: Fri, 5 Dec 2025 12:51:04 +0000 Subject: [PATCH 50/77] Replace use of encoding functions with test-specific function: Test for overlength writes. --- .../netcdf/_bytecoding_datasets.py | 6 +- .../fileformats/netcdf/encoding_tests.txt | 15 +- .../netcdf/test_bytecoding_datasets.py | 194 ++++++++++++------ 3 files changed, 147 insertions(+), 68 deletions(-) diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index 353f14d538..62e1dd2ab7 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -123,9 +123,10 @@ def flexi_encode_stringarray_as_bytearray( def encode_stringarray_as_bytearray( - data: np.ndarray, encoding: str, string_dimension_length: int + data: np.typing.ArrayLike, encoding: str, string_dimension_length: int ) -> np.ndarray: """Encode strings as a bytes array.""" + data = np.asanyarray(data) element_shape = data.shape result = np.zeros(element_shape + (string_dimension_length,), dtype="S1") right_pad = b"\0" * string_dimension_length @@ -179,7 +180,7 @@ def __getitem__(self, keys): data = decode_bytesarray_to_stringarray(data, encoding, strlen) except UnicodeDecodeError as err: msg = ( - f"Character data in variable {self.name!r} could not be decoded" + f"Character data in variable {self.name!r} could not be decoded " f"with the {encoding!r} encoding. This can be fixed by setting the " "variable '_Encoding' attribute to suit the content." ) @@ -188,6 +189,7 @@ def __getitem__(self, keys): return data def __setitem__(self, keys, data): + data = np.asanyarray(data) if self._is_chardata(): # N.B. we never need to UNset this, as we totally control it self._contained_instance.set_auto_chartostring(False) diff --git a/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt b/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt index e77427cd63..5fa021ccdd 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt +++ b/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt @@ -146,12 +146,17 @@ Then, as regards the _Encoding .. TO TEST... ========== -create a dataset + write char data - - X assign different encodings: makes no difference +NOTE on length control: + - not an API thing, it's implicit from when you create a variable + - this also applies to how it loads back + - BUT here there may be scope for a control attribute : -create a dataset + write STRING data - - X encoding=(ascii, utf-8, utf-32, None) - - X withnonascii=(T, F) ++++ create a dataset + write char data ++++ - X assign different encodings: makes no difference + ++++ create a dataset + write STRING data ++++ - X encoding=(ascii, utf-8, utf-32, None) ++++ - X withnonascii=(T, F) - X length=(long, short, none) read string data diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py index 092da19a00..411212b973 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py @@ -9,11 +9,7 @@ import numpy as np import pytest -from iris.fileformats.netcdf._bytecoding_datasets import ( - EncodedDataset, - encode_stringarray_as_bytearray, - flexi_encode_stringarray_as_bytearray, -) +from iris.fileformats.netcdf._bytecoding_datasets import EncodedDataset from iris.fileformats.netcdf._thread_safe_nc import DatasetWrapper encoding_options = [None, "ascii", "utf-8", "utf-32"] @@ -66,8 +62,92 @@ def fetch_undecoded_var(path, varname): return v +def check_raw_content(path, varname, expected_byte_array): + v = fetch_undecoded_var(path, varname) + bytes_result = v[:] + assert ( + bytes_result.shape == expected_byte_array.shape + and bytes_result.dtype == expected_byte_array.dtype + and np.all(bytes_result == expected_byte_array) + ) + + +def _make_bytearray_inner(data, encoding): + # Convert to a (list of [lists of..]) strings or bytes to a + # (list of [lists of..]) length-1 bytes with an extra dimension. + if isinstance(data, str): + # Convert input strings to bytes + data = data.encode(encoding) + if isinstance(data, bytes): + # iterate over bytes to get a sequence of length-1 bytes (what np.array wants) + result = [data[i : i + 1] for i in range(len(data))] + else: + # If not string/bytes, expect the input to be a list. + # N.B. the recursion is inefficient, but we don't care about that here + result = [_make_bytearray_inner(part, encoding) for part in data] + return result + + +def make_bytearray(data, encoding="ascii"): + """Convert bytes or lists of bytes into a numpy byte array. + + This is largely to avoid using "encode_stringarray_as_bytearray", since we don't + want to depend on that when we should be testing it. + So, it mostly replicates the function of that, but it does also support bytes in the + input, and it automatically finds + applies the maximum bytes-lengths in the input. + """ + # First, Convert to a (list of [lists of]..) length-1 bytes objects + data = _make_bytearray_inner(data, encoding) + + # Numbers of bytes in the inner dimension are the lengths of bytes/strings input, + # so they aren't all the same. + # To enable array conversion, we fix that by expanding all to the max length + + def get_maxlen(data): + # Find the maximum number of bytes in the inner dimension. + if not isinstance(data, list): + # Inner bytes object + assert isinstance(data, bytes) + longest = len(data) + else: + # We have a list: either a list of bytes, or a list of lists. + if len(data) == 0 or not isinstance(data[0], list): + # inner-most list, should contain bytes if anything + assert len(data) == 0 or isinstance(data[0], bytes) + # return n-bytes + longest = len(data) + else: + # list of lists: return max length of sub-lists + longest = max(get_maxlen(part) for part in data) + return longest + + maxlen = get_maxlen(data) + + def extend_all_to_maxlen(data, length, filler=b"\0"): + # Extend each "innermost" list (of single bytes) to the required length + if isinstance(data, list): + if len(data) == 0 or not isinstance(data[0], list): + # Pad all the inner-most lists to the required number of elements + n_extra = length - len(data) + if n_extra > 0: + data = data + [filler] * n_extra + else: + data = [extend_all_to_maxlen(part, length, filler) for part in data] + return data + + data = extend_all_to_maxlen(data, maxlen) + # We should now be able to create an array of single bytes. + result = np.array(data) + assert result.dtype == " Date: Fri, 5 Dec 2025 14:47:54 +0000 Subject: [PATCH 51/77] Radically simplify 'make_bytesarray', by using a known specified bytewidth. --- .../netcdf/test_bytecoding_datasets.py | 76 ++++++------------- 1 file changed, 22 insertions(+), 54 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py index 411212b973..9ef354f850 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py @@ -72,7 +72,7 @@ def check_raw_content(path, varname, expected_byte_array): ) -def _make_bytearray_inner(data, encoding): +def _make_bytearray_inner(data, bytewidth, encoding): # Convert to a (list of [lists of..]) strings or bytes to a # (list of [lists of..]) length-1 bytes with an extra dimension. if isinstance(data, str): @@ -81,61 +81,25 @@ def _make_bytearray_inner(data, encoding): if isinstance(data, bytes): # iterate over bytes to get a sequence of length-1 bytes (what np.array wants) result = [data[i : i + 1] for i in range(len(data))] + # pad or truncate everything to the required bytewidth + result = (result + [b"\0"] * bytewidth)[:bytewidth] else: # If not string/bytes, expect the input to be a list. # N.B. the recursion is inefficient, but we don't care about that here - result = [_make_bytearray_inner(part, encoding) for part in data] + result = [_make_bytearray_inner(part, bytewidth, encoding) for part in data] return result -def make_bytearray(data, encoding="ascii"): +def make_bytearray(data, bytewidth, encoding="ascii"): """Convert bytes or lists of bytes into a numpy byte array. This is largely to avoid using "encode_stringarray_as_bytearray", since we don't want to depend on that when we should be testing it. So, it mostly replicates the function of that, but it does also support bytes in the - input, and it automatically finds + applies the maximum bytes-lengths in the input. + input. """ # First, Convert to a (list of [lists of]..) length-1 bytes objects - data = _make_bytearray_inner(data, encoding) - - # Numbers of bytes in the inner dimension are the lengths of bytes/strings input, - # so they aren't all the same. - # To enable array conversion, we fix that by expanding all to the max length - - def get_maxlen(data): - # Find the maximum number of bytes in the inner dimension. - if not isinstance(data, list): - # Inner bytes object - assert isinstance(data, bytes) - longest = len(data) - else: - # We have a list: either a list of bytes, or a list of lists. - if len(data) == 0 or not isinstance(data[0], list): - # inner-most list, should contain bytes if anything - assert len(data) == 0 or isinstance(data[0], bytes) - # return n-bytes - longest = len(data) - else: - # list of lists: return max length of sub-lists - longest = max(get_maxlen(part) for part in data) - return longest - - maxlen = get_maxlen(data) - - def extend_all_to_maxlen(data, length, filler=b"\0"): - # Extend each "innermost" list (of single bytes) to the required length - if isinstance(data, list): - if len(data) == 0 or not isinstance(data[0], list): - # Pad all the inner-most lists to the required number of elements - n_extra = length - len(data) - if n_extra > 0: - data = data + [filler] * n_extra - else: - data = [extend_all_to_maxlen(part, length, filler) for part in data] - return data - - data = extend_all_to_maxlen(data, maxlen) + data = _make_bytearray_inner(data, bytewidth, encoding) # We should now be able to create an array of single bytes. result = np.array(data) assert result.dtype == " Date: Fri, 5 Dec 2025 16:23:55 +0000 Subject: [PATCH 52/77] Add read tests. --- .../netcdf/_bytecoding_datasets.py | 38 +++- .../netcdf/test_bytecoding_datasets.py | 165 ++++++++++++++++-- 2 files changed, 184 insertions(+), 19 deletions(-) diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index 62e1dd2ab7..3bdc799d7f 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -175,7 +175,7 @@ def __getitem__(self, keys): if DECODE_TO_STRINGS_ON_READ and self._is_chardata(): encoding = self._get_encoding() or DEFAULT_READ_ENCODING # N.B. typically, read encoding default is UTF-8 --> a "usually safe" choice - strlen = self._get_string_length() + strlen = self._get_string_width() try: data = decode_bytesarray_to_stringarray(data, encoding, strlen) except UnicodeDecodeError as err: @@ -194,11 +194,11 @@ def __setitem__(self, keys, data): # N.B. we never need to UNset this, as we totally control it self._contained_instance.set_auto_chartostring(False) - encoding = self._get_encoding() or DEFAULT_WRITE_ENCODING # N.B. typically, write encoding default is "ascii" --> fails bad content if data.dtype.kind == "U": try: - strlen = self._get_string_length() + encoding = self._get_encoding() or DEFAULT_WRITE_ENCODING + strlen = self._get_byte_width() data = encode_stringarray_as_bytearray(data, encoding, strlen) except UnicodeEncodeError as err: msg = ( @@ -230,12 +230,36 @@ def _get_encoding(self) -> str | None: return result - def _get_string_length(self): + def _get_byte_width(self) -> int | None: + if not hasattr(self, "_bytewidth"): + n_bytes = self.group().dimensions[self.dimensions[-1]].size + # Cache this length control on the variable -- but not as a netcdf attribute + self.__dict__["_bytewidth"] = n_bytes + + return self.__dict__["_bytewidth"] + + def _get_string_width(self): """Return the string-length defined for this variable.""" if not hasattr(self, "_strlen"): - # Work out the string length from the parent dataset dimensions. - strlen = self.group().dimensions[self.dimensions[-1]].size - # Cache this on the variable -- but not as a netcdf attribute (!) + if hasattr(self, "iris_string_width"): + strlen = self.get_ncattr("iris_string_width") + else: + # Work out the actual byte width from the parent dataset dimensions. + strlen = self._get_byte_width() + # Convert the string dimension length (i.e. bytes) to a sufficiently-long + # string width, depending on the encoding used. + encoding = self._get_encoding() or DEFAULT_READ_ENCODING + # regularise the name for comparison with recognised ones + encoding = codecs.lookup(encoding).name + if "utf-16" in encoding: + # Each char needs at least 2 bytes -- including a terminator char + strlen = (strlen // 2) - 1 + elif "utf-32" in encoding: + # Each char needs exactly 4 bytes -- including a terminator char + strlen = (strlen // 4) - 1 + # "ELSE": assume there can be (at most) as many chars as bytes + + # Cache this length control on the variable -- but not as a netcdf attribute self.__dict__["_strlen"] = strlen return self._strlen diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py index 9ef354f850..5df511103f 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py @@ -9,7 +9,10 @@ import numpy as np import pytest -from iris.fileformats.netcdf._bytecoding_datasets import EncodedDataset +from iris.fileformats.netcdf._bytecoding_datasets import ( + DECODE_TO_STRINGS_ON_READ, + EncodedDataset, +) from iris.fileformats.netcdf._thread_safe_nc import DatasetWrapper encoding_options = [None, "ascii", "utf-8", "utf-32"] @@ -62,14 +65,17 @@ def fetch_undecoded_var(path, varname): return v +def check_array_matching(arr1, arr2): + """Check for arrays matching shape, dtype and content.""" + assert ( + arr1.shape == arr2.shape and arr1.dtype == arr2.dtype and np.all(arr1 == arr2) + ) + + def check_raw_content(path, varname, expected_byte_array): v = fetch_undecoded_var(path, varname) bytes_result = v[:] - assert ( - bytes_result.shape == expected_byte_array.shape - and bytes_result.dtype == expected_byte_array.dtype - and np.all(bytes_result == expected_byte_array) - ) + check_array_matching(bytes_result, expected_byte_array) def _make_bytearray_inner(data, bytewidth, encoding): @@ -102,7 +108,7 @@ def make_bytearray(data, bytewidth, encoding="ascii"): data = _make_bytearray_inner(data, bytewidth, encoding) # We should now be able to create an array of single bytes. result = np.array(data) - assert result.dtype == " string array + result = v[:] + expected = write_strings + if encoding == "utf-8": + # In this case, with the given non-ascii sample data, the + # "default minimum string length" is overestimated. + assert strlen == 7 and result.dtype == "U7" + # correct the result dtype to pass the write_strings comparison below + truncated_result = result.astype("U4") + # Also check that content is the same (i.e. not actually truncated) + assert np.all(truncated_result == result) + result = truncated_result + else: + # Test "raw" read --> byte array + with DECODE_TO_STRINGS_ON_READ.context(False): + result = v[:] + expected = write_bytes + + check_array_matching(result, expected) + + def test_scalar(self, tempdir, readmode): + # Like 'test_write_strings', but the variable has *only* the string dimension. + path = tempdir / f"test_read_scalar_{readmode}.nc" + + strlen = 5 + ds_encoded = make_encoded_dataset(path, strlen=strlen) + v = ds_encoded.createVariable("v0_scalar", "S1", ("strlen",)) + + data_string = "stuff" + data_bytes = make_bytearray(data_string, 5) + + # Checks that we *can* write a string + v[:] = data_bytes + + if readmode == "strings": + # Test "normal" read --> string array + result = v[:] + expected = np.array(data_string) + else: + # Test "raw" read --> byte array + with DECODE_TO_STRINGS_ON_READ.context(False): + result = v[:] + expected = data_bytes + + check_array_matching(result, expected) + + def test_multidim(self, tempdir, readmode): + # Like 'test_write_strings', but the variable has additional dimensions. + path = tempdir / f"test_read_multidim_{readmode}.nc" + + strlen = 5 + ds_encoded = make_encoded_dataset(path, strlen=strlen) + ds_encoded.createDimension("y", 2) + v = ds_encoded.createVariable( + "vyxn", + "S1", + ( + "y", + "x", + "strlen", + ), + ) + + # Check that we *can* write a multidimensional string array + test_strings = [ + ["one", "n", ""], + ["two", "xxxxx", "four"], + ] + test_bytes = make_bytearray(test_strings, strlen) + v[:] = test_bytes + + if readmode == "strings": + # Test "normal" read --> string array + result = v[:] + expected = np.array(test_strings) + else: + # Test "raw" read --> byte array + with DECODE_TO_STRINGS_ON_READ.context(False): + result = v[:] + expected = test_bytes + + check_array_matching(result, expected) + + def test_read_encoding_failure(self, tempdir, readmode): + path = tempdir / f"test_read_encoding_failure_{readmode}.nc" + strlen = 10 + ds = make_encoded_dataset(path, strlen=strlen, encoding="ascii") + v = ds.variables["vxs"] + test_utf8_bytes = make_bytearray( + samples_3_nonascii, bytewidth=strlen, encoding="utf-8" + ) + v[:] = test_utf8_bytes + + if readmode == "strings": + msg = ( + "Character data in variable 'vxs' could not be decoded " + "with the 'ascii' encoding." + ) + with pytest.raises(ValueError, match=msg): + v[:] + else: + with DECODE_TO_STRINGS_ON_READ.context(False): + result = v[:] # this ought to be ok! - def test_encodings(self, encoding): - pass + assert np.all(result == test_utf8_bytes) From 35749d4a679b27450901e50d5ee98273156326ad Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 5 Dec 2025 16:26:13 +0000 Subject: [PATCH 53/77] Remove iris width control (not in this layer). --- .../netcdf/_bytecoding_datasets.py | 31 +++++++++---------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index 3bdc799d7f..5ed156f3ee 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -241,23 +241,20 @@ def _get_byte_width(self) -> int | None: def _get_string_width(self): """Return the string-length defined for this variable.""" if not hasattr(self, "_strlen"): - if hasattr(self, "iris_string_width"): - strlen = self.get_ncattr("iris_string_width") - else: - # Work out the actual byte width from the parent dataset dimensions. - strlen = self._get_byte_width() - # Convert the string dimension length (i.e. bytes) to a sufficiently-long - # string width, depending on the encoding used. - encoding = self._get_encoding() or DEFAULT_READ_ENCODING - # regularise the name for comparison with recognised ones - encoding = codecs.lookup(encoding).name - if "utf-16" in encoding: - # Each char needs at least 2 bytes -- including a terminator char - strlen = (strlen // 2) - 1 - elif "utf-32" in encoding: - # Each char needs exactly 4 bytes -- including a terminator char - strlen = (strlen // 4) - 1 - # "ELSE": assume there can be (at most) as many chars as bytes + # Work out the actual byte width from the parent dataset dimensions. + strlen = self._get_byte_width() + # Convert the string dimension length (i.e. bytes) to a sufficiently-long + # string width, depending on the encoding used. + encoding = self._get_encoding() or DEFAULT_READ_ENCODING + # regularise the name for comparison with recognised ones + encoding = codecs.lookup(encoding).name + if "utf-16" in encoding: + # Each char needs at least 2 bytes -- including a terminator char + strlen = (strlen // 2) - 1 + elif "utf-32" in encoding: + # Each char needs exactly 4 bytes -- including a terminator char + strlen = (strlen // 4) - 1 + # "ELSE": assume there can be (at most) as many chars as bytes # Cache this length control on the variable -- but not as a netcdf attribute self.__dict__["_strlen"] = strlen From b3fd8b180ddbecd1dc238c8bb0df9b2b96e8bfcf Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 5 Dec 2025 17:55:12 +0000 Subject: [PATCH 54/77] more notes --- .../fileformats/netcdf/encoding_tests.txt | 26 +++++++++++-------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt b/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt index 5fa021ccdd..07a0bc3bcd 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt +++ b/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt @@ -146,25 +146,21 @@ Then, as regards the _Encoding .. TO TEST... ========== -NOTE on length control: - - not an API thing, it's implicit from when you create a variable - - this also applies to how it loads back - - BUT here there may be scope for a control attribute : - +++ create a dataset + write char data +++ - X assign different encodings: makes no difference +++ create a dataset + write STRING data +++ - X encoding=(ascii, utf-8, utf-32, None) +++ - X withnonascii=(T, F) - - X length=(long, short, none) +XXXX - X length=(long, short, none) + ***deferred*** to layer above only -read string data - - X encoding=(ascii, utf-8, utf-32, None) - - X withnonascii=(T, F) ++++ read string data ++++ - X encoding=(ascii, utf-8, utf-32, None) ++++ - X withnonascii=(T, F) -read char data (with control) - - X different encodings: make no difference ++++ read char data (with control) ++++ - X different encodings: make no difference ==rethought== write strings @@ -185,3 +181,11 @@ write char data read char data - X encodings: don't matter +--- +NOTEs on length control: +not an API thing, it's implicit from when you create a variable +this also applies to how it loads back +BUT here there may be scope for a control attribute : + "iris_string_dim" - controls width on creation + reading back + + From e7074935d2426927b1fdfce578f5ff40fb692d0d Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 19 Jan 2026 14:56:33 +0000 Subject: [PATCH 55/77] Remove temporary test code. --- .../unit/fileformats/netcdf/test_nc_dtypes.py | 96 ------------------- 1 file changed, 96 deletions(-) delete mode 100644 lib/iris/tests/unit/fileformats/netcdf/test_nc_dtypes.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_nc_dtypes.py b/lib/iris/tests/unit/fileformats/netcdf/test_nc_dtypes.py deleted file mode 100644 index 0c5d2b279e..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test_nc_dtypes.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Temporary code to confirm how various numpy dtypes are stored in a netcdf file.""" - -import netCDF4 as nc -import numpy as np -import pytest - -from iris.tests.integration.netcdf.test_chararrays import ncdump - -# types = [ -# "i1", # np.int8 -# "u1", # np.uint8 -# "S1", # np.byte_ -# "U1", # np.str_ -# "S", # multibytes -# "U", # unicode strings, with/without non-ascii content -# ] - -samples = { - "i1": [-5, 7, 35], # np.int8 - "u1": [65, 67, 90], # np.uint8 - "S1": [b"A", b"B", b"Z"], # np.byte_ - "U1": ["A", "B", "C"], # np.str_ - "S": [b"one21", b"three", b""], # multibyte - "U": ["one", "éclair", "nine"], # unicode strings -} -sample_arrays = { - type_code: np.array(values, dtype=type_code) - for type_code, values in samples.items() -} - - -@pytest.fixture(scope="module") -def tmpdir(tmp_path_factory): - return tmp_path_factory.mktemp("netcdf") - - -def create_file(array: np.ndarray, path): - with nc.Dataset(str(path), "w") as ds: - ds.createDimension("x", 3) - v = ds.createVariable("vx", array.dtype, ("x",)) - # v.set_auto_chartostring(False) - v._Encoding = "UTF-8" if array.dtype.kind == "U" else "ascii" - v[:] = array - - -def get_loadback_array(path): - with nc.Dataset(str(path), "r") as ds: - v = ds.variables["vx"] - v.set_auto_chartostring(False) - result = v[:] - return result - - -@pytest.mark.parametrize("dtype", list(samples.keys())) -def test(tmpdir, dtype): - arr = sample_arrays[dtype] - print("\n---") - print(dtype) - path = tmpdir / f"tmp_{dtype}.nc" - create_file(arr, path) - ncdump(path, "-s") - loadback_array = get_loadback_array(path) - print(f" SPEC:{dtype} SAVED-AS:{arr.dtype} RELOAD-AS:{loadback_array.dtype}") - - -# from iris.tests import env_bin_path -# NCGEN_PATHSTR = str(env_bin_path("ncgen")) -# -# -# def ncgen(cdl_path, nc_path, *args): -# """Call ncdump to print a dump of a file.""" -# args = list(args) -# if not any(arg.startswith('-k') for arg in args): -# args[:0] = ["-k", "nc4"] # force netcdf4 -# call_args = [NCGEN_PATHSTR] + list(args) + [str(cdl_path), '-o', str(nc_path)] -# subprocess.check_call(call_args) -# -# -# def test_uchar(tmpdir): -# arr = sample_arrays["S1"] -# path = tmpdir / f"tmp_ichar.nc" -# create_file(arr, path) -# text = ncdump(path, "-s") -# text_u = text.replace("\t", " ") -# text_u = text_u.replace(" char ", " unsigned char ") -# cdl_path = tmpdir / f"tmp_uchar.cdl" -# with open(cdl_path, "w") as f_out: -# f_out.write(text_u) -# nc_path_2 = tmpdir / f"tmp_uchar.nc" -# ncgen(cdl_path, nc_path_2) -# loadback_array = get_loadback_array(nc_path_2) -# print(f" netcdf type 'uchar' LOADS-AS:{loadback_array.dtype}") From 74c6d82d770bae82b3613f528ccb97adbaa6aea8 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 19 Jan 2026 15:40:26 +0000 Subject: [PATCH 56/77] Use iris categorised warnings for unknown encodings. --- .../netcdf/_bytecoding_datasets.py | 19 +++++++--- .../netcdf/test_bytecoding_datasets.py | 36 ++++++++++++++++--- 2 files changed, 46 insertions(+), 9 deletions(-) diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index 5ed156f3ee..f1fe184729 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -48,6 +48,8 @@ import numpy as np from iris.fileformats.netcdf._thread_safe_nc import DatasetWrapper, VariableWrapper +import iris.warnings +from iris.warnings import IrisCfLoadWarning, IrisCfSaveWarning def decode_bytesarray_to_stringarray( @@ -197,7 +199,9 @@ def __setitem__(self, keys, data): # N.B. typically, write encoding default is "ascii" --> fails bad content if data.dtype.kind == "U": try: - encoding = self._get_encoding() or DEFAULT_WRITE_ENCODING + encoding = ( + self._get_encoding(writing=True) or DEFAULT_WRITE_ENCODING + ) strlen = self._get_byte_width() data = encode_stringarray_as_bytearray(data, encoding, strlen) except UnicodeEncodeError as err: @@ -214,7 +218,7 @@ def __setitem__(self, keys, data): def _is_chardata(self): return np.issubdtype(self.dtype, np.bytes_) - def _get_encoding(self) -> str | None: + def _get_encoding(self, writing=False) -> str | None: """Get the byte encoding defined for this variable (or None).""" result = getattr(self, "_Encoding", None) if result is not None: @@ -225,9 +229,14 @@ def _get_encoding(self) -> str | None: # For example, _Encoding = "ascii", with non-ascii content. except LookupError: # Unrecognised encoding name : handle this as just a warning - msg = f"Unknown encoding for variable {self.name!r}: {result!r}" - warnings.warn(msg, UserWarning) - + msg = ( + f"Ignoring unknown encoding for variable {self.name!r}: " + f"_Encoding = {result!r}." + ) + warntype = IrisCfSaveWarning if writing else IrisCfLoadWarning + warnings.warn(msg, warntype) + # Proceed as if there is no specified encoding + result = None return result def _get_byte_width(self) -> int | None: diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py index 5df511103f..861ec2c516 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py @@ -14,6 +14,7 @@ EncodedDataset, ) from iris.fileformats.netcdf._thread_safe_nc import DatasetWrapper +from iris.warnings import IrisCfLoadWarning, IrisCfSaveWarning encoding_options = [None, "ascii", "utf-8", "utf-32"] @@ -194,17 +195,29 @@ def test_multidim(self, tempdir): expected_bytes = make_bytearray(test_data, strlen) check_raw_content(path, "vyxn", expected_bytes) - def test_write_encoding_failure(self, tempdir): - path = tempdir / "test_writestrings_encoding_failure.nc" - ds = make_encoded_dataset(path, strlen=5, encoding="ascii") + @pytest.mark.parametrize("encoding", [None, "ascii"]) + def test_write_encoding_failure(self, tempdir, encoding): + path = tempdir / f"test_writestrings_encoding_{encoding}_fail.nc" + ds = make_encoded_dataset(path, strlen=5, encoding=encoding) v = ds.variables["vxs"] + encoding_name = encoding + if encoding_name == None: + encoding_name = "ascii" msg = ( "String data written to netcdf character variable 'vxs'.*" - " could not be represented in encoding 'ascii'. " + f" could not be represented in encoding '{encoding_name}'. " ) with pytest.raises(ValueError, match=msg): v[:] = samples_3_nonascii + def test_write_badencoding_ignore(self, tempdir): + path = tempdir / "test_writestrings_badencoding_ignore.nc" + ds = make_encoded_dataset(path, strlen=5, encoding="unknown") + v = ds.variables["vxs"] + msg = r"Ignoring unknown encoding for variable 'vxs': _Encoding = 'unknown'\." + with pytest.warns(IrisCfSaveWarning, match=msg): + v[:] = samples_3_ascii # will work OK + def test_overlength(self, tempdir): # Check expected behaviour with over-length data path = tempdir / "test_writestrings_overlength.nc" @@ -404,3 +417,18 @@ def test_read_encoding_failure(self, tempdir, readmode): result = v[:] # this ought to be ok! assert np.all(result == test_utf8_bytes) + + def test_read_badencoding_ignore(self, tempdir): + path = tempdir / f"test_read_badencoding_ignore.nc" + strlen = 10 + ds = make_encoded_dataset(path, strlen=strlen, encoding="unknown") + v = ds.variables["vxs"] + test_utf8_bytes = make_bytearray( + samples_3_nonascii, bytewidth=strlen, encoding="utf-8" + ) + v[:] = test_utf8_bytes + + msg = r"Ignoring unknown encoding for variable 'vxs': _Encoding = 'unknown'\." + with pytest.warns(IrisCfLoadWarning, match=msg): + # raises warning but succeeds, due to default read encoding of 'utf-8' + v[:] From db123262294325aebe32e0b187b713c5ece3a2a0 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 19 Jan 2026 15:54:46 +0000 Subject: [PATCH 57/77] Clarify the temporary load/save exercising tests (a bit). --- .../tests/integration/netcdf/test_chararrays.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/lib/iris/tests/integration/netcdf/test_chararrays.py b/lib/iris/tests/integration/netcdf/test_chararrays.py index 4414444733..3a4a3e1879 100644 --- a/lib/iris/tests/integration/netcdf/test_chararrays.py +++ b/lib/iris/tests/integration/netcdf/test_chararrays.py @@ -189,7 +189,7 @@ def save_dir(tmp_path_factory): # TODO: the tests don't test things properly yet, they just exercise the code and print # things for manual debugging. -tsts = ( +test_encodings = ( None, "ascii", "utf-8", @@ -201,8 +201,13 @@ def save_dir(tmp_path_factory): # tsts = ("utf-8", "ascii", "utf-8") -@pytest.mark.parametrize("encoding", tsts) +@pytest.mark.parametrize("encoding", test_encodings) def test_load_encodings(encoding, save_dir): + """Load exercise. + + Make a testfile with utf-8 content, variously labelled. + Load with Iris + show result (error or cubes). + """ # small change print(f"\n=========\nTesting encoding: {encoding}") filepath = save_dir / f"tmp_load_{str(encoding)}.nc" @@ -219,8 +224,13 @@ def test_load_encodings(encoding, save_dir): show_result(filepath) -@pytest.mark.parametrize("encoding", tsts) +@pytest.mark.parametrize("encoding", test_encodings) def test_save_encodings(encoding, save_dir): + """Save exercise. + + Make test-cube with non-ascii content, and various '_Encoding' labels. + Save with Iris + show result (error or ncdump). + """ cube = make_testcube( dataarray=TEST_STRINGS, coordarray=TEST_COORD_VALS, encoding_str=encoding ) From dc7188a5337b0e18c6ecc4b5e1938a832784c6f3 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sat, 17 Jan 2026 18:11:44 +0000 Subject: [PATCH 58/77] Use bytecoded_datasets in nc load+save, begin fixes. --- lib/iris/fileformats/cf.py | 6 +- .../netcdf/_bytecoding_datasets.py | 29 +++++++- .../fileformats/netcdf/_thread_safe_nc.py | 7 +- lib/iris/fileformats/netcdf/loader.py | 4 +- lib/iris/fileformats/netcdf/saver.py | 67 +++++++++++-------- .../integration/netcdf/test_chararrays.py | 13 +++- 6 files changed, 87 insertions(+), 39 deletions(-) diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index b4dc6a259d..9e3f57fa06 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -31,7 +31,7 @@ import iris.exceptions import iris.fileformats._nc_load_rules.helpers as hh -from iris.fileformats.netcdf import _thread_safe_nc +from iris.fileformats.netcdf import _bytecoding_datasets from iris.mesh.components import Connectivity import iris.util import iris.warnings @@ -1378,7 +1378,9 @@ def __init__(self, file_source, warn=False, monotonic=False): if isinstance(file_source, str): # Create from filepath : open it + own it (=close when we die). self._filename = os.path.expanduser(file_source) - self._dataset = _thread_safe_nc.DatasetWrapper(self._filename, mode="r") + self._dataset = _bytecoding_datasets.EncodedDataset( + self._filename, mode="r" + ) self._own_file = True else: # We have been passed an open dataset. diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index f1fe184729..a8dfca2b21 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -47,7 +47,12 @@ import numpy as np -from iris.fileformats.netcdf._thread_safe_nc import DatasetWrapper, VariableWrapper +from iris.fileformats.netcdf._thread_safe_nc import ( + DatasetWrapper, + NetCDFDataProxy, + NetCDFWriteProxy, + VariableWrapper, +) import iris.warnings from iris.warnings import IrisCfLoadWarning, IrisCfSaveWarning @@ -133,7 +138,19 @@ def encode_stringarray_as_bytearray( result = np.zeros(element_shape + (string_dimension_length,), dtype="S1") right_pad = b"\0" * string_dimension_length for index in np.ndindex(element_shape): - bytes = data[index].encode(encoding=encoding) + string = data[index] + bytes = string.encode(encoding=encoding) + n_bytes = len(bytes) + # TODO: may want to issue warning or error if we overflow the length? + if n_bytes > string_dimension_length: + from iris.exceptions import TranslationError + + msg = ( + f"Non-ascii string {string!r} written to netcdf exceeds string " + f"dimension : {n_bytes} > {string_dimension_length}." + ) + raise TranslationError(msg) + # It's all a bit nasty ... bytes = (bytes + right_pad)[:string_dimension_length] result[index] = [bytes[i : i + 1] for i in range(string_dimension_length)] @@ -283,3 +300,11 @@ class EncodedDataset(DatasetWrapper): def set_auto_chartostring(self, onoff: bool): msg = "auto_chartostring is not supported by Iris 'EncodedDataset' type." raise TypeError(msg) + + +class EncodedNetCDFDataProxy(NetCDFDataProxy): + DATASET_CLASS = EncodedDataset + + +class EncodedNetCDFWriteProxy(NetCDFWriteProxy): + DATASET_CLASS = EncodedDataset diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index 46b8609bb7..cd97452dac 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -315,6 +315,7 @@ class NetCDFDataProxy: """A reference to the data payload of a single NetCDF file variable.""" __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value") + DATASET_CLASS = netCDF4.Dataset def __init__(self, shape, dtype, path, variable_name, fill_value): self.shape = shape @@ -337,7 +338,7 @@ def __getitem__(self, keys): # netCDF4 library, presumably because __getitem__ gets called so many # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead. with _GLOBAL_NETCDF4_LOCK: - dataset = netCDF4.Dataset(self.path) + dataset = self.DATASET_CLASS(self.path) try: variable = dataset.variables[self.variable_name] # Get the NetCDF variable data and slice. @@ -374,6 +375,8 @@ class NetCDFWriteProxy: TODO: could be improved with a caching scheme, but this just about works. """ + DATASET_CLASS = netCDF4.Dataset + def __init__(self, filepath, cf_var, file_write_lock): self.path = filepath self.varname = cf_var.name @@ -401,7 +404,7 @@ def __setitem__(self, keys, array_data): # investigation needed. for attempt in range(5): try: - dataset = netCDF4.Dataset(self.path, "r+") + dataset = self.DATASET_CLASS(self.path, "r+") break except OSError: if attempt < 4: diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 6557f4aebc..ed3a49dfe2 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -41,7 +41,7 @@ import iris.coord_systems import iris.coords import iris.fileformats.cf -from iris.fileformats.netcdf import _thread_safe_nc +from iris.fileformats.netcdf import _bytecoding_datasets, _thread_safe_nc from iris.fileformats.netcdf.saver import _CF_ATTRS import iris.io import iris.util @@ -55,7 +55,7 @@ # An expected part of the public loader API, but includes thread safety # concerns so is housed in _thread_safe_nc. -NetCDFDataProxy = _thread_safe_nc.NetCDFDataProxy +NetCDFDataProxy = _bytecoding_datasets.EncodedNetCDFDataProxy class _WarnComboIgnoringBoundsLoad( diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index b2e46c6188..5d4aa62795 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -19,6 +19,7 @@ """ +import codecs import collections from itertools import repeat, zip_longest import os @@ -53,7 +54,8 @@ from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord import iris.exceptions import iris.fileformats.cf -from iris.fileformats.netcdf import _dask_locks, _thread_safe_nc +from iris.fileformats.netcdf import _bytecoding_datasets as bytecoding_datasets +from iris.fileformats.netcdf import _dask_locks from iris.fileformats.netcdf._attribute_handlers import ATTRIBUTE_HANDLERS import iris.io import iris.util @@ -305,7 +307,7 @@ class VariableEmulator(typing.Protocol): shape: tuple[int, ...] -CFVariable = typing.Union[_thread_safe_nc.VariableWrapper, VariableEmulator] +CFVariable = typing.Union[bytecoding_datasets.VariableWrapper, VariableEmulator] class Saver: @@ -408,7 +410,7 @@ def __init__(self, filename, netcdf_format, compute=True): # Put it inside a _thread_safe_nc wrapper to ensure thread-safety. # Except if it already is one, since they forbid "re-wrapping". if not hasattr(self._dataset, "THREAD_SAFE_FLAG"): - self._dataset = _thread_safe_nc.DatasetWrapper.from_existing( + self._dataset = bytecoding_datasets.DatasetWrapper.from_existing( self._dataset ) @@ -419,7 +421,7 @@ def __init__(self, filename, netcdf_format, compute=True): # Given a filepath string/path : create a dataset from that try: self.filepath = os.path.abspath(filename) - self._dataset = _thread_safe_nc.DatasetWrapper( + self._dataset = bytecoding_datasets.EncodedDataset( self.filepath, mode="w", format=netcdf_format ) except RuntimeError: @@ -1823,7 +1825,15 @@ def _create_generic_cf_array_var( # Typically CF label variables, but also possibly ancil-vars ? string_dimension_depth = data.dtype.itemsize if data.dtype.kind == "U": - string_dimension_depth //= 4 + encoding = element.attributes.get("_Encoding", "ascii") + # TODO: this can fail -- use a sensible warning + default? + encoding = codecs.lookup(encoding).name + if encoding == "utf-32": + # UTF-32 is a special case -- always 4 exactly bytes per char, plus 4 + string_dimension_depth += 4 + else: + # generally, 4 bytes per char in numpy --> make bytewidth = string-width + string_dimension_depth //= 4 string_dimension_name = "string%d" % string_dimension_depth # Determine whether to create the string length dimension. @@ -1842,25 +1852,25 @@ def _create_generic_cf_array_var( # Create the label coordinate variable. cf_var = self._dataset.createVariable(cf_name, "|S1", element_dims) - # Convert data from an array of strings into a character array - # with an extra string-length dimension. - if len(element_dims) == 1: - # Scalar variable (only has string dimension). - data_first = data[0] - if is_lazy_data(data_first): - data_first = dask.compute(data_first) - data = list("%- *s" % (string_dimension_depth, data_first)) - else: - # NOTE: at present, can't do this lazily?? - orig_shape = data.shape - new_shape = orig_shape + (string_dimension_depth,) - new_data = np.zeros(new_shape, cf_var.dtype) - for index in np.ndindex(orig_shape): - index_slice = tuple(list(index) + [slice(None, None)]) - new_data[index_slice] = list( - "%- *s" % (string_dimension_depth, data[index]) - ) - data = new_data + # # Convert data from an array of strings into a character array + # # with an extra string-length dimension. + # if len(element_dims) == 1: + # # Scalar variable (only has string dimension). + # data_first = data[0] + # if is_lazy_data(data_first): + # data_first = dask.compute(data_first) + # data = list("%- *s" % (string_dimension_depth, data_first)) + # else: + # # NOTE: at present, can't do this lazily?? + # orig_shape = data.shape + # new_shape = orig_shape + (string_dimension_depth,) + # new_data = np.zeros(new_shape, cf_var.dtype) + # for index in np.ndindex(orig_shape): + # index_slice = tuple(list(index) + [slice(None, None)]) + # new_data[index_slice] = list( + # "%- *s" % (string_dimension_depth, data[index]) + # ) + # data = new_data else: # A normal (numeric) variable. # ensure a valid datatype for the file format. @@ -1904,6 +1914,10 @@ def _create_generic_cf_array_var( element, cf_var, cf_name, compression_kwargs=compression_kwargs ) + # Add names + units + # NOTE: *must* now do first, as we may need '_Encoding' set to write it ! + self._set_cf_var_attributes(cf_var, element) + # Add the data to the CF-netCDF variable. if not is_dataless: if packing_controls: @@ -1912,9 +1926,6 @@ def _create_generic_cf_array_var( _setncattr(cf_var, key, value) self._lazy_stream_data(data=data, cf_var=cf_var) - # Add names + units - self._set_cf_var_attributes(cf_var, element) - return cf_name def _create_cf_cell_methods(self, cube, dimension_names): @@ -2534,7 +2545,7 @@ def store( ) -> None: # Create a data-writeable object that we can stream into, which # encapsulates the file to be opened + variable to be written. - write_wrapper = _thread_safe_nc.NetCDFWriteProxy( + write_wrapper = bytecoding_datasets.EncodedNetCDFWriteProxy( self.filepath, cf_var, self.file_write_lock ) # Add to the list of delayed writes, used in delayed_completion(). diff --git a/lib/iris/tests/integration/netcdf/test_chararrays.py b/lib/iris/tests/integration/netcdf/test_chararrays.py index 3a4a3e1879..f3bba81c70 100644 --- a/lib/iris/tests/integration/netcdf/test_chararrays.py +++ b/lib/iris/tests/integration/netcdf/test_chararrays.py @@ -12,7 +12,9 @@ import iris from iris.coords import AuxCoord, DimCoord from iris.cube import Cube -from iris.fileformats.netcdf import _thread_safe_nc +from iris.fileformats.netcdf import _bytecoding_datasets + +# from iris.fileformats.netcdf import _thread_safe_nc from iris.tests import env_bin_path NX, N_STRLEN = 3, 64 @@ -22,7 +24,8 @@ # VARS_COORDS_SHARE_STRING_DIM = True VARS_COORDS_SHARE_STRING_DIM = False if VARS_COORDS_SHARE_STRING_DIM: - TEST_COORD_VALS[-1] = "Xsandwich" # makes the max coord strlen same as data one + # Fix length so that the max coord strlen will be same as data one + TEST_COORD_VALS[-1] = "Xsandwich" # Ensure all tests run with "split attrs" turned on. @@ -68,8 +71,12 @@ def convert_bytesarray_to_strings( # INCLUDE_NUMERIC_AUXCOORD = False +# DATASET_CLASS = _thread_safe_nc.DatasetWrapper +DATASET_CLASS = _bytecoding_datasets.EncodedDataset + + def make_testfile(filepath, chararray, coordarray, encoding_str=None): - ds = _thread_safe_nc.DatasetWrapper(filepath, "w") + ds = DATASET_CLASS(filepath, "w") try: ds.createDimension("x", NX) ds.createDimension("nstr", N_STRLEN) From b859d1bb9388e56588a1453647b74a76d4f6c93a Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 19 Jan 2026 16:18:29 +0000 Subject: [PATCH 59/77] Further attempt to satisfy warning cateogry checker. --- lib/iris/fileformats/netcdf/_bytecoding_datasets.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index a8dfca2b21..52e2fe2aa5 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -251,7 +251,7 @@ def _get_encoding(self, writing=False) -> str | None: f"_Encoding = {result!r}." ) warntype = IrisCfSaveWarning if writing else IrisCfLoadWarning - warnings.warn(msg, warntype) + warnings.warn(msg, category=warntype) # Proceed as if there is no specified encoding result = None return result From 43a133fdf03fc692ed4f00d7d013a20f7ade09a8 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 19 Jan 2026 16:41:46 +0000 Subject: [PATCH 60/77] Fix overlength error tests. --- .../netcdf/_bytecoding_datasets.py | 4 ++-- .../netcdf/test_bytecoding_datasets.py | 20 +++++++++++++++---- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index 52e2fe2aa5..a3a13f86f5 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -146,8 +146,8 @@ def encode_stringarray_as_bytearray( from iris.exceptions import TranslationError msg = ( - f"Non-ascii string {string!r} written to netcdf exceeds string " - f"dimension : {n_bytes} > {string_dimension_length}." + f"String {string!r} written to netcdf exceeds string dimension after " + f"encoding : {n_bytes} > {string_dimension_length}." ) raise TranslationError(msg) diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py index 861ec2c516..4909d976de 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py @@ -9,6 +9,7 @@ import numpy as np import pytest +from iris.exceptions import TranslationError from iris.fileformats.netcdf._bytecoding_datasets import ( DECODE_TO_STRINGS_ON_READ, EncodedDataset, @@ -224,9 +225,9 @@ def test_overlength(self, tempdir): strlen = 5 ds = make_encoded_dataset(path, strlen=strlen, encoding="ascii") v = ds.variables["vxs"] - v[:] = ["1", "123456789", "two"] - expected_bytes = make_bytearray(["1", "12345", "two"], strlen) - check_raw_content(path, "vxs", expected_bytes) + msg = r"String .* written to netcdf exceeds string dimension .* : [0-9]* > 5\." + with pytest.raises(TranslationError, match=msg): + v[:] = ["1", "123456789", "two"] def test_overlength_splitcoding(self, tempdir): # Check expected behaviour when non-ascii multibyte coding gets truncated @@ -234,7 +235,18 @@ def test_overlength_splitcoding(self, tempdir): strlen = 5 ds = make_encoded_dataset(path, strlen=strlen, encoding="utf-8") v = ds.variables["vxs"] - v[:] = ["1", "1234ü", "two"] + # Note: we must do the assignment as a single byte array, to avoid hitting the + # safety check for this exact problem : see previous check. + byte_arrays = [ + string.encode("utf-8")[:strlen] for string in ("1", "1234ü", "two") + ] + nd_bytes_array = np.array( + [ + [bytes[i : i + 1] if i < len(bytes) else b"\0" for i in range(strlen)] + for bytes in byte_arrays + ] + ) + v[:] = nd_bytes_array # This creates a problem: it won't read back msg = ( "Character data in variable 'vxs' could not be decoded " From 4eb2fe1098df501bdb0e80d555c4d38dc3ddb9d5 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 19 Jan 2026 17:10:04 +0000 Subject: [PATCH 61/77] Get temporary iris load/save exercises working (todo: proper tests). --- lib/iris/fileformats/netcdf/saver.py | 6 ++++++ .../integration/netcdf/test_chararrays.py | 20 +++++++++++++++---- 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 5d4aa62795..76cf4533b3 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1718,6 +1718,12 @@ def add_names_attrs(): if element.units.calendar: _setncattr(cf_var, "calendar", str(element.units.calendar)) + # Most attributes are dealt with later. + # But _Encoding need to be defined before we can write to a character variable + if element.dtype.kind in "SU" and "_Encoding" in element.attributes: + encoding = element.attributes.pop("_Encoding") + _setncattr(cf_var, "_Encoding", encoding) + if not isinstance(element, Cube): # Add any other custom coordinate attributes. # N.B. not Cube, which has specific handling in _create_cf_data_variable diff --git a/lib/iris/tests/integration/netcdf/test_chararrays.py b/lib/iris/tests/integration/netcdf/test_chararrays.py index f3bba81c70..496867ee8a 100644 --- a/lib/iris/tests/integration/netcdf/test_chararrays.py +++ b/lib/iris/tests/integration/netcdf/test_chararrays.py @@ -218,6 +218,7 @@ def test_load_encodings(encoding, save_dir): # small change print(f"\n=========\nTesting encoding: {encoding}") filepath = save_dir / f"tmp_load_{str(encoding)}.nc" + # Actual content is always either utf-8 or utf-32 do_as = encoding if encoding != "utf-32": do_as = "utf-8" @@ -228,7 +229,14 @@ def test_load_encodings(encoding, save_dir): TEST_COORD_VALS, N_STRLEN, encoding=do_as ) make_testfile(filepath, TEST_CHARARRAY, TEST_COORDARRAY, encoding_str=encoding) - show_result(filepath) + if encoding == "ascii": + # If explicitly labelled as ascii, 'utf-8' data will fail to load back ... + msg = r"Character data .* could not be decoded with the 'ascii' encoding\." + with pytest.raises(ValueError, match=msg): + show_result(filepath) + else: + # ... otherwise, utf-8 data loads even without a label, as 'utf-8' default used + show_result(filepath) @pytest.mark.parametrize("encoding", test_encodings) @@ -243,10 +251,14 @@ def test_save_encodings(encoding, save_dir): ) print(cube) filepath = save_dir / f"tmp_save_{str(encoding)}.nc" - if encoding == "ascii": + if encoding in ("ascii", None): + msg = ( + "String data written to netcdf character variable 'v' " + "could not be represented in encoding 'ascii'" + ) with pytest.raises( - UnicodeEncodeError, - match="'ascii' codec can't encode character.*not in range", + ValueError, + match=msg, ): iris.save(cube, filepath) else: From 3ccc2e93c44184a54180dfff33438afd98997626 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 21 Jan 2026 16:19:27 +0000 Subject: [PATCH 62/77] Put encoding information into separate converter class, for use in proxies. --- .../netcdf/_bytecoding_datasets.py | 290 +++++++++--------- .../fileformats/netcdf/_thread_safe_nc.py | 27 +- 2 files changed, 161 insertions(+), 156 deletions(-) diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index a3a13f86f5..4559f4b78b 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -42,6 +42,7 @@ import codecs import contextlib +import dataclasses import threading import warnings @@ -80,55 +81,6 @@ def decode_bytesarray_to_stringarray( return result -# -# TODO: remove? -# this older version is "overly flexible", less efficient and not needed here. -# -def flexi_encode_stringarray_as_bytearray( - data: np.ndarray, encoding=None, string_dimension_length: int | None = None -) -> np.ndarray: - """Encode strings as bytearray. - - Note: if 'string_dimension_length' is not given (None), it is set to the longest - encoded bytes element, **OR** the dtype size, if that is greater. - If 'string_dimension_length' is specified, the last array - dimension is set to this and content strings are truncated or extended as required. - """ - if np.ma.isMaskedArray(data): - # netCDF4-python sees zeros as "missing" -- we don't need or want that - data = data.data - element_shape = data.shape - # Encode all the strings + see which is longest - max_length = 1 # this is a MINIMUM - i.e. not zero! - data_elements = np.zeros(element_shape, dtype=object) - for index in np.ndindex(element_shape): - data_element = data[index].encode(encoding=encoding) - element_length = len(data_element) - data_elements[index] = data_element - if element_length > max_length: - max_length = element_length - - if string_dimension_length is None: - # If the string length was not specified, it is the maximum encoded length - # (n-bytes), **or** the dtype string-length, if greater. - string_dimension_length = max_length - array_string_length = int(str(data.dtype)[2:]) # Yuck. No better public way? - if array_string_length > string_dimension_length: - string_dimension_length = array_string_length - - # We maybe *already* encoded all the strings above, but stored them in an - # object-array as we didn't yet know the fixed byte-length to convert to. - # Now convert to a fixed-width byte array with an extra string-length dimension - result = np.zeros(element_shape + (string_dimension_length,), dtype="S1") - right_pad = b"\0" * string_dimension_length - for index in np.ndindex(element_shape): - bytes = data_elements[index] - bytes = (bytes + right_pad)[:string_dimension_length] - result[index] = [bytes[i : i + 1] for i in range(string_dimension_length)] - - return result - - def encode_stringarray_as_bytearray( data: np.typing.ArrayLike, encoding: str, string_dimension_length: int ) -> np.ndarray: @@ -158,6 +110,114 @@ def encode_stringarray_as_bytearray( return result +@dataclasses.dataclass +class VariableEncoder: + """A record of encoding details which can apply them to variable data.""" + + varname: str # just for the error messages + dtype: np.dtype + is_chardata: bool # just a shortcut for the dtype test + read_encoding: str # *always* a valid encoding from the codecs package + write_encoding: str # *always* a valid encoding from the codecs package + n_chars_dim: int # length of associated character dimension + string_width: int # string lengths when viewing as strings (i.e. "Uxx") + + def __init__(self, cf_var): + """Get all the info from an netCDF4 variable (or similar wrapper object). + + Most importantly, we do *not* store 'cf_var' : instead we extract the + necessary information and store it in this object. + So, this object has static state + is serialisable. + """ + self.varname = cf_var.name + self.dtype = cf_var.dtype + self.is_chardata = np.issubdtype(self.dtype, np.bytes_) + self.read_encoding = self._get_encoding(cf_var, writing=False) + self.write_encoding = self._get_encoding(cf_var, writing=True) + self.n_chars_dim = cf_var.group().dimensions[cf_var.dimensions[-1]].size + self.string_width = self._get_string_width(cf_var) + + @staticmethod + def _get_encoding(cf_var, writing=False) -> str: + """Get the byte encoding defined for this variable (or None).""" + result = getattr(cf_var, "_Encoding", None) + if result is not None: + try: + # Accept + normalise naming of encodings + result = codecs.lookup(result).name + # NOTE: if encoding does not suit data, errors can occur. + # For example, _Encoding = "ascii", with non-ascii content. + except LookupError: + # Unrecognised encoding name : handle this as just a warning + msg = ( + f"Ignoring unknown encoding for variable {cf_var.name!r}: " + f"_Encoding = {result!r}." + ) + warntype = IrisCfSaveWarning if writing else IrisCfLoadWarning + warnings.warn(msg, category=warntype) + # Proceed as if there is no specified encoding + result = None + + if result is None: + if writing: + result = DEFAULT_WRITE_ENCODING + else: + result = DEFAULT_READ_ENCODING + return result + + def _get_string_width(self, cf_var) -> int: + """Return the string-length defined for this variable.""" + # Work out the actual byte width from the parent dataset dimensions. + strlen = self.n_chars_dim + # Convert the string dimension length (i.e. bytes) to a sufficiently-long + # string width, depending on the (read) encoding used. + encoding = self.read_encoding + if "utf-16" in encoding: + # Each char needs at least 2 bytes -- including a terminator char + strlen = (strlen // 2) - 1 + elif "utf-32" in encoding: + # Each char needs exactly 4 bytes -- including a terminator char + strlen = (strlen // 4) - 1 + # "ELSE": assume there can be (at most) as many chars as bytes + return strlen + + def decode_bytes_to_stringarray(self, data: np.ndarray) -> np.ndarray: + if self.is_chardata and DECODE_TO_STRINGS_ON_READ: + # N.B. read encoding default is UTF-8 --> a "usually safe" choice + encoding = self.read_encoding + strlen = self.string_width + try: + data = decode_bytesarray_to_stringarray(data, encoding, strlen) + except UnicodeDecodeError as err: + msg = ( + f"Character data in variable {self.varname!r} could not be decoded " + f"with the {encoding!r} encoding. This can be fixed by setting the " + "variable '_Encoding' attribute to suit the content." + ) + raise ValueError(msg) from err + + return data + + def encode_strings_as_bytearray(self, data: np.ndarray) -> np.ndarray: + if data.dtype.kind == "U": + # N.B. it is also possible to pass a byte array (dtype "S1"), + # to be written directly, without processing. + try: + # N.B. write encoding *default* is "ascii" --> fails bad content + encoding = self.write_encoding + strlen = self.n_chars_dim + data = encode_stringarray_as_bytearray(data, encoding, strlen) + except UnicodeEncodeError as err: + msg = ( + f"String data written to netcdf character variable {self.varname!r} " + f"could not be represented in encoding {self.write_encoding!r}. " + "This can be fixed by setting a suitable variable '_Encoding' " + 'attribute, e.g. ._Encoding="UTF-8".' + ) + raise ValueError(msg) from err + return data + + class NetcdfStringDecodeSetting(threading.local): def __init__(self, perform_encoding: bool = True): self.set(perform_encoding) @@ -184,109 +244,24 @@ def context(self, perform_encoding: bool): class EncodedVariable(VariableWrapper): """A variable wrapper that translates variable data according to byte encodings.""" - def __getitem__(self, keys): - if self._is_chardata(): - # N.B. we never need to UNset this, as we totally control it - self._contained_instance.set_auto_chartostring(False) + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + def __getitem__(self, keys): + self._contained_instance.set_auto_chartostring(False) data = super().__getitem__(keys) - - if DECODE_TO_STRINGS_ON_READ and self._is_chardata(): - encoding = self._get_encoding() or DEFAULT_READ_ENCODING - # N.B. typically, read encoding default is UTF-8 --> a "usually safe" choice - strlen = self._get_string_width() - try: - data = decode_bytesarray_to_stringarray(data, encoding, strlen) - except UnicodeDecodeError as err: - msg = ( - f"Character data in variable {self.name!r} could not be decoded " - f"with the {encoding!r} encoding. This can be fixed by setting the " - "variable '_Encoding' attribute to suit the content." - ) - raise ValueError(msg) from err - + # Create a coding spec : redo every time in case "_Encoding" has changed + encoding_spec = VariableEncoder(self._contained_instance) + data = encoding_spec.decode_bytes_to_stringarray(data) return data def __setitem__(self, keys, data): data = np.asanyarray(data) - if self._is_chardata(): - # N.B. we never need to UNset this, as we totally control it - self._contained_instance.set_auto_chartostring(False) - - # N.B. typically, write encoding default is "ascii" --> fails bad content - if data.dtype.kind == "U": - try: - encoding = ( - self._get_encoding(writing=True) or DEFAULT_WRITE_ENCODING - ) - strlen = self._get_byte_width() - data = encode_stringarray_as_bytearray(data, encoding, strlen) - except UnicodeEncodeError as err: - msg = ( - f"String data written to netcdf character variable {self.name!r} " - f"could not be represented in encoding {encoding!r}. This can be " - "fixed by setting a suitable variable '_Encoding' attribute, " - 'e.g. ._Encoding="UTF-8".' - ) - raise ValueError(msg) from err - + # Create a coding spec : redo every time in case "_Encoding" has changed + encoding_spec = VariableEncoder(self._contained_instance) + data = encoding_spec.encode_strings_as_bytearray(data) super().__setitem__(keys, data) - def _is_chardata(self): - return np.issubdtype(self.dtype, np.bytes_) - - def _get_encoding(self, writing=False) -> str | None: - """Get the byte encoding defined for this variable (or None).""" - result = getattr(self, "_Encoding", None) - if result is not None: - try: - # Accept + normalise naming of encodings - result = codecs.lookup(result).name - # NOTE: if encoding does not suit data, errors can occur. - # For example, _Encoding = "ascii", with non-ascii content. - except LookupError: - # Unrecognised encoding name : handle this as just a warning - msg = ( - f"Ignoring unknown encoding for variable {self.name!r}: " - f"_Encoding = {result!r}." - ) - warntype = IrisCfSaveWarning if writing else IrisCfLoadWarning - warnings.warn(msg, category=warntype) - # Proceed as if there is no specified encoding - result = None - return result - - def _get_byte_width(self) -> int | None: - if not hasattr(self, "_bytewidth"): - n_bytes = self.group().dimensions[self.dimensions[-1]].size - # Cache this length control on the variable -- but not as a netcdf attribute - self.__dict__["_bytewidth"] = n_bytes - - return self.__dict__["_bytewidth"] - - def _get_string_width(self): - """Return the string-length defined for this variable.""" - if not hasattr(self, "_strlen"): - # Work out the actual byte width from the parent dataset dimensions. - strlen = self._get_byte_width() - # Convert the string dimension length (i.e. bytes) to a sufficiently-long - # string width, depending on the encoding used. - encoding = self._get_encoding() or DEFAULT_READ_ENCODING - # regularise the name for comparison with recognised ones - encoding = codecs.lookup(encoding).name - if "utf-16" in encoding: - # Each char needs at least 2 bytes -- including a terminator char - strlen = (strlen // 2) - 1 - elif "utf-32" in encoding: - # Each char needs exactly 4 bytes -- including a terminator char - strlen = (strlen // 4) - 1 - # "ELSE": assume there can be (at most) as many chars as bytes - - # Cache this length control on the variable -- but not as a netcdf attribute - self.__dict__["_strlen"] = strlen - - return self._strlen - def set_auto_chartostring(self, onoff: bool): msg = "auto_chartostring is not supported by Iris 'EncodedVariable' type." raise TypeError(msg) @@ -297,14 +272,37 @@ class EncodedDataset(DatasetWrapper): VAR_WRAPPER_CLS = EncodedVariable + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + def set_auto_chartostring(self, onoff: bool): msg = "auto_chartostring is not supported by Iris 'EncodedDataset' type." raise TypeError(msg) class EncodedNetCDFDataProxy(NetCDFDataProxy): - DATASET_CLASS = EncodedDataset + __slots__ = NetCDFDataProxy.__slots__ + ("encoding_details",) + + def __init__(self, cf_var, *args, **kwargs): + # When creating, also capture + record the encoding to be performed. + kwargs["use_byte_data"] = True + super().__init__(cf_var, *args, **kwargs) + self.encoding_details = VariableEncoder(cf_var) + + def __getitem__(self, keys): + data = super().__getitem__(keys) + # Apply the optional bytes-to-strings conversion + data = self.encoding_details.decode_bytes_to_stringarray(data) + return data class EncodedNetCDFWriteProxy(NetCDFWriteProxy): - DATASET_CLASS = EncodedDataset + def __init__(self, filepath, cf_var, file_write_lock): + super.__init__(filepath, cf_var, file_write_lock) + self.encoding_details = VariableEncoder(cf_var) + + def __setitem__(self, key, data): + data = np.asanyarray(data) + # Apply the optional strings-to-bytes conversion + data = self.encoding_details.encode_strings_as_bytearray(data) + super.__setitem__(key, data) diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index cd97452dac..96cee458f7 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -314,15 +314,22 @@ def fromcdl(cls, *args, **kwargs): class NetCDFDataProxy: """A reference to the data payload of a single NetCDF file variable.""" - __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value") - DATASET_CLASS = netCDF4.Dataset - - def __init__(self, shape, dtype, path, variable_name, fill_value): - self.shape = shape + __slots__ = ( + "shape", + "dtype", + "path", + "variable_name", + "fill_value", + "use_byte_data", + ) + + def __init__(self, cf_var, dtype, path, fill_value, *, use_byte_data=False): + self.shape = cf_var.shape + self.variable_name = cf_var.name self.dtype = dtype self.path = path - self.variable_name = variable_name self.fill_value = fill_value + self.use_byte_data = use_byte_data @property def ndim(self): @@ -338,9 +345,11 @@ def __getitem__(self, keys): # netCDF4 library, presumably because __getitem__ gets called so many # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead. with _GLOBAL_NETCDF4_LOCK: - dataset = self.DATASET_CLASS(self.path) + dataset = netCDF4.Dataset(self.path) try: variable = dataset.variables[self.variable_name] + if self.use_byte_data: + variable.set_auto_mask(False) # Get the NetCDF variable data and slice. var = variable[keys] finally: @@ -375,8 +384,6 @@ class NetCDFWriteProxy: TODO: could be improved with a caching scheme, but this just about works. """ - DATASET_CLASS = netCDF4.Dataset - def __init__(self, filepath, cf_var, file_write_lock): self.path = filepath self.varname = cf_var.name @@ -404,7 +411,7 @@ def __setitem__(self, keys, array_data): # investigation needed. for attempt in range(5): try: - dataset = self.DATASET_CLASS(self.path, "r+") + dataset = netCDF4.Dataset(self.path, "r+") break except OSError: if attempt < 4: From e89862c112217baa65143be613d31b7e420d4222 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 21 Jan 2026 18:41:40 +0000 Subject: [PATCH 63/77] First proper testing (reads working). --- lib/iris/fileformats/cf.py | 67 +++--- .../integration/netcdf/test_stringdata.py | 193 ++++++++++++++++++ 2 files changed, 227 insertions(+), 33 deletions(-) create mode 100644 lib/iris/tests/integration/netcdf/test_stringdata.py diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 9e3f57fa06..bd812a9bee 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -816,39 +816,40 @@ def cf_label_data(self, cf_data_var): % self.cf_name ) - label_data = self[:] - - if ma.isMaskedArray(label_data): - label_data = label_data.filled(b"\0") - - # Determine whether we have a string-valued scalar label - # i.e. a character variable that only has one dimension (the length of the string). - if self.ndim == 1: - label_string = b"".join(label_data).strip() - label_string = label_string.decode("utf8") - data = np.array([label_string]) - else: - # Determine the index of the string dimension. - str_dim = self.dimensions.index(str_dim_name) - - # Calculate new label data shape (without string dimension) and create payload array. - new_shape = tuple( - dim_len for i, dim_len in enumerate(self.shape) if i != str_dim - ) - string_basetype = "|U%d" - string_dtype = string_basetype % self.shape[str_dim] - data = np.empty(new_shape, dtype=string_dtype) - - for index in np.ndindex(new_shape): - # Create the slice for the label data. - if str_dim == 0: - label_index = (slice(None, None),) + index - else: - label_index = index + (slice(None, None),) - - label_string = b"".join(label_data[label_index]).strip() - label_string = label_string.decode("utf8") - data[index] = label_string + data = self[:] + # label_data = self[:] + # + # if ma.isMaskedArray(label_data): + # label_data = label_data.filled(b"\0") + # + # # Determine whether we have a string-valued scalar label + # # i.e. a character variable that only has one dimension (the length of the string). + # if self.ndim == 1: + # label_string = b"".join(label_data).strip() + # label_string = label_string.decode("utf8") + # data = np.array([label_string]) + # else: + # # Determine the index of the string dimension. + # str_dim = self.dimensions.index(str_dim_name) + # + # # Calculate new label data shape (without string dimension) and create payload array. + # new_shape = tuple( + # dim_len for i, dim_len in enumerate(self.shape) if i != str_dim + # ) + # string_basetype = "|U%d" + # string_dtype = string_basetype % self.shape[str_dim] + # data = np.empty(new_shape, dtype=string_dtype) + # + # for index in np.ndindex(new_shape): + # # Create the slice for the label data. + # if str_dim == 0: + # label_index = (slice(None, None),) + index + # else: + # label_index = index + (slice(None, None),) + # + # label_string = b"".join(label_data[label_index]).strip() + # label_string = label_string.decode("utf8") + # data[index] = label_string return data diff --git a/lib/iris/tests/integration/netcdf/test_stringdata.py b/lib/iris/tests/integration/netcdf/test_stringdata.py new file mode 100644 index 0000000000..44c94ac2cc --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_stringdata.py @@ -0,0 +1,193 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Integration tests for various uses of character/string arrays in netcdf file variables. + +This covers both the loading and saving of variables which are the content of +data-variables, auxiliary coordinates, ancillary variables and -possibly?- cell measures. +""" + +from pathlib import Path + +import numpy as np +import pytest + +import iris +from iris.fileformats.netcdf import _thread_safe_nc + +N_XDIM = 3 +N_CHARS_DIM = 64 +COORD_ON_SEPARATE_DIM = True +PERSIST_TESTFILES = "~/chararray_testfiles" + + +NO_ENCODING_STR = "" +TEST_ENCODINGS = [ + NO_ENCODING_STR, + "ascii", + "utf-8", + # "iso8859-1", # a common one-byte-per-char "codepage" type + # "utf-16", + "utf-32", +] + + +# +# Routines to convert between byte and string arrays. +# Independently defined here, to avoid relying on any code we are testing. +# +def convert_strings_to_chararray( + string_array_1d: np.ndarray, maxlen: int, encoding: str | None = None +): + # Note: this is limited to 1-D arrays of strings. + # Could generalise that if needed, but for now this makes it simpler. + if encoding is None: + encoding = "ascii" + bbytes = [text.encode(encoding) for text in string_array_1d] + pad = b"\0" * maxlen + bbytes = [(x + pad)[:maxlen] for x in bbytes] + chararray = np.array([[bb[i : i + 1] for i in range(maxlen)] for bb in bbytes]) + return chararray + + +def convert_bytearray_to_strings( + byte_array, encoding="utf-8", string_length: int | None = None +): + """Convert bytes to strings. + + N.B. for now at least, we assume the string dim is **always the last one**. + """ + bytes_shape = byte_array.shape + var_shape = bytes_shape[:-1] + if string_length is None: + string_length = bytes_shape[-1] + string_dtype = f"U{string_length}" + result = np.empty(var_shape, dtype=string_dtype) + for ndindex in np.ndindex(var_shape): + element_bytes = byte_array[ndindex] + bytes = b"".join([b if b else b"\0" for b in element_bytes]) + string = bytes.decode(encoding) + result[ndindex] = string + return result + + +def make_testfile(testfile_path: Path, encoding_str: str): + """Create a test netcdf file. + + Also returns content strings (unicode or ascii versions). + """ + if encoding_str == NO_ENCODING_STR: + encoding = None + else: + encoding = encoding_str + + data_is_ascii = encoding in (None, "ascii") + + if data_is_ascii: + coordvar_strings = ["mOnster", "London", "Amsterdam"] + datavar_strings = ["bun", "Eclair", "sandwich"] + else: + coordvar_strings = ["Münster", "London", "Amsterdam"] + datavar_strings = ["bun", "éclair", "sandwich"] + + coordvar_bytearray = convert_strings_to_chararray( + string_array_1d=coordvar_strings, maxlen=N_CHARS_DIM, encoding=encoding + ) + datavar_bytearray = convert_strings_to_chararray( + string_array_1d=datavar_strings, maxlen=N_CHARS_DIM, encoding=encoding + ) + + ds = _thread_safe_nc.DatasetWrapper(testfile_path, "w") + try: + ds.createDimension("x", N_XDIM) + ds.createDimension("nstr", N_CHARS_DIM) + if COORD_ON_SEPARATE_DIM: + ds.createDimension("nstr2", N_CHARS_DIM) + v_xdim = ds.createVariable("x", int, dimensions=("x")) + v_xdim[:] = np.arange(N_XDIM) + + v_co = ds.createVariable( + "v_co", + "S1", + dimensions=( + "x", + "nstr2" if COORD_ON_SEPARATE_DIM else "nstr", + ), + ) + v_co[:] = coordvar_bytearray + + if encoding is not None: + v_co._Encoding = encoding + + v_numeric = ds.createVariable( + "v_numeric", + float, + dimensions=("x",), + ) + v_numeric[:] = np.arange(N_XDIM) + + v_datavar = ds.createVariable( + "v", + "S1", + dimensions=( + "x", + "nstr", + ), + ) + v_datavar[:] = datavar_bytearray + + if encoding is not None: + v_datavar._Encoding = encoding + + v_datavar.coordinates = "v_co v_numeric" + finally: + ds.close() + + return testfile_path, coordvar_strings, datavar_strings + + +@pytest.fixture(params=TEST_ENCODINGS) +def encoding(request): + return request.param + + +class TestReadEncodings: + """Test loading of testfiles with encoded string data.""" + + @pytest.fixture() + def testdata(self, encoding, tmp_path): + """Create a suitable valid testfile, and return expected string content.""" + if PERSIST_TESTFILES: + tmp_path = Path(PERSIST_TESTFILES).expanduser() + if encoding == "": + filetag = "noencoding" + else: + filetag = encoding + tempfile_path = tmp_path / f"sample_read_{filetag}.nc" + testdata = make_testfile(testfile_path=tempfile_path, encoding_str=encoding) + from iris.tests.integration.netcdf.test_chararrays import ncdump + + ncdump(tempfile_path) + yield testdata + + def assert_no_load_problems(self): + if len(iris.loading.LOAD_PROBLEMS.problems): + probs = "\n".join(str(prob) for prob in iris.loading.LOAD_PROBLEMS.problems) + assert probs == "" + + def test_valid_encodings(self, encoding, testdata): + testfile_path, coordvar_strings, datavar_strings = testdata + cube = iris.load_cube(testfile_path) + self.assert_no_load_problems() + assert cube.shape == (N_XDIM,) + + if encoding != "utf-32": + expected_string_width = N_CHARS_DIM + else: + expected_string_width = (N_CHARS_DIM // 4) - 1 + assert cube.dtype == f" Date: Fri, 23 Jan 2026 15:41:17 +0000 Subject: [PATCH 64/77] Encoded reading ~working; new ideas for switching (untested). --- .../fileformats/_nc_load_rules/helpers.py | 8 +-- lib/iris/fileformats/cf.py | 58 ++++++++++--------- .../netcdf/_bytecoding_datasets.py | 42 +++++++++++++- .../fileformats/netcdf/_thread_safe_nc.py | 2 +- lib/iris/fileformats/netcdf/loader.py | 36 ++++++++---- .../integration/netcdf/test_stringdata.py | 52 +++++++++++++---- 6 files changed, 142 insertions(+), 56 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index fa63002f09..a2800dc91d 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -1644,11 +1644,11 @@ def _add_auxiliary_coordinate( # Determine the name of the dimension/s shared between the CF-netCDF data variable # and the coordinate being built. coord_dims = cf_coord_var.dimensions - if cf._is_str_dtype(cf_coord_var): - coord_dims = coord_dims[:-1] + # if cf._is_str_dtype(cf_coord_var): + # coord_dims = coord_dims[:-1] datavar_dims = engine.cf_var.dimensions - if cf._is_str_dtype(engine.cf_var): - datavar_dims = datavar_dims[:-1] + # if cf._is_str_dtype(engine.cf_var): + # datavar_dims = datavar_dims[:-1] common_dims = [dim for dim in coord_dims if dim in datavar_dims] data_dims = None if common_dims: diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index bd812a9bee..da397a8b53 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -31,7 +31,7 @@ import iris.exceptions import iris.fileformats._nc_load_rules.helpers as hh -from iris.fileformats.netcdf import _bytecoding_datasets +from iris.fileformats.netcdf import _bytecoding_datasets, _thread_safe_nc from iris.mesh.components import Connectivity import iris.util import iris.warnings @@ -72,7 +72,9 @@ # NetCDF returns a different type for strings depending on Python version. def _is_str_dtype(var): - return np.issubdtype(var.dtype, np.bytes_) + # N.B. use 'datatype' not 'dtype', to "look inside" variable wrappers which + # represent 'S1' type data as 'U'. + return isinstance(var.datatype, np.dtype) and np.issubdtype(var.datatype, np.bytes_) ################################################################################ @@ -793,28 +795,28 @@ def cf_label_data(self, cf_data_var): % type(cf_data_var) ) - # Determine the name of the label string (or length) dimension by - # finding the dimension name that doesn't exist within the data dimensions. - str_dim_names = list(set(self.dimensions) - set(cf_data_var.dimensions)) - n_nondata_dims = len(str_dim_names) - - if n_nondata_dims == 0: - # *All* dims are shared with the data-variable. - # This is only ok if the data-var is *also* a string type. - dim_ok = _is_str_dtype(cf_data_var) - # In this case, we must just *assume* that the last dimension is "the" - # string dimension - str_dim_name = self.dimensions[-1] - else: - # If there is exactly one non-data dim, that is the one we want - dim_ok = len(str_dim_names) == 1 - (str_dim_name,) = str_dim_names - - if not dim_ok: - raise ValueError( - "Invalid string dimensions for CF-netCDF label variable %r" - % self.cf_name - ) + # # Determine the name of the label string (or length) dimension by + # # finding the dimension name that doesn't exist within the data dimensions. + # str_dim_names = list(set(self.dimensions) - set(cf_data_var.dimensions)) + # n_nondata_dims = len(str_dim_names) + # + # if n_nondata_dims == 0: + # # *All* dims are shared with the data-variable. + # # This is only ok if the data-var is *also* a string type. + # dim_ok = _is_str_dtype(cf_data_var) + # # In this case, we must just *assume* that the last dimension is "the" + # # string dimension + # str_dim_name = self.dimensions[-1] + # else: + # # If there is exactly one non-data dim, that is the one we want + # dim_ok = len(str_dim_names) == 1 + # (str_dim_name,) = str_dim_names + # + # if not dim_ok: + # raise ValueError( + # "Invalid string dimensions for CF-netCDF label variable %r" + # % self.cf_name + # ) data = self[:] # label_data = self[:] @@ -1379,9 +1381,11 @@ def __init__(self, file_source, warn=False, monotonic=False): if isinstance(file_source, str): # Create from filepath : open it + own it (=close when we die). self._filename = os.path.expanduser(file_source) - self._dataset = _bytecoding_datasets.EncodedDataset( - self._filename, mode="r" - ) + if _bytecoding_datasets.DECODE_TO_STRINGS_ON_READ: + ds_type = _bytecoding_datasets.EncodedDataset + else: + ds_type = _thread_safe_nc.DatasetWrapper + self._dataset = ds_type(self._filename, mode="r") self._own_file = True else: # We have been passed an open dataset. diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index 4559f4b78b..fa64e570bb 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -182,7 +182,7 @@ def _get_string_width(self, cf_var) -> int: return strlen def decode_bytes_to_stringarray(self, data: np.ndarray) -> np.ndarray: - if self.is_chardata and DECODE_TO_STRINGS_ON_READ: + if self.is_chardata: # N.B. read encoding default is UTF-8 --> a "usually safe" choice encoding = self.read_encoding strlen = self.string_width @@ -247,6 +247,38 @@ class EncodedVariable(VariableWrapper): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) + # Override specific properties of the contained instance, making changes in the case + # that the variable contains char data, which is presented instead as strings + # with one less dimension. + + @property + def shape(self): + shape = self._contained_instance.shape + is_chardata = np.issubdtype(self._contained_instance.dtype, np.bytes_) + if is_chardata: + # Translated char data appears without the final dimension + shape = shape[:-1] # remove final dimension + return shape + + @property + def dimensions(self): + dimensions = self._contained_instance.dimensions + is_chardata = np.issubdtype(self._contained_instance.dtype, np.bytes_) + if is_chardata: + # Translated char data appears without the final dimension + dimensions = dimensions[:-1] # remove final dimension + return dimensions + + @property + def dtype(self): + dtype = self._contained_instance.dtype + is_chardata = np.issubdtype(self._contained_instance.dtype, np.bytes_) + if is_chardata: + # Create a coding spec : redo every time in case "_Encoding" has changed + encoding_spec = VariableEncoder(self._contained_instance) + dtype = np.dtype(f"U{encoding_spec.string_width}") + return dtype + def __getitem__(self, keys): self._contained_instance.set_auto_chartostring(False) data = super().__getitem__(keys) @@ -287,7 +319,13 @@ def __init__(self, cf_var, *args, **kwargs): # When creating, also capture + record the encoding to be performed. kwargs["use_byte_data"] = True super().__init__(cf_var, *args, **kwargs) - self.encoding_details = VariableEncoder(cf_var) + if not isinstance(cf_var, EncodedVariable): + msg = ( + f"Unexpected variable type : {type(cf_var)} of variable '{cf_var.name}'" + ": expected EncodedVariable." + ) + raise TypeError(msg) + self.encoding_details = VariableEncoder(cf_var._contained_instance) def __getitem__(self, keys): data = super().__getitem__(keys) diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index 96cee458f7..f96312cf79 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -349,7 +349,7 @@ def __getitem__(self, keys): try: variable = dataset.variables[self.variable_name] if self.use_byte_data: - variable.set_auto_mask(False) + variable.set_auto_chartostring(False) # Get the NetCDF variable data and slice. var = variable[keys] finally: diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index ed3a49dfe2..a3322ec2e4 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -55,6 +55,10 @@ # An expected part of the public loader API, but includes thread safety # concerns so is housed in _thread_safe_nc. +# NOTE: this is the *default*, as required for public legacy api +# - in practice, when creating our proxies we dynamically choose between this and +# :class:`_thread_safe_nc.DatasetWrapper`, depending on +# :data:`_bytecoding_datasets.DECODE_TO_STRINGS_ON_READ` NetCDFDataProxy = _bytecoding_datasets.EncodedNetCDFDataProxy @@ -284,7 +288,7 @@ def _get_cf_var_data(cf_var): # correct dtype. Note: this is not an issue for masked arrays, # only masked scalar values. if result is np.ma.masked: - result = np.ma.masked_all(1, dtype=cf_var.datatype) + result = np.ma.masked_all(1, dtype=cf_var.dtype) else: # Get lazy chunked data out of a cf variable. # Creates Dask wrappers around data arrays for any cube components which @@ -294,15 +298,27 @@ def _get_cf_var_data(cf_var): # Make a data-proxy that mimics array access and can fetch from the file. # Note: Special handling needed for "variable length string" types which # return a dtype of `str`, rather than a numpy type; use `S1` in this case. - fill_dtype = "S1" if cf_var.dtype is str else cf_var.dtype.str[1:] - fill_value = getattr( - cf_var.cf_data, - "_FillValue", - _thread_safe_nc.default_fillvals[fill_dtype], - ) - proxy = NetCDFDataProxy( - cf_var.shape, dtype, cf_var.filename, cf_var.cf_name, fill_value - ) + if cf_var.dtype.kind == "U": + # Special handling for "string variables". + fill_value = "" + else: + fill_dtype = "S1" if cf_var.dtype is str else cf_var.dtype.str[1:] + fill_value = getattr( + cf_var.cf_data, + "_FillValue", + _thread_safe_nc.default_fillvals[fill_dtype], + ) + + # Switch type of proxy, based on type of variable. + # It is done this way, instead of using an instance variable, because the + # limited nature of the wrappers makes a stateful choice awkward, + # e.g. especially, "variable.group()" is *not* the parent DatasetWrapper. + if isinstance(cf_var.cf_data, _bytecoding_datasets.EncodedVariable): + proxy_class = _bytecoding_datasets.EncodedNetCDFDataProxy + else: + proxy_class = _thread_safe_nc.NetCDFDataProxy + + proxy = proxy_class(cf_var.cf_data, dtype, cf_var.filename, fill_value) # Get the chunking specified for the variable : this is either a shape, or # maybe the string "contiguous". if CHUNK_CONTROL.mode is ChunkControl.Modes.AS_DASK: diff --git a/lib/iris/tests/integration/netcdf/test_stringdata.py b/lib/iris/tests/integration/netcdf/test_stringdata.py index 44c94ac2cc..5831f85b41 100644 --- a/lib/iris/tests/integration/netcdf/test_stringdata.py +++ b/lib/iris/tests/integration/netcdf/test_stringdata.py @@ -8,6 +8,7 @@ data-variables, auxiliary coordinates, ancillary variables and -possibly?- cell measures. """ +from dataclasses import dataclass from pathlib import Path import numpy as np @@ -16,9 +17,12 @@ import iris from iris.fileformats.netcdf import _thread_safe_nc +iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = 0 + N_XDIM = 3 N_CHARS_DIM = 64 -COORD_ON_SEPARATE_DIM = True +# COORD_ON_SEPARATE_DIM = True +COORD_ON_SEPARATE_DIM = False PERSIST_TESTFILES = "~/chararray_testfiles" @@ -72,6 +76,14 @@ def convert_bytearray_to_strings( return result +@dataclass +class SamplefileDetails: + filepath: Path + datavar_data: np.ndarray + stringcoord_data: np.ndarray + numericcoord_data: np.ndarray + + def make_testfile(testfile_path: Path, encoding_str: str): """Create a test netcdf file. @@ -84,6 +96,7 @@ def make_testfile(testfile_path: Path, encoding_str: str): data_is_ascii = encoding in (None, "ascii") + numeric_values = np.arange(3.0) if data_is_ascii: coordvar_strings = ["mOnster", "London", "Amsterdam"] datavar_strings = ["bun", "Eclair", "sandwich"] @@ -125,7 +138,7 @@ def make_testfile(testfile_path: Path, encoding_str: str): float, dimensions=("x",), ) - v_numeric[:] = np.arange(N_XDIM) + v_numeric[:] = numeric_values v_datavar = ds.createVariable( "v", @@ -144,7 +157,12 @@ def make_testfile(testfile_path: Path, encoding_str: str): finally: ds.close() - return testfile_path, coordvar_strings, datavar_strings + return SamplefileDetails( + filepath=testfile_path, + datavar_data=datavar_strings, + stringcoord_data=coordvar_strings, + numericcoord_data=numeric_values, + ) @pytest.fixture(params=TEST_ENCODINGS) @@ -152,6 +170,10 @@ def encoding(request): return request.param +def load_problems_list(): + return [str(prob) for prob in iris.loading.LOAD_PROBLEMS.problems] + + class TestReadEncodings: """Test loading of testfiles with encoded string data.""" @@ -168,18 +190,19 @@ def testdata(self, encoding, tmp_path): testdata = make_testfile(testfile_path=tempfile_path, encoding_str=encoding) from iris.tests.integration.netcdf.test_chararrays import ncdump + # TODO: temporary for debug -- TO REMOVE ncdump(tempfile_path) yield testdata - def assert_no_load_problems(self): - if len(iris.loading.LOAD_PROBLEMS.problems): - probs = "\n".join(str(prob) for prob in iris.loading.LOAD_PROBLEMS.problems) - assert probs == "" - - def test_valid_encodings(self, encoding, testdata): - testfile_path, coordvar_strings, datavar_strings = testdata + def test_valid_encodings(self, encoding, testdata: SamplefileDetails): + testfile_path, datavar_strings, coordvar_strings, numeric_data = ( + testdata.filepath, + testdata.datavar_data, + testdata.stringcoord_data, + testdata.numericcoord_data, + ) cube = iris.load_cube(testfile_path) - self.assert_no_load_problems() + assert load_problems_list() == [] assert cube.shape == (N_XDIM,) if encoding != "utf-32": @@ -187,7 +210,12 @@ def test_valid_encodings(self, encoding, testdata): else: expected_string_width = (N_CHARS_DIM // 4) - 1 assert cube.dtype == f" Date: Tue, 27 Jan 2026 11:37:10 +0000 Subject: [PATCH 65/77] Check loads when coords do/not share a string dim with data. --- .../integration/netcdf/test_stringdata.py | 47 +++++++++++++++---- 1 file changed, 37 insertions(+), 10 deletions(-) diff --git a/lib/iris/tests/integration/netcdf/test_stringdata.py b/lib/iris/tests/integration/netcdf/test_stringdata.py index 5831f85b41..fc5bf5ae3e 100644 --- a/lib/iris/tests/integration/netcdf/test_stringdata.py +++ b/lib/iris/tests/integration/netcdf/test_stringdata.py @@ -8,6 +8,7 @@ data-variables, auxiliary coordinates, ancillary variables and -possibly?- cell measures. """ +from contextlib import contextmanager from dataclasses import dataclass from pathlib import Path @@ -17,12 +18,18 @@ import iris from iris.fileformats.netcdf import _thread_safe_nc -iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = 0 + +@pytest.fixture(scope="module") +def all_lazy_auxcoords(): + """Ensure that *all* aux-coords are loaded lazily, even really small ones.""" + old_minlazybytes = iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES + iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = 0 + yield + iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = old_minlazybytes + N_XDIM = 3 N_CHARS_DIM = 64 -# COORD_ON_SEPARATE_DIM = True -COORD_ON_SEPARATE_DIM = False PERSIST_TESTFILES = "~/chararray_testfiles" @@ -78,16 +85,22 @@ def convert_bytearray_to_strings( @dataclass class SamplefileDetails: + """Convenience container for information about a sample file.""" + filepath: Path datavar_data: np.ndarray stringcoord_data: np.ndarray numericcoord_data: np.ndarray -def make_testfile(testfile_path: Path, encoding_str: str): +def make_testfile( + testfile_path: Path, + encoding_str: str, + coords_on_separate_dim: bool, +) -> SamplefileDetails: """Create a test netcdf file. - Also returns content strings (unicode or ascii versions). + Also returns content information for checking loaded results. """ if encoding_str == NO_ENCODING_STR: encoding = None @@ -115,7 +128,7 @@ def make_testfile(testfile_path: Path, encoding_str: str): try: ds.createDimension("x", N_XDIM) ds.createDimension("nstr", N_CHARS_DIM) - if COORD_ON_SEPARATE_DIM: + if coords_on_separate_dim: ds.createDimension("nstr2", N_CHARS_DIM) v_xdim = ds.createVariable("x", int, dimensions=("x")) v_xdim[:] = np.arange(N_XDIM) @@ -125,7 +138,7 @@ def make_testfile(testfile_path: Path, encoding_str: str): "S1", dimensions=( "x", - "nstr2" if COORD_ON_SEPARATE_DIM else "nstr", + "nstr2" if coords_on_separate_dim else "nstr", ), ) v_co[:] = coordvar_bytearray @@ -177,8 +190,17 @@ def load_problems_list(): class TestReadEncodings: """Test loading of testfiles with encoded string data.""" + @pytest.fixture(params=["coordsSameDim", "coordsOwnDim"]) + def use_separate_dims(self, request): + yield request.param == "coordsOwnDim" + @pytest.fixture() - def testdata(self, encoding, tmp_path): + def testdata( + self, + encoding, + tmp_path, + use_separate_dims, + ): """Create a suitable valid testfile, and return expected string content.""" if PERSIST_TESTFILES: tmp_path = Path(PERSIST_TESTFILES).expanduser() @@ -186,8 +208,13 @@ def testdata(self, encoding, tmp_path): filetag = "noencoding" else: filetag = encoding - tempfile_path = tmp_path / f"sample_read_{filetag}.nc" - testdata = make_testfile(testfile_path=tempfile_path, encoding_str=encoding) + dimtag = "diffdims" if use_separate_dims else "samedims" + tempfile_path = tmp_path / f"sample_read_{filetag}_{dimtag}.nc" + testdata = make_testfile( + testfile_path=tempfile_path, + encoding_str=encoding, + coords_on_separate_dim=use_separate_dims, + ) from iris.tests.integration.netcdf.test_chararrays import ncdump # TODO: temporary for debug -- TO REMOVE From 4dd5e4e2c955e9f4de6738f4eacdfd418ca21afb Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 27 Jan 2026 15:32:36 +0000 Subject: [PATCH 66/77] Fix nondecoded reference loads in test_byecoded_datasets. --- .../netcdf/test_bytecoding_datasets.py | 30 ++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py index 4909d976de..f16097bef3 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py @@ -304,6 +304,14 @@ class TestRead: def readmode(self, request): return request.param + def undecoded_testvar(self, ds_encoded, varname: str): + path = ds_encoded.filepath() + ds_encoded.close() + ds = DatasetWrapper(path) + v = ds.variables[varname] + v.set_auto_chartostring(False) + return v + def test_encodings(self, encoding, tempdir, readmode): # Create a dataset with the variable path = tempdir / f"test_read_encodings_{encoding!s}_{readmode}.nc" @@ -337,9 +345,9 @@ def test_encodings(self, encoding, tempdir, readmode): assert np.all(truncated_result == result) result = truncated_result else: - # Test "raw" read --> byte array - with DECODE_TO_STRINGS_ON_READ.context(False): - result = v[:] + # Close and re-open as "regular" dataset -- just to check the raw content + v = self.undecoded_testvar(ds_encoded, "vxs") + result = v[:] expected = write_bytes check_array_matching(result, expected) @@ -364,8 +372,8 @@ def test_scalar(self, tempdir, readmode): expected = np.array(data_string) else: # Test "raw" read --> byte array - with DECODE_TO_STRINGS_ON_READ.context(False): - result = v[:] + v = self.undecoded_testvar(ds_encoded, "v0_scalar") + result = v[:] expected = data_bytes check_array_matching(result, expected) @@ -401,8 +409,8 @@ def test_multidim(self, tempdir, readmode): expected = np.array(test_strings) else: # Test "raw" read --> byte array - with DECODE_TO_STRINGS_ON_READ.context(False): - result = v[:] + v = self.undecoded_testvar(ds_encoded, "vyxn") + result = v[:] expected = test_bytes check_array_matching(result, expected) @@ -410,8 +418,8 @@ def test_multidim(self, tempdir, readmode): def test_read_encoding_failure(self, tempdir, readmode): path = tempdir / f"test_read_encoding_failure_{readmode}.nc" strlen = 10 - ds = make_encoded_dataset(path, strlen=strlen, encoding="ascii") - v = ds.variables["vxs"] + ds_encoded = make_encoded_dataset(path, strlen=strlen, encoding="ascii") + v = ds_encoded.variables["vxs"] test_utf8_bytes = make_bytearray( samples_3_nonascii, bytewidth=strlen, encoding="utf-8" ) @@ -425,8 +433,8 @@ def test_read_encoding_failure(self, tempdir, readmode): with pytest.raises(ValueError, match=msg): v[:] else: - with DECODE_TO_STRINGS_ON_READ.context(False): - result = v[:] # this ought to be ok! + v = self.undecoded_testvar(ds_encoded, "vxs") + result = v[:] # this ought to be ok! assert np.all(result == test_utf8_bytes) From 32a03558cb485739a682269c31e04ced122b0692 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 27 Jan 2026 18:49:23 +0000 Subject: [PATCH 67/77] Test writing of string data: various encodings, from strings or bytes. --- .../integration/netcdf/test_stringdata.py | 165 ++++++++++++++++-- 1 file changed, 155 insertions(+), 10 deletions(-) diff --git a/lib/iris/tests/integration/netcdf/test_stringdata.py b/lib/iris/tests/integration/netcdf/test_stringdata.py index fc5bf5ae3e..ed6fa576df 100644 --- a/lib/iris/tests/integration/netcdf/test_stringdata.py +++ b/lib/iris/tests/integration/netcdf/test_stringdata.py @@ -8,14 +8,17 @@ data-variables, auxiliary coordinates, ancillary variables and -possibly?- cell measures. """ -from contextlib import contextmanager from dataclasses import dataclass from pathlib import Path +from typing import Iterable import numpy as np +from numpy.typing import ArrayLike import pytest import iris +from iris.coords import AuxCoord, DimCoord +from iris.cube import Cube from iris.fileformats.netcdf import _thread_safe_nc @@ -49,8 +52,8 @@ def all_lazy_auxcoords(): # Independently defined here, to avoid relying on any code we are testing. # def convert_strings_to_chararray( - string_array_1d: np.ndarray, maxlen: int, encoding: str | None = None -): + string_array_1d: ArrayLike, maxlen: int, encoding: str | None = None +) -> np.ndarray: # Note: this is limited to 1-D arrays of strings. # Could generalise that if needed, but for now this makes it simpler. if encoding is None: @@ -63,12 +66,13 @@ def convert_strings_to_chararray( def convert_bytearray_to_strings( - byte_array, encoding="utf-8", string_length: int | None = None -): + byte_array: ArrayLike, encoding: str = "utf-8", string_length: int | None = None +) -> np.ndarray: """Convert bytes to strings. N.B. for now at least, we assume the string dim is **always the last one**. """ + byte_array = np.asanyarray(byte_array) bytes_shape = byte_array.shape var_shape = bytes_shape[:-1] if string_length is None: @@ -88,9 +92,9 @@ class SamplefileDetails: """Convenience container for information about a sample file.""" filepath: Path - datavar_data: np.ndarray - stringcoord_data: np.ndarray - numericcoord_data: np.ndarray + datavar_data: ArrayLike + stringcoord_data: ArrayLike + numericcoord_data: ArrayLike def make_testfile( @@ -200,7 +204,7 @@ def testdata( encoding, tmp_path, use_separate_dims, - ): + ) -> Iterable[SamplefileDetails]: """Create a suitable valid testfile, and return expected string content.""" if PERSIST_TESTFILES: tmp_path = Path(PERSIST_TESTFILES).expanduser() @@ -218,7 +222,7 @@ def testdata( from iris.tests.integration.netcdf.test_chararrays import ncdump # TODO: temporary for debug -- TO REMOVE - ncdump(tempfile_path) + ncdump(str(tempfile_path)) yield testdata def test_valid_encodings(self, encoding, testdata: SamplefileDetails): @@ -246,3 +250,144 @@ def test_valid_encodings(self, encoding, testdata: SamplefileDetails): coord_var_2 = cube.coord("v_numeric") assert coord_var_2.dtype == np.float64 assert np.all(coord_var_2.points == numeric_data) + + +@pytest.fixture(params=["stringdata", "bytedata"]) +def as_bytes(request): + yield request.param == "bytedata" + + +@dataclass +class SampleCubeDetails: + cube: Cube + datavar_data: np.ndarray + stringcoord_data: np.ndarray + save_path: str | Path | None = None + + +def make_testcube( + encoding_str: str | None = None, + byte_data: bool = False, +) -> SampleCubeDetails: + data_is_ascii = encoding_str in (NO_ENCODING_STR, "ascii") + + numeric_values = np.arange(3.0) + if data_is_ascii: + coordvar_strings = ["mOnster", "London", "Amsterdam"] + datavar_strings = ["bun", "Eclair", "sandwich"] + else: + coordvar_strings = ["Münster", "London", "Amsterdam"] + datavar_strings = ["bun", "éclair", "sandwich"] + + if not byte_data: + charlen = N_CHARS_DIM + if encoding_str == "utf-32": + charlen = charlen // 4 - 1 + strings_dtype = np.dtype(f"U{charlen}") + coordvar_array = np.array(coordvar_strings, dtype=strings_dtype) + datavar_array = np.array(datavar_strings, dtype=strings_dtype) + else: + write_encoding = encoding_str + if write_encoding == NO_ENCODING_STR: + write_encoding = "ascii" + coordvar_array = convert_strings_to_chararray( + coordvar_strings, maxlen=N_CHARS_DIM, encoding=write_encoding + ) + datavar_array = convert_strings_to_chararray( + datavar_strings, maxlen=N_CHARS_DIM, encoding=write_encoding + ) + + cube = Cube(datavar_array, var_name="v") + cube.add_dim_coord(DimCoord(np.arange(N_XDIM), var_name="x"), 0) + if encoding_str != NO_ENCODING_STR: + cube.attributes["_Encoding"] = encoding_str + co_x = AuxCoord(coordvar_array, var_name="v_co") + if encoding_str != NO_ENCODING_STR: + co_x.attributes["_Encoding"] = encoding_str + co_dims = (0, 1) if byte_data else (0,) + cube.add_aux_coord(co_x, co_dims) + + result = SampleCubeDetails( + cube=cube, + datavar_data=datavar_array, + stringcoord_data=coordvar_array, + ) + return result + + +class TestWriteEncodings: + """Test saving of testfiles with encoded string data. + + To avoid circularity, we generate and save *cube* data. + """ + + @pytest.fixture(params=["dataAsStrings", "dataAsBytes"]) + def write_bytes(self, request): + yield request.param == "dataAsBytes" + + @pytest.fixture() + def testpath(self, encoding, write_bytes, tmp_path): + """Create a suitable test cube, with either string or byte content.""" + if PERSIST_TESTFILES: + tmp_path = Path(PERSIST_TESTFILES).expanduser() + if encoding == "": + filetag = "noencoding" + else: + filetag = encoding + datatag = "writebytes" if write_bytes else "writestrings" + tempfile_path = tmp_path / f"sample_write_{filetag}_{datatag}.nc" + yield tempfile_path + + @pytest.fixture() + def testdata(self, testpath, encoding, write_bytes): + """Create a suitable test cube + save to a file. + + Apply the given encoding to both coord and cube data. + Form the data as bytes, or as strings, depending on 'write_bytes'.' + """ + cube_info = make_testcube(encoding_str=encoding, byte_data=write_bytes) + cube_info.save_path = testpath + cube = cube_info.cube + iris.save(cube, testpath) + yield cube_info + + def test_valid_encodings(self, encoding, testdata, write_bytes): + cube_info = testdata + cube, path = cube_info.cube, cube_info.save_path + # TODO: not testing the "byte read/write" yet + # Make a quick check for cube equality : but the presentation depends on the read mode + # with DECODE_TO_STRINGS_ON_READ.context(not write_bytes): + # read_cube = iris.load_cube(path) + # assert read_cube == cube + + # N.B. file content should not depend on whether bytes or strings were written + vararray, coordarray = cube_info.datavar_data, cube_info.stringcoord_data + ds = _thread_safe_nc.DatasetWrapper(path) + ds.set_auto_chartostring(False) + v_main = ds.variables["v"] + v_co = ds.variables["v_co"] + assert v_main.shape == (N_XDIM, N_CHARS_DIM) + assert v_co.shape == (N_XDIM, N_CHARS_DIM) + assert v_main.dtype == " Date: Wed, 28 Jan 2026 14:39:59 +0000 Subject: [PATCH 68/77] Fix write proxy; tmp_path in stringdata tests; tidy stringdata tests. --- .../netcdf/_bytecoding_datasets.py | 4 +- .../integration/netcdf/test_stringdata.py | 54 ++++++++++++------- 2 files changed, 36 insertions(+), 22 deletions(-) diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index fa64e570bb..59ad639634 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -336,11 +336,11 @@ def __getitem__(self, keys): class EncodedNetCDFWriteProxy(NetCDFWriteProxy): def __init__(self, filepath, cf_var, file_write_lock): - super.__init__(filepath, cf_var, file_write_lock) + super().__init__(filepath, cf_var, file_write_lock) self.encoding_details = VariableEncoder(cf_var) def __setitem__(self, key, data): data = np.asanyarray(data) # Apply the optional strings-to-bytes conversion data = self.encoding_details.encode_strings_as_bytearray(data) - super.__setitem__(key, data) + super().__setitem__(key, data) diff --git a/lib/iris/tests/integration/netcdf/test_stringdata.py b/lib/iris/tests/integration/netcdf/test_stringdata.py index ed6fa576df..bc308d474c 100644 --- a/lib/iris/tests/integration/netcdf/test_stringdata.py +++ b/lib/iris/tests/integration/netcdf/test_stringdata.py @@ -33,8 +33,9 @@ def all_lazy_auxcoords(): N_XDIM = 3 N_CHARS_DIM = 64 -PERSIST_TESTFILES = "~/chararray_testfiles" - +# TODO: remove (debug) +# PERSIST_TESTFILES: str | None = "~/chararray_testfiles" +PERSIST_TESTFILES: str | None = None NO_ENCODING_STR = "" TEST_ENCODINGS = [ @@ -199,38 +200,51 @@ def use_separate_dims(self, request): yield request.param == "coordsOwnDim" @pytest.fixture() - def testdata( + def readtest_path( self, encoding, tmp_path, use_separate_dims, ) -> Iterable[SamplefileDetails]: """Create a suitable valid testfile, and return expected string content.""" - if PERSIST_TESTFILES: - tmp_path = Path(PERSIST_TESTFILES).expanduser() + match PERSIST_TESTFILES: + case str(): + tmp_path = Path(PERSIST_TESTFILES).expanduser() + case _: + pass if encoding == "": filetag = "noencoding" else: filetag = encoding dimtag = "diffdims" if use_separate_dims else "samedims" tempfile_path = tmp_path / f"sample_read_{filetag}_{dimtag}.nc" + yield tempfile_path + + @pytest.fixture() + def readtest_data( + self, + encoding, + readtest_path, + use_separate_dims, + ) -> Iterable[SamplefileDetails]: + """Create a suitable valid testfile, and return expected string content.""" testdata = make_testfile( - testfile_path=tempfile_path, + testfile_path=readtest_path, encoding_str=encoding, coords_on_separate_dim=use_separate_dims, ) - from iris.tests.integration.netcdf.test_chararrays import ncdump - # TODO: temporary for debug -- TO REMOVE - ncdump(str(tempfile_path)) + # # TODO: temporary for debug -- TO REMOVE + # from iris.tests.integration.netcdf.test_chararrays import ncdump + # ncdump(str(tempfile_path)) yield testdata - def test_valid_encodings(self, encoding, testdata: SamplefileDetails): + def test_valid_encodings(self, encoding, readtest_data: SamplefileDetails): testfile_path, datavar_strings, coordvar_strings, numeric_data = ( - testdata.filepath, - testdata.datavar_data, - testdata.stringcoord_data, - testdata.numericcoord_data, + readtest_data.filepath, + readtest_data.datavar_data, + readtest_data.stringcoord_data, + readtest_data.numericcoord_data, ) cube = iris.load_cube(testfile_path) assert load_problems_list() == [] @@ -326,7 +340,7 @@ def write_bytes(self, request): yield request.param == "dataAsBytes" @pytest.fixture() - def testpath(self, encoding, write_bytes, tmp_path): + def writetest_path(self, encoding, write_bytes, tmp_path): """Create a suitable test cube, with either string or byte content.""" if PERSIST_TESTFILES: tmp_path = Path(PERSIST_TESTFILES).expanduser() @@ -339,20 +353,20 @@ def testpath(self, encoding, write_bytes, tmp_path): yield tempfile_path @pytest.fixture() - def testdata(self, testpath, encoding, write_bytes): + def writetest_data(self, writetest_path, encoding, write_bytes): """Create a suitable test cube + save to a file. Apply the given encoding to both coord and cube data. Form the data as bytes, or as strings, depending on 'write_bytes'.' """ cube_info = make_testcube(encoding_str=encoding, byte_data=write_bytes) - cube_info.save_path = testpath + cube_info.save_path = writetest_path cube = cube_info.cube - iris.save(cube, testpath) + iris.save(cube, writetest_path) yield cube_info - def test_valid_encodings(self, encoding, testdata, write_bytes): - cube_info = testdata + def test_valid_encodings(self, encoding, writetest_data, write_bytes): + cube_info = writetest_data cube, path = cube_info.cube, cube_info.save_path # TODO: not testing the "byte read/write" yet # Make a quick check for cube equality : but the presentation depends on the read mode From 7bf0b105570793a88ec778a2f3f460d3e45e019d Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 28 Jan 2026 14:58:42 +0000 Subject: [PATCH 69/77] Fix for non-string data. --- lib/iris/fileformats/netcdf/_bytecoding_datasets.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py index 59ad639634..22a9011eec 100644 --- a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -132,10 +132,11 @@ def __init__(self, cf_var): self.varname = cf_var.name self.dtype = cf_var.dtype self.is_chardata = np.issubdtype(self.dtype, np.bytes_) - self.read_encoding = self._get_encoding(cf_var, writing=False) - self.write_encoding = self._get_encoding(cf_var, writing=True) - self.n_chars_dim = cf_var.group().dimensions[cf_var.dimensions[-1]].size - self.string_width = self._get_string_width(cf_var) + if self.is_chardata: + self.read_encoding = self._get_encoding(cf_var, writing=False) + self.write_encoding = self._get_encoding(cf_var, writing=True) + self.n_chars_dim = cf_var.group().dimensions[cf_var.dimensions[-1]].size + self.string_width = self._get_string_width(cf_var) @staticmethod def _get_encoding(cf_var, writing=False) -> str: @@ -199,7 +200,7 @@ def decode_bytes_to_stringarray(self, data: np.ndarray) -> np.ndarray: return data def encode_strings_as_bytearray(self, data: np.ndarray) -> np.ndarray: - if data.dtype.kind == "U": + if self.is_chardata and data.dtype.kind == "U": # N.B. it is also possible to pass a byte array (dtype "S1"), # to be written directly, without processing. try: From bd42fa5e6807dcb30c9ef5512b0e65442f9791e9 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 28 Jan 2026 15:15:29 +0000 Subject: [PATCH 70/77] Pre-clear load problems. --- lib/iris/tests/integration/netcdf/test_stringdata.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/iris/tests/integration/netcdf/test_stringdata.py b/lib/iris/tests/integration/netcdf/test_stringdata.py index bc308d474c..5050152042 100644 --- a/lib/iris/tests/integration/netcdf/test_stringdata.py +++ b/lib/iris/tests/integration/netcdf/test_stringdata.py @@ -195,6 +195,11 @@ def load_problems_list(): class TestReadEncodings: """Test loading of testfiles with encoded string data.""" + @pytest.fixture(autouse=True) + def _clear_load_problems(self): + iris.loading.LOAD_PROBLEMS.reset() + yield + @pytest.fixture(params=["coordsSameDim", "coordsOwnDim"]) def use_separate_dims(self, request): yield request.param == "coordsOwnDim" From ab3ec12fbb6d52c123f9756866a7bf964f3b314e Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 27 Feb 2026 16:46:23 +0000 Subject: [PATCH 71/77] Fix mock patches. --- .../fileformats/netcdf/saver/test_Saver.py | 182 ++++++++++-------- .../saver/test_Saver__lazy_stream_data.py | 2 +- .../netcdf/saver/test_Saver__ugrid.py | 8 +- 3 files changed, 104 insertions(+), 88 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index 5231632252..b1d2a7f370 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -30,6 +30,7 @@ from iris.coords import AncillaryVariable, AuxCoord, DimCoord from iris.cube import Cube from iris.fileformats.netcdf import Saver, _thread_safe_nc +from iris.fileformats.netcdf import _bytecoding_datasets as ds_wrappers from iris.tests import _shared_utils from iris.tests._shared_utils import assert_CDL import iris.tests.stock as stock @@ -215,7 +216,7 @@ def test_big_endian(self, request, tmp_path): def test_zlib(self, mocker): cube = self._simple_cube(">f4") - api = mocker.patch("iris.fileformats.netcdf.saver._thread_safe_nc") + api = mocker.patch("iris.fileformats.netcdf.saver.bytecoding_datasets") # Define mocked default fill values to prevent deprecation warning (#4374). api.default_fillvals = collections.defaultdict(lambda: -99.0) # Mock the apparent dtype of mocked variables, to avoid an error. @@ -226,7 +227,7 @@ def test_zlib(self, mocker): # a fill-value report on a non-compliant variable in a non-file (!) with Saver("/dummy/path", "NETCDF4", compute=False) as saver: saver.write(cube, zlib=True) - dataset = api.DatasetWrapper.return_value + dataset = api.EncodedDataset.return_value create_var_call = mocker.call( "air_pressure_anomaly", np.dtype("float32"), @@ -266,8 +267,12 @@ def test_compression(self, mocker, tmp_path): with self.temp_filename(suffix=".nc") as nc_path: with Saver(nc_path, "NETCDF4", compute=False) as saver: + tgt = ( + "iris.fileformats.netcdf.saver.bytecoding_datasets" + ".EncodedDataset.createVariable" + ) patch = mocker.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", + tgt, # Use 'wraps' to allow the patched methods to function as normal # - the patch object just acts as a 'spy' on its calls. wraps=saver._dataset.createVariable, @@ -302,8 +307,12 @@ def test_non_compression__shape(self, mocker, tmp_path): with self.temp_filename(suffix=".nc") as nc_path: with Saver(nc_path, "NETCDF4", compute=False) as saver: + tgt = ( + "iris.fileformats.netcdf.saver.bytecoding_datasets" + ".EncodedDataset.createVariable" + ) patch = mocker.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", + tgt, # Use 'wraps' to allow the patched methods to function as normal # - the patch object just acts as a 'spy' on its calls. wraps=saver._dataset.createVariable, @@ -338,8 +347,12 @@ def test_non_compression__dtype(self, mocker, tmp_path): with self.temp_filename(suffix=".nc") as nc_path: with Saver(nc_path, "NETCDF4", compute=False) as saver: + tgt = ( + "iris.fileformats.netcdf.saver.bytecoding_datasets" + ".EncodedDataset.createVariable" + ) patch = self.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", + tgt, # Use 'wraps' to allow the patched methods to function as normal # - the patch object just acts as a 'spy' on its calls. wraps=saver._dataset.createVariable, @@ -375,23 +388,24 @@ def test_least_significant_digit(self, tmp_path): def test_default_unlimited_dimensions(self, tmp_path): # Default is no unlimited dimensions. cube = self._simple_cube(">f4") - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - assert not ds.dimensions["dim0"].isunlimited() - assert not ds.dimensions["dim1"].isunlimited() - ds.close() + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + ds = ds_wrappers.EncodedDataset(nc_path) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + assert not ds.dimensions["dim0"].isunlimited() + assert not ds.dimensions["dim1"].isunlimited() + ds.close() def test_no_unlimited_dimensions(self, tmp_path): cube = self._simple_cube(">f4") - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=None) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - for dim in ds.dimensions.values(): - assert not dim.isunlimited() - ds.close() + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=None) + ds = ds_wrappers.EncodedDataset(nc_path) + for dim in ds.dimensions.values(): + assert not dim.isunlimited() + ds.close() def test_invalid_unlimited_dimensions(self, tmp_path): cube = self._simple_cube(">f4") @@ -407,32 +421,33 @@ def test_custom_unlimited_dimensions(self, tmp_path): "projection_x_coordinate", ] # test coordinates by name - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=unlimited_dimensions) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - for dim in unlimited_dimensions: - assert ds.dimensions[dim].isunlimited() - ds.close() - # test coordinate arguments - nc_path = tmp_path / "temp2.nc" - coords = [cube.coord(dim) for dim in unlimited_dimensions] - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=coords) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - for dim in unlimited_dimensions: - assert ds.dimensions[dim].isunlimited() - ds.close() + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=unlimited_dimensions) + ds = _thread_safe_nc.DatasetWrapper(nc_path) + for dim in unlimited_dimensions: + assert ds.dimensions[dim].isunlimited() + ds.close() + # test coordinate arguments + with self.temp_filename(".nc") as nc_path: + coords = [cube.coord(dim) for dim in unlimited_dimensions] + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=coords) + ds = ds_wrappers.EncodedDataset(nc_path) + for dim in unlimited_dimensions: + assert ds.dimensions[dim].isunlimited() + ds.close() + def test_reserved_attributes(self, tmp_path): cube = self._simple_cube(">f4") cube.attributes["dimensions"] = "something something_else" - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - res = ds.getncattr("dimensions") - ds.close() + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + ds = ds_wrappers.EncodedDataset(nc_path) + res = ds.getncattr("dimensions") + ds.close() assert res == "something something_else" def test_with_climatology(self, request, tmp_path): @@ -449,14 +464,14 @@ def test_dimensional_to_scalar(self, tmp_path): scalar_data = self.array_lib.zeros(1) scalar_coord = AuxCoord(points=scalar_point, bounds=scalar_bounds) cube = Cube(scalar_data, aux_coords_and_dims=[(scalar_coord, 0)])[0] - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - # Confirm that the only dimension is the one denoting the number - # of bounds - have successfully saved the 2D bounds array into 1D. - assert ["bnds"] == list(ds.dimensions.keys()) - ds.close() + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + ds = ds_wrappers.EncodedDataset(nc_path) + # Confirm that the only dimension is the one denoting the number + # of bounds - have successfully saved the 2D bounds array into 1D. + assert ["bnds"] == list(ds.dimensions.keys()) + ds.close() class Test__create_cf_bounds(MockerMixin): @@ -492,7 +507,7 @@ def _check_bounds_setting(self, climatological=False): saver._ensure_valid_dtype.return_value = self.mocker.Mock( shape=coord.bounds.shape, dtype=coord.bounds.dtype ) - var = self.mocker.MagicMock(spec=_thread_safe_nc.VariableWrapper) + var = self.mocker.MagicMock(spec=ds_wrappers.EncodedVariable) # Make the main call. Saver._create_cf_bounds(saver, coord, var, "time") @@ -533,7 +548,7 @@ def test_valid_range_saved(self, tmp_path): nc_path = tmp_path / "temp.nc" with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) + ds = ds_wrappers.EncodedDataset(nc_path) _shared_utils.assert_array_equal(ds.valid_range, vrange) ds.close() @@ -542,24 +557,25 @@ def test_valid_min_saved(self, tmp_path): cube.data = cube.data.astype("int32") cube.attributes["valid_min"] = 1 - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - _shared_utils.assert_array_equal(ds.valid_min, 1) - ds.close() + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = ds_wrappers.EncodedDataset(nc_path) + _shared_utils.assert_array_equal(ds.valid_min, 1) + ds.close() def test_valid_max_saved(self, tmp_path): cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") cube.attributes["valid_max"] = 2 - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - _shared_utils.assert_array_equal(ds.valid_max, 2) - ds.close() + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = ds_wrappers.EncodedDataset(nc_path) + self.assertArrayEqual(ds.valid_max, 2) + _shared_utils.assert_array_equal(ds.valid_max, 2) + ds.close() class Test_write__valid_x_coord_attributes: @@ -574,36 +590,36 @@ def test_valid_range_saved(self, tmp_path): vrange = self.array_lib.array([1, 2], dtype="int32") cube.coord(axis="x").attributes["valid_range"] = vrange - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - _shared_utils.assert_array_equal(ds.variables["longitude"].valid_range, vrange) - ds.close() + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = ds_wrappers.EncodedDataset(nc_path) + _shared_utils.assert_array_equal(ds.variables["longitude"].valid_range, vrange) + ds.close() def test_valid_min_saved(self, tmp_path): cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") cube.coord(axis="x").attributes["valid_min"] = 1 - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - _shared_utils.assert_array_equal(ds.variables["longitude"].valid_min, 1) - ds.close() + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = ds_wrappers.EncodedDataset(nc_path) + _shared_utils.assert_array_equal(ds.variables["longitude"].valid_min, 1) + ds.close() def test_valid_max_saved(self, tmp_path): cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") cube.coord(axis="x").attributes["valid_max"] = 2 - nc_path = tmp_path / "temp.nc" - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - _shared_utils.assert_array_equal(ds.variables["longitude"].valid_max, 2) - ds.close() + with self.temp_filename(".nc") as nc_path: + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = ds_wrappers.EncodedDataset(nc_path) + _shared_utils.assert_array_equal(ds.variables["longitude"].valid_max, 2) + ds.close() class Test_write_fill_value: @@ -635,7 +651,7 @@ def netCDF_var(cube, **kwargs): nc_path = tmp_path / "temp.nc" with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, **kwargs) - ds = _thread_safe_nc.DatasetWrapper(nc_path) + ds = ds_wrappers.EncodedDataset(nc_path) (var,) = [ var for var in ds.variables.values() @@ -715,7 +731,7 @@ def _setup(self, mocker): ) ) _ = mocker.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + "iris.fileformats.netcdf._bytecoding_datasets.EncodedDataset", dataset_class, ) diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py index 0f3a91fec2..9cb84e81b5 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -29,7 +29,7 @@ def saver_patch(mocker): mock_dataset = mocker.MagicMock() mock_dataset_class = mocker.Mock(return_value=mock_dataset) # Mock the wrapper within the netcdf saver - target1 = "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper" + target1 = "iris.fileformats.netcdf.saver.bytecoding_datasets.DatasetWrapper" # Mock the real netCDF4.Dataset within the threadsafe-nc module, as this is # used by NetCDFDataProxy and NetCDFWriteProxy. target2 = "iris.fileformats.netcdf._thread_safe_nc.netCDF4.Dataset" diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py index e5783925b0..2ace3f4f86 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py @@ -398,12 +398,12 @@ def test_compression(self, check_save_cubes, mocker): # into the iris.fileformats.netcdf.saver. Also we want to check that the # compression kwargs are passed into the NetCDF4 createVariable method patch = mocker.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", + "iris.fileformats.netcdf.saver.bytecoding_datasets.EncodedDataset.createVariable", ) # No need to patch this NetCDF4 variable to compensate for the previous patch # on createVariable, which doesn't actually create the variable. mocker.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.variables" + "iris.fileformats.netcdf.saver.bytecoding_datasets.EncodedDataset.variables" ) cube = make_cube(var_name=(var_name := "a")) compression_kwargs = { @@ -776,10 +776,10 @@ def test_compression(self, check_save_mesh, mocker): """ patch = mocker.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", + "iris.fileformats.netcdf.saver.bytecoding_datasets.EncodedDataset.createVariable", ) mocker.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.variables" + "iris.fileformats.netcdf.saver.bytecoding_datasets.EncodedDataset.variables" ) mesh = make_mesh() compression_kwargs = { From 7240509ab8de0e2e55d8056345cad2234c6119f6 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 27 Feb 2026 16:57:49 +0000 Subject: [PATCH 72/77] Fix patches in test_CFReader. --- lib/iris/tests/unit/fileformats/cf/test_CFReader.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index d0dd0175a2..f293c9d77f 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -78,7 +78,7 @@ def _setup(self, mocker): getncattr=getncattr, ) mocker.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + "iris.fileformats.netcdf._bytecoding_datasets.EncodedDataset", return_value=dataset, ) @@ -141,7 +141,7 @@ def _setup(self, mocker): mocker.patch("iris.fileformats.cf.CFReader._build_cf_groups") mocker.patch("iris.fileformats.cf.CFReader._reset") mocker.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + "iris.fileformats.netcdf._bytecoding_datasets.EncodedDataset", return_value=self.dataset, ) @@ -237,7 +237,7 @@ def _setup(self, mocker): # and building first level cf-groups for variables. mocker.patch("iris.fileformats.cf.CFReader._reset") mocker.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + "iris.fileformats.netcdf._bytecoding_datasets.EncodedDataset", return_value=self.dataset, ) @@ -375,7 +375,7 @@ def _setup_class(self, mocker): # translations and building first level cf-groups for variables. mocker.patch("iris.fileformats.cf.CFReader._reset") mocker.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + "iris.fileformats.netcdf._bytecoding_datasets.EncodedDataset", return_value=self.dataset, ) cf_reader = CFReader("dummy") From f910ee3531f801d8957971508c18a56752a44556 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 27 Feb 2026 17:41:22 +0000 Subject: [PATCH 73/77] Fix variable creation in odd cases. --- lib/iris/fileformats/netcdf/saver.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 76cf4533b3..8e03776c3d 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -1718,11 +1718,14 @@ def add_names_attrs(): if element.units.calendar: _setncattr(cf_var, "calendar", str(element.units.calendar)) - # Most attributes are dealt with later. - # But _Encoding need to be defined before we can write to a character variable - if element.dtype.kind in "SU" and "_Encoding" in element.attributes: - encoding = element.attributes.pop("_Encoding") - _setncattr(cf_var, "_Encoding", encoding) + # Note: when writing UGRID, "element" can be a Mesh which has no "dtype", + # and for dataless cubes it will have a 'None' dtype. + if getattr(element, "dtype", None) is not None: + # Most attributes are dealt with later. But _Encoding needs to be defined + # *before* we can write to a character variable. + if element.dtype.kind in "SU" and "_Encoding" in element.attributes: + encoding = element.attributes.pop("_Encoding") + _setncattr(cf_var, "_Encoding", encoding) if not isinstance(element, Cube): # Add any other custom coordinate attributes. From ea51696fcbb772542e617f8f7f36246ac7ac6572 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 27 Feb 2026 18:05:43 +0000 Subject: [PATCH 74/77] Ignore attribute reordering in scaling-packed saves. --- .../multi_packed_multi_dtype.cdl | 68 ------------------ .../multi_packed_single_dtype.cdl | 70 ------------------- .../TestPackedData/single_packed_manual.cdl | 50 ------------- .../TestPackedData/single_packed_signed.cdl | 50 ------------- .../TestPackedData/single_packed_unsigned.cdl | 50 ------------- 5 files changed, 288 deletions(-) delete mode 100644 lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl delete mode 100644 lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl delete mode 100644 lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl delete mode 100644 lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl delete mode 100644 lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl diff --git a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl deleted file mode 100644 index 8a8f481492..0000000000 --- a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl +++ /dev/null @@ -1,68 +0,0 @@ -dimensions: - bnds = 2 ; - latitude = 73 ; - longitude = 96 ; - time = 360 ; -variables: - short air_temperature(time, latitude, longitude) ; - air_temperature:scale_factor = 0.00242575f ; - air_temperature:add_offset = 261.648f ; - air_temperature:standard_name = "air_temperature" ; - air_temperature:units = "K" ; - air_temperature:um_stash_source = "m01s03i236" ; - air_temperature:cell_methods = "time: maximum (interval: 1 hour)" ; - air_temperature:grid_mapping = "latitude_longitude" ; - air_temperature:coordinates = "forecast_period forecast_reference_time height" ; - int latitude_longitude ; - latitude_longitude:grid_mapping_name = "latitude_longitude" ; - latitude_longitude:longitude_of_prime_meridian = 0. ; - latitude_longitude:earth_radius = 6371229. ; - double time(time) ; - time:axis = "T" ; - time:bounds = "time_bnds" ; - time:units = "hours since 1970-01-01 00:00:00" ; - time:standard_name = "time" ; - time:calendar = "360_day" ; - double time_bnds(time, bnds) ; - float latitude(latitude) ; - latitude:axis = "Y" ; - latitude:units = "degrees_north" ; - latitude:standard_name = "latitude" ; - float longitude(longitude) ; - longitude:axis = "X" ; - longitude:units = "degrees_east" ; - longitude:standard_name = "longitude" ; - double forecast_period(time) ; - forecast_period:bounds = "forecast_period_bnds" ; - forecast_period:units = "hours" ; - forecast_period:standard_name = "forecast_period" ; - double forecast_period_bnds(time, bnds) ; - double forecast_reference_time ; - forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; - forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "360_day" ; - double height ; - height:units = "m" ; - height:standard_name = "height" ; - height:positive = "up" ; - float precipitation_flux(time, latitude, longitude) ; - precipitation_flux:standard_name = "precipitation_flux" ; - precipitation_flux:units = "kg m-2 s-1" ; - precipitation_flux:um_stash_source = "m01s05i216" ; - precipitation_flux:cell_methods = "time: mean (interval: 1 hour)" ; - precipitation_flux:grid_mapping = "latitude_longitude" ; - precipitation_flux:coordinates = "forecast_period forecast_reference_time" ; - ushort air_temperature_0(time, latitude, longitude) ; - air_temperature_0:scale_factor = 0.002014167f ; - air_temperature_0:add_offset = 176.7872f ; - air_temperature_0:standard_name = "air_temperature" ; - air_temperature_0:units = "K" ; - air_temperature_0:um_stash_source = "m01s03i236" ; - air_temperature_0:cell_methods = "time: minimum (interval: 1 hour)" ; - air_temperature_0:grid_mapping = "latitude_longitude" ; - air_temperature_0:coordinates = "forecast_period forecast_reference_time height" ; - -// global attributes: - :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.7" ; -} diff --git a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl deleted file mode 100644 index 3f2c909ce8..0000000000 --- a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl +++ /dev/null @@ -1,70 +0,0 @@ -dimensions: - bnds = 2 ; - latitude = 73 ; - longitude = 96 ; - time = 360 ; -variables: - short air_temperature(time, latitude, longitude) ; - air_temperature:scale_factor = 0.00242575f ; - air_temperature:add_offset = 261.648f ; - air_temperature:standard_name = "air_temperature" ; - air_temperature:units = "K" ; - air_temperature:um_stash_source = "m01s03i236" ; - air_temperature:cell_methods = "time: maximum (interval: 1 hour)" ; - air_temperature:grid_mapping = "latitude_longitude" ; - air_temperature:coordinates = "forecast_period forecast_reference_time height" ; - int latitude_longitude ; - latitude_longitude:grid_mapping_name = "latitude_longitude" ; - latitude_longitude:longitude_of_prime_meridian = 0. ; - latitude_longitude:earth_radius = 6371229. ; - double time(time) ; - time:axis = "T" ; - time:bounds = "time_bnds" ; - time:units = "hours since 1970-01-01 00:00:00" ; - time:standard_name = "time" ; - time:calendar = "360_day" ; - double time_bnds(time, bnds) ; - float latitude(latitude) ; - latitude:axis = "Y" ; - latitude:units = "degrees_north" ; - latitude:standard_name = "latitude" ; - float longitude(longitude) ; - longitude:axis = "X" ; - longitude:units = "degrees_east" ; - longitude:standard_name = "longitude" ; - double forecast_period(time) ; - forecast_period:bounds = "forecast_period_bnds" ; - forecast_period:units = "hours" ; - forecast_period:standard_name = "forecast_period" ; - double forecast_period_bnds(time, bnds) ; - double forecast_reference_time ; - forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; - forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "360_day" ; - double height ; - height:units = "m" ; - height:standard_name = "height" ; - height:positive = "up" ; - short precipitation_flux(time, latitude, longitude) ; - precipitation_flux:scale_factor = 2.989738e-08f ; - precipitation_flux:add_offset = 0.0009796774f ; - precipitation_flux:standard_name = "precipitation_flux" ; - precipitation_flux:units = "kg m-2 s-1" ; - precipitation_flux:um_stash_source = "m01s05i216" ; - precipitation_flux:cell_methods = "time: mean (interval: 1 hour)" ; - precipitation_flux:grid_mapping = "latitude_longitude" ; - precipitation_flux:coordinates = "forecast_period forecast_reference_time" ; - short air_temperature_0(time, latitude, longitude) ; - air_temperature_0:scale_factor = 0.002014167f ; - air_temperature_0:add_offset = 242.7874f ; - air_temperature_0:standard_name = "air_temperature" ; - air_temperature_0:units = "K" ; - air_temperature_0:um_stash_source = "m01s03i236" ; - air_temperature_0:cell_methods = "time: minimum (interval: 1 hour)" ; - air_temperature_0:grid_mapping = "latitude_longitude" ; - air_temperature_0:coordinates = "forecast_period forecast_reference_time height" ; - -// global attributes: - :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.7" ; -} diff --git a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl deleted file mode 100644 index 83e7329575..0000000000 --- a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl +++ /dev/null @@ -1,50 +0,0 @@ -dimensions: - bnds = 2 ; - latitude = 73 ; - longitude = 96 ; -variables: - short air_temperature(latitude, longitude) ; - air_temperature:scale_factor = 0.001198068f ; - air_temperature:add_offset = 267.4006f ; - air_temperature:standard_name = "air_temperature" ; - air_temperature:units = "K" ; - air_temperature:um_stash_source = "m01s03i236" ; - air_temperature:cell_methods = "time: mean (interval: 6 hour)" ; - air_temperature:grid_mapping = "latitude_longitude" ; - air_temperature:coordinates = "forecast_period forecast_reference_time height time" ; - int latitude_longitude ; - latitude_longitude:grid_mapping_name = "latitude_longitude" ; - latitude_longitude:longitude_of_prime_meridian = 0. ; - latitude_longitude:earth_radius = 6371229. ; - float latitude(latitude) ; - latitude:axis = "Y" ; - latitude:units = "degrees_north" ; - latitude:standard_name = "latitude" ; - float longitude(longitude) ; - longitude:axis = "X" ; - longitude:units = "degrees_east" ; - longitude:standard_name = "longitude" ; - double forecast_period ; - forecast_period:bounds = "forecast_period_bnds" ; - forecast_period:units = "hours" ; - forecast_period:standard_name = "forecast_period" ; - double forecast_period_bnds(bnds) ; - double forecast_reference_time ; - forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; - forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "standard" ; - double height ; - height:units = "m" ; - height:standard_name = "height" ; - height:positive = "up" ; - double time ; - time:bounds = "time_bnds" ; - time:units = "hours since 1970-01-01 00:00:00" ; - time:standard_name = "time" ; - time:calendar = "standard" ; - double time_bnds(bnds) ; - -// global attributes: - :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.7" ; -} diff --git a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl deleted file mode 100644 index 83e7329575..0000000000 --- a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl +++ /dev/null @@ -1,50 +0,0 @@ -dimensions: - bnds = 2 ; - latitude = 73 ; - longitude = 96 ; -variables: - short air_temperature(latitude, longitude) ; - air_temperature:scale_factor = 0.001198068f ; - air_temperature:add_offset = 267.4006f ; - air_temperature:standard_name = "air_temperature" ; - air_temperature:units = "K" ; - air_temperature:um_stash_source = "m01s03i236" ; - air_temperature:cell_methods = "time: mean (interval: 6 hour)" ; - air_temperature:grid_mapping = "latitude_longitude" ; - air_temperature:coordinates = "forecast_period forecast_reference_time height time" ; - int latitude_longitude ; - latitude_longitude:grid_mapping_name = "latitude_longitude" ; - latitude_longitude:longitude_of_prime_meridian = 0. ; - latitude_longitude:earth_radius = 6371229. ; - float latitude(latitude) ; - latitude:axis = "Y" ; - latitude:units = "degrees_north" ; - latitude:standard_name = "latitude" ; - float longitude(longitude) ; - longitude:axis = "X" ; - longitude:units = "degrees_east" ; - longitude:standard_name = "longitude" ; - double forecast_period ; - forecast_period:bounds = "forecast_period_bnds" ; - forecast_period:units = "hours" ; - forecast_period:standard_name = "forecast_period" ; - double forecast_period_bnds(bnds) ; - double forecast_reference_time ; - forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; - forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "standard" ; - double height ; - height:units = "m" ; - height:standard_name = "height" ; - height:positive = "up" ; - double time ; - time:bounds = "time_bnds" ; - time:units = "hours since 1970-01-01 00:00:00" ; - time:standard_name = "time" ; - time:calendar = "standard" ; - double time_bnds(bnds) ; - -// global attributes: - :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.7" ; -} diff --git a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl deleted file mode 100644 index 7b9114309e..0000000000 --- a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl +++ /dev/null @@ -1,50 +0,0 @@ -dimensions: - bnds = 2 ; - latitude = 73 ; - longitude = 96 ; -variables: - ubyte air_temperature(latitude, longitude) ; - air_temperature:scale_factor = 0.3079035f ; - air_temperature:add_offset = 228.1423f ; - air_temperature:standard_name = "air_temperature" ; - air_temperature:units = "K" ; - air_temperature:um_stash_source = "m01s03i236" ; - air_temperature:cell_methods = "time: mean (interval: 6 hour)" ; - air_temperature:grid_mapping = "latitude_longitude" ; - air_temperature:coordinates = "forecast_period forecast_reference_time height time" ; - int latitude_longitude ; - latitude_longitude:grid_mapping_name = "latitude_longitude" ; - latitude_longitude:longitude_of_prime_meridian = 0. ; - latitude_longitude:earth_radius = 6371229. ; - float latitude(latitude) ; - latitude:axis = "Y" ; - latitude:units = "degrees_north" ; - latitude:standard_name = "latitude" ; - float longitude(longitude) ; - longitude:axis = "X" ; - longitude:units = "degrees_east" ; - longitude:standard_name = "longitude" ; - double forecast_period ; - forecast_period:bounds = "forecast_period_bnds" ; - forecast_period:units = "hours" ; - forecast_period:standard_name = "forecast_period" ; - double forecast_period_bnds(bnds) ; - double forecast_reference_time ; - forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; - forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "standard" ; - double height ; - height:units = "m" ; - height:standard_name = "height" ; - height:positive = "up" ; - double time ; - time:bounds = "time_bnds" ; - time:units = "hours since 1970-01-01 00:00:00" ; - time:standard_name = "time" ; - time:calendar = "standard" ; - double time_bnds(bnds) ; - -// global attributes: - :source = "Data from Met Office Unified Model" ; - :Conventions = "CF-1.7" ; -} From ea1d3cfa8a6afee1fe8880e434429dc1bf093892 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 27 Feb 2026 18:35:47 +0000 Subject: [PATCH 75/77] Fix test for refactored proxy constructor. --- .../helpers/test_build_and_add_auxiliary_coordinate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py index 766f23fe43..b721bbec58 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py @@ -171,7 +171,7 @@ class TestDtype(MockerMixin): def _setup(self, mocker): # Create coordinate cf variables and pyke engine. points = np.arange(6).reshape(2, 3) - cf_data = mocker.MagicMock(_FillValue=None) + cf_data = mocker.MagicMock(_FillValue=None, shape=points.shape) cf_data.chunking = mocker.MagicMock(return_value=points.shape) self.engine = mocker.Mock( From 0acbae916720d549ceee3e140fab14fa0365a545 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 27 Feb 2026 18:56:41 +0000 Subject: [PATCH 76/77] Fix get_cf_var_data to support vlen-string. --- lib/iris/fileformats/netcdf/loader.py | 2 +- .../fileformats/netcdf/loader/test__get_cf_var_data.py | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index a3322ec2e4..5bb20e6585 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -298,7 +298,7 @@ def _get_cf_var_data(cf_var): # Make a data-proxy that mimics array access and can fetch from the file. # Note: Special handling needed for "variable length string" types which # return a dtype of `str`, rather than a numpy type; use `S1` in this case. - if cf_var.dtype.kind == "U": + if getattr(cf_var.dtype, "kind", None) == "U": # Special handling for "string variables". fill_value = "" else: diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py index f92e8288b7..8a05bb712d 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__get_cf_var_data.py @@ -24,14 +24,15 @@ def _setup(self): self.expected_chunks = _optimum_chunksize(self.shape, self.shape) def _make(self, chunksizes=None, shape=None, dtype="i4", **extra_properties): + if shape is None: + shape = self.shape cf_data = self.mocker.MagicMock( _FillValue=None, __getitem__="", - dimensions=["dim_" + str(x) for x in range(len(shape or "1"))], + dimensions=["dim_" + str(x) for x in range(len(shape))], + shape=shape, ) cf_data.chunking = self.mocker.MagicMock(return_value=chunksizes) - if shape is None: - shape = self.shape if dtype is not str: # for testing VLen str arrays (dtype=`class `) dtype = np.dtype(dtype) cf_var = self.mocker.MagicMock( From 31884e980276be171ce6a8bdafc2a130eb47ffb4 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 27 Feb 2026 18:58:04 +0000 Subject: [PATCH 77/77] Add back new test results, folder removed in error. --- .../multi_packed_multi_dtype.cdl | 68 ++++++++++++++++++ .../multi_packed_single_dtype.cdl | 70 +++++++++++++++++++ .../TestPackedData/single_packed_manual.cdl | 50 +++++++++++++ .../TestPackedData/single_packed_signed.cdl | 50 +++++++++++++ .../TestPackedData/single_packed_unsigned.cdl | 50 +++++++++++++ 5 files changed, 288 insertions(+) create mode 100644 lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl create mode 100644 lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl create mode 100644 lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl create mode 100644 lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl create mode 100644 lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl diff --git a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl new file mode 100644 index 0000000000..27d8f55a45 --- /dev/null +++ b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl @@ -0,0 +1,68 @@ +dimensions: + bnds = 2 ; + latitude = 73 ; + longitude = 96 ; + time = 360 ; +variables: + short air_temperature(time, latitude, longitude) ; + air_temperature:standard_name = "air_temperature" ; + air_temperature:units = "K" ; + air_temperature:scale_factor = 0.00242575f ; + air_temperature:add_offset = 261.648f ; + air_temperature:um_stash_source = "m01s03i236" ; + air_temperature:cell_methods = "time: maximum (interval: 1 hour)" ; + air_temperature:grid_mapping = "latitude_longitude" ; + air_temperature:coordinates = "forecast_period forecast_reference_time height" ; + int latitude_longitude ; + latitude_longitude:grid_mapping_name = "latitude_longitude" ; + latitude_longitude:longitude_of_prime_meridian = 0. ; + latitude_longitude:earth_radius = 6371229. ; + double time(time) ; + time:axis = "T" ; + time:bounds = "time_bnds" ; + time:units = "hours since 1970-01-01 00:00:00" ; + time:standard_name = "time" ; + time:calendar = "360_day" ; + double time_bnds(time, bnds) ; + float latitude(latitude) ; + latitude:axis = "Y" ; + latitude:units = "degrees_north" ; + latitude:standard_name = "latitude" ; + float longitude(longitude) ; + longitude:axis = "X" ; + longitude:units = "degrees_east" ; + longitude:standard_name = "longitude" ; + double forecast_period(time) ; + forecast_period:bounds = "forecast_period_bnds" ; + forecast_period:units = "hours" ; + forecast_period:standard_name = "forecast_period" ; + double forecast_period_bnds(time, bnds) ; + double forecast_reference_time ; + forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; + forecast_reference_time:standard_name = "forecast_reference_time" ; + forecast_reference_time:calendar = "360_day" ; + double height ; + height:units = "m" ; + height:standard_name = "height" ; + height:positive = "up" ; + float precipitation_flux(time, latitude, longitude) ; + precipitation_flux:standard_name = "precipitation_flux" ; + precipitation_flux:units = "kg m-2 s-1" ; + precipitation_flux:um_stash_source = "m01s05i216" ; + precipitation_flux:cell_methods = "time: mean (interval: 1 hour)" ; + precipitation_flux:grid_mapping = "latitude_longitude" ; + precipitation_flux:coordinates = "forecast_period forecast_reference_time" ; + ushort air_temperature_0(time, latitude, longitude) ; + air_temperature_0:standard_name = "air_temperature" ; + air_temperature_0:units = "K" ; + air_temperature_0:scale_factor = 0.002014167f ; + air_temperature_0:add_offset = 176.7872f ; + air_temperature_0:um_stash_source = "m01s03i236" ; + air_temperature_0:cell_methods = "time: minimum (interval: 1 hour)" ; + air_temperature_0:grid_mapping = "latitude_longitude" ; + air_temperature_0:coordinates = "forecast_period forecast_reference_time height" ; + +// global attributes: + :source = "Data from Met Office Unified Model" ; + :Conventions = "CF-1.7" ; +} diff --git a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl new file mode 100644 index 0000000000..c85fd35efd --- /dev/null +++ b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl @@ -0,0 +1,70 @@ +dimensions: + bnds = 2 ; + latitude = 73 ; + longitude = 96 ; + time = 360 ; +variables: + short air_temperature(time, latitude, longitude) ; + air_temperature:standard_name = "air_temperature" ; + air_temperature:units = "K" ; + air_temperature:scale_factor = 0.00242575f ; + air_temperature:add_offset = 261.648f ; + air_temperature:um_stash_source = "m01s03i236" ; + air_temperature:cell_methods = "time: maximum (interval: 1 hour)" ; + air_temperature:grid_mapping = "latitude_longitude" ; + air_temperature:coordinates = "forecast_period forecast_reference_time height" ; + int latitude_longitude ; + latitude_longitude:grid_mapping_name = "latitude_longitude" ; + latitude_longitude:longitude_of_prime_meridian = 0. ; + latitude_longitude:earth_radius = 6371229. ; + double time(time) ; + time:axis = "T" ; + time:bounds = "time_bnds" ; + time:units = "hours since 1970-01-01 00:00:00" ; + time:standard_name = "time" ; + time:calendar = "360_day" ; + double time_bnds(time, bnds) ; + float latitude(latitude) ; + latitude:axis = "Y" ; + latitude:units = "degrees_north" ; + latitude:standard_name = "latitude" ; + float longitude(longitude) ; + longitude:axis = "X" ; + longitude:units = "degrees_east" ; + longitude:standard_name = "longitude" ; + double forecast_period(time) ; + forecast_period:bounds = "forecast_period_bnds" ; + forecast_period:units = "hours" ; + forecast_period:standard_name = "forecast_period" ; + double forecast_period_bnds(time, bnds) ; + double forecast_reference_time ; + forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; + forecast_reference_time:standard_name = "forecast_reference_time" ; + forecast_reference_time:calendar = "360_day" ; + double height ; + height:units = "m" ; + height:standard_name = "height" ; + height:positive = "up" ; + short precipitation_flux(time, latitude, longitude) ; + precipitation_flux:standard_name = "precipitation_flux" ; + precipitation_flux:units = "kg m-2 s-1" ; + precipitation_flux:scale_factor = 2.989738e-08f ; + precipitation_flux:add_offset = 0.0009796774f ; + precipitation_flux:um_stash_source = "m01s05i216" ; + precipitation_flux:cell_methods = "time: mean (interval: 1 hour)" ; + precipitation_flux:grid_mapping = "latitude_longitude" ; + precipitation_flux:coordinates = "forecast_period forecast_reference_time" ; + short air_temperature_0(time, latitude, longitude) ; + air_temperature_0:standard_name = "air_temperature" ; + air_temperature_0:units = "K" ; + air_temperature_0:scale_factor = 0.002014167f ; + air_temperature_0:add_offset = 242.7874f ; + air_temperature_0:um_stash_source = "m01s03i236" ; + air_temperature_0:cell_methods = "time: minimum (interval: 1 hour)" ; + air_temperature_0:grid_mapping = "latitude_longitude" ; + air_temperature_0:coordinates = "forecast_period forecast_reference_time height" ; + +// global attributes: + :source = "Data from Met Office Unified Model" ; + :Conventions = "CF-1.7" ; +} diff --git a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl new file mode 100644 index 0000000000..ed89a25d9f --- /dev/null +++ b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl @@ -0,0 +1,50 @@ +dimensions: + bnds = 2 ; + latitude = 73 ; + longitude = 96 ; +variables: + short air_temperature(latitude, longitude) ; + air_temperature:standard_name = "air_temperature" ; + air_temperature:units = "K" ; + air_temperature:scale_factor = 0.001198068f ; + air_temperature:add_offset = 267.4006f ; + air_temperature:um_stash_source = "m01s03i236" ; + air_temperature:cell_methods = "time: mean (interval: 6 hour)" ; + air_temperature:grid_mapping = "latitude_longitude" ; + air_temperature:coordinates = "forecast_period forecast_reference_time height time" ; + int latitude_longitude ; + latitude_longitude:grid_mapping_name = "latitude_longitude" ; + latitude_longitude:longitude_of_prime_meridian = 0. ; + latitude_longitude:earth_radius = 6371229. ; + float latitude(latitude) ; + latitude:axis = "Y" ; + latitude:units = "degrees_north" ; + latitude:standard_name = "latitude" ; + float longitude(longitude) ; + longitude:axis = "X" ; + longitude:units = "degrees_east" ; + longitude:standard_name = "longitude" ; + double forecast_period ; + forecast_period:bounds = "forecast_period_bnds" ; + forecast_period:units = "hours" ; + forecast_period:standard_name = "forecast_period" ; + double forecast_period_bnds(bnds) ; + double forecast_reference_time ; + forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; + forecast_reference_time:standard_name = "forecast_reference_time" ; + forecast_reference_time:calendar = "standard" ; + double height ; + height:units = "m" ; + height:standard_name = "height" ; + height:positive = "up" ; + double time ; + time:bounds = "time_bnds" ; + time:units = "hours since 1970-01-01 00:00:00" ; + time:standard_name = "time" ; + time:calendar = "standard" ; + double time_bnds(bnds) ; + +// global attributes: + :source = "Data from Met Office Unified Model" ; + :Conventions = "CF-1.7" ; +} diff --git a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl new file mode 100644 index 0000000000..ed89a25d9f --- /dev/null +++ b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl @@ -0,0 +1,50 @@ +dimensions: + bnds = 2 ; + latitude = 73 ; + longitude = 96 ; +variables: + short air_temperature(latitude, longitude) ; + air_temperature:standard_name = "air_temperature" ; + air_temperature:units = "K" ; + air_temperature:scale_factor = 0.001198068f ; + air_temperature:add_offset = 267.4006f ; + air_temperature:um_stash_source = "m01s03i236" ; + air_temperature:cell_methods = "time: mean (interval: 6 hour)" ; + air_temperature:grid_mapping = "latitude_longitude" ; + air_temperature:coordinates = "forecast_period forecast_reference_time height time" ; + int latitude_longitude ; + latitude_longitude:grid_mapping_name = "latitude_longitude" ; + latitude_longitude:longitude_of_prime_meridian = 0. ; + latitude_longitude:earth_radius = 6371229. ; + float latitude(latitude) ; + latitude:axis = "Y" ; + latitude:units = "degrees_north" ; + latitude:standard_name = "latitude" ; + float longitude(longitude) ; + longitude:axis = "X" ; + longitude:units = "degrees_east" ; + longitude:standard_name = "longitude" ; + double forecast_period ; + forecast_period:bounds = "forecast_period_bnds" ; + forecast_period:units = "hours" ; + forecast_period:standard_name = "forecast_period" ; + double forecast_period_bnds(bnds) ; + double forecast_reference_time ; + forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; + forecast_reference_time:standard_name = "forecast_reference_time" ; + forecast_reference_time:calendar = "standard" ; + double height ; + height:units = "m" ; + height:standard_name = "height" ; + height:positive = "up" ; + double time ; + time:bounds = "time_bnds" ; + time:units = "hours since 1970-01-01 00:00:00" ; + time:standard_name = "time" ; + time:calendar = "standard" ; + double time_bnds(bnds) ; + +// global attributes: + :source = "Data from Met Office Unified Model" ; + :Conventions = "CF-1.7" ; +} diff --git a/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl new file mode 100644 index 0000000000..eedad33e03 --- /dev/null +++ b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl @@ -0,0 +1,50 @@ +dimensions: + bnds = 2 ; + latitude = 73 ; + longitude = 96 ; +variables: + ubyte air_temperature(latitude, longitude) ; + air_temperature:standard_name = "air_temperature" ; + air_temperature:units = "K" ; + air_temperature:scale_factor = 0.3079035f ; + air_temperature:add_offset = 228.1423f ; + air_temperature:um_stash_source = "m01s03i236" ; + air_temperature:cell_methods = "time: mean (interval: 6 hour)" ; + air_temperature:grid_mapping = "latitude_longitude" ; + air_temperature:coordinates = "forecast_period forecast_reference_time height time" ; + int latitude_longitude ; + latitude_longitude:grid_mapping_name = "latitude_longitude" ; + latitude_longitude:longitude_of_prime_meridian = 0. ; + latitude_longitude:earth_radius = 6371229. ; + float latitude(latitude) ; + latitude:axis = "Y" ; + latitude:units = "degrees_north" ; + latitude:standard_name = "latitude" ; + float longitude(longitude) ; + longitude:axis = "X" ; + longitude:units = "degrees_east" ; + longitude:standard_name = "longitude" ; + double forecast_period ; + forecast_period:bounds = "forecast_period_bnds" ; + forecast_period:units = "hours" ; + forecast_period:standard_name = "forecast_period" ; + double forecast_period_bnds(bnds) ; + double forecast_reference_time ; + forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; + forecast_reference_time:standard_name = "forecast_reference_time" ; + forecast_reference_time:calendar = "standard" ; + double height ; + height:units = "m" ; + height:standard_name = "height" ; + height:positive = "up" ; + double time ; + time:bounds = "time_bnds" ; + time:units = "hours since 1970-01-01 00:00:00" ; + time:standard_name = "time" ; + time:calendar = "standard" ; + double time_bnds(bnds) ; + +// global attributes: + :source = "Data from Met Office Unified Model" ; + :Conventions = "CF-1.7" ; +}