diff --git a/.github/workflows/benchmarks_report.yml b/.github/workflows/benchmarks_report.yml index 8d7dfb6b0b..8caf063e74 100644 --- a/.github/workflows/benchmarks_report.yml +++ b/.github/workflows/benchmarks_report.yml @@ -54,7 +54,7 @@ jobs: echo "reports_exist=$reports_exist" >> "$GITHUB_OUTPUT" - name: Store artifact - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: benchmark_reports path: benchmark_reports @@ -68,7 +68,7 @@ jobs: uses: actions/checkout@v6 - name: Download artifact - uses: actions/download-artifact@v7 + uses: actions/download-artifact@v8 with: name: benchmark_reports path: .github/workflows/benchmark_reports diff --git a/.github/workflows/benchmarks_run.yml b/.github/workflows/benchmarks_run.yml index 9b30d6a848..bfbf5a81f0 100644 --- a/.github/workflows/benchmarks_run.yml +++ b/.github/workflows/benchmarks_run.yml @@ -157,7 +157,7 @@ jobs: - name: Upload any benchmark reports # Uploading enables more downstream processing e.g. posting a PR comment. if: success() || steps.overnight.outcome == 'failure' - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: benchmark_reports path: .github/workflows/benchmark_reports @@ -165,7 +165,7 @@ jobs: - name: Archive asv results # Store the raw ASV database(s) to help manual investigations. if: ${{ always() }} - uses: actions/upload-artifact@v6 + uses: actions/upload-artifact@v7 with: name: asv-raw-results path: benchmarks/.asv/results diff --git a/.github/workflows/ci-linkchecks.yml b/.github/workflows/ci-linkchecks.yml index 83ef091a19..6ffdd3df93 100644 --- a/.github/workflows/ci-linkchecks.yml +++ b/.github/workflows/ci-linkchecks.yml @@ -23,7 +23,7 @@ jobs: - name: Link Checker id: lychee - uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 + uses: lycheeverse/lychee-action@8646ba30535128ac92d33dfc9133794bfdd9b411 with: token: ${{secrets.GITHUB_TOKEN}} fail: false diff --git a/.github/workflows/ci-manifest.yml b/.github/workflows/ci-manifest.yml index 6450a2d13a..46f8fda4c8 100644 --- a/.github/workflows/ci-manifest.yml +++ b/.github/workflows/ci-manifest.yml @@ -23,4 +23,4 @@ concurrency: jobs: manifest: name: "check-manifest" - uses: scitools/workflows/.github/workflows/ci-manifest.yml@2026.01.0 + uses: scitools/workflows/.github/workflows/ci-manifest.yml@2026.03.2 diff --git a/.github/workflows/ci-template-check.yml b/.github/workflows/ci-template-check.yml index cc9f85ae44..3d0bb0d12d 100644 --- a/.github/workflows/ci-template-check.yml +++ b/.github/workflows/ci-template-check.yml @@ -10,7 +10,7 @@ on: jobs: prompt-share: - uses: scitools/workflows/.github/workflows/ci-template-check.yml@2026.01.0 + uses: scitools/workflows/.github/workflows/ci-template-check.yml@2026.03.2 secrets: inherit with: pr_number: ${{ github.event.pull_request.number }} diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml index adc4892b10..9088aef8f1 100644 --- a/.github/workflows/ci-tests.yml +++ b/.github/workflows/ci-tests.yml @@ -35,18 +35,18 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest"] - python-version: ["3.13"] + python-version: ["3.14"] session: ["doctest", "gallery"] include: - os: "ubuntu-latest" - python-version: "3.13" + python-version: "3.14" session: "tests" coverage: "--coverage" - os: "ubuntu-latest" - python-version: "3.12" + python-version: "3.13" session: "tests" - os: "ubuntu-latest" - python-version: "3.11" + python-version: "3.12" session: "tests" env: diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml index d2bf28aa8e..152778ff26 100644 --- a/.github/workflows/ci-wheels.yml +++ b/.github/workflows/ci-wheels.yml @@ -37,7 +37,7 @@ jobs: run: | pipx run build - - uses: actions/upload-artifact@v6 + - uses: actions/upload-artifact@v7 with: name: pypi-artifacts path: ${{ github.workspace }}/dist/* @@ -52,7 +52,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.11", "3.12", "3.13"] + python-version: ["3.12", "3.13", "3.14"] session: ["wheel"] env: ENV_NAME: "ci-wheels" @@ -61,7 +61,7 @@ jobs: with: fetch-depth: 0 - - uses: actions/download-artifact@v7 + - uses: actions/download-artifact@v8 with: name: pypi-artifacts path: ${{ github.workspace }}/dist @@ -116,7 +116,7 @@ jobs: name: "show artifacts" runs-on: ubuntu-latest steps: - - uses: actions/download-artifact@v7 + - uses: actions/download-artifact@v8 with: name: pypi-artifacts path: ${{ github.workspace }}/dist @@ -137,7 +137,7 @@ jobs: # and check for the SciTools repo if: github.event_name == 'push' && github.event.ref == 'refs/heads/main' && github.repository_owner == 'SciTools' steps: - - uses: actions/download-artifact@v7 + - uses: actions/download-artifact@v8 with: name: pypi-artifacts path: ${{ github.workspace }}/dist @@ -159,7 +159,7 @@ jobs: # upload to PyPI for every tag starting with 'v' if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') && github.repository_owner == 'SciTools' steps: - - uses: actions/download-artifact@v7 + - uses: actions/download-artifact@v8 with: name: pypi-artifacts path: ${{ github.workspace }}/dist diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml index e7d743311c..69eb9a52c3 100644 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -14,5 +14,5 @@ on: jobs: refresh_lockfiles: - uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2026.01.0 + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@2026.03.2 secrets: inherit diff --git a/.lycheeignore b/.lycheeignore index 1cdb78f91f..6b3103f7c8 100644 --- a/.lycheeignore +++ b/.lycheeignore @@ -4,16 +4,17 @@ file:/// # DEAD : legacy in various old whatsnews https://biggus.readthedocs.io -# unkown problem, works in browser : used in further_topics/ugrid/data_model +# unknown problem, works in browser : used in +# docs/src/user_manual/explanation/mesh_data_model.rst https://doi.org/10.3390/jmse2010194 -# DEAD, todo:remove, used in docs/src/userguide/plotting_a_cube.rst +# DEAD, todo:remove, used in docs/src/user_manual/tutorial/plotting_a_cube.rst https://effbot.org # nonfunctional, found in some code examples https://foo/ -# DEAD, todo:remove, used in docs/src/further_topics/ugrid/data_model.rst +# DEAD, todo:remove, used in docs/src/user_manual/explanation/mesh_data_model.rst https://ibm-design-language.eu-de.mybluemix.net/design/language/resources/color-library # DEAD, legacy in whatsnew/1.4.rst @@ -45,14 +46,14 @@ https://stickler-ci.com # DEAD, todo:remove, used in lib/iris/symbols.py https://www.wmo.int/pages/prog/www/DPFS/documents/485_Vol_I_en_colour.pdf -# DEAD, todo:remove, used in docs/src/userguide/plotting_a_cube.rst +# DEAD, todo:remove, used in docs/src/user_manual/tutorial/plotting_a_cube.rst # unkown problem, works in browser : used in docs/src/index.rst https://www.flaticon.com # nonfunctional example, used in lib/iris/io/__init__.py https://www.thing.com -# DEAD, todo:remove, used in docs/src/userguide/plotting_a_cube.rst +# DEAD, todo:remove, used in docs/src/user_manual/tutorial/plotting_a_cube.rst https://www.personal.psu.edu/cab38/ColorBrewer/ColorBrewer_updates.html # nonfunctional, found in some code examples diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2c7c6819a8..e4ac43f2f3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -35,7 +35,7 @@ repos: - id: no-commit-to-branch - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.14.10" + rev: "v0.15.4" hooks: - id: ruff types: [file, python] diff --git a/.readthedocs.yml b/.readthedocs.yml index d82bd513ca..6b0c699f21 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -23,7 +23,7 @@ conda: sphinx: configuration: docs/src/conf.py - fail_on_warning: false + fail_on_warning: true python: install: diff --git a/.ruff.toml b/.ruff.toml index 5d78ecdb57..37f2cb7498 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -96,7 +96,7 @@ lint.ignore = [ # flake8-pytest-style (PT) # https://docs.astral.sh/ruff/rules/#flake8-pytest-style-pt - "PT", + "PT019", # flake8-raise (RSE) # https://docs.astral.sh/ruff/rules/#flake8-raise-rse diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index 7623b4458b..79cb0798fb 100755 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -67,7 +67,7 @@ def _check_requirements(package: str) -> None: def _prep_data_gen_env() -> None: """Create or access a separate, unchanging environment for generating test data.""" - python_version = "3.13" + python_version = "3.14" data_gen_var = "DATA_GEN_PYTHON" if data_gen_var in environ: echo("Using existing data generation environment.") diff --git a/docs/gallery_code/general/plot_SOI_filtering.py b/docs/gallery_code/general/plot_SOI_filtering.py index 4b256c894c..68b2812ccd 100644 --- a/docs/gallery_code/general/plot_SOI_filtering.py +++ b/docs/gallery_code/general/plot_SOI_filtering.py @@ -2,6 +2,11 @@ Applying a Filter to a Time-Series ================================== +.. how-to:: Applying a Filter to a Time-Series + :tags: topic_plotting;topic_maths_stats + + How to apply a low pass filter to an Iris Cube via rolling_window(). + This example demonstrates low pass filtering a time-series by applying a weighted running mean over the time dimension. diff --git a/docs/gallery_code/general/plot_anomaly_log_colouring.py b/docs/gallery_code/general/plot_anomaly_log_colouring.py index cd11161041..8ac8c76e0d 100644 --- a/docs/gallery_code/general/plot_anomaly_log_colouring.py +++ b/docs/gallery_code/general/plot_anomaly_log_colouring.py @@ -2,6 +2,11 @@ Colouring Anomaly Data With Logarithmic Scaling =============================================== +.. how-to:: Colouring Anomaly Data With Logarithmic Scaling + :tags: topic_plotting;topic_maths_stats + + How to visualise values using a logarithmic scale. + In this example, we need to plot anomaly data where the values have a "logarithmic" significance -- i.e. we want to give approximately equal ranges of colour between data values of, say, 1 and 10 as between 10 and 100. diff --git a/docs/gallery_code/general/plot_coriolis.py b/docs/gallery_code/general/plot_coriolis.py index 905108abfd..d435023574 100644 --- a/docs/gallery_code/general/plot_coriolis.py +++ b/docs/gallery_code/general/plot_coriolis.py @@ -2,6 +2,11 @@ Deriving the Coriolis Frequency Over the Globe ============================================== +.. how-to:: Deriving the Coriolis Frequency Over the Globe + :tags: topic_plotting;topic_data_model + + How to create your own Cube from computed data and visualise it. + This code computes the Coriolis frequency and stores it in a cube with associated metadata. It then plots the Coriolis frequency on an orthographic projection. diff --git a/docs/gallery_code/general/plot_cross_section.py b/docs/gallery_code/general/plot_cross_section.py index 8e5bee85ed..8309d6d5cf 100644 --- a/docs/gallery_code/general/plot_cross_section.py +++ b/docs/gallery_code/general/plot_cross_section.py @@ -2,6 +2,11 @@ Cross Section Plots =================== +.. how-to:: Cross Section Plots + :tags: topic_plotting;topic_slice_combine + + How to visualise cross-sections of multi-dimensional Cubes. + This example demonstrates contour plots of a cross-sectioned multi-dimensional cube which features a hybrid height vertical coordinate system. diff --git a/docs/gallery_code/general/plot_custom_aggregation.py b/docs/gallery_code/general/plot_custom_aggregation.py index 65fadfb473..afeb359409 100644 --- a/docs/gallery_code/general/plot_custom_aggregation.py +++ b/docs/gallery_code/general/plot_custom_aggregation.py @@ -2,6 +2,11 @@ Calculating a Custom Statistic ============================== +.. how-to:: Calculating a Custom Statistic + :tags: topic_plotting;topic_maths_stats + + How to define and use a custom aggregation operation, including visualisation. + This example shows how to define and use a custom :class:`iris.analysis.Aggregator`, that provides a new statistical operator for use with cube aggregation functions such as :meth:`~iris.cube.Cube.collapsed`, diff --git a/docs/gallery_code/general/plot_custom_file_loading.py b/docs/gallery_code/general/plot_custom_file_loading.py index 06de887614..670575d124 100644 --- a/docs/gallery_code/general/plot_custom_file_loading.py +++ b/docs/gallery_code/general/plot_custom_file_loading.py @@ -2,6 +2,11 @@ Loading a Cube From a Custom File Format ======================================== +.. how-to:: Loading a Cube From a Custom File Format + :tags: topic_plotting;topic_load_save + + How to visualise data from a file Iris does not natively support. + This example shows how a custom text file can be loaded using the standard Iris load mechanism. diff --git a/docs/gallery_code/general/plot_global_map.py b/docs/gallery_code/general/plot_global_map.py index 60ac200a43..7e61565f44 100644 --- a/docs/gallery_code/general/plot_global_map.py +++ b/docs/gallery_code/general/plot_global_map.py @@ -2,6 +2,11 @@ Quickplot of a 2D Cube on a Map =============================== +.. how-to:: Quickplot of a 2D Cube on a Map + :tags: topic_plotting + + A demonstration of basic iris.quickplot use. + This example demonstrates a contour plot of global air temperature. The plot title and the labels for the axes are automatically derived from the metadata. diff --git a/docs/gallery_code/general/plot_inset.py b/docs/gallery_code/general/plot_inset.py index 5edd375743..6ea04ffebb 100644 --- a/docs/gallery_code/general/plot_inset.py +++ b/docs/gallery_code/general/plot_inset.py @@ -2,6 +2,11 @@ Test Data Showing Inset Plots ============================= +.. how-to:: Test Data Showing Inset Plots + :tags: topic_plotting;topic_maths_stats + + How to create inset plots within a main plot. + This example demonstrates the use of a single 3D data cube with time, latitude and longitude dimensions to plot a temperature series for a single latitude coordinate, with an inset plot of the data region. diff --git a/docs/gallery_code/general/plot_lineplot_with_legend.py b/docs/gallery_code/general/plot_lineplot_with_legend.py index d1b3acd912..bfba085ef2 100644 --- a/docs/gallery_code/general/plot_lineplot_with_legend.py +++ b/docs/gallery_code/general/plot_lineplot_with_legend.py @@ -2,6 +2,11 @@ Multi-Line Temperature Profile Plot =================================== +.. how-to:: Multi-Line Temperature Profile Plot + :tags: topic_plotting + + How to plot multiple lines on a single plot with a legend. + """ # noqa: D205, D212, D400 import matplotlib.pyplot as plt diff --git a/docs/gallery_code/general/plot_polar_stereo.py b/docs/gallery_code/general/plot_polar_stereo.py index 99abbd0ae0..07f0a4c01b 100644 --- a/docs/gallery_code/general/plot_polar_stereo.py +++ b/docs/gallery_code/general/plot_polar_stereo.py @@ -2,6 +2,11 @@ Example of a Polar Stereographic Plot ===================================== +.. how-to:: Example of a Polar Stereographic Plot + :tags: topic_plotting + + A demonstration of plotting data defined on an alternative map projection. + Demonstrates plotting data that are defined on a polar stereographic projection. diff --git a/docs/gallery_code/general/plot_polynomial_fit.py b/docs/gallery_code/general/plot_polynomial_fit.py index 37cc4e283b..adaaf4b94f 100644 --- a/docs/gallery_code/general/plot_polynomial_fit.py +++ b/docs/gallery_code/general/plot_polynomial_fit.py @@ -2,6 +2,11 @@ Fitting a Polynomial ==================== +.. how-to:: Fitting a Polynomial + :tags: topic_plotting;topic_maths_stats;topic_data_model + + How to compute and plot a polynomial fit to 1D data in an Iris cube. + This example demonstrates computing a polynomial fit to 1D data from an Iris cube, adding the fit to the cube's metadata, and plotting both the 1D data and the fit. diff --git a/docs/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py index 6e8ba5a5af..441dee245e 100644 --- a/docs/gallery_code/general/plot_projections_and_annotations.py +++ b/docs/gallery_code/general/plot_projections_and_annotations.py @@ -2,6 +2,11 @@ Plotting in Different Projections ================================= +.. how-to:: Plotting in Different Projections + :tags: topic_plotting;topic_interoperability + + How to overlay data from two different map projections and add graphics. + This example shows how to overlay data and graphics in different projections, demonstrating various features of Iris, Cartopy and matplotlib. diff --git a/docs/gallery_code/general/plot_rotated_pole_mapping.py b/docs/gallery_code/general/plot_rotated_pole_mapping.py index e9e3656184..a43a06e7a3 100644 --- a/docs/gallery_code/general/plot_rotated_pole_mapping.py +++ b/docs/gallery_code/general/plot_rotated_pole_mapping.py @@ -2,6 +2,11 @@ Rotated Pole Mapping ==================== +.. how-to:: Rotated Pole Mapping + :tags: topic_plotting + + How to visualise data via different methods and coordinate systems. + This example uses several visualisation methods to achieve an array of differing images, including: diff --git a/docs/gallery_code/general/plot_zonal_means.py b/docs/gallery_code/general/plot_zonal_means.py index d4ec1eb1fc..af3e83ce41 100644 --- a/docs/gallery_code/general/plot_zonal_means.py +++ b/docs/gallery_code/general/plot_zonal_means.py @@ -2,6 +2,11 @@ Zonal Mean Diagram of Air Temperature ===================================== +.. how-to:: Zonal Mean Diagram of Air Temperature + :tags: topic_plotting;topic_maths_stats + + How to use aligned plots to visualise collapsed dimensional statistics. + This example demonstrates aligning a linear plot and a cartographic plot using Matplotlib. diff --git a/docs/gallery_code/meteorology/plot_COP_1d.py b/docs/gallery_code/meteorology/plot_COP_1d.py index 84addd140a..099f3f80e9 100644 --- a/docs/gallery_code/meteorology/plot_COP_1d.py +++ b/docs/gallery_code/meteorology/plot_COP_1d.py @@ -2,6 +2,11 @@ Global Average Annual Temperature Plot ====================================== +.. how-to:: Global Average Annual Temperature Plot + :tags: topic_plotting;topic_slice_combine;topic_maths_stats + + How to spatially constrain data, compute statistics and visualise a comparison. + Produces a time-series plot of North American temperature forecasts for 2 different emission scenarios. Constraining data to a limited spatial area also features in this example. diff --git a/docs/gallery_code/meteorology/plot_COP_maps.py b/docs/gallery_code/meteorology/plot_COP_maps.py index 1c5e865a8f..fca40dc373 100644 --- a/docs/gallery_code/meteorology/plot_COP_maps.py +++ b/docs/gallery_code/meteorology/plot_COP_maps.py @@ -2,6 +2,11 @@ Global Average Annual Temperature Maps ====================================== +.. how-to:: Global Average Annual Temperature Maps + :tags: topic_plotting + + How to produce comparative maps of two files with a shared colour bar. + Produces maps of global temperature forecasts from the A1B and E1 scenarios. The data used comes from the HadGEM2-AO model simulations for the A1B and E1 diff --git a/docs/gallery_code/meteorology/plot_TEC.py b/docs/gallery_code/meteorology/plot_TEC.py index e6269eaf9b..67a2b2722e 100644 --- a/docs/gallery_code/meteorology/plot_TEC.py +++ b/docs/gallery_code/meteorology/plot_TEC.py @@ -2,6 +2,11 @@ Ionosphere Space Weather ======================== +.. how-to:: Ionosphere Space Weather + :tags: topic_plotting + + How to mask out values below a threshold in a plot. + This space weather example plots a filled contour of rotated pole point data with a shaded relief image underlay. The plot shows aggregated vertical electron content in the ionosphere. diff --git a/docs/gallery_code/meteorology/plot_deriving_phenomena.py b/docs/gallery_code/meteorology/plot_deriving_phenomena.py index 81a05be9b9..cc9e1c93ca 100644 --- a/docs/gallery_code/meteorology/plot_deriving_phenomena.py +++ b/docs/gallery_code/meteorology/plot_deriving_phenomena.py @@ -2,6 +2,11 @@ Deriving Exner Pressure and Air Temperature =========================================== +.. how-to:: Deriving Exner Pressure and Air Temperature + :tags: topic_plotting;topic_maths_stats + + How to use Iris arithmetic to derive phenomena from existing cubes and plot them. + This example shows some processing of cubes in order to derive further related cubes; in this case the derived cubes are Exner pressure and air temperature which are calculated by combining air pressure, air potential temperature and diff --git a/docs/gallery_code/meteorology/plot_hovmoller.py b/docs/gallery_code/meteorology/plot_hovmoller.py index 829b370d78..ad8297ba79 100644 --- a/docs/gallery_code/meteorology/plot_hovmoller.py +++ b/docs/gallery_code/meteorology/plot_hovmoller.py @@ -2,6 +2,11 @@ Hovmoller Diagram of Monthly Surface Temperature ================================================ +.. how-to:: Hovmoller Diagram of Monthly Surface Temperature + :tags: topic_plotting;topic_maths_stats + + How to collapse and plot Cubes to create a Hovmoller diagram. + This example demonstrates the creation of a Hovmoller diagram with fine control over plot ticks and labels. The data comes from the Met Office OSTIA project and has been pre-processed to calculate the monthly mean sea surface diff --git a/docs/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/gallery_code/meteorology/plot_lagged_ensemble.py index 7c34572136..4cb7e9e6ad 100644 --- a/docs/gallery_code/meteorology/plot_lagged_ensemble.py +++ b/docs/gallery_code/meteorology/plot_lagged_ensemble.py @@ -2,6 +2,11 @@ Seasonal Ensemble Model Plots ============================= +.. how-to:: Seasonal Ensemble Model Plots + :tags: topic_plotting;topic_data_model;topic_maths_stats;topic_slice_combine + + How to use Iris in a complex real-world analysis scenario. + This example demonstrates the loading of a lagged ensemble dataset from the GloSea4 model, which is then used to produce two types of plot: diff --git a/docs/gallery_code/meteorology/plot_wind_barbs.py b/docs/gallery_code/meteorology/plot_wind_barbs.py index f11c9a7b50..9dce0caad2 100644 --- a/docs/gallery_code/meteorology/plot_wind_barbs.py +++ b/docs/gallery_code/meteorology/plot_wind_barbs.py @@ -2,6 +2,11 @@ Plotting Wind Direction Using Barbs =================================== +.. how-to:: Plotting Wind Direction Using Barbs + :tags: topic_plotting;topic_maths_stats + + How to use Iris to derive and plot wind barbs. + This example demonstrates using barbs to plot wind speed contours and wind direction barbs from wind vector component input data. The vector components are co-located in space in this case. diff --git a/docs/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py index 5310ad937d..beccb217c2 100644 --- a/docs/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -2,6 +2,11 @@ Plotting Wind Direction Using Quiver ==================================== +.. how-to:: Plotting Wind Direction Using Quiver + :tags: topic_plotting + + How to use Iris to plot wind quivers. + This example demonstrates using quiver to plot wind speed contours and wind direction arrows from wind vector component input data. The vector components are co-located in space in this case. diff --git a/docs/gallery_code/oceanography/plot_atlantic_profiles.py b/docs/gallery_code/oceanography/plot_atlantic_profiles.py index a43fb7f8cb..bd57e1dfe8 100644 --- a/docs/gallery_code/oceanography/plot_atlantic_profiles.py +++ b/docs/gallery_code/oceanography/plot_atlantic_profiles.py @@ -2,6 +2,11 @@ Oceanographic Profiles and T-S Diagrams ======================================= +.. how-to:: Oceanographic Profiles and T-S Diagrams + :tags: topic_plotting;topic_slice_combine + + How to use Iris' for visualising oceanographic profile data, including scatter plotting. + This example demonstrates how to plot vertical profiles of different variables in the same axes, and how to make a scatter plot of two variables. There is an oceanographic theme but the same techniques are diff --git a/docs/gallery_code/oceanography/plot_load_nemo.py b/docs/gallery_code/oceanography/plot_load_nemo.py index aac89fec0e..a26b97da31 100644 --- a/docs/gallery_code/oceanography/plot_load_nemo.py +++ b/docs/gallery_code/oceanography/plot_load_nemo.py @@ -2,6 +2,11 @@ Load a Time Series of Data From the NEMO Model ============================================== +.. how-to:: Load a Time Series of Data From the NEMO Model + :tags: topic_plotting;topic_load_save;topic_data_model;topic_slice_combine + + How to concatenate data from multiple NEMO files. + This example demonstrates how to load multiple files containing data output by the NEMO model and combine them into a time series in a single cube. The different time dimensions in these files can prevent Iris from concatenating diff --git a/docs/gallery_code/oceanography/plot_orca_projection.py b/docs/gallery_code/oceanography/plot_orca_projection.py index bb68056cb3..065078e5df 100644 --- a/docs/gallery_code/oceanography/plot_orca_projection.py +++ b/docs/gallery_code/oceanography/plot_orca_projection.py @@ -2,6 +2,11 @@ Tri-Polar Grid Projected Plotting ================================= +.. how-to:: Tri-Polar Grid Projected Plotting + :tags: topic_plotting + + How to visualise data defined on a tri-polar grid using different map projections. + This example demonstrates cell plots of data on the semi-structured ORCA2 model grid. diff --git a/docs/gallery_tests/conftest.py b/docs/gallery_tests/conftest.py index 564a2892a2..2f35659b2e 100644 --- a/docs/gallery_tests/conftest.py +++ b/docs/gallery_tests/conftest.py @@ -47,7 +47,7 @@ def no_show(): if example_dir.is_dir(): monkeypatch.syspath_prepend(example_dir) - yield + return @pytest.fixture diff --git a/docs/src/IEP/IEP001.adoc b/docs/src/IEP/IEP001.adoc deleted file mode 100644 index 2daef2363a..0000000000 --- a/docs/src/IEP/IEP001.adoc +++ /dev/null @@ -1,193 +0,0 @@ -# IEP 1 - Enhanced indexing - -## Background - -Currently, to select a subset of a Cube based on coordinate values we use something like: -[source,python] ----- -cube.extract(iris.Constraint(realization=3, - model_level_number=[1, 5], - latitude=lambda cell: 40 <= cell <= 60)) ----- -On the plus side, this works irrespective of the dimension order of the data, but the drawbacks with this form of indexing include: - -* It uses a completely different syntax to position-based indexing, e.g. `cube[4, 0:6]`. -* It uses a completely different syntax to pandas and xarray value-based indexing, e.g. `df.loc[4, 0:6]`. -* It is long-winded and requires the use of an additional class. -* It requires the use of lambda functions even when just selecting a range. - -Arguably, the situation when subsetting using positional indices but where the dimension order is unknown is even worse - it has no standard syntax _at all_! Instead it requires code akin to: -[source,python] ----- -key = [slice(None)] * cube.ndim -key[cube.coord_dims('model_level_number')[0]] = slice(3, 9, 2) -cube[tuple(key)] ----- - -The only form of indexing that is well supported is indexing by position where the dimension order is known: -[source,python] ----- -cube[4, 0:6, 30:] ----- - -## Proposal - -Provide indexing helpers on the Cube to extend explicit support to all permutations of: - -* implicit dimension vs. named coordinate, -* and positional vs. coordinate-value based selection. - -### Helper syntax options - -Commonly, the names of coordinates are also valid Python identifiers. -For names where this is not true, the names can expressed through either the `helper[...]` or `helper(...)` syntax by constructing an explicit dict. -For example: `cube.loc[{'12': 0}]` or `cube.loc(**{'12': 0})`. - -#### Extended pandas style - -Use a single helper for index by position, and a single helper for index by value. Helper names taken from pandas, but their behaviour is extended by making them callable to support named coordinates. - -|=== -.2+| 2+h|Index by -h|Position h|Value - -h|Implicit dimension - -a|[source,python] ----- -cube[:, 2] # No change -cube.iloc[:, 2] ----- - -a|[source,python] ----- -cube.loc[:, 1.5] ----- - -h|Coordinate name - -a|[source,python] ----- -cube[dict(height=2)] -cube.iloc[dict(height=2)] -cube.iloc(height=2) ----- - -a|[source,python] ----- -cube.loc[dict(height=1.5)] -cube.loc(height=1.5) ----- -|=== - -#### xarray style - -xarray introduces a second set of helpers for accessing named dimensions that provide the callable syntax `(foo=...)`. - -|=== -.2+| 2+h|Index by -h|Position h|Value - -h|Implicit dimension - -a|[source,python] ----- -cube[:, 2] # No change ----- - -a|[source,python] ----- -cube.loc[:, 1.5] ----- - -h|Coordinate name - -a|[source,python] ----- - cube[dict(height=2)] - cube.isel(height=2) ----- - -a|[source,python] ----- -cube.loc[dict(height=1.5)] -cube.sel(height=1.5) ----- -|=== - -### Slices - -The semantics of position-based slices will continue to match that of normal Python slices. The start position is included, the end position is excluded. - -Value-based slices will be strictly inclusive, with both the start and end values included. This behaviour differs from normal Python slices but is in common with pandas. - -Just as for normal Python slices, we do not need to provide the ability to control the include/exclude behaviour for slicing. - -### Value-based indexing - -#### Equality - -Should the behaviour of value-based equality depend on the data type of the coordinate? - -* integer: exact match -* float: tolerance match, tolerance determined by bit-width -* string: exact match - -#### Scalar/category - -If/how to deal with category selection `cube.loc(season='JJA')`? Defer to `groupby()`? - -`cube.loc[12]` - must always match a single value or raise KeyError, corresponding dimension will be removed -`cube.loc[[12]]` - may match any number of values? (incl. zero?), dimension will be retained - -### Out of scope - -* Deliberately enhancing the performance. -This is a very valuable topic and should be addressed by subsequent efforts. - -* Time/date values as strings. -Providing pandas-style string representations for convenient representation of partial date/times should be addressed in a subsequent effort - perhaps in conjunction with an explicit performance test suite. -There is a risk that this topic could bog down when dealing with non-standard calendars and climatological date ranges. - -## Work required - -* Implementations for each of the new helper objects. -* An update to the documentation to demonstrate best practice. Known impacted areas include: -** The "Subsetting a Cube" chapter of the user guide. - -### TODO -* Multi-dimensional coordinates -* Non-orthogonal coordinates -* Bounds -* Boolean array indexing -* Lambdas? -* What to do about constrained loading? -* Relationship to https://scitools.org.uk/iris/docs/v1.9.2/iris/iris/cube.html#iris.cube.Cube.intersection[iris.cube.Cube.intersection]? -* Relationship to interpolation (especially nearest-neighbour)? -** e.g. What to do about values that don't exist? -*** pandas throws a KeyError -*** xarray supports (several) nearest-neighbour schemes via https://xarray.pydata.org/en/stable/indexing.html#nearest-neighbor-lookups[`data.sel()`] -*** Apparently https://holoviews.org/[holoviews] does nearest-neighbour interpolation. -* multi-dimensional coordinate => unroll? -* var_name only selection? `cube.vloc(t0=12)` -* Orthogonal only? Or also independent? `cube.loc_points(lon=[1, 1, 5], lat=[31, 33, 32])` - ** This seems quite closely linked to interpolation. Is the interpolation scheme orthogonal to cross-product vs. independent? -+ -[source,python] ----- -cube.interpolate( - scheme='nearest', - mesh=dict(lon=[5, 10, 15], lat=[40, 50])) -cube.interpolate( - scheme=Nearest(mode='spherical'), - locations=Ortho(lon=[5, 10, 15], lat=[40, 50])) ----- - -## References -. Iris - * https://scitools.org.uk/iris/docs/v1.9.2/iris/iris.html#iris.Constraint[iris.Constraint] - * https://scitools.org.uk/iris/docs/v1.9.2/userguide/subsetting_a_cube.html[Subsetting a cube] -. https://pandas.pydata.org/pandas-docs/stable/indexing.html[pandas indexing] -. https://xarray.pydata.org/en/stable/indexing.html[xarray indexing] -. https://legacy.python.org/dev/peps/pep-0472/[PEP 472 - Support for indexing with keyword arguments] -. https://nbviewer.jupyter.org/gist/rsignell-usgs/13d7ce9d95fddb4983d4cbf98be6c71d[Time slicing NetCDF or OPeNDAP datasets] - Rich Signell's xarray/iris comparison focussing on time handling and performance diff --git a/docs/src/_static/theme_override.css b/docs/src/_static/theme_override.css index 355119f8a5..1d4d7bcb79 100644 --- a/docs/src/_static/theme_override.css +++ b/docs/src/_static/theme_override.css @@ -26,3 +26,32 @@ ul.squarelist { text-indent: 1em; padding-left: 5em; } + +/* custom css for the cards on the homepage */ +.sd-card-img-top { + width: 15% !important; + position: absolute !important; + padding-left: 10px; + min-width: 50px; + top: 50%; + transform: translateY(-50%); +} + +.sd-card-img { + height: auto; +} + +.custom-title { + font-weight: bold; + color: #1B8FB7 !important; + text-align: left; +} + +.custom-body { + text-align: left; + margin-left: max(45px, 15%); +} + +.center { + text-align: center; +} \ No newline at end of file diff --git a/docs/src/_templates/tags_links.need b/docs/src/_templates/tags_links.need new file mode 100644 index 0000000000..43c3ca39ce --- /dev/null +++ b/docs/src/_templates/tags_links.need @@ -0,0 +1,11 @@ +{# Render plain clickable text links for each tag #} +{% if tags %} +:strong:`Tags:` {{ " " }} {%- for t in tags -%} + {%- if t and t.startswith('topic_') -%} + :ref:`{{ t }} <{{ t }}>` + {%- else -%} + {{ t }} + {%- endif -%} + {%- if not loop.last %} | {% endif -%} +{%- endfor %} +{% endif %} diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc index a516332aaf..0f31261131 100644 --- a/docs/src/common_links.inc +++ b/docs/src/common_links.inc @@ -47,6 +47,8 @@ .. _iris-esmf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid .. _netCDF4: https://github.com/Unidata/netcdf4-python .. _SciTools Contributor's License Agreement (CLA): https://cla-assistant.io/SciTools/ +.. _extlinks: https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html +.. _Diataxis: https://diataxis.fr/ .. comment diff --git a/docs/src/conf.py b/docs/src/conf.py index fa896aba69..ad80752223 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -19,7 +19,10 @@ """Config for sphinx.""" +import ast +import contextlib import datetime +import importlib from importlib.metadata import version as get_version from inspect import getsource import ntpath @@ -29,6 +32,7 @@ from subprocess import run import sys from tempfile import gettempdir +import textwrap from urllib.parse import quote import warnings @@ -163,6 +167,9 @@ def _dotv(version): "sphinx_gallery.gen_gallery", "matplotlib.sphinxext.mathmpl", "matplotlib.sphinxext.plot_directive", + "sphinx_needs", + "user_manual_directives", + "sphinx_reredirects", ] if skip_api == "1": @@ -219,6 +226,11 @@ def _dotv(version): autoclass_content = "both" modindex_common_prefix = ["iris"] +# if geovista is not installed we need to mock the imports so the autodoc build works: +if importlib.util.find_spec("geovista") is None: + autodoc_mock_imports = ["geovista", "pyvista"] + + # -- apidoc extension --------------------------------------------------------- # See https://github.com/sphinx-contrib/apidoc source_code_root = (Path(__file__).parents[2]).absolute() @@ -279,11 +291,20 @@ def _dotv(version): "https://github.com/SciTools/iris/discussions/%s", "Discussion #%s", ), + "user": ("https://github.com/%s", "@%s"), } # -- Doctest ("make doctest")-------------------------------------------------- -doctest_global_setup = "import iris" +doctest_global_setup = """ +import iris + +# To handle conditional doctest skipping if geovista is not installed: +try: + import geovista as gv +except ImportError: + gv = None +""" # -- Options for HTML output -------------------------------------------------- @@ -412,12 +433,14 @@ def reset_modules(gallery_conf, fname): ) sys.path.insert(0, str(reset_modules_dir)) +GALLERY_CODE: str = "../gallery_code" +GALLERY_DIRS: str = "generated/gallery" sphinx_gallery_conf = { # path to your example scripts - "examples_dirs": ["../gallery_code"], + "examples_dirs": GALLERY_CODE, # path to where to save gallery generated output - "gallery_dirs": ["generated/gallery"], + "gallery_dirs": GALLERY_DIRS, # filename pattern for the files in the gallery "filename_pattern": "/plot_", # filename pattern to ignore in the gallery @@ -441,3 +464,270 @@ def reset_modules(gallery_conf, fname): "section": "Section %s", "table": "Table %s", } + +# ============================================================================ +# | Copyright GeoVista | +# | Code from this point unto the termination banner is copyright GeoVista. | +# | Minimal code changes made to make it generic. | +# | | +# | License details can be found at: | +# | https://github.com/bjlittle/geovista/blob/main/LICENSE | +# ============================================================================ + +# Source: https://github.com/bjlittle/geovista/blob/main/docs/src/conf.py + + +def _bool_eval(*, arg: str | bool) -> bool: + """Sanitise to a boolean only configuration.""" + if isinstance(arg, str): + with contextlib.suppress(TypeError): + arg = ast.literal_eval(arg.capitalize()) + + return bool(arg) + + +def generate_carousel( + app: Sphinx, + fname: Path, + ncards: int | None = None, + margin: int | None = None, + width: int | None = None, +) -> None: + """Generate and write the gallery carousel RST file.""" + if ncards is None: + ncards = 3 + + if margin is None: + margin = 4 + + if width is None: + width = "25%" + + base = Path(app.srcdir, *GALLERY_DIRS.split("/")) + cards_by_link = {} + + card = r""".. card:: + :img-background: {image} + :link: {link} + :link-type: ref + :width: {width} + :margin: {margin} + :class-card: align-self-center +""" + + # TODO @bjlittle: use Path.walk when python >=3.12 + for root, _, files in os.walk(str(base)): + root = Path(root) # noqa: PLW2901 + if root.name == "images": + root_relative = root.relative_to(app.srcdir) + link_relative = root.parent.relative_to(app.srcdir) + + for file in files: + path = Path(file) + if path.suffix == ".png": + # generate the card "img-background" filename + image = root_relative / path + + # generate the card "link" reference + # remove numeric gallery image index e.g., "001" + parts = path.stem.split("_")[:-1] + link = parts[:2] + list(link_relative.parts) + parts[2:] + link = f"{'_'.join(link)}.py" + + # needed in case a gallery filename has mixed case + link = link.lower() + + kwargs = { + "image": image, + "link": link, + "width": width, + "margin": margin, + } + + cards_by_link[link] = card.format(**kwargs) + + # sort the cards by their link + cards = [cards_by_link[link] for link in sorted(cards_by_link.keys())] + cards = textwrap.indent("\n".join(cards), prefix=" " * 4) + + # now, create the card carousel + carousel = f""".. card-carousel:: {ncards} + +{cards} + +.. rst-class:: center + + :fa:`images` Gallery Carousel + +""" + + # finally, write the rst for the gallery carousel + Path(app.srcdir, fname).write_text(carousel) + + +def gallery_carousel( + app: Sphinx, + env: BuildEnvironment, # noqa: ARG001 + docnames: list[str], # noqa: ARG001 +) -> None: + """Create the gallery carousel.""" + # create empty or truncate existing file + fname = Path(app.srcdir, "gallery_carousel.txt") + + with fname.open("w"): + pass + + if _bool_eval(arg=app.builder.config.plot_gallery): + # only generate the carousel if we have a gallery + generate_carousel(app, fname) + + +# ============================================================================ +# | END GeoVista copyright | +# ============================================================================ + + +# -- sphinx-reredirects config ------------------------------------------------ + +redirects = { + # explanation + "further_topics/dataless_cubes": "/user_manual/explanation/dataless_cubes.html", + "userguide/iris_cubes": "/user_manual/explanation/iris_cubes.html", + "userguide/iris_philosophy": "/user_manual/explanation/iris_philosophy.html", + "community/iris_xarray": "/user_manual/explanation/iris_xarray.html", + "further_topics/lenient_maths": "/user_manual/explanation/lenient_maths.html", + "further_topics/lenient_metadata": "/user_manual/explanation/lenient_metadata.html", + "further_topics/ugrid/data_model": "/user_manual/explanation/mesh_data_model.html", + "further_topics/ugrid/partner_packages": "/user_manual/explanation/mesh_partners.html", + "further_topics/metadata": "/user_manual/explanation/metadata.html", + "further_topics/missing_data_handling": "/user_manual/explanation/missing_data_handling.html", + "further_topics/netcdf_io": "/user_manual/explanation/netcdf_io.html", + "userguide/real_and_lazy_data": "/user_manual/explanation/real_and_lazy_data.html", + "further_topics/um_files_loading": "/user_manual/explanation/um_files_loading.html", + "further_topics/ux_guide": "/user_manual/explanation/ux_guide.html", + "further_topics/which_regridder_to_use": "/user_manual/explanation/which_regridder_to_use.html", + "why_iris": "/user_manual/explanation/why_iris.html", + # how_to + "further_topics/filtering_warnings": "/user_manual/how_to/filtering_warnings.html", + "installing": "/user_manual/how_to/installing.html", + "further_topics/ugrid/other_meshes": "/user_manual/how_to/mesh_conversions.html", + "further_topics/ugrid/operations": "/user_manual/how_to/mesh_operations.html", + "userguide/navigating_a_cube": "/user_manual/how_to/navigating_a_cube.html", + "community/plugins": "/user_manual/how_to/plugins.html", + # reference + "userguide/citation": "/user_manual/reference/citation.html", + "userguide/glossary": "/user_manual/reference/glossary.html", + "community/phrasebook": "/user_manual/reference/phrasebook.html", + # section indexes + "community/index": "/user_manual/section_indexes/community.html", + "further_topics/dask_best_practices/index": "/user_manual/section_indexes/dask_best_practices.html", + "further_topics/ugrid/index": "/user_manual/section_indexes/mesh_support.html", + "userguide/index": "/user_manual/section_indexes/userguide.html", + # tutorial + "further_topics/controlling_merge": "/user_manual/tutorial/controlling_merge.html", + "userguide/cube_maths": "/user_manual/tutorial/cube_maths.html", + "userguide/cube_statistics": "/user_manual/tutorial/cube_statistics.html", + "further_topics/dask_best_practices/dask_bags_and_greed": "/user_manual/tutorial/dask_bags_and_greed.html", + "further_topics/dask_best_practices/dask_parallel_loop": "/user_manual/tutorial/dask_parallel_loop.html", + "further_topics/dask_best_practices/dask_pp_to_netcdf": "/user_manual/tutorial/dask_pp_to_netcdf.html", + "userguide/interpolation_and_regridding": "/user_manual/tutorial/interpolation_and_regridding.html", + "userguide/loading_iris_cubes": "/user_manual/tutorial/loading_iris_cubes.html", + "userguide/merge_and_concat": "/user_manual/tutorial/merge_and_concat.html", + "userguide/plotting_a_cube": "/user_manual/tutorial/plotting_a_cube.html", + "userguide/saving_iris_cubes": "/user_manual/tutorial/saving_iris_cubes.html", + "userguide/subsetting_a_cube": "/user_manual/tutorial/subsetting_a_cube.html", +} + +# -- sphinx-needs config ------------------------------------------------------ +# See https://sphinx-needs.readthedocs.io/en/latest/configuration.html + +# TODO: namespace these types as Diataxis for max clarity? +needs_types = [ + { + "directive": "tutorial", + "title": "Tutorial", + "prefix": "", + "color": "", + "style": "node", + }, + { + "directive": "how-to", + "title": "How To", + "prefix": "", + "color": "", + "style": "node", + }, + { + "directive": "explanation", + "title": "Explanation", + "prefix": "", + "color": "", + "style": "node", + }, + { + # z_ prefix to force to the end of sorted lists. + "directive": "z_reference", + "title": "Reference", + "prefix": "", + "color": "", + "style": "node", + }, +] +# The layout whenever a 'need item' directive is used. I.e. at the top of each +# user manual page. +needs_default_layout = "focus" +# The `tags_links` jinja template displays a list of tags where every topic_* +# tag is a link to the relevant section in user_manual/index.rst. +needs_template_folder = "_templates" +needs_fields = { + "post_template": {"default": "tags_links"}, +} + +from sphinx_needs.data import NeedsCoreFields + +# Known bug in sphinx-needs pre v6.0. +# https://github.com/useblocks/sphinx-needs/issues/1420 +if "allow_default" not in NeedsCoreFields["post_template"]: + NeedsCoreFields["post_template"]["allow_default"] = "str" + + +# ------------------------------------------------------------------------------ + + +def setup(app: Sphinx) -> None: + """Configure sphinx application.""" + # Monkeypatch for https://github.com/useblocks/sphinx-needs/issues/723 + import sphinx_needs.directives.needtable as nt + + orig_row_col_maker = nt.row_col_maker + + def row_col_maker_link_title( + app, + fromdocname, + all_needs, + need_info, + need_key, + make_ref=False, + ref_lookup=False, + prefix="", + ): + if need_key == "title": + make_ref = True + return orig_row_col_maker( + app, + fromdocname, + all_needs, + need_info, + need_key, + make_ref, + ref_lookup, + prefix, + ) + + nt.row_col_maker = row_col_maker_link_title + + # we require the output of this extension + app.setup_extension("sphinx_gallery.gen_gallery") + + # register callback to generate gallery carousel + app.connect("env-before-read-docs", gallery_carousel) diff --git a/docs/src/developers_guide/documenting/whats_new_contributions.rst b/docs/src/developers_guide/documenting/whats_new_contributions.rst index 82569e57a0..a6b7d13148 100644 --- a/docs/src/developers_guide/documenting/whats_new_contributions.rst +++ b/docs/src/developers_guide/documenting/whats_new_contributions.rst @@ -82,6 +82,18 @@ The required content, in order, is as follows: .. _@tkknight: https://github.com/tkknight + .. hint:: + + Alternatively adopt the ``:user:`` `extlinks`_ convenience instead. + + For example to reference the ``github`` user ``tkknight`` simply use + :literal:`:user:\`tkknight\``. + + This will be rendered as :user:`tkknight`. + + In addition, there is now no need to add a full reference to the user within + the documentation. + * A succinct summary of the new/changed behaviour. * Context to the change. Possible examples include: what this fixes, why @@ -143,3 +155,4 @@ users. To achieve this several categories may be used. **💼 Internal** Changes to any internal or development related topics, such as testing, environment dependencies etc. + diff --git a/docs/src/further_topics/index.rst b/docs/src/further_topics/index.rst deleted file mode 100644 index 122f1746a9..0000000000 --- a/docs/src/further_topics/index.rst +++ /dev/null @@ -1,24 +0,0 @@ -.. _further_topics_index: - - -Further Topics -=============== - -Extra information on specific technical issues. - -.. toctree:: - :maxdepth: 1 - - filtering_warnings - metadata - lenient_metadata - lenient_maths - um_files_loading - missing_data_handling - dataless_cubes - netcdf_io - dask_best_practices/index - ugrid/index - which_regridder_to_use - controlling_merge - ux_guide \ No newline at end of file diff --git a/docs/src/getting_started.rst b/docs/src/getting_started.rst deleted file mode 100644 index 24299a4060..0000000000 --- a/docs/src/getting_started.rst +++ /dev/null @@ -1,15 +0,0 @@ -.. _getting_started_index: - -Getting Started -=============== - -To get started with Iris we recommend reading :ref:`why_iris` was created and to -explore the examples in the :ref:`gallery_index` after :ref:`installing_iris` -Iris. - -.. toctree:: - :maxdepth: 1 - - why_iris - installing - generated/gallery/index \ No newline at end of file diff --git a/docs/src/index.rst b/docs/src/index.rst index 139e54cee0..5059bcd062 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -15,117 +15,80 @@ representations become unwieldy and inefficient. For more information see :ref:`why_iris`. -.. grid:: 3 - .. grid-item-card:: - :text-align: center - :img-top: _static/icon_shuttle.svg - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg - - Information on Iris, how to install and a gallery of examples that - create plots. +.. grid:: 1 1 2 2 + :gutter: 2 - +++ - .. button-ref:: getting_started_index - :ref-type: ref - :color: primary - :outline: - :expand: + .. grid-item-card:: Getting Started + :class-title: custom-title + :class-body: custom-body + :link: getting_started_index + :link-type: ref + :img-top: _static/icon_shuttle.svg + :class-img-top: dark-light + :class-card: sd-rounded-3 - Getting Started + Installing and gallery examples. - .. grid-item-card:: - :text-align: center + .. grid-item-card:: User Manual + :class-title: custom-title + :class-body: custom-body + :link: user_manual_index + :link-type: ref :img-top: _static/icon_instructions.svg - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg - - Learn how to use Iris, including loading, navigating, saving, - plotting and more. - - +++ - .. button-ref:: user_guide_index - :ref-type: ref - :color: primary - :outline: - :expand: + :class-img-top: dark-light + :class-card: sd-rounded-3 - User Guide + Learn how to use Iris. - .. grid-item-card:: - :text-align: center + .. grid-item-card:: Developers Guide + :class-title: custom-title + :class-body: custom-body + :link: development_where_to_start + :link-type: ref :img-top: _static/icon_development.svg - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg - - Information on how you can contribute to Iris as a developer. - - +++ - .. button-ref:: development_where_to_start - :ref-type: ref - :color: primary - :outline: - :expand: - - Developers Guide - + :class-img-top: dark-light + :class-card: sd-rounded-3 -.. grid:: 3 + Contribute to Iris as a developer. - .. grid-item-card:: - :text-align: center + .. grid-item-card:: Iris API + :class-title: custom-title + :class-body: custom-body + :link: generated/api/iris.html :img-top: _static/icon_api.svg - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg + :class-img-top: dark-light + :class-card: sd-rounded-3 - Browse full Iris functionality by module. + Iris functionality by module. - +++ - .. button-ref:: generated/api/iris - :ref-type: doc - :color: primary - :outline: - :expand: - - Iris API - - .. grid-item-card:: - :text-align: center + .. grid-item-card:: What's New + :class-title: custom-title + :class-body: custom-body + :link: iris_whatsnew + :link-type: ref :img-top: _static/icon_new_product.svg - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg - - Find out what has recently changed in Iris. - - +++ - .. button-ref:: iris_whatsnew - :ref-type: ref - :color: primary - :outline: - :expand: + :class-img-top: dark-light + :class-card: sd-rounded-3 - What's New + Recent changes in Iris. - .. grid-item-card:: - :text-align: center + .. grid-item-card:: Voted Issues + :class-title: custom-title + :class-body: custom-body + :link: voted_issues_top + :link-type: ref :img-top: _static/icon_thumb.png - :class-img-top: w-50 m-auto px-1 py-2 dark-light - :shadow: lg + :class-img-top: dark-light + :class-card: sd-rounded-3 - Raise the profile of issues by voting on them. + Raise the profile of issues by voting. - +++ - .. button-ref:: voted_issues_top - :ref-type: ref - :color: primary - :outline: - :expand: - Voted Issues +Icons made by FreePik from `Flaticon `_ -Icons made by FreePik from `Flaticon `_ +.. include:: gallery_carousel.txt .. _iris_support: @@ -151,20 +114,12 @@ The legacy support resources: .. toctree:: - :caption: Getting Started + :caption: User Manual :maxdepth: 1 + :name: user_manual :hidden: - getting_started - - -.. toctree:: - :caption: User Guide - :maxdepth: 1 - :name: userguide_index - :hidden: - - userguide/index + user_manual/index .. toctree:: @@ -176,15 +131,6 @@ The legacy support resources: developers_guide/contributing_getting_involved -.. toctree:: - :caption: Community - :maxdepth: 1 - :name: community_index - :hidden: - - Community - - .. toctree:: :caption: What's New in Iris :maxdepth: 1 @@ -194,12 +140,4 @@ The legacy support resources: whatsnew/index -.. toctree:: - :caption: Iris API - :maxdepth: 1 - :hidden: - - Iris API - - .. todolist:: diff --git a/docs/src/sphinxext/user_manual_directives.py b/docs/src/sphinxext/user_manual_directives.py new file mode 100644 index 0000000000..fa90a8bccf --- /dev/null +++ b/docs/src/sphinxext/user_manual_directives.py @@ -0,0 +1,251 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Sphinx customisations for a Diataxis User Manual (see diataxis.fr).""" + +import enum +from pathlib import Path +import re +from textwrap import indent +import typing + +from docutils import nodes # type: ignore[import-untyped] +from docutils.parsers.rst import Directive # type: ignore[import-untyped] +from docutils.statemachine import StringList # type: ignore[import-untyped] +from sphinx.application import Sphinx +from sphinx.builders import Builder +from sphinx.util import logging as sphinx_logging +from sphinx_needs.api import get_needs_view + +if typing.TYPE_CHECKING: + from sphinx_needs.api.need import NeedsInfoType + +logger = sphinx_logging.getLogger(__name__) + + +class Diataxis(enum.StrEnum): + """The Diataxis-inspired sphinx-needs directives configured in conf.py.""" + + ALL = "all" + TUTORIAL = "tutorial" + EXPLANATION = "explanation" + HOW_TO = "how-to" + # z_ prefix to force to the end of sorted lists. + REFERENCE = "z_reference" + + +DIATAXIS_CAPTIONS = { + Diataxis.TUTORIAL: "Guided lessons for understanding a topic.\n\n(Supports **study**, via **action**)", + Diataxis.EXPLANATION: "In-depth discussion for understanding concepts.\n\n(Supports **study**, via **theory**)", + Diataxis.HOW_TO: "Step by step instructions for achieving a specific goal.\n\n(Supports **work**, via **action**)", + Diataxis.REFERENCE: "Concise information to look up when needed.\n\n(Supports **work**, via **theory**)", +} +"""Text to be displayed at the top of each Diataxis tab.""" + + +class DiataxisDirective(Directive): + """A topic-filtered tab-set block with Diataxis tab-items and topic navigation badges.""" + + has_content = True + """Content = the topic tag to filter by, e.g. `topic_about`.""" + + @staticmethod + def _indent(text: str) -> str: + return indent(text, " ") + + def _needtable(self, types: Diataxis, tags: str) -> str: + """Construct a single sphinx-needs needtable directive string.""" + options = [ + ':columns: title as " ";content as " "', + ":colwidths: 30;60", + ":style: table", + ":sort: type", + ":filter_warning: No pages for this filter.", + ] + if types is not Diataxis.ALL: + options.append(f":types: {types}") + if tags != "topic_all": + options.append(f":tags: {tags}") + options_str = "\n".join(options) + needtable = "\n".join( + [ + ".. needtable::", + self._indent(options_str), + ] + ) + return needtable + + def _tab_item(self, diataxis: Diataxis, tags: str) -> str: + """Construct a single tab-item string for the given Diataxis type.""" + needtable = self._needtable(types=diataxis, tags=tags) + + # Convert the Diataxis directive name to a pretty title. + tab_item_title = str(diataxis) + tab_item_title = tab_item_title.removeprefix("z_") + tab_item_title = tab_item_title.capitalize() + + caption = DIATAXIS_CAPTIONS.get(diataxis, "") + content = [ + # sync means all tab-sets on this page switch tabs together. + f":sync: {diataxis}", + "", + caption, + "", + needtable, + ] + content_str = "\n".join(content) + tab_item = "\n".join( + [ + f".. tab-item:: {tab_item_title}", + self._indent(content_str), + ] + ) + return tab_item + + def run(self): + """Construct the navigation badges followed by the Diataxis tab-set.""" + # Enforce the only valid location for this directive. + rst_path = Path(self.state.document["source"]) + if not (rst_path.parent.name == "user_manual" and rst_path.name == "index.rst"): + message = "Expected directive to only be used in user_manual/index.rst" + error = self.state_machine.reporter.error(message, line=self.lineno) + return [error] + + # Find all the topic labels in this file and construct navigation badges + # for them. + label_pattern = re.compile(r"^\.\. _(topic_.+):$", re.MULTILINE) + topic_labels = label_pattern.findall(rst_path.read_text()) + # The 'current' topic is highlighted differently. + badges = { + label: "bdg-ref-primary" + if label == self.content[0] + else "bdg-ref-primary-line" + for label in topic_labels + } + # Parse the badges as RST. + node = nodes.Element() + self.state.nested_parse( + StringList([f":{badge}:`{label}`" for label, badge in badges.items()]), + self.content_offset, + node, + ) + + # Construct the Diataxis tab-set. + tab_items = [ + self._tab_item(diataxis=diataxis, tags=self.content[0]) + for diataxis in Diataxis + ] + tab_items_str = "\n\n".join(tab_items) + tab_set = "\n".join( + [ + ".. tab-set::", + "", + self._indent(tab_items_str), + ] + ) + # Parse the tab set as RST. + self.state.nested_parse( + StringList(tab_set.splitlines()), self.content_offset, node + ) + + return node.children + + +def validate_items(app: Sphinx, builder: Builder) -> None: + """Validate that each user manual page has a single correctly configured item.""" + env = app.env + found_docs: typing.Iterable[str] = env.found_docs + + # Read-only iterable of all sphinx-needs items; only valid in the write phase. + needs_view = get_needs_view(app) + # Group needs by docname + by_doc: dict[Path, list[NeedsInfoType]] = {} + for need_id in needs_view: + need = needs_view[need_id] + doc_name = need.get("docname") + if not doc_name: + # External/imported needs may have no docname; skip page accounting + continue + by_doc.setdefault(Path(doc_name), []).append(need) + + def _get_expected_type(doc_path: Path) -> typing.Optional[Diataxis]: + """Get the expected Diataxis type for the given document path.""" + parents_and_diataxis = [ + (Path("generated/api"), Diataxis.REFERENCE), + (Path("generated/gallery"), Diataxis.HOW_TO), + (Path("user_manual/tutorial"), Diataxis.TUTORIAL), + (Path("user_manual/explanation"), Diataxis.EXPLANATION), + (Path("user_manual/how_to"), Diataxis.HOW_TO), + (Path("user_manual/reference"), Diataxis.REFERENCE), + ] + expected = None + for parent, diataxis in parents_and_diataxis: + if parent in doc_path.parents: + expected = diataxis + break + if Path("generated/gallery") in doc_path.parents and doc_path.name == "index": + expected = None + if doc_path.name == "sg_execution_times": + expected = None + return expected + + for doc_name in found_docs: + doc_path = Path(doc_name) + expected_type = _get_expected_type(doc_path) + if expected_type is not None: + problem_prefix = "Page expected to have exactly 1 sphinx-needs item;" + try: + (page_need,) = by_doc[doc_path] + except KeyError: + problem = f"{problem_prefix} found 0." + logger.error(problem, location=doc_name) + continue + except ValueError: + count = len(by_doc[doc_path]) + problem = f"{problem_prefix} found {count}." + logger.error(problem, location=doc_name) + continue + + if (page_type := page_need["type"]) != expected_type: + problem = ( + "sphinx-needs item expected to have type " + f"'{expected_type}'; found type '{page_type}'." + ) + logger.error(problem, location=doc_name) + + if (line_no := page_need.get("lineno")) > 25: + # Ensures that links to the needs directive take reader to the + # start of the page. + problem = ( + "sphinx-needs item expected to be defined within " + f"first 25 lines; found at line {line_no}." + ) + logger.error(problem, location=doc_name) + + # Title is not validated as it is always populated. + + if page_need["content"] == "": + problem = "sphinx-needs item must have non-empty content section." + logger.error(problem, location=doc_name) + + tags = page_need.get("tags", []) + if [tag for tag in tags if tag.startswith("topic_")] == []: + problem = ( + "sphinx-needs item must have at least one 'topic_xxx' tag " + "in its 'tags' field." + ) + logger.error(problem, location=doc_name) + + +def setup(app: Sphinx): + """Set up the Sphinx extension. + + This function is expected by Sphinx to register the extension. + """ + # Connect at write-started so needs are fully collected & resolved. + app.connect("write-started", validate_items) + + app.add_directive("diataxis-page-list", DiataxisDirective) + + return {"version": "0.1"} diff --git a/docs/src/further_topics/dataless_cubes.rst b/docs/src/user_manual/explanation/dataless_cubes.rst similarity index 96% rename from docs/src/further_topics/dataless_cubes.rst rename to docs/src/user_manual/explanation/dataless_cubes.rst index d0b592dfb6..1c4ec64a2c 100644 --- a/docs/src/further_topics/dataless_cubes.rst +++ b/docs/src/user_manual/explanation/dataless_cubes.rst @@ -1,3 +1,8 @@ +.. explanation:: Dataless Cubes + :tags: topic_data_model + + Read about Iris' support for Cubes with no data payload. + .. _dataless-cubes: ============== diff --git a/docs/src/further_topics/ugrid/images/data_structured_grid.svg b/docs/src/user_manual/explanation/images/data_structured_grid.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/data_structured_grid.svg rename to docs/src/user_manual/explanation/images/data_structured_grid.svg diff --git a/docs/src/further_topics/ugrid/images/data_ugrid_mesh.svg b/docs/src/user_manual/explanation/images/data_ugrid_mesh.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/data_ugrid_mesh.svg rename to docs/src/user_manual/explanation/images/data_ugrid_mesh.svg diff --git a/docs/src/further_topics/ugrid/images/geovistalogo.svg b/docs/src/user_manual/explanation/images/geovistalogo.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/geovistalogo.svg rename to docs/src/user_manual/explanation/images/geovistalogo.svg diff --git a/docs/src/further_topics/ugrid/images/iris-esmf-regrid.svg b/docs/src/user_manual/explanation/images/iris-esmf-regrid.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/iris-esmf-regrid.svg rename to docs/src/user_manual/explanation/images/iris-esmf-regrid.svg diff --git a/docs/src/userguide/multi_array.svg b/docs/src/user_manual/explanation/images/multi_array.svg similarity index 100% rename from docs/src/userguide/multi_array.svg rename to docs/src/user_manual/explanation/images/multi_array.svg diff --git a/docs/src/userguide/multi_array_to_cube.svg b/docs/src/user_manual/explanation/images/multi_array_to_cube.svg similarity index 100% rename from docs/src/userguide/multi_array_to_cube.svg rename to docs/src/user_manual/explanation/images/multi_array_to_cube.svg diff --git a/docs/src/further_topics/ugrid/images/ugrid_edge_data.svg b/docs/src/user_manual/explanation/images/ugrid_edge_data.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/ugrid_edge_data.svg rename to docs/src/user_manual/explanation/images/ugrid_edge_data.svg diff --git a/docs/src/further_topics/ugrid/images/ugrid_element_centres.svg b/docs/src/user_manual/explanation/images/ugrid_element_centres.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/ugrid_element_centres.svg rename to docs/src/user_manual/explanation/images/ugrid_element_centres.svg diff --git a/docs/src/further_topics/ugrid/images/ugrid_node_independence.svg b/docs/src/user_manual/explanation/images/ugrid_node_independence.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/ugrid_node_independence.svg rename to docs/src/user_manual/explanation/images/ugrid_node_independence.svg diff --git a/docs/src/further_topics/ugrid/images/ugrid_variable_faces.svg b/docs/src/user_manual/explanation/images/ugrid_variable_faces.svg similarity index 100% rename from docs/src/further_topics/ugrid/images/ugrid_variable_faces.svg rename to docs/src/user_manual/explanation/images/ugrid_variable_faces.svg diff --git a/docs/src/userguide/iris_cubes.rst b/docs/src/user_manual/explanation/iris_cubes.rst similarity index 98% rename from docs/src/userguide/iris_cubes.rst rename to docs/src/user_manual/explanation/iris_cubes.rst index 03b5093efc..4b615ba21e 100644 --- a/docs/src/userguide/iris_cubes.rst +++ b/docs/src/user_manual/explanation/iris_cubes.rst @@ -1,3 +1,8 @@ +.. explanation:: Iris Data Structures + :tags: topic_data_model + + Read about the core Iris data model. + .. _iris_data_structures: ==================== @@ -105,7 +110,7 @@ Suppose we have some gridded data which has 24 air temperature readings (in Kelvin) which is located at 4 different longitudes, 2 different latitudes and 3 different heights. Our data array can be represented pictorially: -.. image:: multi_array.svg +.. image:: images/multi_array.svg Where dimensions 0, 1, and 2 have lengths 3, 2 and 4 respectively. @@ -135,7 +140,7 @@ The Iris cube to represent this data would consist of: Pictorially the cube has taken on more information than a simple array: -.. image:: multi_array_to_cube.svg +.. image:: images/multi_array_to_cube.svg Additionally further information may be optionally attached to the cube. diff --git a/docs/src/userguide/iris_philosophy.rst b/docs/src/user_manual/explanation/iris_philosophy.rst similarity index 98% rename from docs/src/userguide/iris_philosophy.rst rename to docs/src/user_manual/explanation/iris_philosophy.rst index 4005d915f0..e3f7618f88 100644 --- a/docs/src/userguide/iris_philosophy.rst +++ b/docs/src/user_manual/explanation/iris_philosophy.rst @@ -1,11 +1,14 @@ +.. explanation:: Iris' Philosophy + :tags: topic_about + + Read about how and why Iris is made the way it is. + .. _iris-philosophy: **************** Iris' Philosophy **************** -.. todo:: https://github.com/SciTools/iris/issues/6511; this page belongs in 'Explanation' - .. _code-maintenance: Code Maintenance diff --git a/docs/src/community/iris_xarray.rst b/docs/src/user_manual/explanation/iris_xarray.rst similarity index 97% rename from docs/src/community/iris_xarray.rst rename to docs/src/user_manual/explanation/iris_xarray.rst index f64f64bb32..12d13a08a5 100644 --- a/docs/src/community/iris_xarray.rst +++ b/docs/src/user_manual/explanation/iris_xarray.rst @@ -1,4 +1,9 @@ -.. include:: ../common_links.inc +.. explanation:: Iris ❤️ Xarray + :tags: topic_interoperability + + Read about the similarities and differences between Iris and Xarray. + +.. include:: ../../common_links.inc ====================== Iris ❤️ :term:`Xarray` @@ -7,7 +12,7 @@ Iris ❤️ :term:`Xarray` There is a lot of overlap between Iris and :term:`Xarray`, but some important differences too. Below is a summary of the most important differences, so that you can be prepared, and to help you choose the best package for your use case. -See :doc:`phrasebook` for a broad comparison of terminology. +See :doc:`../reference/phrasebook` for a broad comparison of terminology. Overall Experience ------------------ diff --git a/docs/src/further_topics/lenient_maths.rst b/docs/src/user_manual/explanation/lenient_maths.rst similarity index 98% rename from docs/src/further_topics/lenient_maths.rst rename to docs/src/user_manual/explanation/lenient_maths.rst index 51f77fb956..bf297e7e58 100644 --- a/docs/src/further_topics/lenient_maths.rst +++ b/docs/src/user_manual/explanation/lenient_maths.rst @@ -1,3 +1,8 @@ +.. explanation:: Lenient Cube Maths + :tags: topic_data_model;topic_maths_stats + + Read about the options for handling metadata differences during Cube maths. + .. _lenient maths: Lenient Cube Maths diff --git a/docs/src/further_topics/lenient_metadata.rst b/docs/src/user_manual/explanation/lenient_metadata.rst similarity index 99% rename from docs/src/further_topics/lenient_metadata.rst rename to docs/src/user_manual/explanation/lenient_metadata.rst index 5de9ad70c4..7e1b6b26e9 100644 --- a/docs/src/further_topics/lenient_metadata.rst +++ b/docs/src/user_manual/explanation/lenient_metadata.rst @@ -1,3 +1,8 @@ +.. explanation:: Lenient Metadata + :tags: topic_data_model + + Read about the options for handling metadata differences between Cubes. + .. _lenient metadata: Lenient Metadata diff --git a/docs/src/further_topics/ugrid/data_model.rst b/docs/src/user_manual/explanation/mesh_data_model.rst similarity index 99% rename from docs/src/further_topics/ugrid/data_model.rst rename to docs/src/user_manual/explanation/mesh_data_model.rst index 1660f6d08c..bbcfd05f64 100644 --- a/docs/src/further_topics/ugrid/data_model.rst +++ b/docs/src/user_manual/explanation/mesh_data_model.rst @@ -1,3 +1,8 @@ +.. explanation:: The Mesh Data Model + :tags: topic_mesh;topic_data_model + + Read about how Iris represents unstructured mesh data. + .. include:: ../../common_links.inc .. _ugrid model: @@ -269,7 +274,7 @@ using packages such as Dask. Spatial operations on mesh data are more complex ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Detail: :doc:`operations` +Detail: :doc:`../how_to/mesh_operations` Indexing a mesh data array cannot be used for: diff --git a/docs/src/further_topics/ugrid/partner_packages.rst b/docs/src/user_manual/explanation/mesh_partners.rst similarity index 94% rename from docs/src/further_topics/ugrid/partner_packages.rst rename to docs/src/user_manual/explanation/mesh_partners.rst index f69546446c..0e1f3e341d 100644 --- a/docs/src/further_topics/ugrid/partner_packages.rst +++ b/docs/src/user_manual/explanation/mesh_partners.rst @@ -1,3 +1,8 @@ +.. explanation:: Mesh Partner Packages + :tags: topic_mesh;topic_interoperability + + Read about Python packages you can use alongside Iris to work with mesh data. + .. include:: ../../common_links.inc .. _ugrid partners: @@ -34,7 +39,7 @@ reasons: Below you can learn more about the partner packages and how they are useful. Specifics of what operations would require their installation can be found in: -:doc:`operations`. +:doc:`../how_to/mesh_operations`. .. important:: **Experimental** diff --git a/docs/src/further_topics/metadata.rst b/docs/src/user_manual/explanation/metadata.rst similarity index 99% rename from docs/src/further_topics/metadata.rst rename to docs/src/user_manual/explanation/metadata.rst index f66f253a90..589df672b4 100644 --- a/docs/src/further_topics/metadata.rst +++ b/docs/src/user_manual/explanation/metadata.rst @@ -1,4 +1,8 @@ -.. _further topics: +.. explanation:: Metadata + :tags: topic_data_model + + Read about metadata in the Iris data model e.g. names, units. + .. _metadata: Metadata diff --git a/docs/src/further_topics/missing_data_handling.rst b/docs/src/user_manual/explanation/missing_data_handling.rst similarity index 94% rename from docs/src/further_topics/missing_data_handling.rst rename to docs/src/user_manual/explanation/missing_data_handling.rst index a461a44456..fe23787bc1 100644 --- a/docs/src/further_topics/missing_data_handling.rst +++ b/docs/src/user_manual/explanation/missing_data_handling.rst @@ -1,3 +1,8 @@ +.. explanation:: Missing Data Handling in Iris + :tags: topic_data_model;topic_maths_stats;topic_load_save + + Read about how Iris handles missing/masked data during Cube load/save/modification. + ============================= Missing Data Handling in Iris ============================= diff --git a/docs/src/further_topics/netcdf_io.rst b/docs/src/user_manual/explanation/netcdf_io.rst similarity index 99% rename from docs/src/further_topics/netcdf_io.rst rename to docs/src/user_manual/explanation/netcdf_io.rst index 1e94123fdf..d3fbf00b10 100644 --- a/docs/src/further_topics/netcdf_io.rst +++ b/docs/src/user_manual/explanation/netcdf_io.rst @@ -1,3 +1,8 @@ +.. explanation:: NetCDF I/O Handling in Iris + :tags: topic_load_save + + Read about how Iris loads and saves NetCDF files. + .. testsetup:: chunk_control import iris diff --git a/docs/src/userguide/real_and_lazy_data.rst b/docs/src/user_manual/explanation/real_and_lazy_data.rst similarity index 98% rename from docs/src/userguide/real_and_lazy_data.rst rename to docs/src/user_manual/explanation/real_and_lazy_data.rst index 2b3ecf9e64..275c870252 100644 --- a/docs/src/userguide/real_and_lazy_data.rst +++ b/docs/src/user_manual/explanation/real_and_lazy_data.rst @@ -1,3 +1,7 @@ +.. explanation:: Real and Lazy Data + :tags: data-model;topic_lazy_data + + Read about how Iris defers data loading from disk. .. _real_and_lazy_data: @@ -14,7 +18,7 @@ Real and Lazy Data ================== -We have seen in the :doc:`iris_cubes` section of the user guide that +We have seen in the :doc:`../explanation/iris_cubes` section of the user guide that Iris cubes contain data and metadata about a phenomenon. The data element of a cube is always an array, but the array may be either "real" or "lazy". diff --git a/docs/src/further_topics/um_files_loading.rst b/docs/src/user_manual/explanation/um_files_loading.rst similarity index 99% rename from docs/src/further_topics/um_files_loading.rst rename to docs/src/user_manual/explanation/um_files_loading.rst index 2d2eb973e4..8c6718805a 100644 --- a/docs/src/further_topics/um_files_loading.rst +++ b/docs/src/user_manual/explanation/um_files_loading.rst @@ -1,3 +1,8 @@ +.. explanation:: Iris Handling of PP and Fieldsfiles + :tags: topic_load_save + + Read about how Iris represents Met Office UM PP and Fieldsfiles data. + .. testsetup:: import numpy as np diff --git a/docs/src/further_topics/ux_guide.rst b/docs/src/user_manual/explanation/ux_guide.rst similarity index 86% rename from docs/src/further_topics/ux_guide.rst rename to docs/src/user_manual/explanation/ux_guide.rst index 6b0599c757..3b62e036f4 100644 --- a/docs/src/further_topics/ux_guide.rst +++ b/docs/src/user_manual/explanation/ux_guide.rst @@ -1,9 +1,14 @@ +.. explanation:: Reviewing the Iris User Experience + :tags: topic_about + + Read about how we plan to review and improve the user experience of Iris. + .. _ux_guide: Reviewing the Iris User Experience ********************************** -.. todo:: https://github.com/SciTools/iris/issues/6511; this page belongs in 'Explanation' +.. todo:: https://github.com/SciTools/iris/issues/6867; this page belongs in 'Get Involved' Often, improving and updating the existing user experience can fall behind fixing create new features, or quashing pesky bugs. To combat this, we plan to have regular development discussions to ensure diff --git a/docs/src/further_topics/which_regridder_to_use.rst b/docs/src/user_manual/explanation/which_regridder_to_use.rst similarity index 99% rename from docs/src/further_topics/which_regridder_to_use.rst rename to docs/src/user_manual/explanation/which_regridder_to_use.rst index dae273252d..cd1e227872 100644 --- a/docs/src/further_topics/which_regridder_to_use.rst +++ b/docs/src/user_manual/explanation/which_regridder_to_use.rst @@ -1,4 +1,9 @@ -.. include:: ../common_links.inc +.. explanation:: Which Regridder to Use + :tags: topic_regrid + + Read about the different regridders available in Iris. + +.. include:: ../../common_links.inc .. _which_regridder_to_use: @@ -38,7 +43,7 @@ the following form: | System** | ``src``/``tgt`` cube coordinates. | +-----------------+-----------------------------------------------------------+ | **Lazy | If the result is calculated lazily. See | -| Regridding** | :doc:`real and lazy data `.| +| Regridding** | :doc:`real and lazy data `. | +-----------------+-----------------------------------------------------------+ | **Weights | See `regridder performance`_. | | Caching** | | diff --git a/docs/src/why_iris.rst b/docs/src/user_manual/explanation/why_iris.rst similarity index 94% rename from docs/src/why_iris.rst rename to docs/src/user_manual/explanation/why_iris.rst index a5f137b9b8..d7df72d8ad 100644 --- a/docs/src/why_iris.rst +++ b/docs/src/user_manual/explanation/why_iris.rst @@ -1,3 +1,8 @@ +.. explanation:: Why Iris + :tags: topic_about + + Read about the Iris Python package and why you might want to use it. + .. _why_iris: Why Iris diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/user_manual/how_to/filtering_warnings.rst similarity index 93% rename from docs/src/further_topics/filtering_warnings.rst rename to docs/src/user_manual/how_to/filtering_warnings.rst index 204049942b..d2217f326e 100644 --- a/docs/src/further_topics/filtering_warnings.rst +++ b/docs/src/user_manual/how_to/filtering_warnings.rst @@ -1,3 +1,8 @@ +.. how-to:: Filtering Warnings + :tags: topic_troubleshooting + + How to customise Iris' warnings to only see those you need. + .. _filtering-warnings: ================== @@ -49,9 +54,9 @@ Warnings: >>> my_operation() ... - iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:451: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) - iris/coord_systems.py:771: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:777: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( Warnings can be suppressed using the Python warnings filter with the ``ignore`` @@ -112,7 +117,7 @@ You can target specific Warning messages, e.g. ... warnings.filterwarnings("ignore", message="Discarding false_easting") ... my_operation() ... - iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:451: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) :: @@ -127,16 +132,16 @@ Or you can target Warnings raised by specific lines of specific modules, e.g. .. doctest:: filtering_warnings >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=445) + ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=451) ... my_operation() ... - iris/coord_systems.py:771: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:777: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( :: - python -W ignore:::iris.coord_systems:445 - export PYTHONWARNINGS=ignore:::iris.coord_systems:445 + python -W ignore:::iris.coord_systems:451 + export PYTHONWARNINGS=ignore:::iris.coord_systems:451 Warnings from a Common Source ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -190,7 +195,7 @@ module during execution: ... ) ... my_operation() ... - iris/coord_systems.py:445: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:451: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) ---- diff --git a/docs/src/further_topics/ugrid/images/fesom_mesh.png b/docs/src/user_manual/how_to/images/fesom_mesh.png similarity index 100% rename from docs/src/further_topics/ugrid/images/fesom_mesh.png rename to docs/src/user_manual/how_to/images/fesom_mesh.png diff --git a/docs/src/further_topics/ugrid/images/orca_grid.png b/docs/src/user_manual/how_to/images/orca_grid.png similarity index 100% rename from docs/src/further_topics/ugrid/images/orca_grid.png rename to docs/src/user_manual/how_to/images/orca_grid.png diff --git a/docs/src/further_topics/ugrid/images/plotting.png b/docs/src/user_manual/how_to/images/plotting.png similarity index 100% rename from docs/src/further_topics/ugrid/images/plotting.png rename to docs/src/user_manual/how_to/images/plotting.png diff --git a/docs/src/further_topics/ugrid/images/smc_mesh.png b/docs/src/user_manual/how_to/images/smc_mesh.png similarity index 100% rename from docs/src/further_topics/ugrid/images/smc_mesh.png rename to docs/src/user_manual/how_to/images/smc_mesh.png diff --git a/docs/src/installing.rst b/docs/src/user_manual/how_to/installing.rst similarity index 97% rename from docs/src/installing.rst rename to docs/src/user_manual/how_to/installing.rst index a0a3fd2c62..d7832733d4 100644 --- a/docs/src/installing.rst +++ b/docs/src/user_manual/how_to/installing.rst @@ -1,3 +1,8 @@ +.. how-to:: Installing + :tags: topic_about + + How to install the Iris Python package. + .. _installing_iris: Installing diff --git a/docs/src/further_topics/ugrid/other_meshes.rst b/docs/src/user_manual/how_to/mesh_conversions.rst similarity index 98% rename from docs/src/further_topics/ugrid/other_meshes.rst rename to docs/src/user_manual/how_to/mesh_conversions.rst index 19f220be82..c465f9e9da 100644 --- a/docs/src/further_topics/ugrid/other_meshes.rst +++ b/docs/src/user_manual/how_to/mesh_conversions.rst @@ -1,10 +1,15 @@ +.. how-to:: Converting Other Mesh Formats + :tags: topic_mesh;topic_interoperability + + How to convert other mesh formats into Iris' Mesh Data Model. + .. _other_meshes: Converting Other Mesh Formats ***************************** Iris' Mesh Data Model is based primarily on the CF-UGRID conventions (see -:doc:`data_model`), but other mesh formats can be converted to fit into this +:doc:`../../user_manual/explanation/mesh_data_model`), but other mesh formats can be converted to fit into this model, **enabling use of Iris' specialised mesh support**. Below are some examples demonstrating how this works for various mesh formats. diff --git a/docs/src/further_topics/ugrid/operations.rst b/docs/src/user_manual/how_to/mesh_operations.rst similarity index 98% rename from docs/src/further_topics/ugrid/operations.rst rename to docs/src/user_manual/how_to/mesh_operations.rst index 14e746352f..34cd650b91 100644 --- a/docs/src/further_topics/ugrid/operations.rst +++ b/docs/src/user_manual/how_to/mesh_operations.rst @@ -1,10 +1,15 @@ +.. how-to:: Working with Mesh Data + :tags: topic_mesh;topic_load_save;topic_plotting;topic_regrid;topic_maths_stats + + How to perform common Iris operations on unstructured mesh data. + .. _ugrid operations: Working with Mesh Data ********************** .. note:: Several of the operations below rely on the optional dependencies - mentioned in :doc:`partner_packages`. + mentioned in :doc:`../../user_manual/explanation/mesh_partners`. Operations Summary ------------------ @@ -529,7 +534,7 @@ Region Extraction .. rubric:: |tagline: region extraction| -As described in :doc:`data_model`, indexing for a range along a +As described in :doc:`../../user_manual/explanation/mesh_data_model`, indexing for a range along a :class:`~iris.cube.Cube`\'s :meth:`~iris.cube.Cube.mesh_dim` will not provide a contiguous region, since **position on the unstructured dimension is unrelated to spatial position**. This means that subsetted @@ -583,6 +588,7 @@ below: :icon: code .. doctest:: ugrid_operations + :skipif: gv is None >>> from geovista.geodesic import BBox >>> from iris import load_cube, sample_data_path @@ -818,7 +824,7 @@ user. Keep an eye on memory demand when comparing large :class:`~iris.mesh.MeshXY`\es, but note that :class:`~iris.mesh.MeshXY`\ equality is enabled for lazy - processing (:doc:`/userguide/real_and_lazy_data`), so if the + processing (:doc:`/user_manual/explanation/real_and_lazy_data`), so if the :class:`~iris.mesh.MeshXY`\es being compared are lazy the process will use less memory than their total size. @@ -829,7 +835,7 @@ Combining Cubes .. rubric:: |tagline: combining cubes| Merging or concatenating :class:`~iris.cube.Cube`\s (described in -:doc:`/userguide/merge_and_concat`) with two different +:doc:`../tutorial/merge_and_concat`) with two different :class:`~iris.mesh.MeshXY`\es is not possible - a :class:`~iris.cube.Cube` must be associated with just a single :class:`~iris.mesh.MeshXY`, and merge/concatenate are not yet @@ -855,7 +861,7 @@ Arithmetic .. rubric:: |tagline: arithmetic| -Cube Arithmetic (described in :doc:`/userguide/cube_maths`) +Cube Arithmetic (described in :doc:`../tutorial/cube_maths`) has been extended to handle :class:`~iris.cube.Cube`\s that include :class:`~iris.mesh.MeshCoord`\s, and hence have a ``cube.mesh``. diff --git a/docs/src/userguide/navigating_a_cube.rst b/docs/src/user_manual/how_to/navigating_a_cube.rst similarity index 98% rename from docs/src/userguide/navigating_a_cube.rst rename to docs/src/user_manual/how_to/navigating_a_cube.rst index ec3cd8e0dc..2e4f3c0ca9 100644 --- a/docs/src/userguide/navigating_a_cube.rst +++ b/docs/src/user_manual/how_to/navigating_a_cube.rst @@ -1,3 +1,8 @@ +.. how-to:: Navigating a Cube + :tags: topic_data_model + + How to access the properties of a Cube. + ================= Navigating a Cube ================= @@ -66,7 +71,7 @@ and :attr:`Cube.units ` respectively:: Interrogating these with the standard :func:`type` function will tell you that ``standard_name`` and ``long_name`` are either a string or ``None``, and ``units`` is an instance of :class:`iris.unit.Unit`. A more in depth discussion on -the cube units and their functional effects can be found at the end of :doc:`cube_maths`. +the cube units and their functional effects can be found at the end of :doc:`../tutorial/cube_maths`. You can access a string representing the "name" of a cube with the :meth:`Cube.name() ` method:: @@ -94,7 +99,7 @@ Each cube also has a :mod:`numpy` array which represents the phenomenon of the c print(cube.ndim) For more on the benefits, handling and uses of lazy data, see - :doc:`Real and Lazy Data ` + :doc:`Real and Lazy Data ` You can change the units of a cube using the :meth:`~iris.cube.Cube.convert_units` method. For example:: diff --git a/docs/src/community/plugins.rst b/docs/src/user_manual/how_to/plugins.rst similarity index 93% rename from docs/src/community/plugins.rst rename to docs/src/user_manual/how_to/plugins.rst index 0d79d64623..ba993eceeb 100644 --- a/docs/src/community/plugins.rst +++ b/docs/src/user_manual/how_to/plugins.rst @@ -1,3 +1,8 @@ +.. how-to:: Plugins + :tags: topic_interoperability;topic_about + + How to create and use plugins to extend Iris' functionality. + .. _namespace package: https://packaging.python.org/en/latest/guides/packaging-namespace-packages/ .. _community_plugins: diff --git a/docs/src/user_manual/index.rst b/docs/src/user_manual/index.rst new file mode 100644 index 0000000000..0d57721609 --- /dev/null +++ b/docs/src/user_manual/index.rst @@ -0,0 +1,199 @@ +.. include:: /common_links.inc + +.. comment: + now that User Manual is the official top-level, and the User Guide is a + sub-section, the original labels have been relocated here. + +.. _user_guide_index: +.. _user_guide_introduction: +.. _user_manual_index: + +User Manual +=========== + +.. hint:: + + If you are new to Iris: check out :ref:`getting_started_index` first. + +Welcome to the Iris User Manual! + +This is designed as a searchable index of **all** our user documentation. Try +the Topic and `Diataxis`_ filters below to find the information you need today. +Alternatively, you can use the sidebar to navigate by section. + +.. tip:: + + - :doc:`/user_manual/index`: a searchable index of **all** user + documentation. + - :doc:`User Guide `: a linear + narrative introduction to Iris' data model and functionality. + +.. comment: + The tree structure for user_manual is specified here. As mentioned in the + text, we prefer readers to use the tabbed sections below, so the toctree is + hidden - not rendered in the text, only in the sidebar. This toctree is + expected to be section_indexes/* pages; with each of those pages + providing the remaining sub-structure. + + +.. toctree:: + :maxdepth: 1 + :hidden: + + section_indexes/get_started + section_indexes/userguide + /generated/gallery/index + Iris API + section_indexes/dask_best_practices + section_indexes/mesh_support + section_indexes/metadata_arithmetic + section_indexes/community + section_indexes/general + +.. _topic_all: + +All +--- + +.. diataxis-page-list:: topic_all + +By Topic +-------- + +.. _topic_data_model: + +topic: ``data_model`` +^^^^^^^^^^^^^^^^^^^^^ + +Pages about the :class:`~iris.cube.Cube` class and its associated components +such as :class:`~iris.coords.Coord` and :class:`~iris.mesh.MeshXY`. + +.. diataxis-page-list:: topic_data_model + + +.. _topic_slice_combine: + +topic: ``slice_combine`` +^^^^^^^^^^^^^^^^^^^^^^^^ + +Pages about subsetting and combining :class:`~iris.cube.Cube` and +:class:`~iris.cube.CubeList` data. Examples include slicing, indexing, merging, +concatenating. + +.. diataxis-page-list:: topic_slice_combine + + +.. _topic_load_save: + +topic: ``load_save`` +^^^^^^^^^^^^^^^^^^^^ + +Pages about reading from files into the data model, and writing from the data +model to files. + +.. diataxis-page-list:: topic_load_save + + +.. _topic_lazy_data: + +topic: ``lazy_data`` +^^^^^^^^^^^^^^^^^^^^ + +Pages about Iris' implementation of parallel and out-of-core data handling, via +Dask. See :term:`Lazy Data`. + +.. diataxis-page-list:: topic_lazy_data + + +.. _topic_plotting: + +topic: ``plotting`` +^^^^^^^^^^^^^^^^^^^ + +Pages about Iris' use of :term:`Cartopy` or :ref:`ugrid geovista` to plot +:class:`~iris.cube.Cube` data. + +.. diataxis-page-list:: topic_plotting + + +.. _topic_maths_stats: + +topic: ``maths_stats`` +^^^^^^^^^^^^^^^^^^^^^^ + +Pages about statistical and mathematical operations on :class:`~iris.cube.Cube` +data, e.g. computing means, differences, etc. + +.. diataxis-page-list:: topic_maths_stats + + +.. _topic_regrid: + +topic: ``regrid`` +^^^^^^^^^^^^^^^^^ + +Pages about regridding (2D to 2D) and interpolation (ND to 1D) of data from one +set of coordinates to another. Commonly used to move between different XY grids. + +.. diataxis-page-list:: topic_regrid + + +.. _topic_customisation: + +topic: ``customisation`` +^^^^^^^^^^^^^^^^^^^^^^^^ + +Pages about configurable Iris behaviour. + +.. diataxis-page-list:: topic_customisation + + +.. _topic_troubleshooting: + +topic: ``troubleshooting`` +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Pages about problems/exceptions you may encounter when using Iris, and how to +best handle them. + +.. diataxis-page-list:: topic_troubleshooting + + +.. _topic_experimental: + +topic: ``experimental`` +^^^^^^^^^^^^^^^^^^^^^^^ + +Pages about API that is still subject to change. + +.. diataxis-page-list:: topic_experimental + + +.. _topic_interoperability: + +topic: ``interoperability`` +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Pages about using Iris alongside other libraries and tools. + +.. diataxis-page-list:: topic_interoperability + + +.. _topic_mesh: + +topic: ``mesh`` +^^^^^^^^^^^^^^^ + +Pages about Iris' support for unstructured mesh data. + +.. diataxis-page-list:: topic_mesh + + +.. _topic_about: + +topic: ``about`` +^^^^^^^^^^^^^^^^ + +Pages about the non-code aspects of Iris: philosophy, installation, etc. + +.. diataxis-page-list:: topic_about diff --git a/docs/src/userguide/citation.rst b/docs/src/user_manual/reference/citation.rst similarity index 88% rename from docs/src/userguide/citation.rst rename to docs/src/user_manual/reference/citation.rst index d0496f4876..00991e1a70 100644 --- a/docs/src/userguide/citation.rst +++ b/docs/src/user_manual/reference/citation.rst @@ -1,3 +1,8 @@ +.. z_reference:: Citing Iris + :tags: topic_about + + Information on the correct way to cite the Iris Python package. + .. _Citing_Iris: =========== diff --git a/docs/src/userguide/glossary.rst b/docs/src/user_manual/reference/glossary.rst similarity index 86% rename from docs/src/userguide/glossary.rst rename to docs/src/user_manual/reference/glossary.rst index 7de88462e2..3c04b1756b 100644 --- a/docs/src/userguide/glossary.rst +++ b/docs/src/user_manual/reference/glossary.rst @@ -1,4 +1,9 @@ -.. include:: ../common_links.inc +.. z_reference:: Glossary + :tags: topic_data_model;topic_about;topic_load_save;topic_lazy_data;topic_plotting;topic_maths_stats;topic_regrid;topic_customisation;topic_troubleshooting;topic_slice_combine + + Information on common terms used within Iris documentation. + +.. include:: ../../common_links.inc .. _glossary: @@ -40,7 +45,7 @@ Glossary representing the cube as a whole. | **Related:** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Cube @@ -55,7 +60,7 @@ Glossary - :term:`Coordinate Factories ` | **Related:** :term:`NumPy` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Cell Method @@ -64,7 +69,7 @@ Glossary MEAN or SUM operation. | **Related:** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Coordinate Factory @@ -75,7 +80,7 @@ Glossary "height above ground level" coordinate. | **Related:** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | @@ -85,7 +90,7 @@ Glossary so that not all data is in RAM at once. | **Related:** :term:`Lazy Data` **|** :term:`NumPy` - | **More information:** :doc:`real_and_lazy_data` + | **More information:** :doc:`../explanation/real_and_lazy_data` | Fields File (FF) Format @@ -111,7 +116,7 @@ Glossary thanks to parallel processing. | **Related:** :term:`Dask` **|** :term:`Real Data` - | **More information:** :doc:`real_and_lazy_data` + | **More information:** :doc:`../explanation/real_and_lazy_data` | Long Name @@ -119,7 +124,7 @@ Glossary the same restraints as :term:`standard name`. | **Related:** :term:`Standard Name` **|** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Matplotlib @@ -137,7 +142,7 @@ Glossary e.g. :term:`units ` or :term:`Cell Methods ` | **Related:** :term:`Phenomenon` **|** :term:`Cube` - | **More information:** :doc:`../further_topics/metadata` + | **More information:** :doc:`../explanation/metadata` | NetCDF Format @@ -167,7 +172,7 @@ Glossary | **Related:** :term:`Metadata` **|** :term:`Standard Name` **|** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Post Processing (PP) Format @@ -183,7 +188,7 @@ Glossary on the hard drive. | **Related:** :term:`Lazy Data` **|** :term:`NumPy` - | **More information:** :doc:`real_and_lazy_data` + | **More information:** :doc:`../explanation/real_and_lazy_data` | Standard Name @@ -191,14 +196,14 @@ Glossary defined at `CF Standard Names `_. | **Related:** :term:`Long Name` **|** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Unit The unit with which the :term:`phenomenon` is measured e.g. m / sec. | **Related:** :term:`Cube` - | **More information:** :doc:`iris_cubes` + | **More information:** :doc:`../explanation/iris_cubes` | Xarray diff --git a/docs/src/community/phrasebook.rst b/docs/src/user_manual/reference/phrasebook.rst similarity index 92% rename from docs/src/community/phrasebook.rst rename to docs/src/user_manual/reference/phrasebook.rst index bcd91cca83..c952988c82 100644 --- a/docs/src/community/phrasebook.rst +++ b/docs/src/user_manual/reference/phrasebook.rst @@ -1,4 +1,9 @@ -.. include:: ../common_links.inc +.. z_reference:: Phrasebook + :tags: topic_interoperability + + Information on terminology differences between Iris and similar packages. + +.. include:: ../../common_links.inc .. _phrasebook: diff --git a/docs/src/community/index.rst b/docs/src/user_manual/section_indexes/community.rst similarity index 83% rename from docs/src/community/index.rst rename to docs/src/user_manual/section_indexes/community.rst index 1462f881a8..f2bc926b1e 100644 --- a/docs/src/community/index.rst +++ b/docs/src/user_manual/section_indexes/community.rst @@ -1,4 +1,4 @@ -.. include:: ../common_links.inc +.. include:: ../../common_links.inc .. todo: consider scientific-python.org @@ -7,6 +7,8 @@ Iris in the Community ===================== +.. todo:: https://github.com/SciTools/iris/issues/6867 - this section belongs in 'Get Involved' + Iris aims to be a valuable member of the open source scientific Python community. @@ -39,15 +41,15 @@ smoother interoperability: .. not using toctree due to combination of child pages and cross-references. * The :mod:`iris.pandas` module -* :doc:`iris_xarray` -* :doc:`phrasebook` +* :doc:`../explanation/iris_xarray` +* :doc:`../reference/phrasebook` .. toctree:: :maxdepth: 1 :hidden: - iris_xarray - phrasebook + ../explanation/iris_xarray + ../reference/phrasebook Plugins ------- @@ -57,4 +59,4 @@ Iris can be extended with **plugins**! See below for further information: .. toctree:: :maxdepth: 2 - plugins + ../how_to/plugins diff --git a/docs/src/further_topics/dask_best_practices/index.rst b/docs/src/user_manual/section_indexes/dask_best_practices.rst similarity index 95% rename from docs/src/further_topics/dask_best_practices/index.rst rename to docs/src/user_manual/section_indexes/dask_best_practices.rst index 79de2692bd..109d328b82 100644 --- a/docs/src/further_topics/dask_best_practices/index.rst +++ b/docs/src/user_manual/section_indexes/dask_best_practices.rst @@ -102,7 +102,7 @@ this with:: dask.config.set(num_workers=N) -For an example, see :doc:`dask_bags_and_greed`. +For an example, see :doc:`../tutorial/dask_bags_and_greed`. Alternatively, when there is only one CPU allocated, it may actually be more efficient to use a "synchronous" scheduler instead, with:: @@ -194,7 +194,7 @@ If the file(s) being loaded contain multiple fields, this can lead to an excessive amount of chunks which will result in poor performance. When the default chunking is not appropriate, it is possible to rechunk. -:doc:`dask_pp_to_netcdf` provides a detailed demonstration of how Dask can optimise +:doc:`../tutorial/dask_pp_to_netcdf` provides a detailed demonstration of how Dask can optimise that process. @@ -208,14 +208,14 @@ If you feel you have an example of a Dask best practice that you think may be he please share them with us by raising a new `discussion on the Iris repository `_. -* :doc:`dask_pp_to_netcdf` -* :doc:`dask_parallel_loop` -* :doc:`dask_bags_and_greed` +* :doc:`../tutorial/dask_pp_to_netcdf` +* :doc:`../tutorial/dask_parallel_loop` +* :doc:`../tutorial/dask_bags_and_greed` .. toctree:: :hidden: :maxdepth: 1 - dask_pp_to_netcdf - dask_parallel_loop - dask_bags_and_greed + ../tutorial/dask_pp_to_netcdf + ../tutorial/dask_parallel_loop + ../tutorial/dask_bags_and_greed diff --git a/docs/src/user_manual/section_indexes/general.rst b/docs/src/user_manual/section_indexes/general.rst new file mode 100644 index 0000000000..f3d28824a1 --- /dev/null +++ b/docs/src/user_manual/section_indexes/general.rst @@ -0,0 +1,28 @@ +.. include:: /common_links.inc + +General +======= + +Below are any pages not belonging to any other User Manual section. + +.. tip:: + + To maximise discoverability of **all** pages, the primary design of the + User Manual is based on navigating with the Topic and `Diataxis`_ + filters in the top page: :doc:`../index`. + +.. toctree:: + :maxdepth: 1 + + ../reference/citation + ../tutorial/controlling_merge + ../explanation/dataless_cubes + ../how_to/filtering_warnings + ../reference/glossary + ../explanation/iris_philosophy + ../explanation/missing_data_handling + ../explanation/netcdf_io + ../tutorial/s3_io + ../explanation/um_files_loading + ../explanation/ux_guide + ../explanation/which_regridder_to_use diff --git a/docs/src/user_manual/section_indexes/get_started.rst b/docs/src/user_manual/section_indexes/get_started.rst new file mode 100644 index 0000000000..f95bbf9635 --- /dev/null +++ b/docs/src/user_manual/section_indexes/get_started.rst @@ -0,0 +1,29 @@ +.. _getting_started_index: + +Get Started +=========== + +Quick Start +----------- + +This will get you up and running with just 5 minutes of reading: + +- :ref:`installing_iris` +- :ref:`iris_data_structures` +- The first part of: :ref:`loading_iris_cubes` + +Base Understanding +------------------ + +If you prefer to begin with a richer understanding, these are the pages to read: + +- :ref:`why_iris` +- Browse for an idea of Iris' capabilities: :doc:`/generated/gallery/index` +- :doc:`userguide` + +.. toctree:: + :maxdepth: 1 + :hidden: + + ../how_to/installing + ../explanation/why_iris diff --git a/docs/src/further_topics/ugrid/index.rst b/docs/src/user_manual/section_indexes/mesh_support.rst similarity index 75% rename from docs/src/further_topics/ugrid/index.rst rename to docs/src/user_manual/section_indexes/mesh_support.rst index c247a9dc6d..eb046e6c8a 100644 --- a/docs/src/further_topics/ugrid/index.rst +++ b/docs/src/user_manual/section_indexes/mesh_support.rst @@ -35,10 +35,10 @@ Iris' mesh support is experimental Read on to find out more... -* :doc:`data_model` - learn why the mesh experience is so different. -* :doc:`partner_packages` - meet some optional dependencies that provide powerful mesh operations. -* :doc:`operations` - experience how your workflows will look when written for mesh data. -* :doc:`other_meshes` - check out some examples of converting various mesh formats into Iris' mesh format. +* :doc:`../explanation/mesh_data_model` - learn why the mesh experience is so different. +* :doc:`../explanation/mesh_partners` - meet some optional dependencies that provide powerful mesh operations. +* :doc:`../how_to/mesh_operations` - experience how your workflows will look when written for mesh data. +* :doc:`../how_to/mesh_conversions` - check out some examples of converting various mesh formats into Iris' mesh format. .. Need an actual TOC to get Sphinx working properly, but have hidden it in @@ -48,9 +48,9 @@ Read on to find out more... :hidden: :maxdepth: 1 - data_model - partner_packages - operations - other_meshes + ../explanation/mesh_data_model + ../explanation/mesh_partners + ../how_to/mesh_operations + ../how_to/mesh_conversions __ CF-UGRID_ diff --git a/docs/src/user_manual/section_indexes/metadata_arithmetic.rst b/docs/src/user_manual/section_indexes/metadata_arithmetic.rst new file mode 100644 index 0000000000..1f5e2c044e --- /dev/null +++ b/docs/src/user_manual/section_indexes/metadata_arithmetic.rst @@ -0,0 +1,12 @@ +Metadata and Arithmetic +======================= + +A small series of detailed pages on how Iris handles metadata, especially when +combining :class:`~iris.cube.Cube` instances via arithmetic operations. + +.. toctree:: + :maxdepth: 1 + + ../explanation/metadata + ../explanation/lenient_metadata + ../explanation/lenient_maths diff --git a/docs/src/user_manual/section_indexes/userguide.rst b/docs/src/user_manual/section_indexes/userguide.rst new file mode 100644 index 0000000000..799e751e59 --- /dev/null +++ b/docs/src/user_manual/section_indexes/userguide.rst @@ -0,0 +1,46 @@ +User Guide: Intro to Iris +========================= + +The User Guide is designed to give an introduction to and a comprehensive +grounding in Iris' data model and functionality. It is presented in a linear +narrative style, with early sections providing a foundation for later sections. + +.. tip:: + + - :doc:`User Guide `: a linear + narrative introduction to Iris' data model and functionality. + - :doc:`/user_manual/index`: a searchable index of **all** user + documentation. + +Much of the content has supplementary links to the reference documentation; +you will not need to follow these links in order to understand the guide but +they may serve as a useful reference for future exploration. + +.. only:: html + + Since later pages depend on earlier ones, try reading this user guide + sequentially using the ``next`` and ``previous`` links at the bottom + of each page. + +.. note:: + + There is also useful learning material held in the + https://github.com/scitools-classroom repo, including tutorials, courses + and presentations. + + +.. toctree:: + :maxdepth: 2 + + ../explanation/iris_cubes + ../tutorial/loading_iris_cubes + ../tutorial/saving_iris_cubes + ../how_to/navigating_a_cube + ../tutorial/subsetting_a_cube + ../explanation/real_and_lazy_data + ../tutorial/plotting_a_cube + ../tutorial/interpolation_and_regridding + ../tutorial/merge_and_concat + ../tutorial/cube_statistics + ../tutorial/cube_maths + diff --git a/docs/src/further_topics/controlling_merge.rst b/docs/src/user_manual/tutorial/controlling_merge.rst similarity index 98% rename from docs/src/further_topics/controlling_merge.rst rename to docs/src/user_manual/tutorial/controlling_merge.rst index 8868306d10..de0fe9bfdc 100644 --- a/docs/src/further_topics/controlling_merge.rst +++ b/docs/src/user_manual/tutorial/controlling_merge.rst @@ -1,3 +1,8 @@ +.. tutorial:: Controlling Merge and Concatenate + :tags: topic_slice_combine;topic_customisation;topic_load_save + + A lesson in fine-grain control of combining Iris Cubes. + .. _controlling_merge: ================================= diff --git a/docs/src/userguide/cube_maths.rst b/docs/src/user_manual/tutorial/cube_maths.rst similarity index 98% rename from docs/src/userguide/cube_maths.rst rename to docs/src/user_manual/tutorial/cube_maths.rst index 79c91ca61b..817b496686 100644 --- a/docs/src/userguide/cube_maths.rst +++ b/docs/src/user_manual/tutorial/cube_maths.rst @@ -1,3 +1,8 @@ +.. tutorial:: Cube Maths + :tags: topic_maths_stats + + A lesson in mathematical operations on Iris Cubes. + .. _cube maths: ========== @@ -5,7 +10,7 @@ Cube Maths ========== -The section :doc:`navigating_a_cube` highlighted that +The section :doc:`../how_to/navigating_a_cube` highlighted that every cube has a data attribute; this attribute can then be manipulated directly:: diff --git a/docs/src/userguide/cube_statistics.rst b/docs/src/user_manual/tutorial/cube_statistics.rst similarity index 99% rename from docs/src/userguide/cube_statistics.rst rename to docs/src/user_manual/tutorial/cube_statistics.rst index efc031aa43..e980fea407 100644 --- a/docs/src/userguide/cube_statistics.rst +++ b/docs/src/user_manual/tutorial/cube_statistics.rst @@ -1,3 +1,8 @@ +.. tutorial:: Cube Statistics + :tags: topic_maths_stats + + A lesson in statistical operations on Iris Cubes. + .. _cube-statistics: =============== diff --git a/docs/src/further_topics/dask_best_practices/dask_bags_and_greed.rst b/docs/src/user_manual/tutorial/dask_bags_and_greed.rst similarity index 98% rename from docs/src/further_topics/dask_best_practices/dask_bags_and_greed.rst rename to docs/src/user_manual/tutorial/dask_bags_and_greed.rst index 272ea6fc08..8d6f073e84 100644 --- a/docs/src/further_topics/dask_best_practices/dask_bags_and_greed.rst +++ b/docs/src/user_manual/tutorial/dask_bags_and_greed.rst @@ -1,3 +1,8 @@ +.. tutorial:: Dask Bags and Greedy Parallelism + :tags: topic_lazy_data + + A real-world demonstration of using Dask Bags with Iris, including hazards to watch out for. + .. _examples_bags_greed: 3. Dask Bags and Greedy Parallelism diff --git a/docs/src/further_topics/dask_best_practices/dask_parallel_loop.rst b/docs/src/user_manual/tutorial/dask_parallel_loop.rst similarity index 97% rename from docs/src/further_topics/dask_best_practices/dask_parallel_loop.rst rename to docs/src/user_manual/tutorial/dask_parallel_loop.rst index 2c19196318..977238b52b 100644 --- a/docs/src/further_topics/dask_best_practices/dask_parallel_loop.rst +++ b/docs/src/user_manual/tutorial/dask_parallel_loop.rst @@ -1,3 +1,8 @@ +.. tutorial:: Parallelising a Loop of Multiple Calls to a Third Party Library + :tags: topic_lazy_data + + A real-world demonstration of parallelising function calls with Dask. + .. _examples_parallel_loop: 2. Parallelising a Loop of Multiple Calls to a Third Party Library diff --git a/docs/src/further_topics/dask_best_practices/dask_pp_to_netcdf.rst b/docs/src/user_manual/tutorial/dask_pp_to_netcdf.rst similarity index 94% rename from docs/src/further_topics/dask_best_practices/dask_pp_to_netcdf.rst rename to docs/src/user_manual/tutorial/dask_pp_to_netcdf.rst index 28784154b4..0b8f306026 100644 --- a/docs/src/further_topics/dask_best_practices/dask_pp_to_netcdf.rst +++ b/docs/src/user_manual/tutorial/dask_pp_to_netcdf.rst @@ -1,3 +1,9 @@ +.. tutorial:: Speeding up Converting PP Files to NetCDF + :tags: topic_lazy_data;topic_load_save + + A real-world demonstration of tuning Dask and Iris for better performance + when saving data. + .. _examples_pp_to_ff: 1. Speed up Converting PP Files to NetCDF diff --git a/docs/src/userguide/concat.svg b/docs/src/user_manual/tutorial/images/concat.svg similarity index 100% rename from docs/src/userguide/concat.svg rename to docs/src/user_manual/tutorial/images/concat.svg diff --git a/docs/src/further_topics/dask_best_practices/images/grib-bottleneck.png b/docs/src/user_manual/tutorial/images/grib-bottleneck.png similarity index 100% rename from docs/src/further_topics/dask_best_practices/images/grib-bottleneck.png rename to docs/src/user_manual/tutorial/images/grib-bottleneck.png diff --git a/docs/src/further_topics/dask_best_practices/images/loop_third_party_kapture_results.png b/docs/src/user_manual/tutorial/images/loop_third_party_kapture_results.png similarity index 100% rename from docs/src/further_topics/dask_best_practices/images/loop_third_party_kapture_results.png rename to docs/src/user_manual/tutorial/images/loop_third_party_kapture_results.png diff --git a/docs/src/userguide/merge.svg b/docs/src/user_manual/tutorial/images/merge.svg similarity index 100% rename from docs/src/userguide/merge.svg rename to docs/src/user_manual/tutorial/images/merge.svg diff --git a/docs/src/userguide/merge_and_concat.svg b/docs/src/user_manual/tutorial/images/merge_and_concat.svg similarity index 100% rename from docs/src/userguide/merge_and_concat.svg rename to docs/src/user_manual/tutorial/images/merge_and_concat.svg diff --git a/docs/src/userguide/interpolation_and_regridding.rst b/docs/src/user_manual/tutorial/interpolation_and_regridding.rst similarity index 96% rename from docs/src/userguide/interpolation_and_regridding.rst rename to docs/src/user_manual/tutorial/interpolation_and_regridding.rst index 4a95276ab2..6a888d7549 100644 --- a/docs/src/userguide/interpolation_and_regridding.rst +++ b/docs/src/user_manual/tutorial/interpolation_and_regridding.rst @@ -1,3 +1,8 @@ +.. tutorial:: Cube Interpolation and Regridding + :tags: topic_regrid + + A lesson in Iris' interpolation and regridding functionality. + .. _interpolation_and_regridding: .. testsetup:: * @@ -32,8 +37,8 @@ The following are the regridding schemes that are currently available in Iris: The linear and nearest-neighbour interpolation schemes, and the linear, nearest-neighbour, and area-weighted regridding schemes support lazy regridding, i.e. if the source cube has lazy data, the resulting cube will also have lazy data. -See :doc:`real_and_lazy_data` for an introduction to lazy data. -See :doc:`../further_topics/which_regridder_to_use` for a more in depth overview of the different regridders. +See :doc:`../explanation/real_and_lazy_data` for an introduction to lazy data. +See :doc:`../explanation/which_regridder_to_use` for a more in depth overview of the different regridders. .. _interpolation: @@ -161,7 +166,7 @@ these sample points: Let's look at the original data, the interpolation line and the new data in a plot. This will help us to see what is going on: -.. plot:: userguide/regridding_plots/interpolate_column.py +.. plot:: user_manual/tutorial/regridding_plots/interpolate_column.py The red diamonds on the extremes of the altitude values show that we have extrapolated data beyond the range of the original data. In some cases this is @@ -222,7 +227,7 @@ Let's load two cubes that have different grids and coordinate systems: We can visually confirm that they are on different grids by plotting the two cubes: -.. plot:: userguide/regridding_plots/regridding_plot.py +.. plot:: user_manual/tutorial/regridding_plots/regridding_plot.py Let's regrid the ``global_air_temp`` cube onto a rotated pole grid using a linear regridding scheme. To achieve this we pass the ``rotated_psl`` @@ -231,7 +236,7 @@ cube onto: >>> rotated_air_temp = global_air_temp.regrid(rotated_psl, iris.analysis.Linear()) -.. plot:: userguide/regridding_plots/regridded_to_rotated.py +.. plot:: user_manual/tutorial/regridding_plots/regridded_to_rotated.py We could regrid the pressure values onto the global grid, but this will involve some form of extrapolation. As with interpolation, we can control the extrapolation @@ -260,7 +265,7 @@ pole grid: >>> scheme = iris.analysis.Linear(extrapolation_mode='mask') >>> global_psl = rotated_psl.regrid(global_air_temp, scheme) -.. plot:: userguide/regridding_plots/regridded_to_global.py +.. plot:: user_manual/tutorial/regridding_plots/regridded_to_global.py Notice that although we can still see the approximate shape of the rotated pole grid, the cells have now become rectangular in a plate carrée (equirectangular) projection. @@ -341,7 +346,7 @@ some data will be disregarded if it lies close to masked data. To visualise the above regrid, let's plot the original data, along with 3 distinct ``mdtol`` values to compare the result: -.. plot:: userguide/regridding_plots/regridded_to_global_area_weighted.py +.. plot:: user_manual/tutorial/regridding_plots/regridded_to_global_area_weighted.py .. _caching_a_regridder: diff --git a/docs/src/userguide/loading_iris_cubes.rst b/docs/src/user_manual/tutorial/loading_iris_cubes.rst similarity index 98% rename from docs/src/userguide/loading_iris_cubes.rst rename to docs/src/user_manual/tutorial/loading_iris_cubes.rst index ac6b370466..e54dbc9ebd 100644 --- a/docs/src/userguide/loading_iris_cubes.rst +++ b/docs/src/user_manual/tutorial/loading_iris_cubes.rst @@ -1,3 +1,8 @@ +.. tutorial:: Loading Iris Cubes + :tags: topic_load_save + + A lesson in how Iris loading works. + .. _loading_iris_cubes: =================== @@ -114,7 +119,7 @@ Notice that the result of printing a **cube** is a little more verbose than it was when printing a **list of cubes**. In addition to the very short summary which is provided when printing a list of cubes, information is provided on the coordinates which constitute the cube in question. -This was the output discussed at the end of the :doc:`iris_cubes` section. +This was the output discussed at the end of the :doc:`../explanation/iris_cubes` section. .. note:: @@ -156,7 +161,7 @@ essential descriptive information or metadata : the bulk of the actual data content will only be loaded later, as it is needed. This is referred to as 'lazy' data. It allows loading to be much quicker, and to occupy less memory. -For more on the benefits, handling and uses of lazy data, see :doc:`Real and Lazy Data `. +For more on the benefits, handling and uses of lazy data, see :doc:`Real and Lazy Data `. .. _constrained-loading: @@ -408,3 +413,4 @@ API documentation for:** :class:`iris.loading.LoadProblems`. warnings.filterwarnings("ignore") helpers.get_names = get_names_original std_names.STD_NAMES["air_temperature"] = air_temperature + iris.FUTURE.date_microseconds = False diff --git a/docs/src/userguide/merge_and_concat.rst b/docs/src/user_manual/tutorial/merge_and_concat.rst similarity index 99% rename from docs/src/userguide/merge_and_concat.rst rename to docs/src/user_manual/tutorial/merge_and_concat.rst index d754e08cc1..3f717f064e 100644 --- a/docs/src/userguide/merge_and_concat.rst +++ b/docs/src/user_manual/tutorial/merge_and_concat.rst @@ -1,3 +1,8 @@ +.. tutorial:: Merge and Concatenate + :tags: topic_slice_combine + + A lesson in the various ways to combine Cubes along different dimensional axes. + .. _merge_and_concat: ===================== @@ -16,7 +21,7 @@ issues from occurring. Both ``merge`` and ``concatenate`` take multiple cubes as input and result in fewer cubes as output. The following diagram illustrates the two processes: -.. image:: merge_and_concat.svg +.. image:: images/merge_and_concat.svg :alt: Pictographic of merge and concatenation. :align: center @@ -128,7 +133,7 @@ make a new ``z`` dimension coordinate: The following diagram illustrates what has taken place in this example: -.. image:: merge.svg +.. image:: images/merge.svg :alt: Pictographic of merge. :align: center @@ -294,7 +299,7 @@ cubes to form a new cube with an extended ``t`` coordinate: The following diagram illustrates what has taken place in this example: -.. image:: concat.svg +.. image:: images/concat.svg :alt: Pictographic of concatenate. :align: center diff --git a/docs/src/userguide/plotting_a_cube.rst b/docs/src/user_manual/tutorial/plotting_a_cube.rst similarity index 94% rename from docs/src/userguide/plotting_a_cube.rst rename to docs/src/user_manual/tutorial/plotting_a_cube.rst index f152690835..a32464443f 100644 --- a/docs/src/userguide/plotting_a_cube.rst +++ b/docs/src/user_manual/tutorial/plotting_a_cube.rst @@ -1,3 +1,8 @@ +.. tutorial:: Plotting a Cube + :tags: topic_plotting + + A lesson on visualising Iris Cubes using Cartopy and Matplotlib. + .. _plotting_a_cube: =============== @@ -160,7 +165,7 @@ The syntax is very similar to that which you would provide to Matplotlib's equivalent :py:func:`matplotlib.pyplot.plot` and indeed all of the keyword arguments are equivalent: -.. plot:: userguide/plotting_examples/1d_simple.py +.. plot:: user_manual/tutorial/plotting_examples/1d_simple.py :include-source: For more information on how this example reduced the 2D cube to 1 dimension see @@ -179,7 +184,7 @@ to a plot. For example, the previous plot can be improved quickly by replacing **iris.plot** with **iris.quickplot**: -.. plot:: userguide/plotting_examples/1d_quickplot_simple.py +.. plot:: user_manual/tutorial/plotting_examples/1d_quickplot_simple.py :include-source: @@ -238,7 +243,7 @@ Cube Contour A simple contour plot of a cube can be created with either the :func:`iris.plot.contour` or :func:`iris.quickplot.contour` functions: -.. plot:: userguide/plotting_examples/cube_contour.py +.. plot:: user_manual/tutorial/plotting_examples/cube_contour.py :include-source: @@ -247,7 +252,7 @@ Cube Filled Contour Similarly a filled contour plot of a cube can be created with the :func:`iris.plot.contourf` or :func:`iris.quickplot.contourf` functions: -.. plot:: userguide/plotting_examples/cube_contourf.py +.. plot:: user_manual/tutorial/plotting_examples/cube_contourf.py :include-source: @@ -265,7 +270,7 @@ or :func:`iris.quickplot.pcolormesh`. and :func:`iris.quickplot.pcolormesh` will attempt to guess suitable values based on their points (see also :func:`iris.coords.Coord.guess_bounds()`). -.. plot:: userguide/plotting_examples/cube_blockplot.py +.. plot:: user_manual/tutorial/plotting_examples/cube_blockplot.py :include-source: .. _brewer-info: @@ -303,7 +308,7 @@ Available Brewer Schemes The following subset of Brewer palettes found at `colorbrewer2.org `_ are available within Iris. -.. plot:: userguide/plotting_examples/brewer.py +.. plot:: user_manual/tutorial/plotting_examples/brewer.py Plotting With Brewer @@ -313,7 +318,7 @@ To plot a cube using a Brewer colour palette, simply select one of the Iris registered Brewer colour palettes and plot the cube as normal. The Brewer palettes become available once :mod:`iris.plot` or :mod:`iris.quickplot` are imported. -.. plot:: userguide/plotting_examples/cube_brewer_contourf.py +.. plot:: user_manual/tutorial/plotting_examples/cube_brewer_contourf.py :include-source: @@ -327,5 +332,5 @@ Citations can be easily added to a plot using the The recommended text for the Cynthia Brewer citation is provided by :data:`iris.plot.BREWER_CITE`. -.. plot:: userguide/plotting_examples/cube_brewer_cite_contourf.py +.. plot:: user_manual/tutorial/plotting_examples/cube_brewer_cite_contourf.py :include-source: diff --git a/docs/src/userguide/plotting_examples/1d_quickplot_simple.py b/docs/src/user_manual/tutorial/plotting_examples/1d_quickplot_simple.py similarity index 100% rename from docs/src/userguide/plotting_examples/1d_quickplot_simple.py rename to docs/src/user_manual/tutorial/plotting_examples/1d_quickplot_simple.py diff --git a/docs/src/userguide/plotting_examples/1d_simple.py b/docs/src/user_manual/tutorial/plotting_examples/1d_simple.py similarity index 100% rename from docs/src/userguide/plotting_examples/1d_simple.py rename to docs/src/user_manual/tutorial/plotting_examples/1d_simple.py diff --git a/docs/src/userguide/plotting_examples/1d_with_legend.py b/docs/src/user_manual/tutorial/plotting_examples/1d_with_legend.py similarity index 100% rename from docs/src/userguide/plotting_examples/1d_with_legend.py rename to docs/src/user_manual/tutorial/plotting_examples/1d_with_legend.py diff --git a/docs/src/userguide/plotting_examples/brewer.py b/docs/src/user_manual/tutorial/plotting_examples/brewer.py similarity index 100% rename from docs/src/userguide/plotting_examples/brewer.py rename to docs/src/user_manual/tutorial/plotting_examples/brewer.py diff --git a/docs/src/userguide/plotting_examples/cube_blockplot.py b/docs/src/user_manual/tutorial/plotting_examples/cube_blockplot.py similarity index 100% rename from docs/src/userguide/plotting_examples/cube_blockplot.py rename to docs/src/user_manual/tutorial/plotting_examples/cube_blockplot.py diff --git a/docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py b/docs/src/user_manual/tutorial/plotting_examples/cube_brewer_cite_contourf.py similarity index 100% rename from docs/src/userguide/plotting_examples/cube_brewer_cite_contourf.py rename to docs/src/user_manual/tutorial/plotting_examples/cube_brewer_cite_contourf.py diff --git a/docs/src/userguide/plotting_examples/cube_brewer_contourf.py b/docs/src/user_manual/tutorial/plotting_examples/cube_brewer_contourf.py similarity index 100% rename from docs/src/userguide/plotting_examples/cube_brewer_contourf.py rename to docs/src/user_manual/tutorial/plotting_examples/cube_brewer_contourf.py diff --git a/docs/src/userguide/plotting_examples/cube_contour.py b/docs/src/user_manual/tutorial/plotting_examples/cube_contour.py similarity index 100% rename from docs/src/userguide/plotting_examples/cube_contour.py rename to docs/src/user_manual/tutorial/plotting_examples/cube_contour.py diff --git a/docs/src/userguide/plotting_examples/cube_contourf.py b/docs/src/user_manual/tutorial/plotting_examples/cube_contourf.py similarity index 100% rename from docs/src/userguide/plotting_examples/cube_contourf.py rename to docs/src/user_manual/tutorial/plotting_examples/cube_contourf.py diff --git a/docs/src/userguide/plotting_examples/masking_brazil_plot.py b/docs/src/user_manual/tutorial/plotting_examples/masking_brazil_plot.py similarity index 100% rename from docs/src/userguide/plotting_examples/masking_brazil_plot.py rename to docs/src/user_manual/tutorial/plotting_examples/masking_brazil_plot.py diff --git a/docs/src/userguide/plotting_examples/masking_stereographic_plot.py b/docs/src/user_manual/tutorial/plotting_examples/masking_stereographic_plot.py similarity index 100% rename from docs/src/userguide/plotting_examples/masking_stereographic_plot.py rename to docs/src/user_manual/tutorial/plotting_examples/masking_stereographic_plot.py diff --git a/docs/src/userguide/regridding_plots/interpolate_column.py b/docs/src/user_manual/tutorial/regridding_plots/interpolate_column.py similarity index 100% rename from docs/src/userguide/regridding_plots/interpolate_column.py rename to docs/src/user_manual/tutorial/regridding_plots/interpolate_column.py diff --git a/docs/src/userguide/regridding_plots/regridded_to_global.py b/docs/src/user_manual/tutorial/regridding_plots/regridded_to_global.py similarity index 100% rename from docs/src/userguide/regridding_plots/regridded_to_global.py rename to docs/src/user_manual/tutorial/regridding_plots/regridded_to_global.py diff --git a/docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py b/docs/src/user_manual/tutorial/regridding_plots/regridded_to_global_area_weighted.py similarity index 100% rename from docs/src/userguide/regridding_plots/regridded_to_global_area_weighted.py rename to docs/src/user_manual/tutorial/regridding_plots/regridded_to_global_area_weighted.py diff --git a/docs/src/userguide/regridding_plots/regridded_to_rotated.py b/docs/src/user_manual/tutorial/regridding_plots/regridded_to_rotated.py similarity index 100% rename from docs/src/userguide/regridding_plots/regridded_to_rotated.py rename to docs/src/user_manual/tutorial/regridding_plots/regridded_to_rotated.py diff --git a/docs/src/userguide/regridding_plots/regridding_plot.py b/docs/src/user_manual/tutorial/regridding_plots/regridding_plot.py similarity index 100% rename from docs/src/userguide/regridding_plots/regridding_plot.py rename to docs/src/user_manual/tutorial/regridding_plots/regridding_plot.py diff --git a/docs/src/user_manual/tutorial/s3_io.rst b/docs/src/user_manual/tutorial/s3_io.rst new file mode 100644 index 0000000000..5c0cd89a2a --- /dev/null +++ b/docs/src/user_manual/tutorial/s3_io.rst @@ -0,0 +1,265 @@ +.. tutorial:: Loading From and Saving To S3 Buckets + :tags: topic_load_save + + A lesson on using s3-fuse with Iris to load/save data from/to S3 buckets. + +.. _s3_io: + +Loading From and Saving To S3 Buckets +===================================== + +For cloud computing, it is natural to want to access data storage based on URIs. +At the present time, by far the most widely used platform for this is +`Amazon S3 "buckets" `_. + +It is common to treat an S3 bucket like a "disk", storing files as individual S3 +objects. S3 access URLs can also contain a nested +`'prefix string' `_ +structure, which naturally mirrors sub-directories in a file-system. + +While it would be possible for Iris to support S3 access directly, as it does the +"OpenDAP" protocol for netCDF data, this approach has some serious limitations : most +notably, each supported file format would have to be separately extended to support S3 +URLs in the place of file paths for loading and saving. + +Instead, we have found that it is most practical to perform this access using a virtual +file system approach. However, one drawback is that this is best controlled *outside* +the Python code -- see details below. + + +TL;DR +----- +Install s3-fuse and use its ``s3fs`` command, to create a file-system mount which maps +to an S3 bucket. S3 objects can then be accessed as a regular files (read and write). + + +Fsspec, S3-fs, fuse and s3-fuse +-------------------------------- +This approach depends on a set of related code solutions, as follows: + +`fsspec `_ +is a general framework for implementing Python-file-like access to alternative storage +resources. + +`s3fs `_ +is a package based on fsspec, which enables Python to "open" S3 data objects as Python +file-like objects for reading and writing. + +`fuse `_ +is an interface library that enables a data resource to be "mounted" as a Linux +filesystem, with user (not root) privilege. + +`s3-fuse `_ +is a utility based on s3fs and fuse, which provides a POSIX-compatible "mount" so that +an S3 bucket can be accessed as a regular Unix file system. + + +Practical usage +--------------- +Of the above, the only thing you actually need to know about is **s3-fuse**. + +There is an initial one-time setup, and also actions to take in advance of launching +Python, and after exit, each time you want to access S3 from Python. + +Prior requirements +^^^^^^^^^^^^^^^^^^ + +Install "s3-fuse" +~~~~~~~~~~~~~~~~~ +The most reliable method is to install into your Linux O.S. See +`installation instructions `_ . +This presumes that you perform a system installation with ``apt``, ``yum`` or similar. + +If you do not have necessary 'sudo' or root access permissions, we have found that it +is sufficient to install only **into your Python environment**, using conda. +Though not suggested, this appears to work on Unix systems where we have tried it. + +For this, you can use conda -- e.g. + +.. code-block:: bash + + $ conda install s3-fuse + +( Or better, put it into a reusable 'spec file', with all other requirements, and then +use ``$ conda create --file ...`` +). + +.. note:: + + It is **not** possible to install s3fs-fuse into a Python environment with ``pip``, + as it is not a Python package. + + +Create an empty mount directory +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +You need an empty directory in your existing filesystem tree, that you will map your +S3 bucket **onto** -- e.g. + +.. code-block:: bash + + $ mkdir /home/self.me/s3_root/testbucket_mountpoint + + +Setup AWS credentials +~~~~~~~~~~~~~~~~~~~~~ +Provide S3 access credentials in an AWS credentials file, as described +`here in the the s3-fuse documentation `_. + +There is a general introduction to AWS credentials +`here in the AWS documentation `_ +which should explain what you need here. + + +Before use (before each Python invocation) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Activate your Python environment, which then gives access to the **s3-fuse** Linux +command "s3fs". + +Map your S3 bucket "into" the chosen empty directory -- e.g. + +.. code-block:: bash + + $ s3fs my-test-bucket /home/self.me/s3_root/testbucket_mountpoint + +.. note:: + + You can now freely list/access contents of your bucket at this path + -- including updating or writing files. + +.. note:: + + This performs a Unix file-system "mount" operation, which temporarily + modifies your system. This change is not part of the current environment, and is not + limited to the scope of the current process. + + If you reboot, the mount will disappear. If you logout and login again, there can + be problems : ideally you should avoid this by always "unmounting" (see below). + +.. note:: + + The command for mounting an s3-fuse filesystem is ``s3fs`` - this should not be + confused with the similarly named s3fs python package. + + +Within Python code +^^^^^^^^^^^^^^^^^^ +You can now access objects at the remote S3 URL via the mount point on your local file +system you just created with `s3fs`, e.g. + +.. code-block:: python + + >>> path = "/home/self.me/s3_root/testbucket_mountpoint/sub_dir/a_file.nc" + >>> cubes = iris.load(path) + + +After use (after Python exit) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +When you have finished accessing the S3 objects in the mounted virtual filesystem, it +is a good idea to **unmount** it. Before doing this, make sure that all file handles to +the objects have been closed and there are no terminals open in that directory. + +.. code-block:: bash + + $ umount /home/self.me/s3_root/testbucket_mountpoint + +.. note:: + + The ``umount`` command is a standard Unix command. It may not always succeed, in which case + some kind of retry may be needed -- see detail notes below. + + The mount created will not survive a system reboot, nor does it function correctly + if the user logs out + logs in again. + + Presumably, problems could occur if repeated operation were to create a very large + number of mounts, so unmounting after use does seem advisable. + + +Some Pros and Cons of this approach +----------------------------------- + +PROs +^^^^ + +* **s3fs** supports random access to "parts" of a file, allowing efficient handling of + datasets larger than memory without requiring the data to be explicitly sharded + in storage. + +* **s3-fuse** is transparent to file access within Python, including Iris load+save or + other files accessed via a Python 'open' : the S3 data appears to be files in a + regular file-system. + +* the file-system virtualisation approach works for all file formats, since the + mapping occurs in the O.S. rather than in Iris, or Python. + +* "mounting" avoids the need for the Python code to dynamically connect to / + disconnect from an S3 bucket. + +* the "unmount problem" (see below) is managed at the level of the operating system, + where it occurs, instead of trying to allow for it in Python code. This means it + could be managed differently in different operating systems, if needed. + +* it does also work with many other cloud object-storage platforms, though with extra + required dependencies in some cases. + See the s3fs-fuse `Non-Amazon S3`_ docs page for details. + +CONs +^^^^ + +* only works on Unix-like O.S. + +* requires the "fuse" kernel module to be supported in your O.S. + This is usually installed by default, but may not always be. + See `'fuse' kernel module `_ + for more detail. + +* the file-system virtualisation may not be perfect : some file-system operations + might not behave as expected, e.g. with regard to file permissions or system + information. + +* it requires user actions *outside* the Python code. + +* the user must manage the mount/umount context. + +* some similar cloud object-storage platforms are *not* supported. + See the s3fs-fuse `Non-Amazon S3`_ docs page for details of those which are. + + +Background Notes and Details +---------------------------- + +* The file-like objects provided by **fsspec** replicate nearly *all* the behaviours + of a regular Python file. + + However, this is still hard to integrate with regular file access, since you + cannot create one from a regular Python "open" call -- still less + when opening a file with an underlying file-format such as netCDF4 or HDF5 + (since these are usually implemented in other languages such as C). + Nor can you interrogate file paths or system metadata, e.g. permissions. + + So, the key benefit offered by **s3-fuse** is that all functions are mapped + onto regular O.S. file-system calls -- so the file-format never needs to + know that the data is not a "real" file. + +* It would be possible, instead, to copy data into an *actual* file on disk, but the + s3-fuse approach avoids the need for copying, and thus in a cloud environment also + the cost and maintenance of a "local disk". + + s3fs also allows the software to access only *required* parts of a file, without + copying the whole content. This is obviously essential for efficient use of large + datasets, e.g. when larger than available memory. + +* It is also possible to use **s3-fuse** to establish the mounts *from within Python*. + However, we have considered integrating this into Iris and rejected it because of + unavoidable problems : namely, the "umount problem" (see below). + For details, see : https://github.com/SciTools/iris/pull/6731 + +* "Unmounting" must be done via a shell ``umount`` command, and there is no easy way to + guarantee that this succeeds, since it can often get a "target is busy" error. + + This "umount problem" is a known problem in Unix generally : see + `here `_ . + + It can only be resolved by a delay + retry. + + +.. _Non-Amazon S3: https://github.com/s3fs-fuse/s3fs-fuse/wiki/Non-Amazon-S3 diff --git a/docs/src/userguide/saving_iris_cubes.rst b/docs/src/user_manual/tutorial/saving_iris_cubes.rst similarity index 97% rename from docs/src/userguide/saving_iris_cubes.rst rename to docs/src/user_manual/tutorial/saving_iris_cubes.rst index 2ffc8c47d3..50466f8261 100644 --- a/docs/src/userguide/saving_iris_cubes.rst +++ b/docs/src/user_manual/tutorial/saving_iris_cubes.rst @@ -1,3 +1,8 @@ +.. tutorial:: Saving Iris Cubes + :tags: topic_load_save + + A lesson on writing Iris Cubes to file in various formats. + .. _saving_iris_cubes: ================== diff --git a/docs/src/userguide/subsetting_a_cube.rst b/docs/src/user_manual/tutorial/subsetting_a_cube.rst similarity index 98% rename from docs/src/userguide/subsetting_a_cube.rst rename to docs/src/user_manual/tutorial/subsetting_a_cube.rst index cbf3cb4c2e..53fe027243 100644 --- a/docs/src/userguide/subsetting_a_cube.rst +++ b/docs/src/user_manual/tutorial/subsetting_a_cube.rst @@ -1,4 +1,9 @@ -.. include:: ../common_links.inc +.. tutorial:: Subsetting a Cube + :tags: topic_slice_combine + + A lesson on subsetting Iris Cubes by extraction, masking, and iteration. + +.. include:: ../../common_links.inc .. _subsetting_a_cube: @@ -437,7 +442,7 @@ function and this returns a copy of the cube with a :py:class:`numpy.masked_arra as the data payload, where the data outside the shape is hidden by the masked array. -.. plot:: userguide/plotting_examples/masking_brazil_plot.py +.. plot:: user_manual/tutorial/plotting_examples/masking_brazil_plot.py :include-source: We can see that the dimensions of the cube haven't changed - the plot still has @@ -460,7 +465,7 @@ data in a stereographic projection (with projected coordinates with units of metres), and mask it to only show data over the United Kingdom, based on a shapefile of the UK boundary defined in WGS84 lat-lon coordinates. -.. plot:: userguide/plotting_examples/masking_stereographic_plot.py +.. plot:: user_manual/tutorial/plotting_examples/masking_stereographic_plot.py :include-source: diff --git a/docs/src/userguide/change_management_goals.txt b/docs/src/userguide/change_management_goals.txt deleted file mode 100644 index afed1ebb98..0000000000 --- a/docs/src/userguide/change_management_goals.txt +++ /dev/null @@ -1,9 +0,0 @@ -To reduce code maintenance problems to an absolute minimum, Iris applies -defined change management procedures to ensure that : - - * you can be confident that your code will still work with a future release - - * you will be aware of future incompatibility problems in advance - - * you can defer making code compatibility changes for some time, until it suits you - diff --git a/docs/src/userguide/index.rst b/docs/src/userguide/index.rst deleted file mode 100644 index 2b77129a4e..0000000000 --- a/docs/src/userguide/index.rst +++ /dev/null @@ -1,45 +0,0 @@ -.. _user_guide_index: -.. _user_guide_introduction: - -User Guide -========== - -If you are reading this user guide for the first time it is strongly -recommended that you read the user guide fully before experimenting with your -own data files. - -Much of the content has supplementary links to the reference documentation; -you will not need to follow these links in order to understand the guide but -they may serve as a useful reference for future exploration. - -.. only:: html - - Since later pages depend on earlier ones, try reading this user guide - sequentially using the ``next`` and ``previous`` links at the bottom - of each page. - -.. note:: - - There is also useful learning material held in the - https://github.com/scitools-classroom repo, including tutorials, courses - and presentations. - - -.. toctree:: - :maxdepth: 2 - - iris_cubes - loading_iris_cubes - saving_iris_cubes - navigating_a_cube - subsetting_a_cube - real_and_lazy_data - plotting_a_cube - interpolation_and_regridding - merge_and_concat - cube_statistics - cube_maths - citation - iris_philosophy - glossary - ../further_topics/index diff --git a/docs/src/voted_issues.rst b/docs/src/voted_issues.rst index a550a997ba..26eaa78b07 100644 --- a/docs/src/voted_issues.rst +++ b/docs/src/voted_issues.rst @@ -5,6 +5,8 @@ Voted Issues ============ +.. todo:: https://github.com/SciTools/iris/issues/6867 - this page belongs in 'Get Involved'. + You can help us to prioritise development of new features by leaving a 👍 reaction on the header (not subsequent comments) of any issue. diff --git a/docs/src/whatsnew/1.7.rst b/docs/src/whatsnew/1.7.rst index 4c3f3197dc..ed135caf3c 100644 --- a/docs/src/whatsnew/1.7.rst +++ b/docs/src/whatsnew/1.7.rst @@ -312,8 +312,8 @@ Documentation ============= * New sections on :ref:`cube broadcasting ` and - :doc:`regridding and interpolation ` - have been added to the :doc:`user guide `. + :doc:`regridding and interpolation ` + have been added to the :doc:`User Guide `. * An example demonstrating custom log-scale colouring has been added. See :ref:`sphx_glr_generated_gallery_general_plot_anomaly_log_colouring.py`. diff --git a/docs/src/whatsnew/1.8.rst b/docs/src/whatsnew/1.8.rst index dd2ca5e155..51ab612d8c 100644 --- a/docs/src/whatsnew/1.8.rst +++ b/docs/src/whatsnew/1.8.rst @@ -220,12 +220,12 @@ Deprecations Documentation ============= -* A chapter on :doc:`merge and concatenate ` has - been added to the :doc:`user guide `. +* A chapter on :doc:`merge and concatenate ` has + been added to the :doc:`User Guide `. * A section on installing Iris using `conda `_ has - been added to the :doc:`install guide `. + been added to the :doc:`install guide `. * Updates to the chapter on - :doc:`regridding and interpolation ` - have been added to the :doc:`user guide `. + :doc:`regridding and interpolation ` + have been added to the :doc:`User Guide `. diff --git a/docs/src/whatsnew/1.9.rst b/docs/src/whatsnew/1.9.rst index 9829d8ff3b..7f16011ba0 100644 --- a/docs/src/whatsnew/1.9.rst +++ b/docs/src/whatsnew/1.9.rst @@ -64,7 +64,7 @@ Features a fixed surface type of 107. * Added several new helper functions for file-save customisation, - (see also : :doc:`Saving Iris Cubes `): + (see also : :doc:`Saving Iris Cubes `): * :meth:`iris.fileformats.grib.as_pairs` * :meth:`iris.fileformats.grib.as_messages` @@ -193,8 +193,8 @@ Deprecations Documentation ============= -* A chapter on :doc:`saving iris cubes ` has been - added to the :doc:`user guide `. +* A chapter on :doc:`saving iris cubes ` has been + added to the :doc:`User Guide `. * Added script and documentation for building a what's new page from developer-submitted contributions. See diff --git a/docs/src/whatsnew/2.0.rst b/docs/src/whatsnew/2.0.rst index 1ee159c662..a2d8bde77b 100644 --- a/docs/src/whatsnew/2.0.rst +++ b/docs/src/whatsnew/2.0.rst @@ -26,7 +26,7 @@ Features In particular, Dask's *threaded*, *multiprocessing* or *distributed* `schedulers`_ can be used in order to best utilise available compute and memory resource. For further details, see :doc:`Real and Lazy Data - `. + `. * Changes to the :class:`iris.cube.Cube`: @@ -290,8 +290,8 @@ Documentation ============= * A new UserGuide chapter on :doc:`Real and Lazy Data - ` has been added, and referenced from key - points in the :doc:`User Guide ` . + ` has been added, and referenced from key + points in the :doc:`User Guide ` . .. _Biggus: https://biggus.readthedocs.io/en/latest/ diff --git a/docs/src/whatsnew/3.0.rst b/docs/src/whatsnew/3.0.rst index 13629cd9de..fc81f67ba4 100644 --- a/docs/src/whatsnew/3.0.rst +++ b/docs/src/whatsnew/3.0.rst @@ -479,7 +479,7 @@ v3.0.4 (22 July 2021) links are more visible to users. This uses the sphinx-panels_ extension. (:pull:`3884`) -#. `@bjlittle`_ created the :ref:`Further topics ` section and +#. `@bjlittle`_ created the 'Further Topics' section and included documentation for :ref:`metadata`, :ref:`lenient metadata`, and :ref:`lenient maths`. (:pull:`3890`) diff --git a/docs/src/whatsnew/3.4.rst b/docs/src/whatsnew/3.4.rst index e8d4f0fd2b..cbea3eba8c 100644 --- a/docs/src/whatsnew/3.4.rst +++ b/docs/src/whatsnew/3.4.rst @@ -70,7 +70,7 @@ v3.4.1 (21 Feb 2023) non-existing paths, and added expansion functionality to :func:`~iris.io.save`. (:issue:`4772`, :pull:`4913`) -#. `@trexfeathers`_ and `Julian Heming`_ added new mappings between CF +#. `@trexfeathers`_ and `@mo-jheming`_ added new mappings between CF standard names and UK Met Office LBFC codes. (:pull:`4859`) #. `@pp-mo`_ changed the metadata of a face/edge-type @@ -286,7 +286,7 @@ v3.4.1 (21 Feb 2023) core dev names are automatically included by the common_links.inc: .. _@TTV-Intrepid: https://github.com/TTV-Intrepid -.. _Julian Heming: https://www.metoffice.gov.uk/research/people/julian-heming +.. _@mo-jheming: https://github.com/mo-jheming .. _@hsteptoe: https://github.com/hsteptoe diff --git a/docs/src/whatsnew/3.5.rst b/docs/src/whatsnew/3.5.rst index c6699ee842..686cfdb2ae 100644 --- a/docs/src/whatsnew/3.5.rst +++ b/docs/src/whatsnew/3.5.rst @@ -140,12 +140,12 @@ This document explains the changes made to Iris for this release and removed an ECMWF link in the ``v1.0`` What's New that was failing the linkcheck CI. (:pull:`5109`) -#. `@trexfeathers`_ added a new top-level :doc:`/community/index` section, +#. `@trexfeathers`_ added a new top-level :doc:`/user_manual/section_indexes/community` section, as a one-stop place to find out about getting involved, and how we relate to other projects. (:pull:`5025`) #. The **Iris community**, with help from the **Xarray community**, produced - the :doc:`/community/iris_xarray` page, highlighting the similarities and + the :doc:`../user_manual/explanation/iris_xarray` page, highlighting the similarities and differences between the two packages. (:pull:`5025`) #. `@bjlittle`_ added a new section to the `README.md`_ to show our support diff --git a/docs/src/whatsnew/3.7.rst b/docs/src/whatsnew/3.7.rst index fdadb20412..41ca3c4206 100644 --- a/docs/src/whatsnew/3.7.rst +++ b/docs/src/whatsnew/3.7.rst @@ -130,7 +130,7 @@ v3.7.1 (04 Mar 2024) .. _dask_guide: -#. `@HGWright`_ added a :doc:`/further_topics/dask_best_practices/index` +#. `@HGWright`_ added a :doc:`/user_manual/section_indexes/dask_best_practices` section into the user guide, containing advice and use cases to help users get the best out of Dask with Iris. (:pull:`5190`) diff --git a/docs/src/whatsnew/3.8.rst b/docs/src/whatsnew/3.8.rst index 9fa87a9337..9ec2be4722 100644 --- a/docs/src/whatsnew/3.8.rst +++ b/docs/src/whatsnew/3.8.rst @@ -215,9 +215,9 @@ v3.8.1 (04 Mar 2024) #. `@bouweandela`_ updated all hyperlinks to https. (:pull:`5621`) -#. `@ESadek-MO`_ created an index page for :ref:`further_topics_index`, and +#. `@ESadek-MO`_ created an index page for 'Further Topics', and relocated all 'Technical Papers' into - :ref:`further_topics_index`. (:pull:`5602`) + 'Further Topics'. (:pull:`5602`) #. `@trexfeathers`_ made drop-down icons visible to show which pages link to 'sub-pages'. (:pull:`5684`) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index ed0dff6197..f1d8547c75 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -24,7 +24,10 @@ This document explains the changes made to Iris for this release 📢 Announcements ================ -#. N/A +#. We've had a makeover! ✨ All user documentation pages have been reorganised + into a new structure: :doc:`/user_manual/index`. This restructure is to + maximise discoverability of the available pages, as well as embracing the + `Diataxis`_ framework for better engagement with user needs going forward. ✨ Features @@ -48,6 +51,8 @@ This document explains the changes made to Iris for this release :func:`~iris.cube.Cube.rolling_window` and :func:`~iris.cube.Cube.intersection` to work with dataless cubes. (:pull:`6860`, :pull:`6757`) +#. `@HGWright`_ added to the Nimrod loader to expand the types of Nimrod files it can load. This includes selecting which Nimrod table to use the data entry headers from. (:issue:`4505`, :pull:`6763`) + 🐛 Bugs Fixed ============= @@ -69,7 +74,12 @@ This document explains the changes made to Iris for this release 🔥 Deprecations =============== -#. N/A +#. `@ESadek-MO`_ has deprecated the :class:`~iris.tests.IrisTest` class, and other unittest-based + testing conveniences in favour of the conveniences found in :mod:`iris/tests/_shared_utils.py`. + (:pull:`6950`) + +#. `@hsteptoe`_ has deprecated the use of the `copy` kwarg across :mod:`iris.pandas` to reflect changes + to the default behaviour of pandas v3 `New pandas v3 copy behaviour`_. (:pull:`6948`) 🔗 Dependencies @@ -81,7 +91,22 @@ This document explains the changes made to Iris for this release 📚 Documentation ================ -#. N/A +#. `@tkknight`_ reduced the space used on the documentation homepage by the quick + link cards to allow for easier reading. (:pull:`6886`) + +#. `@tkknight`_ added a gallery carousel to the documentation homepage. (:pull:`6884`) + +#. :user:`bjlittle` added the ``:user:`` `extlinks`_ ``github`` user convenience. + (:pull:`6931`) + +#. `@pp-mo`_ added a page on how to access datafiles in S3 buckets. + (:issue:`6374`, :pull:`6951`) + +#. `@trexfeathers`_, `@stephenworsley`_, `@ESadek-MO`_ and `@tkknight`_ reorganised **all** + user documentation pages into a new structure: :doc:`/user_manual/index`. + This restructure is to maximise discoverability of the available pages, as + well as embracing the `Diataxis`_ framework for better engagement with user + needs going forward. (:issue:`6511`, :pull:`6868`) 💼 Internal @@ -98,13 +123,36 @@ This document explains the changes made to Iris for this release #. `@tkknight`_ removed flake8, we have ruff now instead. (:pull:`6889`) +#. `@trexfeathers`_ and `@ukmo-ccbunney`_ updated CI to support Python 3.14 + inline with `SPEC0 Minimum Supported Dependencies`_. Note: `pyvista` (and + hence `geovista`) is not yet compatible with Python 3.14, so + :mod:`iris.experimental.geovista` is currently only available for + Python \<3.14. (:pull:`6816`, :issue:`6775`) + +#. `@ESadek-MO`_, `@trexfeathers`_, `@bjlittle`_, `@HGWright`_, `@pp-mo`_, + `@stephenworsley`_ and `@ukmo-ccbunney`_ converted the entirity of the tests + from unittest to pytest. Iris is now also ruff-PT compliant, save for PT019. + (:issue:`6212`, :pull:`6939`) + +#. `@hsteptoe`_ and `@ESadek-MO`_ (reviewer) updated chained assignment useage within the tests + associated with :mod:`iris.pandas` to reflect changes in pandas v3 `New pandas v3 copy behaviour`_. + (:pull:`6948`, :issue:`6761`) + +#. `@hsteptoe`_ and `@ESadek-MO`_ (reviewer) added static type hinting to :mod:`iris.pandas`. (:pull:`6948`) + +#. `@ukmo-ccbunney`_ changed formatting of numpy scalars attributes when generating a + Cube/Coord summary to use ``str`` representation instead of ``repr``. + (:pull:`6966`, :issue:`6692`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: .. _@hdyson: https://github.com/hdyson - - +.. _@hsteptoe: https://github.com/hsteptoe .. comment Whatsnew resources in alphabetical order: + +.. _New pandas v3 copy behaviour: https://pandas.pydata.org/docs/whatsnew/v3.0.0.html#consistent-copy-view-behaviour-with-copy-on-write +.. _SPEC0 Minimum Supported Dependencies: https://scientific-python.org/specs/spec-0000/ diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index fc83615a40..90f8c1f51a 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -4,10 +4,15 @@ # See LICENSE in the root of the repository for full licensing details. """A package for handling multi-dimensional data and associated metadata. +.. z_reference:: iris + :tags: topic_load_save;topic_data_model;topic_customisation;topic_slice_combine + + API reference + .. note :: The Iris documentation has further usage information, including - a :ref:`user guide ` which should be the first port of + a :ref:`user manual ` which should be the first port of call for new users. The functions in this module provide the main way to load and/or save diff --git a/lib/iris/_deprecation.py b/lib/iris/_deprecation.py index b771883a71..27127ecf5b 100644 --- a/lib/iris/_deprecation.py +++ b/lib/iris/_deprecation.py @@ -4,8 +4,82 @@ # See LICENSE in the root of the repository for full licensing details. """Utilities for producing runtime deprecation messages.""" +from functools import wraps +import inspect import warnings +from iris.warnings import IrisUserWarning + + +def explicit_copy_checker(f): + """Check for explicitly set parameters in a function. + + This is intended to be used as a decorator for functions that take a + variable number of parameters, to allow the function to determine which + parameters were explicitly set by the caller. + + This can be helpful when wanting raise DeprecationWarning of function + parameters, but only when they are explicitly set by the caller, and not + when they are left at their default value. + + Parameters + ---------- + f : function + The function to be decorated. The function must have a signature that + allows for variable parameters (e.g. ``*args`` and/or ``**kwargs``), and + the parameters to be checked must be explicitly listed in the function + signature (i.e. not just passed via ``**kwargs``). + + Returns + ------- + function + The decorated function, which will have an additional keyword argument + ``explicit_params`` added to its signature. This argument will be a set + of the names of the parameters that were explicitly set by the caller when + calling the function. + + Examples + -------- + The following example shows how to use the ``explicit_copy_checker`` decorator to + check for explicitly set parameters in a function, and raise a DeprecationWarning + if a deprecated parameter is explicitly set by the caller. + + >>> from iris._deprecation import explicit_copy_checker, IrisDeprecation + >>> @explicit_copy_checker + ... def my_function(a, b=1): + ... print(f"a={a}, b={b}") + ... if "b" in kwargs["explicit_params"]: + ... warnings.warn("Parameter 'b' is deprecated.", IrisDeprecation) + >>> my_function(1) # No warning, 'b' is not explicitly set + >>> my_function(1, b=3) # Warning, 'b' is explicitly set + + """ + varnames = inspect.getfullargspec(f)[0] + + @wraps(f) + def wrapper(*a, **kw): + explicit_params = set(list(varnames[: len(a)]) + list(kw.keys())) + if "copy" in explicit_params: + if kw["copy"] is False: + msg = ( + "Pandas v3 behaviour defaults to copy=True. The `copy`" + f" parameter in `{f.__name__}` is deprecated and" + "will be removed in a future release." + ) + warnings.warn(msg, category=IrisUserWarning) + else: + msg = ( + f"The `copy` parameter in `{f.__name__}` is deprecated and" + " will be removed in a future release. The function will" + " always make a copy of the data array, to ensure that the" + " returned Cubes are independent of the input pandas data." + ) + warn_deprecated(msg) + else: + return f(*a, **kw) + + return wrapper + class IrisDeprecation(UserWarning): """An Iris deprecation warning. diff --git a/lib/iris/_representation/cube_summary.py b/lib/iris/_representation/cube_summary.py index 20d93f1acf..ae0318d728 100644 --- a/lib/iris/_representation/cube_summary.py +++ b/lib/iris/_representation/cube_summary.py @@ -81,7 +81,8 @@ def array_repr(arr): def value_repr(value, quote_strings=False, clip_strings=False): """Produce a single-line printable version of an attribute or scalar value.""" - if hasattr(value, "dtype"): + if hasattr(value, "dtype") and hasattr(value, "shape") and len(value.shape) > 0: + # Only format as array if value is not a scalar. value = array_repr(value) elif isinstance(value, str): value = string_repr( diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index ca530cf931..2520889c88 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """A package providing :class:`iris.cube.Cube` analysis support. +.. z_reference:: iris.analysis + :tags: topic_maths_stats;topic_regrid + + API reference + This module defines a suite of :class:`~iris.analysis.Aggregator` instances, which are used to specify the statistical measure to calculate over a :class:`~iris.cube.Cube`, using methods such as @@ -2281,7 +2286,7 @@ def interp_order(length): Notes ------ This function does not maintain laziness when called; it realises data. -See more at :doc:`/userguide/real_and_lazy_data`. +See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ @@ -2599,7 +2604,7 @@ def clear_phenomenon_identity(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ cube.rename(None) diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 3ba406e02a..e54365e248 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -430,7 +430,7 @@ def rotate_grid_vectors(u_cube, v_cube, grid_angles_cube=None, grid_angles_kwarg Vector magnitudes will always be the same as the inputs. This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ u_out, v_out = (cube.copy() for cube in (u_cube, v_cube)) diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index 4f485c0680..fededb029b 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Calculus operations on :class:`iris.cube.Cube` instances. +.. z_reference:: iris.analysis.calculus + :tags: topic_maths_stats + + API reference + See also: :mod:`NumPy `. """ @@ -150,7 +155,7 @@ def cube_delta(cube, coord): .. note:: This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # handle the case where a user passes a coordinate name @@ -261,7 +266,7 @@ def differentiate(cube, coord_to_differentiate): .. note:: This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # Get the delta cube in the required differential direction. @@ -549,7 +554,7 @@ def curl(i_cube, j_cube, k_cube=None): .. note:: This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # Get the vector quantity names. @@ -773,7 +778,7 @@ def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index d055266d98..e88711d51f 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Various utilities and numeric transformations relevant to cartography.""" +"""Various utilities and numeric transformations relevant to cartography. + +.. z_reference:: iris.analysis.cartography + :tags: topic_maths_stats + + API reference +""" from collections import namedtuple import copy @@ -75,7 +81,7 @@ def wrap_lons(lons, base, period): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # It is important to use 64bit floating precision when changing a floats # numbers range. @@ -279,7 +285,7 @@ def get_xy_grids(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ x_coord, y_coord = cube.coord(axis="X"), cube.coord(axis="Y") @@ -317,7 +323,7 @@ def get_xy_contiguous_bounded_grids(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ x_coord, y_coord = cube.coord(axis="X"), cube.coord(axis="Y") @@ -552,7 +558,7 @@ def cosine_latitude_weights(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # Find all latitude coordinates, we want one and only one. lat_coords = [coord for coord in cube.coords() if "latitude" in coord.name()] @@ -656,7 +662,7 @@ def project(cube, target_proj, nx=None, ny=None): .. note:: This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. .. warning:: @@ -1149,7 +1155,7 @@ def rotate_winds(u_cube, v_cube, target_cs): .. note:: This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. .. warning:: diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py index 120b6dfaa6..dc828851c5 100644 --- a/lib/iris/analysis/geometry.py +++ b/lib/iris/analysis/geometry.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Various utilities related to geometric operations. +.. z_reference:: iris.analysis.geometry + :tags: topic_maths_stats + + API reference + .. note:: This module requires :mod:`shapely`. @@ -162,7 +167,7 @@ def geometry_area_weights(cube, geometry, normalize=False): .. note:: This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. Parameters ---------- diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 24d95153b5..7441cdccfe 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Basic mathematical and statistical operations.""" +"""Basic mathematical and statistical operations. + +.. z_reference:: iris.analysis.maths + :tags: topic_maths_stats + + API reference +""" from functools import lru_cache import inspect @@ -107,7 +113,7 @@ def abs(cube, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -156,7 +162,7 @@ def intersection_of_cubes(cube, other_cube): cube1, cube2 = (intersections[0], intersections[1]) This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ wmsg = ( @@ -237,7 +243,7 @@ def add(cube, other, dim=None, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -289,7 +295,7 @@ def subtract(cube, other, dim=None, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -396,7 +402,7 @@ def multiply(cube, other, dim=None, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -480,7 +486,7 @@ def divide(cube, other, dim=None, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -545,7 +551,7 @@ def exponentiate(cube, exponent, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -593,7 +599,7 @@ def exp(cube, in_place=False): Taking an exponential will return a cube with dimensionless units. This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -621,7 +627,7 @@ def log(cube, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -653,7 +659,7 @@ def log2(cube, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -681,7 +687,7 @@ def log10(cube, in_place=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -736,7 +742,7 @@ def apply_ufunc(ufunc, cube, other=None, new_unit=None, new_name=None, in_place= This function maintains laziness when called; it does not realise data. This is dependent on `ufunc` argument being a numpy operation that is compatible with lazy operation. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if not isinstance(ufunc, np.ufunc): diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 8df93571f1..478b87ad21 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Statistical operations between cubes.""" +"""Statistical operations between cubes. + +.. z_reference:: iris.analysis.stats + :tags: topic_maths_stats + + API reference +""" import dask.array as da import numpy as np diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index 100c6a5de1..43927f9257 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Defines a Trajectory class, and a routine to extract a sub-cube along a trajectory.""" +"""Defines a Trajectory class, and a routine to extract a sub-cube along a trajectory. + +.. z_reference:: iris.analysis.trajectory + :tags: topic_maths_stats;topic_regrid + + API reference +""" import math @@ -207,7 +213,7 @@ def interpolate(cube, sample_points, method=None): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.analysis import Linear diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index e322dfeb84..3952c8c924 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Definitions of derived coordinates.""" +"""Definitions of derived coordinates. + +.. z_reference:: iris.aux_factory + :tags: topic_data_model + + API reference +""" from abc import ABCMeta, abstractmethod import warnings diff --git a/lib/iris/common/__init__.py b/lib/iris/common/__init__.py index f9ad2bf207..03fc934a42 100644 --- a/lib/iris/common/__init__.py +++ b/lib/iris/common/__init__.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""A package for provisioning common Iris infrastructure.""" +"""A package for provisioning common Iris infrastructure. + +.. z_reference:: iris.common + :tags: topic_data_model;topic_maths_stats + + API reference +""" from .lenient import * from .metadata import * diff --git a/lib/iris/common/lenient.py b/lib/iris/common/lenient.py index d6fca461d1..22b9dd3d8b 100644 --- a/lib/iris/common/lenient.py +++ b/lib/iris/common/lenient.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides the infrastructure to support lenient client/service behaviour.""" +"""Provides the infrastructure to support lenient client/service behaviour. + +.. z_reference:: iris.common.lenient + :tags: topic_data_model;topic_maths_stats + + API reference +""" from collections.abc import Iterable from contextlib import contextmanager diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 55ad4e1319..f4af7584b4 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides the infrastructure to support the common metadata API.""" +"""Provides the infrastructure to support the common metadata API. + +.. z_reference:: iris.common.metadata + :tags: topic_data_model + + API reference +""" from __future__ import annotations @@ -204,13 +210,15 @@ def func(field): # Certain members never participate in strict equivalence, so # are filtered out. fields = filter( - lambda field: field - not in ( - "circular", - "location_axis", - "node_dimension", - "edge_dimension", - "face_dimension", + lambda field: ( + field + not in ( + "circular", + "location_axis", + "node_dimension", + "edge_dimension", + "face_dimension", + ) ), self._fields, ) diff --git a/lib/iris/common/mixin.py b/lib/iris/common/mixin.py index 4cb176dc2f..aab98eb1f7 100644 --- a/lib/iris/common/mixin.py +++ b/lib/iris/common/mixin.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides common metadata mixin behaviour.""" +"""Provides common metadata mixin behaviour. + +.. z_reference:: iris.common.mixin + :tags: topic_data_model + + API reference +""" from __future__ import annotations diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 49df0e66e6..66d91416ca 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Resolve metadata common between two cubes. +.. z_reference:: iris.common.resolve + :tags: topic_data_model;topic_maths_stats + + API reference + Provides the infrastructure to support the analysis, identification and combination of metadata common between two :class:`~iris.cube.Cube` operands into a single resultant :class:`~iris.cube.Cube`, which will be @@ -747,7 +752,8 @@ def _create_prepared_item( # Build a prepared-item to make a MeshCoord. # This case does *NOT* use points + bounds, so alternatives to the # coord content should not have been specified by the caller. - assert points is None and bounds is None + assert points is None + assert bounds is None mesh = coord.mesh location = coord.location axis = coord.axis diff --git a/lib/iris/config.py b/lib/iris/config.py index 9cec602a95..e99beb351d 100644 --- a/lib/iris/config.py +++ b/lib/iris/config.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Provides access to Iris-specific configuration values. +.. z_reference:: iris.config + :tags: topic_customisation + + API reference + The default configuration values can be overridden by creating the file ``iris/etc/site.cfg``. If it exists, this file must conform to the format defined by :mod:`configparser`. diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index 460a131a18..a497b1cfd8 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Cube functions for coordinate categorisation. +.. z_reference:: iris.coord_categorisation + :tags: topic_data_model + + API reference + All the functions provided here add a new coordinate to a cube. * The function :func:`add_categorised_coord` performs a generic diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index a8f78b0ebf..3b500f43bb 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Definitions of coordinate systems.""" +"""Definitions of coordinate systems. + +.. z_reference:: iris.coord_systems + :tags: topic_data_model + + API reference +""" from abc import ABCMeta, abstractmethod from functools import cached_property diff --git a/lib/iris/coords.py b/lib/iris/coords.py index 1013353759..044dc21f0f 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Definitions of coordinates and other dimensional metadata.""" +"""Definitions of coordinates and other dimensional metadata. + +.. z_reference:: iris.coords + :tags: topic_data_model + + API reference +""" from abc import ABCMeta, abstractmethod from collections import namedtuple @@ -3149,8 +3155,8 @@ def __init__(self, method, coords=None, intervals=None, comments=None): elif isinstance(coords, str): _coords.append(BaseMetadata.token(coords) or default_name) else: - normalise = ( - lambda coord: coord.name(token=True) + normalise = lambda coord: ( + coord.name(token=True) if isinstance(coord, Coord) else BaseMetadata.token(coord) or default_name ) diff --git a/lib/iris/cube.py b/lib/iris/cube.py index a68e9d7599..44be3a63d7 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Classes for representing multi-dimensional data with metadata.""" +"""Classes for representing multi-dimensional data with metadata. + +.. z_reference:: iris.cube + :tags: topic_data_model;topic_lazy_data;topic_maths_stats;topic_regrid;topic_slice_combine + + API reference +""" from __future__ import annotations @@ -1126,7 +1132,7 @@ class Cube(CFVariableMixin): source 'Data from Met Office Unified Model' - See the :doc:`user guide` for more information. + See the :doc:`user manual` for more information. """ @@ -4923,8 +4929,10 @@ def aggregated_by( # coordinate dimension. shared_coords = list( filter( - lambda coord_: coord_ not in groupby_coords - and dimension_to_groupby in self.coord_dims(coord_), + lambda coord_: ( + coord_ not in groupby_coords + and dimension_to_groupby in self.coord_dims(coord_) + ), self.dim_coords + self.aux_coords, ) ) diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index 56e220faf9..5589e03337 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Exceptions specific to the Iris package.""" +"""Exceptions specific to the Iris package. + +.. z_reference:: iris.exceptions + :tags: topic_troubleshooting + + API reference +""" class IrisError(Exception): diff --git a/lib/iris/experimental/__init__.py b/lib/iris/experimental/__init__.py index eea4259355..aa2ae2aec1 100644 --- a/lib/iris/experimental/__init__.py +++ b/lib/iris/experimental/__init__.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Experimental code can be introduced to Iris through this package. +.. z_reference:: iris.experimental + :tags: topic_experimental + + API reference + Changes to experimental code may be more extensive than in the rest of the codebase. The code is expected to graduate, eventually, to "full status". diff --git a/lib/iris/experimental/animate.py b/lib/iris/experimental/animate.py index 13c1613802..74300dbbdc 100644 --- a/lib/iris/experimental/animate.py +++ b/lib/iris/experimental/animate.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Wrapper for animating iris cubes using iris or matplotlib plotting functions. +.. z_reference:: iris.experimental.animate + :tags: topic_experimental;topic_plotting + + API reference + Notes ----- .. deprecated:: 3.4.0 diff --git a/lib/iris/experimental/geovista.py b/lib/iris/experimental/geovista.py index 57cbded2c2..a120877dad 100644 --- a/lib/iris/experimental/geovista.py +++ b/lib/iris/experimental/geovista.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Experimental module for using some GeoVista operations with Iris cubes.""" +"""Experimental module for using some GeoVista operations with Iris cubes. + +.. z_reference:: iris.experimental.geovista + :tags: topic_experimental;topic_interoperability;topic_plotting + + API reference +""" from geovista import Transform from geovista.common import VTK_CELL_IDS, VTK_POINT_IDS @@ -64,64 +70,79 @@ def cube_to_polydata(cube, **kwargs): cube_w_time = load_cube(sample_data_path("A1B_north_america.nc")) cube_mesh = load_cube(sample_data_path("mesh_C4_synthetic_float.nc")) - >>> from iris.experimental.geovista import cube_to_polydata + .. doctest:: + :skipif: gv is None + + >>> from iris.experimental.geovista import cube_to_polydata Converting a standard 2-dimensional :class:`~iris.cube.Cube` with 1-dimensional coordinates: - >>> print(cube.summary(shorten=True)) - air_temperature / (K) (latitude: 73; longitude: 96) - >>> print(cube_to_polydata(cube)) - PolyData (... - N Cells: 7008 - N Points: 7178 - N Strips: 0 - X Bounds: -9.992e-01, 9.992e-01 - Y Bounds: -9.992e-01, 9.992e-01 - Z Bounds: -1.000e+00, 1.000e+00 - N Arrays: 4 + .. doctest:: + :skipif: gv is None + + >>> print(cube.summary(shorten=True)) + air_temperature / (K) (latitude: 73; longitude: 96) + >>> print(cube_to_polydata(cube)) + PolyData (... + N Cells: 7008 + N Points: 7178 + N Strips: 0 + X Bounds: -9.992e-01, 9.992e-01 + Y Bounds: -9.992e-01, 9.992e-01 + Z Bounds: -1.000e+00, 1.000e+00 + N Arrays: 4 Configure the conversion by passing additional keyword arguments: - >>> print(cube_to_polydata(cube, radius=2)) - PolyData (... - N Cells: 7008 - N Points: 7178 - N Strips: 0 - X Bounds: -1.998e+00, 1.998e+00 - Y Bounds: -1.998e+00, 1.998e+00 - Z Bounds: -2.000e+00, 2.000e+00 - N Arrays: 4 + .. doctest:: + :skipif: gv is None + + >>> print(cube_to_polydata(cube, radius=2)) + PolyData (... + N Cells: 7008 + N Points: 7178 + N Strips: 0 + X Bounds: -1.998e+00, 1.998e+00 + Y Bounds: -1.998e+00, 1.998e+00 + Z Bounds: -2.000e+00, 2.000e+00 + N Arrays: 4 Converting a :class:`~iris.cube.Cube` that has a :attr:`~iris.cube.Cube.mesh` describing its horizontal space: - >>> print(cube_mesh.summary(shorten=True)) - synthetic / (1) (-- : 96) - >>> print(cube_to_polydata(cube_mesh)) - PolyData (... - N Cells: 96 - N Points: 98 - N Strips: 0 - X Bounds: -1.000e+00, 1.000e+00 - Y Bounds: -1.000e+00, 1.000e+00 - Z Bounds: -1.000e+00, 1.000e+00 - N Arrays: 4 + .. doctest:: + :skipif: gv is None + + >>> print(cube_mesh.summary(shorten=True)) + synthetic / (1) (-- : 96) + >>> print(cube_to_polydata(cube_mesh)) + PolyData (... + N Cells: 96 + N Points: 98 + N Strips: 0 + X Bounds: -1.000e+00, 1.000e+00 + Y Bounds: -1.000e+00, 1.000e+00 + Z Bounds: -1.000e+00, 1.000e+00 + N Arrays: 4 Remember to reduce the dimensionality of your :class:`~iris.cube.Cube` to just be the horizontal space: - >>> print(cube_w_time.summary(shorten=True)) - air_temperature / (K) (time: 240; latitude: 37; longitude: 49) - >>> print(cube_to_polydata(cube_w_time[0, :, :])) - PolyData (... - N Cells: 1813 - N Points: 1900 - N Strips: 0 - X Bounds: -6.961e-01, 6.961e-01 - Y Bounds: -9.686e-01, -3.411e-01 - Z Bounds: 2.483e-01, 8.714e-01 - N Arrays: 4 + .. doctest:: + :skipif: gv is None + + >>> print(cube_w_time.summary(shorten=True)) + air_temperature / (K) (time: 240; latitude: 37; longitude: 49) + >>> print(cube_to_polydata(cube_w_time[0, :, :])) + PolyData (... + N Cells: 1813 + N Points: 1900 + N Strips: 0 + X Bounds: -6.961e-01, 6.961e-01 + Y Bounds: -9.686e-01, -3.411e-01 + Z Bounds: 2.483e-01, 8.714e-01 + N Arrays: 4 """ if cube.mesh: @@ -227,57 +248,69 @@ def extract_unstructured_region(cube, polydata, region, **kwargs): The parameters of :func:`extract_unstructured_region` have been designed with flexibility and reuse in mind. This is demonstrated below. - >>> from geovista.geodesic import BBox - >>> from iris.experimental.geovista import cube_to_polydata, extract_unstructured_region - >>> print(cube_w_mesh.shape) - (72, 96) - >>> # The mesh dimension represents the horizontal space of the cube. - >>> print(cube_w_mesh.shape[cube_w_mesh.mesh_dim()]) - 96 - >>> cube_polydata = cube_to_polydata(cube_w_mesh[0, :]) - >>> extracted_cube = extract_unstructured_region( - ... cube=cube_w_mesh, - ... polydata=cube_polydata, - ... region=BBox(lons=[0, 70, 70, 0], lats=[-25, -25, 45, 45]), - ... ) - >>> print(extracted_cube.shape) - (72, 11) + .. doctest:: + :skipif: gv is None + + >>> from geovista.geodesic import BBox + >>> from iris.experimental.geovista import cube_to_polydata, extract_unstructured_region + >>> print(cube_w_mesh.shape) + (72, 96) + >>> # The mesh dimension represents the horizontal space of the cube. + >>> print(cube_w_mesh.shape[cube_w_mesh.mesh_dim()]) + 96 + >>> cube_polydata = cube_to_polydata(cube_w_mesh[0, :]) + >>> extracted_cube = extract_unstructured_region( + ... cube=cube_w_mesh, + ... polydata=cube_polydata, + ... region=BBox(lons=[0, 70, 70, 0], lats=[-25, -25, 45, 45]), + ... ) + >>> print(extracted_cube.shape) + (72, 11) Now reuse the same `cube` and `polydata` to extract a different region: - >>> new_region = BBox(lons=[0, 35, 35, 0], lats=[-25, -25, 45, 45]) - >>> extracted_cube = extract_unstructured_region( - ... cube=cube_w_mesh, - ... polydata=cube_polydata, - ... region=new_region, - ... ) - >>> print(extracted_cube.shape) - (72, 6) + .. doctest:: + :skipif: gv is None + + >>> new_region = BBox(lons=[0, 35, 35, 0], lats=[-25, -25, 45, 45]) + >>> extracted_cube = extract_unstructured_region( + ... cube=cube_w_mesh, + ... polydata=cube_polydata, + ... region=new_region, + ... ) + >>> print(extracted_cube.shape) + (72, 6) Now apply the same region extraction to a different `cube` that has the same horizontal shape: - >>> print(other_cube_w_mesh.shape) - (20, 96) - >>> extracted_cube = extract_unstructured_region( - ... cube=other_cube_w_mesh, - ... polydata=cube_polydata, - ... region=new_region, - ... ) - >>> print(extracted_cube.shape) - (20, 6) + .. doctest:: + :skipif: gv is None + + >>> print(other_cube_w_mesh.shape) + (20, 96) + >>> extracted_cube = extract_unstructured_region( + ... cube=other_cube_w_mesh, + ... polydata=cube_polydata, + ... region=new_region, + ... ) + >>> print(extracted_cube.shape) + (20, 6) Arbitrary keywords can be passed down to :meth:`geovista.geodesic.BBox.enclosed` (``outside`` in this example): - >>> extracted_cube = extract_unstructured_region( - ... cube=other_cube_w_mesh, - ... polydata=cube_polydata, - ... region=new_region, - ... outside=True, - ... ) - >>> print(extracted_cube.shape) - (20, 90) + .. doctest:: + :skipif: gv is None + + >>> extracted_cube = extract_unstructured_region( + ... cube=other_cube_w_mesh, + ... polydata=cube_polydata, + ... region=new_region, + ... outside=True, + ... ) + >>> print(extracted_cube.shape) + (20, 90) """ if cube.mesh: diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index 7fc9c5153c..0b5057136c 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Experimental module for importing/exporting raster data from Iris cubes using the GDAL library. +.. z_reference:: iris.experimental.raster + :tags: topic_experimental;topic_load_save + + API reference + See also: `GDAL - Geospatial Data Abstraction Library `_. TODO: If this module graduates from experimental the (optional) GDAL diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index 9ad93f83b6..253fa5957e 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Regridding functions. +.. z_reference:: iris.experimental.regrid + :tags: topic_experimental;topic_regrid + + API reference + Notes ----- .. deprecated:: 3.2.0 diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index 886ba8c97f..ca36612330 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -11,6 +11,11 @@ `iris-esmf-regrid `_ instead. +.. z_reference:: iris.experimental.regrid_conservative + :tags: topic_experimental;topic_regrid + + API reference + """ import functools diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index 4beac376ee..12ace6a33e 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Definitions of how Iris objects should be represented.""" +"""Definitions of how Iris objects should be represented. + +.. z_reference:: iris.experimental.representation + :tags: topic_experimental;topic_data_model + + API reference +""" from html import escape diff --git a/lib/iris/experimental/stratify.py b/lib/iris/experimental/stratify.py index 50f8c21dcf..3a5f823fe6 100644 --- a/lib/iris/experimental/stratify.py +++ b/lib/iris/experimental/stratify.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Routines for putting data on new strata (aka. isosurfaces), often in the Z direction.""" +"""Routines for putting data on new strata (aka. isosurfaces), often in the Z direction. + +.. z_reference:: iris.experimental.stratify + :tags: topic_experimental;topic_interoperability + + API reference +""" from functools import partial diff --git a/lib/iris/experimental/ugrid.py b/lib/iris/experimental/ugrid.py index 7db26ca26b..96f08e76b3 100644 --- a/lib/iris/experimental/ugrid.py +++ b/lib/iris/experimental/ugrid.py @@ -5,6 +5,11 @@ """Legacy import location for mesh support. +.. z_reference:: iris.experimental.ugrid + :tags: topic_experimental;topic_mesh;topic_data_model + + API reference + See :mod:`iris.mesh` for the new, correct import location. Notes diff --git a/lib/iris/fileformats/__init__.py b/lib/iris/fileformats/__init__.py index 6729141bf6..0af0888b3f 100644 --- a/lib/iris/fileformats/__init__.py +++ b/lib/iris/fileformats/__init__.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""A package for converting cubes to and from specific file formats.""" +"""A package for converting cubes to and from specific file formats. + +.. z_reference:: iris.fileformats + :tags: topic_load_save + + API reference +""" from iris.io.format_picker import ( DataSourceObjectProtocol, diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 35c2e96924..a2800dc91d 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -708,13 +708,13 @@ def build_and_add_global_attributes(engine: Engine): ), ) if problem is not None: - stack_notes = problem.stack_trace.__notes__ + stack_notes = problem.stack_trace.__notes__ # type: ignore[attr-defined] if stack_notes is None: stack_notes = [] stack_notes.append( f"Skipping disallowed global attribute '{attr_name}' (see above error)" ) - problem.stack_trace.__notes__ = stack_notes + problem.stack_trace.__notes__ = stack_notes # type: ignore[attr-defined] ################################################################################ @@ -1536,14 +1536,14 @@ def build_and_add_dimension_coordinate( ) if problem is not None: coord_var_name = str(cf_coord_var.cf_name) - stack_notes = problem.stack_trace.__notes__ + stack_notes = problem.stack_trace.__notes__ # type: ignore[attr-defined] if stack_notes is None: stack_notes = [] stack_notes.append( f"Failed to create {coord_var_name} dimension coordinate:\n" f"Gracefully creating {coord_var_name!r} auxiliary coordinate instead." ) - problem.stack_trace.__notes__ = stack_notes + problem.stack_trace.__notes__ = stack_notes # type: ignore[attr-defined] problem.handled = True _ = _add_or_capture( @@ -1643,9 +1643,13 @@ def _add_auxiliary_coordinate( # Determine the name of the dimension/s shared between the CF-netCDF data variable # and the coordinate being built. - common_dims = [ - dim for dim in cf_coord_var.dimensions if dim in engine.cf_var.dimensions - ] + coord_dims = cf_coord_var.dimensions + # if cf._is_str_dtype(cf_coord_var): + # coord_dims = coord_dims[:-1] + datavar_dims = engine.cf_var.dimensions + # if cf._is_str_dtype(engine.cf_var): + # datavar_dims = datavar_dims[:-1] + common_dims = [dim for dim in coord_dims if dim in datavar_dims] data_dims = None if common_dims: # Calculate the offset of each common dimension. diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 1ac95a42eb..c27da55a0f 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Provides ABF (and ABL) file format capabilities. +.. z_reference:: iris.fileformats.abf + :tags: topic_load_save + + API reference + ABF and ABL files are satellite file formats defined by Boston University. Including this module adds ABF and ABL loading to the session's capabilities. diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index 2b6568c315..da397a8b53 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Provide capability to load netCDF files and interpret them. +.. z_reference:: iris.fileformats.cf + :tags: topic_load_save + + API reference + Provides the capability to load netCDF files and interpret them according to the 'NetCDF Climate and Forecast (CF) Metadata Conventions'. @@ -26,7 +31,7 @@ import iris.exceptions import iris.fileformats._nc_load_rules.helpers as hh -from iris.fileformats.netcdf import _thread_safe_nc +from iris.fileformats.netcdf import _bytecoding_datasets, _thread_safe_nc from iris.mesh.components import Connectivity import iris.util import iris.warnings @@ -67,7 +72,9 @@ # NetCDF returns a different type for strings depending on Python version. def _is_str_dtype(var): - return np.issubdtype(var.dtype, np.bytes_) + # N.B. use 'datatype' not 'dtype', to "look inside" variable wrappers which + # represent 'S1' type data as 'U'. + return isinstance(var.datatype, np.dtype) and np.issubdtype(var.datatype, np.bytes_) ################################################################################ @@ -788,50 +795,63 @@ def cf_label_data(self, cf_data_var): % type(cf_data_var) ) - # Determine the name of the label string (or length) dimension by - # finding the dimension name that doesn't exist within the data dimensions. - str_dim_name = list(set(self.dimensions) - set(cf_data_var.dimensions)) - - if len(str_dim_name) != 1: - raise ValueError( - "Invalid string dimensions for CF-netCDF label variable %r" - % self.cf_name - ) - - str_dim_name = str_dim_name[0] - label_data = self[:] - - if ma.isMaskedArray(label_data): - label_data = label_data.filled() - - # Determine whether we have a string-valued scalar label - # i.e. a character variable that only has one dimension (the length of the string). - if self.ndim == 1: - label_string = b"".join(label_data).strip() - label_string = label_string.decode("utf8") - data = np.array([label_string]) - else: - # Determine the index of the string dimension. - str_dim = self.dimensions.index(str_dim_name) - - # Calculate new label data shape (without string dimension) and create payload array. - new_shape = tuple( - dim_len for i, dim_len in enumerate(self.shape) if i != str_dim - ) - string_basetype = "|U%d" - string_dtype = string_basetype % self.shape[str_dim] - data = np.empty(new_shape, dtype=string_dtype) - - for index in np.ndindex(new_shape): - # Create the slice for the label data. - if str_dim == 0: - label_index = (slice(None, None),) + index - else: - label_index = index + (slice(None, None),) - - label_string = b"".join(label_data[label_index]).strip() - label_string = label_string.decode("utf8") - data[index] = label_string + # # Determine the name of the label string (or length) dimension by + # # finding the dimension name that doesn't exist within the data dimensions. + # str_dim_names = list(set(self.dimensions) - set(cf_data_var.dimensions)) + # n_nondata_dims = len(str_dim_names) + # + # if n_nondata_dims == 0: + # # *All* dims are shared with the data-variable. + # # This is only ok if the data-var is *also* a string type. + # dim_ok = _is_str_dtype(cf_data_var) + # # In this case, we must just *assume* that the last dimension is "the" + # # string dimension + # str_dim_name = self.dimensions[-1] + # else: + # # If there is exactly one non-data dim, that is the one we want + # dim_ok = len(str_dim_names) == 1 + # (str_dim_name,) = str_dim_names + # + # if not dim_ok: + # raise ValueError( + # "Invalid string dimensions for CF-netCDF label variable %r" + # % self.cf_name + # ) + + data = self[:] + # label_data = self[:] + # + # if ma.isMaskedArray(label_data): + # label_data = label_data.filled(b"\0") + # + # # Determine whether we have a string-valued scalar label + # # i.e. a character variable that only has one dimension (the length of the string). + # if self.ndim == 1: + # label_string = b"".join(label_data).strip() + # label_string = label_string.decode("utf8") + # data = np.array([label_string]) + # else: + # # Determine the index of the string dimension. + # str_dim = self.dimensions.index(str_dim_name) + # + # # Calculate new label data shape (without string dimension) and create payload array. + # new_shape = tuple( + # dim_len for i, dim_len in enumerate(self.shape) if i != str_dim + # ) + # string_basetype = "|U%d" + # string_dtype = string_basetype % self.shape[str_dim] + # data = np.empty(new_shape, dtype=string_dtype) + # + # for index in np.ndindex(new_shape): + # # Create the slice for the label data. + # if str_dim == 0: + # label_index = (slice(None, None),) + index + # else: + # label_index = index + (slice(None, None),) + # + # label_string = b"".join(label_data[label_index]).strip() + # label_string = label_string.decode("utf8") + # data[index] = label_string return data @@ -1361,7 +1381,11 @@ def __init__(self, file_source, warn=False, monotonic=False): if isinstance(file_source, str): # Create from filepath : open it + own it (=close when we die). self._filename = os.path.expanduser(file_source) - self._dataset = _thread_safe_nc.DatasetWrapper(self._filename, mode="r") + if _bytecoding_datasets.DECODE_TO_STRINGS_ON_READ: + ds_type = _bytecoding_datasets.EncodedDataset + else: + ds_type = _thread_safe_nc.DatasetWrapper + self._dataset = ds_type(self._filename, mode="r") self._own_file = True else: # We have been passed an open dataset. diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index 3c37395f6c..b1047bcffe 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides Creation and saving of DOT graphs for a :class:`iris.cube.Cube`.""" +"""Provides Creation and saving of DOT graphs for a :class:`iris.cube.Cube`. + +.. z_reference:: iris.fileformats.dot + :tags: topic_load_save + + API reference +""" import os import subprocess diff --git a/lib/iris/fileformats/name.py b/lib/iris/fileformats/name.py index bc1bb690c2..7b41d909f7 100644 --- a/lib/iris/fileformats/name.py +++ b/lib/iris/fileformats/name.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides NAME file format loading capabilities.""" +"""Provides NAME file format loading capabilities. + +.. z_reference:: iris.fileformats.name + :tags: topic_load_save + + API reference +""" def _get_NAME_loader(filename): diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index fe53308cb0..b13b33ccd3 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""NAME file format loading functions.""" +"""NAME file format loading functions. + +.. z_reference:: iris.fileformats.name_loaders + :tags: topic_load_save + + API reference +""" import collections import datetime diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py index 992392b9a1..f1e37f2545 100644 --- a/lib/iris/fileformats/netcdf/__init__.py +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Support loading and saving NetCDF files using CF conventions for metadata interpretation. +.. z_reference:: iris.fileformats.netcdf + :tags: topic_load_save + + API reference + See : `NetCDF User's Guide `_ and `netCDF4 python module `_. diff --git a/lib/iris/fileformats/netcdf/_bytecoding_datasets.py b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py new file mode 100644 index 0000000000..22a9011eec --- /dev/null +++ b/lib/iris/fileformats/netcdf/_bytecoding_datasets.py @@ -0,0 +1,347 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Module providing to netcdf datasets with automatic character encoding. + +The requirement is to convert numpy fixed-width unicode arrays on writing to a variable +which is declared as a byte (character) array with a fixed-length string dimension. + +Numpy unicode string arrays are ones with dtypes of the form "U". +Numpy character variables have the dtype "S1", and map to a fixed-length "string +dimension". + +In principle, netCDF4 already performs these translations, but in practice current +releases are not functional for anything other than "ascii" encoding -- including UTF-8, +which is the most obvious and desirable "general" solution. + +There is also the question of whether we should like to implement UTF-8 as our default. +Current discussions on this are inconclusive and neither CF conventions nor the NetCDF +User Guide are definite on what possible values of "_Encoding" are, or what the effective +default is, even though they do both mention the "_Encoding" attribute as a potential +way to handle the issue. + +Because of this, we interpret as follows: + * when reading bytes : in the absence of an "_Encoding" attribute, we will attempt to + decode bytes as UTF-8 + * when writing strings : in the absence of an "_Encoding" attribute (on the Iris + cube or coord object), we will attempt to encode data with "ascii" : If this fails, + it raise an error prompting the user to supply an "_Encoding" attribute. + +Where an "_Encoding" attribute is provided to Iris, we will honour it where possible, +identifying with "codecs.lookup" : This means we support the encodings in the Python +Standard Library, and the name aliases which it recognises. + +See: + +* known problems https://github.com/Unidata/netcdf4-python/issues/1440 +* suggestions for how this "ought" to work, discussed in the netcdf-c library + * https://github.com/Unidata/netcdf-c/issues/402 + +""" + +import codecs +import contextlib +import dataclasses +import threading +import warnings + +import numpy as np + +from iris.fileformats.netcdf._thread_safe_nc import ( + DatasetWrapper, + NetCDFDataProxy, + NetCDFWriteProxy, + VariableWrapper, +) +import iris.warnings +from iris.warnings import IrisCfLoadWarning, IrisCfSaveWarning + + +def decode_bytesarray_to_stringarray( + byte_array: np.ndarray, encoding: str, string_width: int +) -> np.ndarray: + """Convert an array of bytes to an array of strings, with one less dimension. + + N.B. for now at least, we assume the string dim is **always the last one**. + If 'string_width' is not given, it is set to the final dimension of 'byte_array'. + """ + if np.ma.isMaskedArray(byte_array): + # netCDF4-python sees zeros as "missing" -- we don't need or want that + byte_array = byte_array.data + bytes_shape = byte_array.shape + var_shape = bytes_shape[:-1] + string_dtype = f"U{string_width}" + result = np.empty(var_shape, dtype=string_dtype) + for ndindex in np.ndindex(var_shape): + element_bytes = byte_array[ndindex] + bytes = b"".join([b if b else b"\0" for b in element_bytes]) + string = bytes.decode(encoding) + result[ndindex] = string + return result + + +def encode_stringarray_as_bytearray( + data: np.typing.ArrayLike, encoding: str, string_dimension_length: int +) -> np.ndarray: + """Encode strings as a bytes array.""" + data = np.asanyarray(data) + element_shape = data.shape + result = np.zeros(element_shape + (string_dimension_length,), dtype="S1") + right_pad = b"\0" * string_dimension_length + for index in np.ndindex(element_shape): + string = data[index] + bytes = string.encode(encoding=encoding) + n_bytes = len(bytes) + # TODO: may want to issue warning or error if we overflow the length? + if n_bytes > string_dimension_length: + from iris.exceptions import TranslationError + + msg = ( + f"String {string!r} written to netcdf exceeds string dimension after " + f"encoding : {n_bytes} > {string_dimension_length}." + ) + raise TranslationError(msg) + + # It's all a bit nasty ... + bytes = (bytes + right_pad)[:string_dimension_length] + result[index] = [bytes[i : i + 1] for i in range(string_dimension_length)] + + return result + + +@dataclasses.dataclass +class VariableEncoder: + """A record of encoding details which can apply them to variable data.""" + + varname: str # just for the error messages + dtype: np.dtype + is_chardata: bool # just a shortcut for the dtype test + read_encoding: str # *always* a valid encoding from the codecs package + write_encoding: str # *always* a valid encoding from the codecs package + n_chars_dim: int # length of associated character dimension + string_width: int # string lengths when viewing as strings (i.e. "Uxx") + + def __init__(self, cf_var): + """Get all the info from an netCDF4 variable (or similar wrapper object). + + Most importantly, we do *not* store 'cf_var' : instead we extract the + necessary information and store it in this object. + So, this object has static state + is serialisable. + """ + self.varname = cf_var.name + self.dtype = cf_var.dtype + self.is_chardata = np.issubdtype(self.dtype, np.bytes_) + if self.is_chardata: + self.read_encoding = self._get_encoding(cf_var, writing=False) + self.write_encoding = self._get_encoding(cf_var, writing=True) + self.n_chars_dim = cf_var.group().dimensions[cf_var.dimensions[-1]].size + self.string_width = self._get_string_width(cf_var) + + @staticmethod + def _get_encoding(cf_var, writing=False) -> str: + """Get the byte encoding defined for this variable (or None).""" + result = getattr(cf_var, "_Encoding", None) + if result is not None: + try: + # Accept + normalise naming of encodings + result = codecs.lookup(result).name + # NOTE: if encoding does not suit data, errors can occur. + # For example, _Encoding = "ascii", with non-ascii content. + except LookupError: + # Unrecognised encoding name : handle this as just a warning + msg = ( + f"Ignoring unknown encoding for variable {cf_var.name!r}: " + f"_Encoding = {result!r}." + ) + warntype = IrisCfSaveWarning if writing else IrisCfLoadWarning + warnings.warn(msg, category=warntype) + # Proceed as if there is no specified encoding + result = None + + if result is None: + if writing: + result = DEFAULT_WRITE_ENCODING + else: + result = DEFAULT_READ_ENCODING + return result + + def _get_string_width(self, cf_var) -> int: + """Return the string-length defined for this variable.""" + # Work out the actual byte width from the parent dataset dimensions. + strlen = self.n_chars_dim + # Convert the string dimension length (i.e. bytes) to a sufficiently-long + # string width, depending on the (read) encoding used. + encoding = self.read_encoding + if "utf-16" in encoding: + # Each char needs at least 2 bytes -- including a terminator char + strlen = (strlen // 2) - 1 + elif "utf-32" in encoding: + # Each char needs exactly 4 bytes -- including a terminator char + strlen = (strlen // 4) - 1 + # "ELSE": assume there can be (at most) as many chars as bytes + return strlen + + def decode_bytes_to_stringarray(self, data: np.ndarray) -> np.ndarray: + if self.is_chardata: + # N.B. read encoding default is UTF-8 --> a "usually safe" choice + encoding = self.read_encoding + strlen = self.string_width + try: + data = decode_bytesarray_to_stringarray(data, encoding, strlen) + except UnicodeDecodeError as err: + msg = ( + f"Character data in variable {self.varname!r} could not be decoded " + f"with the {encoding!r} encoding. This can be fixed by setting the " + "variable '_Encoding' attribute to suit the content." + ) + raise ValueError(msg) from err + + return data + + def encode_strings_as_bytearray(self, data: np.ndarray) -> np.ndarray: + if self.is_chardata and data.dtype.kind == "U": + # N.B. it is also possible to pass a byte array (dtype "S1"), + # to be written directly, without processing. + try: + # N.B. write encoding *default* is "ascii" --> fails bad content + encoding = self.write_encoding + strlen = self.n_chars_dim + data = encode_stringarray_as_bytearray(data, encoding, strlen) + except UnicodeEncodeError as err: + msg = ( + f"String data written to netcdf character variable {self.varname!r} " + f"could not be represented in encoding {self.write_encoding!r}. " + "This can be fixed by setting a suitable variable '_Encoding' " + 'attribute, e.g. ._Encoding="UTF-8".' + ) + raise ValueError(msg) from err + return data + + +class NetcdfStringDecodeSetting(threading.local): + def __init__(self, perform_encoding: bool = True): + self.set(perform_encoding) + + def set(self, perform_encoding: bool): + self.perform_encoding = perform_encoding + + def __bool__(self): + return self.perform_encoding + + @contextlib.contextmanager + def context(self, perform_encoding: bool): + old_setting = self.perform_encoding + self.perform_encoding = perform_encoding + yield + self.perform_encoding = old_setting + + +DECODE_TO_STRINGS_ON_READ = NetcdfStringDecodeSetting() +DEFAULT_READ_ENCODING = "utf-8" +DEFAULT_WRITE_ENCODING = "ascii" + + +class EncodedVariable(VariableWrapper): + """A variable wrapper that translates variable data according to byte encodings.""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # Override specific properties of the contained instance, making changes in the case + # that the variable contains char data, which is presented instead as strings + # with one less dimension. + + @property + def shape(self): + shape = self._contained_instance.shape + is_chardata = np.issubdtype(self._contained_instance.dtype, np.bytes_) + if is_chardata: + # Translated char data appears without the final dimension + shape = shape[:-1] # remove final dimension + return shape + + @property + def dimensions(self): + dimensions = self._contained_instance.dimensions + is_chardata = np.issubdtype(self._contained_instance.dtype, np.bytes_) + if is_chardata: + # Translated char data appears without the final dimension + dimensions = dimensions[:-1] # remove final dimension + return dimensions + + @property + def dtype(self): + dtype = self._contained_instance.dtype + is_chardata = np.issubdtype(self._contained_instance.dtype, np.bytes_) + if is_chardata: + # Create a coding spec : redo every time in case "_Encoding" has changed + encoding_spec = VariableEncoder(self._contained_instance) + dtype = np.dtype(f"U{encoding_spec.string_width}") + return dtype + + def __getitem__(self, keys): + self._contained_instance.set_auto_chartostring(False) + data = super().__getitem__(keys) + # Create a coding spec : redo every time in case "_Encoding" has changed + encoding_spec = VariableEncoder(self._contained_instance) + data = encoding_spec.decode_bytes_to_stringarray(data) + return data + + def __setitem__(self, keys, data): + data = np.asanyarray(data) + # Create a coding spec : redo every time in case "_Encoding" has changed + encoding_spec = VariableEncoder(self._contained_instance) + data = encoding_spec.encode_strings_as_bytearray(data) + super().__setitem__(keys, data) + + def set_auto_chartostring(self, onoff: bool): + msg = "auto_chartostring is not supported by Iris 'EncodedVariable' type." + raise TypeError(msg) + + +class EncodedDataset(DatasetWrapper): + """A specialised DatasetWrapper whose variables perform byte encoding.""" + + VAR_WRAPPER_CLS = EncodedVariable + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def set_auto_chartostring(self, onoff: bool): + msg = "auto_chartostring is not supported by Iris 'EncodedDataset' type." + raise TypeError(msg) + + +class EncodedNetCDFDataProxy(NetCDFDataProxy): + __slots__ = NetCDFDataProxy.__slots__ + ("encoding_details",) + + def __init__(self, cf_var, *args, **kwargs): + # When creating, also capture + record the encoding to be performed. + kwargs["use_byte_data"] = True + super().__init__(cf_var, *args, **kwargs) + if not isinstance(cf_var, EncodedVariable): + msg = ( + f"Unexpected variable type : {type(cf_var)} of variable '{cf_var.name}'" + ": expected EncodedVariable." + ) + raise TypeError(msg) + self.encoding_details = VariableEncoder(cf_var._contained_instance) + + def __getitem__(self, keys): + data = super().__getitem__(keys) + # Apply the optional bytes-to-strings conversion + data = self.encoding_details.decode_bytes_to_stringarray(data) + return data + + +class EncodedNetCDFWriteProxy(NetCDFWriteProxy): + def __init__(self, filepath, cf_var, file_write_lock): + super().__init__(filepath, cf_var, file_write_lock) + self.encoding_details = VariableEncoder(cf_var) + + def __setitem__(self, key, data): + data = np.asanyarray(data) + # Apply the optional strings-to-bytes conversion + data = self.encoding_details.encode_strings_as_bytearray(data) + super().__setitem__(key, data) diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index 33183ef0fa..f96312cf79 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -159,6 +159,9 @@ class GroupWrapper(_ThreadSafeWrapper): CONTAINED_CLASS = netCDF4.Group # Note: will also accept a whole Dataset object, but that is OK. _DUCKTYPE_CHECK_PROPERTIES = ["createVariable"] + # Class to use when creating variable wrappers (default=VariableWrapper). + # - needed to support _byte_encoded_data.EncodedDataset. + VAR_WRAPPER_CLS = VariableWrapper # All Group API that returns Dimension(s) is wrapped to instead return # DimensionWrapper(s). @@ -203,7 +206,7 @@ def variables(self) -> typing.Dict[str, VariableWrapper]: """ with _GLOBAL_NETCDF4_LOCK: variables_ = self._contained_instance.variables - return {k: VariableWrapper.from_existing(v) for k, v in variables_.items()} + return {k: self.VAR_WRAPPER_CLS.from_existing(v) for k, v in variables_.items()} def createVariable(self, *args, **kwargs) -> VariableWrapper: """Call createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. @@ -216,7 +219,7 @@ def createVariable(self, *args, **kwargs) -> VariableWrapper: """ with _GLOBAL_NETCDF4_LOCK: new_variable = self._contained_instance.createVariable(*args, **kwargs) - return VariableWrapper.from_existing(new_variable) + return self.VAR_WRAPPER_CLS.from_existing(new_variable) def get_variables_by_attributes( self, *args, **kwargs @@ -234,7 +237,7 @@ def get_variables_by_attributes( variables_ = list( self._contained_instance.get_variables_by_attributes(*args, **kwargs) ) - return [VariableWrapper.from_existing(v) for v in variables_] + return [self.VAR_WRAPPER_CLS.from_existing(v) for v in variables_] # All Group API that returns Group(s) is wrapped to instead return # GroupWrapper(s). @@ -252,7 +255,7 @@ def groups(self): """ with _GLOBAL_NETCDF4_LOCK: groups_ = self._contained_instance.groups - return {k: GroupWrapper.from_existing(v) for k, v in groups_.items()} + return {k: self.__class__.from_existing(v) for k, v in groups_.items()} @property def parent(self): @@ -268,7 +271,7 @@ def parent(self): """ with _GLOBAL_NETCDF4_LOCK: parent_ = self._contained_instance.parent - return GroupWrapper.from_existing(parent_) + return self.__class__.from_existing(parent_) def createGroup(self, *args, **kwargs): """Call createGroup() from netCDF4.Group/Dataset. @@ -281,7 +284,7 @@ def createGroup(self, *args, **kwargs): """ with _GLOBAL_NETCDF4_LOCK: new_group = self._contained_instance.createGroup(*args, **kwargs) - return GroupWrapper.from_existing(new_group) + return self.__class__.from_existing(new_group) class DatasetWrapper(GroupWrapper): @@ -311,14 +314,22 @@ def fromcdl(cls, *args, **kwargs): class NetCDFDataProxy: """A reference to the data payload of a single NetCDF file variable.""" - __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value") - - def __init__(self, shape, dtype, path, variable_name, fill_value): - self.shape = shape + __slots__ = ( + "shape", + "dtype", + "path", + "variable_name", + "fill_value", + "use_byte_data", + ) + + def __init__(self, cf_var, dtype, path, fill_value, *, use_byte_data=False): + self.shape = cf_var.shape + self.variable_name = cf_var.name self.dtype = dtype self.path = path - self.variable_name = variable_name self.fill_value = fill_value + self.use_byte_data = use_byte_data @property def ndim(self): @@ -337,6 +348,8 @@ def __getitem__(self, keys): dataset = netCDF4.Dataset(self.path) try: variable = dataset.variables[self.variable_name] + if self.use_byte_data: + variable.set_auto_chartostring(False) # Get the NetCDF variable data and slice. var = variable[keys] finally: diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 219f681e67..5bb20e6585 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Support loading Iris cubes from NetCDF files using the CF conventions for metadata interpretation. +.. z_reference:: iris.fileformats.netcdf.loader + :tags: topic_load_save + + API reference + See : `NetCDF User's Guide `_ and `netCDF4 python module `_. @@ -36,7 +41,7 @@ import iris.coord_systems import iris.coords import iris.fileformats.cf -from iris.fileformats.netcdf import _thread_safe_nc +from iris.fileformats.netcdf import _bytecoding_datasets, _thread_safe_nc from iris.fileformats.netcdf.saver import _CF_ATTRS import iris.io import iris.util @@ -50,7 +55,11 @@ # An expected part of the public loader API, but includes thread safety # concerns so is housed in _thread_safe_nc. -NetCDFDataProxy = _thread_safe_nc.NetCDFDataProxy +# NOTE: this is the *default*, as required for public legacy api +# - in practice, when creating our proxies we dynamically choose between this and +# :class:`_thread_safe_nc.DatasetWrapper`, depending on +# :data:`_bytecoding_datasets.DECODE_TO_STRINGS_ON_READ` +NetCDFDataProxy = _bytecoding_datasets.EncodedNetCDFDataProxy class _WarnComboIgnoringBoundsLoad( @@ -279,7 +288,7 @@ def _get_cf_var_data(cf_var): # correct dtype. Note: this is not an issue for masked arrays, # only masked scalar values. if result is np.ma.masked: - result = np.ma.masked_all(1, dtype=cf_var.datatype) + result = np.ma.masked_all(1, dtype=cf_var.dtype) else: # Get lazy chunked data out of a cf variable. # Creates Dask wrappers around data arrays for any cube components which @@ -289,15 +298,27 @@ def _get_cf_var_data(cf_var): # Make a data-proxy that mimics array access and can fetch from the file. # Note: Special handling needed for "variable length string" types which # return a dtype of `str`, rather than a numpy type; use `S1` in this case. - fill_dtype = "S1" if cf_var.dtype is str else cf_var.dtype.str[1:] - fill_value = getattr( - cf_var.cf_data, - "_FillValue", - _thread_safe_nc.default_fillvals[fill_dtype], - ) - proxy = NetCDFDataProxy( - cf_var.shape, dtype, cf_var.filename, cf_var.cf_name, fill_value - ) + if getattr(cf_var.dtype, "kind", None) == "U": + # Special handling for "string variables". + fill_value = "" + else: + fill_dtype = "S1" if cf_var.dtype is str else cf_var.dtype.str[1:] + fill_value = getattr( + cf_var.cf_data, + "_FillValue", + _thread_safe_nc.default_fillvals[fill_dtype], + ) + + # Switch type of proxy, based on type of variable. + # It is done this way, instead of using an instance variable, because the + # limited nature of the wrappers makes a stateful choice awkward, + # e.g. especially, "variable.group()" is *not* the parent DatasetWrapper. + if isinstance(cf_var.cf_data, _bytecoding_datasets.EncodedVariable): + proxy_class = _bytecoding_datasets.EncodedNetCDFDataProxy + else: + proxy_class = _thread_safe_nc.NetCDFDataProxy + + proxy = proxy_class(cf_var.cf_data, dtype, cf_var.filename, fill_value) # Get the chunking specified for the variable : this is either a shape, or # maybe the string "contiguous". if CHUNK_CONTROL.mode is ChunkControl.Modes.AS_DASK: diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 5177749c07..8e03776c3d 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Module to support the saving of Iris cubes to a NetCDF file. +.. z_reference:: iris.fileformats.netcdf.saver + :tags: topic_load_save + + API reference + Module to support the saving of Iris cubes to a NetCDF file, also using the CF conventions for metadata interpretation. @@ -14,6 +19,7 @@ """ +import codecs import collections from itertools import repeat, zip_longest import os @@ -48,7 +54,8 @@ from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord import iris.exceptions import iris.fileformats.cf -from iris.fileformats.netcdf import _dask_locks, _thread_safe_nc +from iris.fileformats.netcdf import _bytecoding_datasets as bytecoding_datasets +from iris.fileformats.netcdf import _dask_locks from iris.fileformats.netcdf._attribute_handlers import ATTRIBUTE_HANDLERS import iris.io import iris.util @@ -300,7 +307,7 @@ class VariableEmulator(typing.Protocol): shape: tuple[int, ...] -CFVariable = typing.Union[_thread_safe_nc.VariableWrapper, VariableEmulator] +CFVariable = typing.Union[bytecoding_datasets.VariableWrapper, VariableEmulator] class Saver: @@ -403,7 +410,7 @@ def __init__(self, filename, netcdf_format, compute=True): # Put it inside a _thread_safe_nc wrapper to ensure thread-safety. # Except if it already is one, since they forbid "re-wrapping". if not hasattr(self._dataset, "THREAD_SAFE_FLAG"): - self._dataset = _thread_safe_nc.DatasetWrapper.from_existing( + self._dataset = bytecoding_datasets.DatasetWrapper.from_existing( self._dataset ) @@ -414,7 +421,7 @@ def __init__(self, filename, netcdf_format, compute=True): # Given a filepath string/path : create a dataset from that try: self.filepath = os.path.abspath(filename) - self._dataset = _thread_safe_nc.DatasetWrapper( + self._dataset = bytecoding_datasets.EncodedDataset( self.filepath, mode="w", format=netcdf_format ) except RuntimeError: @@ -759,7 +766,7 @@ def _create_cf_dimensions(self, cube, dimension_names, unlimited_dimensions=None # used for a different one pass else: - dim_name = self._get_coord_variable_name(cube, coord) + dim_name = self._get_element_variable_name(cube, coord) unlimited_dim_names.append(dim_name) for dim_name in dimension_names: @@ -990,12 +997,12 @@ def _add_aux_coords( ] # Include any relevant mesh location coordinates. - mesh: MeshXY | None = getattr(cube, "mesh") - mesh_location: str | None = getattr(cube, "location") + mesh: MeshXY | None = getattr(cube, "mesh") # type: ignore[annotation-unchecked] + mesh_location: str | None = getattr(cube, "location") # type: ignore[annotation-unchecked] if mesh and mesh_location: location_coords: MeshNodeCoords | MeshEdgeCoords | MeshFaceCoords = getattr( mesh, f"{mesh_location}_coords" - ) + ) # type: ignore[annotation-unchecked] coords_to_add.extend(list(location_coords)) return self._add_inner_related_vars( @@ -1365,7 +1372,7 @@ def record_dimension(names_list, dim_name, length, matching_coords=None): if dim_name is None: # Not already present : create a unique dimension name # from the coord. - dim_name = self._get_coord_variable_name(cube, coord) + dim_name = self._get_element_variable_name(cube, coord) # Disambiguate if it has the same name as an # existing dimension. # OR if it matches an existing file variable name. @@ -1541,38 +1548,14 @@ def _create_cf_bounds(self, coord, cf_var, cf_name, /, *, compression_kwargs=Non ) self._lazy_stream_data(data=bounds, cf_var=cf_var_bounds) - def _get_cube_variable_name(self, cube): - """Return a CF-netCDF variable name for the given cube. - - Parameters - ---------- - cube : :class:`iris.cube.Cube` - An instance of a cube for which a CF-netCDF variable - name is required. - - Returns - ------- - str - A CF-netCDF variable name as a string. - - """ - if cube.var_name is not None: - cf_name = cube.var_name - else: - # Convert to lower case and replace whitespace by underscores. - cf_name = "_".join(cube.name().lower().split()) - - cf_name = self.cf_valid_var_name(cf_name) - return cf_name - - def _get_coord_variable_name(self, cube_or_mesh, coord): - """Return a CF-netCDF variable name for a given coordinate-like element. + def _get_element_variable_name(self, cube_or_mesh, element): + """Return a CF-netCDF variable name for a given coordinate-like element, or cube. Parameters ---------- cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.mesh.MeshXY` The Cube or Mesh being saved to the netCDF file. - coord : :class:`iris.coords._DimensionalMetadata` + element : :class:`iris.coords._DimensionalMetadata` | :class:``iris.cube.Cube`` An instance of a coordinate (or similar), for which a CF-netCDF variable name is required. @@ -1592,17 +1575,21 @@ def _get_coord_variable_name(self, cube_or_mesh, coord): cube = None mesh = cube_or_mesh - if coord.var_name is not None: - cf_name = coord.var_name + if element.var_name is not None: + cf_name = element.var_name + elif isinstance(element, Cube): + # Make name for a Cube without a var_name. + cf_name = "_".join(element.name().lower().split()) else: - name = coord.standard_name or coord.long_name + # Make name for a Coord-like element without a var_name + name = element.standard_name or element.long_name if not name or set(name).intersection(string.whitespace): # We need to invent a name, based on its associated dimensions. - if cube is not None and cube.coords(coord): + if cube is not None and cube.coords(element): # It is a regular cube coordinate. # Auto-generate a name based on the dims. name = "" - for dim in cube.coord_dims(coord): + for dim in cube.coord_dims(element): name += f"dim{dim}" # Handle scalar coordinate (dims == ()). if not name: @@ -1616,8 +1603,8 @@ def _get_coord_variable_name(self, cube_or_mesh, coord): # At present, a location-coord cannot be nameless, as the # MeshXY code relies on guess_coord_axis. - assert isinstance(coord, Connectivity) - location = coord.cf_role.split("_")[0] + assert isinstance(element, Connectivity) + location = element.cf_role.split("_")[0] location_dim_attr = f"{location}_dimension" name = getattr(mesh, location_dim_attr) @@ -1693,6 +1680,8 @@ def _create_mesh(self, mesh): return cf_mesh_name def _set_cf_var_attributes(self, cf_var, element): + from iris.cube import Cube + # Deal with CF-netCDF units, and add the name+units properties. if isinstance(element, iris.coords.Coord): # Fix "degree" units if needed. @@ -1700,34 +1689,59 @@ def _set_cf_var_attributes(self, cf_var, element): else: units_str = str(element.units) - if cf_units.as_unit(units_str).is_udunits(): - _setncattr(cf_var, "units", units_str) - - standard_name = element.standard_name - if standard_name is not None: - _setncattr(cf_var, "standard_name", standard_name) - - long_name = element.long_name - if long_name is not None: - _setncattr(cf_var, "long_name", long_name) + # NB this bit is a nasty hack to preserve existing behaviour through a refactor: + # The attributes for Coords are created in the order units, standard_name, + # whereas for data-variables (aka Cubes) it is the other way around. + # Needed now that this routine is also called from _create_cf_data_variable. + # TODO: when we can break things, rationalise these to be the same. + def add_units_attr(): + if cf_units.as_unit(units_str).is_udunits(): + _setncattr(cf_var, "units", units_str) + + def add_names_attrs(): + standard_name = element.standard_name + if standard_name is not None: + _setncattr(cf_var, "standard_name", standard_name) + + long_name = element.long_name + if long_name is not None: + _setncattr(cf_var, "long_name", long_name) + + if isinstance(element, Cube): + add_names_attrs() + add_units_attr() + else: + add_units_attr() + add_names_attrs() # Add the CF-netCDF calendar attribute. if element.units.calendar: _setncattr(cf_var, "calendar", str(element.units.calendar)) - # Add any other custom coordinate attributes. - for name in sorted(element.attributes): - value = element.attributes[name] - - if name == "STASH": - # Adopting provisional Metadata Conventions for representing MO - # Scientific Data encoded in NetCDF Format. - name = "um_stash_source" - value = str(value) - - # Don't clobber existing attributes. - if not hasattr(cf_var, name): - _setncattr(cf_var, name, value) + # Note: when writing UGRID, "element" can be a Mesh which has no "dtype", + # and for dataless cubes it will have a 'None' dtype. + if getattr(element, "dtype", None) is not None: + # Most attributes are dealt with later. But _Encoding needs to be defined + # *before* we can write to a character variable. + if element.dtype.kind in "SU" and "_Encoding" in element.attributes: + encoding = element.attributes.pop("_Encoding") + _setncattr(cf_var, "_Encoding", encoding) + + if not isinstance(element, Cube): + # Add any other custom coordinate attributes. + # N.B. not Cube, which has specific handling in _create_cf_data_variable + for name in sorted(element.attributes): + value = element.attributes[name] + + if name == "STASH": + # Adopting provisional Metadata Conventions for representing MO + # Scientific Data encoded in NetCDF Format. + name = "um_stash_source" + value = str(value) + + # Don't clobber existing attributes. + if not hasattr(cf_var, name): + _setncattr(cf_var, name, value) def _create_generic_cf_array_var( self, @@ -1739,6 +1753,8 @@ def _create_generic_cf_array_var( element_dims=None, fill_value=None, compression_kwargs=None, + packing_controls: dict | None = None, + is_dataless=False, ): """Create theCF-netCDF variable given dimensional_metadata. @@ -1791,7 +1807,7 @@ def _create_generic_cf_array_var( # Work out the var-name to use. # N.B. the only part of this routine that may use a mesh _or_ a cube. - cf_name = self._get_coord_variable_name(cube_or_mesh, element) + cf_name = self._get_element_variable_name(cube_or_mesh, element) while cf_name in self._dataset.variables: cf_name = self._increment_name(cf_name) @@ -1804,18 +1820,29 @@ def _create_generic_cf_array_var( # Get the data values, in a way which works for any element type, as # all are subclasses of _DimensionalMetadata. # (e.g. =points if a coord, =data if an ancillary, etc) - data = element._core_values() + if isinstance(element, Cube): + data = element.core_data() + else: + data = element._core_values() # This compression contract is *not* applicable to a mesh. - if cube and cube.shape != data.shape: + if cube is not None and data is not None and cube.shape != data.shape: compression_kwargs = {} - if np.issubdtype(data.dtype, np.str_): + if not is_dataless and np.issubdtype(data.dtype, np.str_): # Deal with string-type variables. # Typically CF label variables, but also possibly ancil-vars ? string_dimension_depth = data.dtype.itemsize if data.dtype.kind == "U": - string_dimension_depth //= 4 + encoding = element.attributes.get("_Encoding", "ascii") + # TODO: this can fail -- use a sensible warning + default? + encoding = codecs.lookup(encoding).name + if encoding == "utf-32": + # UTF-32 is a special case -- always 4 exactly bytes per char, plus 4 + string_dimension_depth += 4 + else: + # generally, 4 bytes per char in numpy --> make bytewidth = string-width + string_dimension_depth //= 4 string_dimension_name = "string%d" % string_dimension_depth # Determine whether to create the string length dimension. @@ -1834,28 +1861,38 @@ def _create_generic_cf_array_var( # Create the label coordinate variable. cf_var = self._dataset.createVariable(cf_name, "|S1", element_dims) - # Convert data from an array of strings into a character array - # with an extra string-length dimension. - if len(element_dims) == 1: - data_first = data[0] - if is_lazy_data(data_first): - data_first = dask.compute(data_first) - data = list("%- *s" % (string_dimension_depth, data_first)) - else: - orig_shape = data.shape - new_shape = orig_shape + (string_dimension_depth,) - new_data = np.zeros(new_shape, cf_var.dtype) - for index in np.ndindex(orig_shape): - index_slice = tuple(list(index) + [slice(None, None)]) - new_data[index_slice] = list( - "%- *s" % (string_dimension_depth, data[index]) - ) - data = new_data + # # Convert data from an array of strings into a character array + # # with an extra string-length dimension. + # if len(element_dims) == 1: + # # Scalar variable (only has string dimension). + # data_first = data[0] + # if is_lazy_data(data_first): + # data_first = dask.compute(data_first) + # data = list("%- *s" % (string_dimension_depth, data_first)) + # else: + # # NOTE: at present, can't do this lazily?? + # orig_shape = data.shape + # new_shape = orig_shape + (string_dimension_depth,) + # new_data = np.zeros(new_shape, cf_var.dtype) + # for index in np.ndindex(orig_shape): + # index_slice = tuple(list(index) + [slice(None, None)]) + # new_data[index_slice] = list( + # "%- *s" % (string_dimension_depth, data[index]) + # ) + # data = new_data else: # A normal (numeric) variable. # ensure a valid datatype for the file format. - element_type = type(element).__name__ - data = self._ensure_valid_dtype(data, element_type, element) + if is_dataless: + dtype = self._DATALESS_DTYPE + fill_value = self._DATALESS_FILLVALUE + else: + element_type = type(element).__name__ + data = self._ensure_valid_dtype(data, element_type, element) + if not packing_controls: + dtype = data.dtype.newbyteorder("=") + else: + dtype = packing_controls["dtype"] # Check if this is a dim-coord. is_dimcoord = cube is not None and element in cube.dim_coords @@ -1869,7 +1906,7 @@ def _create_generic_cf_array_var( # Create the CF-netCDF variable. cf_var = self._dataset.createVariable( cf_name, - data.dtype.newbyteorder("="), + dtype, element_dims, fill_value=fill_value, **compression_kwargs, @@ -1886,12 +1923,18 @@ def _create_generic_cf_array_var( element, cf_var, cf_name, compression_kwargs=compression_kwargs ) - # Add the data to the CF-netCDF variable. - self._lazy_stream_data(data=data, cf_var=cf_var) - # Add names + units + # NOTE: *must* now do first, as we may need '_Encoding' set to write it ! self._set_cf_var_attributes(cf_var, element) + # Add the data to the CF-netCDF variable. + if not is_dataless: + if packing_controls: + # We must set packing attributes (if any), before assigning values. + for key, value in packing_controls["attributes"]: + _setncattr(cf_var, key, value) + self._lazy_stream_data(data=data, cf_var=cf_var) + return cf_name def _create_cf_cell_methods(self, cube, dimension_names): @@ -2238,9 +2281,9 @@ def _create_cf_grid_mapping(self, cube, cf_var_cube): cfvar = self._name_coord_map.name(coord) if not cfvar: # not found - create and store it: - cfvar = self._get_coord_variable_name(cube, coord) + cfvar = self._get_element_variable_name(cube, coord) self._name_coord_map.append( - cfvar, self._get_coord_variable_name(cube, coord) + cfvar, self._get_element_variable_name(cube, coord) ) cfvar_names.append(cfvar) @@ -2320,18 +2363,10 @@ def _create_cf_data_variable( # be removed. # Get the values in a form which is valid for the file format. is_dataless = cube.is_dataless() - if is_dataless: - data = None - else: - data = self._ensure_valid_dtype(cube.core_data(), "cube", cube) - if is_dataless: - # The variable must have *some* dtype, and it must be maskable - dtype = self._DATALESS_DTYPE - fill_value = self._DATALESS_FILLVALUE - elif not packing: - dtype = data.dtype.newbyteorder("=") - else: + packing_controls = None + if packing and not is_dataless: + data = self._ensure_valid_dtype(cube.core_data(), "cube", cube) if isinstance(packing, dict): if "dtype" not in packing: msg = "The dtype attribute is required for packing." @@ -2370,45 +2405,29 @@ def _create_cf_data_variable( else: add_offset = cmin + 2 ** (n - 1) * scale_factor - def set_packing_ncattrs(cfvar): - """Set netCDF packing attributes. - - NOTE: cfvar needs to be a _thread_safe_nc._ThreadSafeWrapper subclass. - - """ - assert hasattr(cfvar, "THREAD_SAFE_FLAG") - if packing: - if scale_factor: - _setncattr(cfvar, "scale_factor", scale_factor) - if add_offset: - _setncattr(cfvar, "add_offset", add_offset) - - cf_name = self._get_cube_variable_name(cube) - while cf_name in self._dataset.variables: - cf_name = self._increment_name(cf_name) + packing_controls = { + "dtype": dtype, + "attributes": [ + ("scale_factor", scale_factor), + ("add_offset", add_offset), + ], + } # Create the cube CF-netCDF data variable with data payload. - cf_var = self._dataset.createVariable( - cf_name, dtype, dimension_names, fill_value=fill_value, **kwargs + cf_name = self._create_generic_cf_array_var( + cube, + dimension_names, + cube, + element_dims=dimension_names, + fill_value=fill_value, + compression_kwargs=kwargs, + packing_controls=packing_controls, + is_dataless=is_dataless, ) + cf_var = self._dataset.variables[cf_name] - if not is_dataless: - set_packing_ncattrs(cf_var) - self._lazy_stream_data(data=data, cf_var=cf_var) - - if cube.standard_name: - _setncattr(cf_var, "standard_name", cube.standard_name) - - if cube.long_name: - _setncattr(cf_var, "long_name", cube.long_name) - - if cube.units.is_udunits(): - _setncattr(cf_var, "units", str(cube.units)) - - # Add the CF-netCDF calendar attribute. - if cube.units.calendar: - _setncattr(cf_var, "calendar", cube.units.calendar) - + # Set general attrs: NB this part is cube-specific (not the same for components) + # - so 'set_cf_var_attributes' *doesn't* set these, if element is a Cube if iris.FUTURE.save_split_attrs: attr_names = cube.attributes.locals.keys() else: @@ -2535,7 +2554,7 @@ def store( ) -> None: # Create a data-writeable object that we can stream into, which # encapsulates the file to be opened + variable to be written. - write_wrapper = _thread_safe_nc.NetCDFWriteProxy( + write_wrapper = bytecoding_datasets.EncodedNetCDFWriteProxy( self.filepath, cf_var, self.file_write_lock ) # Add to the list of delayed writes, used in delayed_completion(). diff --git a/lib/iris/fileformats/netcdf/ugrid_load.py b/lib/iris/fileformats/netcdf/ugrid_load.py index 0d4766057c..50147f17ef 100644 --- a/lib/iris/fileformats/netcdf/ugrid_load.py +++ b/lib/iris/fileformats/netcdf/ugrid_load.py @@ -5,6 +5,11 @@ r"""Allow the construction of :class:`~iris.mesh.MeshXY`. +.. z_reference:: iris.fileformats.netcdf.ugrid_load + :tags: topic_load_save;topic_mesh + + API reference + Extension functions for Iris NetCDF loading, to construct :class:`~iris.mesh.MeshXY` from UGRID data in files. diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index d318c94882..03a44f1ec2 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -2,8 +2,15 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides NIMROD file format capabilities.""" +"""Provides NIMROD file format capabilities. +.. z_reference:: iris.fileformats.nimrod + :tags: topic_load_save + + API reference +""" + +from enum import Enum import glob import os import struct @@ -14,6 +21,7 @@ import iris from iris.exceptions import TranslationError import iris.fileformats.nimrod_load_rules +from iris.fileformats.nimrod_load_rules import Table # general header (int16) elements 1-31 (Fortran bytes 1-62) general_header_int16s = ( @@ -101,59 +109,97 @@ # data specific header (int16) elements 108-159 (Fortran bytes 411-512) -data_header_int16s = ( - "threshold_type", - "probability_method", - "recursive_filter_iterations", - "member_count", - "probability_period_of_event", +table_1_data_header_int16s = ( + ( + "radar_number", + "radar_sites", + "additional_radar_sites", + "clutter_map_number", + "calibration_type", + "bright_band_height", + "bright_band_intensity", + "bright_band_test_param_1", + "bright_band_test_param_2", + "infill_flag", + "stop_elevation", + "copy_vertical_coord", + "copy_reference_vertical_coord", + "copy_y_origin", + "copy_row_step", + "copy_x_origin", + "copy_column_step", + "copy_float32_mdi", + "copy_MKS_data_scaling", + "copy_data_offset", + "copy_x_offset", + "copy_y_offset", + "copy_true_origin_latitude", + "copy_true_origin_longitude", + "copy_tl_y", + "copy_tl_x", + "copy_tr_y", + "copy_tr_x", + "copy_br_y", + "copy_br_x", + "copy_bl_y", + "copy_bl_x", + "sensor_identifier", + "meteosat_identifier", + "availability_of_synop_meteosat", + "software_identifier", + "software_major_version", + "software_minor_version", + "software_micro_version", + ) + + tuple(f"data_header_int16_{i}" for i in range(48, 59)) + + ("period_seconds",) +) + + +# data specific header (int16) elements 108-159 (Fortran bytes 411-512) +table_2_data_header_int16s = ( + ( + "threshold_type", + "probability_method", + "recursive_filter_iterations", + "member_count", + "probability_period_of_event", + ) + + tuple(f"data_header_int16_{i}" for i in range(5, 59)) + + ("period_seconds",) +) + + +# data specific header (int16) elements 108-159 (Fortran bytes 411-512) +table_3_data_header_int16s = ( + "data_header_int16_00", + "data_header_int16_01", + "data_header_int16_02", + "data_header_int16_03", + "data_header_int16_04", "data_header_int16_05", "soil_type", + "data_header_int16_07", + "data_header_int16_08", + "data_header_int16_09", + "data_header_int16_10", +) + tuple(f"data_header_int16_{i}" for i in range(11, 60)) + + +# data specific header (int16) elements 108-159 (Fortran bytes 411-512) +table_4_data_header_int16s = ( + "data_header_int16_00", + "data_header_int16_01", + "data_header_int16_02", + "data_header_int16_03", + "data_header_int16_04", + "data_header_int16_05", + "data_header_int16_06", "radiation_code", "data_header_int16_08", "data_header_int16_09", "data_header_int16_10", - "data_header_int16_11", - "data_header_int16_12", - "data_header_int16_13", - "data_header_int16_14", - "data_header_int16_15", - "data_header_int16_16", - "data_header_int16_17", - "data_header_int16_18", - "data_header_int16_19", - "data_header_int16_20", - "data_header_int16_21", - "data_header_int16_22", - "data_header_int16_23", - "data_header_int16_24", - "data_header_int16_25", - "data_header_int16_26", - "data_header_int16_27", - "data_header_int16_28", - "data_header_int16_29", - "data_header_int16_30", - "data_header_int16_31", - "data_header_int16_32", - "data_header_int16_33", - "data_header_int16_34", - "data_header_int16_35", - "data_header_int16_36", - "data_header_int16_37", - "data_header_int16_38", - "data_header_int16_39", - "data_header_int16_40", - "data_header_int16_41", - "data_header_int16_42", - "data_header_int16_43", - "data_header_int16_44", - "data_header_int16_45", - "data_header_int16_46", - "data_header_int16_47", - "data_header_int16_48", - "data_header_int16_49", - "period_seconds", -) +) + tuple(f"data_header_int16_{i}" for i in range(11, 60)) def _read_chars(infile, num): @@ -226,6 +272,53 @@ def _read_header(self, infile): self.source = _read_chars(infile, 24) self.title = _read_chars(infile, 24) + # determine which of Table 1, 2, 3 or 4 is being used + Table_3_field_codes = [ + 18, + 144, + 190, + 191, + 192, + 193, + 194, + 196, + 197, + 198, + 199, + 201, + 202, + 203, + 204, + 218, + 219, + 301, + 302, + 901, + 8229, + 8230, + ] + Table_4_field_codes = [90, 91, 92, 93, 96, 303] + default_float_threshold = self.float32_mdi + threshold_set = ( + self.threshold_value != default_float_threshold + or self.threshold_value_alt != default_float_threshold + ) + # The `Table` enum is defined in iris.fileformats.nimrod_load_rules + if self.field_code in Table_3_field_codes: + table = Table.table_3 + data_header_int16s = table_3_data_header_int16s + elif self.field_code in Table_4_field_codes: + table = Table.table_4 + data_header_int16s = table_4_data_header_int16s + elif threshold_set: + table = Table.table_2 + data_header_int16s = table_2_data_header_int16s + else: + table = Table.table_1 + data_header_int16s = table_1_data_header_int16s + + self.table = table + # data specific header (int16) elements 108- (bytes 411-512) self._read_header_subset(infile, data_header_int16s, np.int16) # skip unnamed int16s diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index 4b3987003a..1bb58d8b59 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -2,8 +2,15 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Rules for converting NIMROD fields into cubes.""" +"""Rules for converting NIMROD fields into cubes. +.. z_reference:: iris.fileformats.nimrod_load_rules + :tags: topic_load_save + + API reference +""" + +from enum import Enum import re import string import warnings @@ -31,6 +38,14 @@ ) +class Table(Enum): + # The NIMROD documentation defines four tables of fields, with different header contents and load rules. + table_1 = "Table_1" + table_2 = "Table_2" + table_3 = "Table_3" + table_4 = "Table_4" + + class TranslationWarning(IrisNimrodTranslationWarning): """Backwards compatible form of :class:`iris.warnings.IrisNimrodTranslationWarning`.""" @@ -636,10 +651,8 @@ def add_attr(item): cube_source = "Nimrod pwind routine" for key in [ "neighbourhood_radius", - "recursive_filter_iterations", "recursive_filter_alpha", "threshold_vicinity_radius", - "probability_period_of_event", ]: add_attr(key) @@ -660,6 +673,60 @@ def add_attr(item): cube.attributes["institution"] = "Met Office" +def table_1_attributes(cube, field): + """Add attributes to the cube.""" + # TODO: This section may need to be changed in the future + # as there may be some of these attributes that can be promoted into coords + # but we in AVD do not have that level of domain knowledge to make those decisions + + def add_attr(item): + """Add an attribute to the cube.""" + if hasattr(field, item): + value = getattr(field, item) + if is_missing(field, value): + return + cube.attributes[item] = value + + for key in [ + "radar_number", + "radar_sites", + "additional_radar_sites", + "clutter_map_number", + "calibration_type", + "bright_band_height", + "bright_band_intensity", + "bright_band_test_param_1", + "bright_band_test_param_2", + "infill_flag", + "stop_elevation", + "sensor_identifier", + "meteosat_identifier", + "software_identifier", + "software_major_version", + "software_minor_version", + "software_micro_version", + ]: + add_attr(key) + + +def table_2_attributes(cube, field): + """Add attributes to the cube.""" + # TODO: This section may need to be changed in the future + # as there may be some of these attributes that can be promoted into coords + # but we in AVD do not have that level of domain knowledge to make those decisions + + def add_attr(item): + """Add an attribute to the cube.""" + if hasattr(field, item): + value = getattr(field, item) + if is_missing(field, value): + return + cube.attributes[item] = value + + for key in ["recursive_filter_iterations", "probability_period_of_event"]: + add_attr(key) + + def known_threshold_coord(field): """Supply known threshold coord meta-data for known use cases. @@ -865,6 +932,26 @@ def soil_type_coord(cube, field): ) +def radiation_type_attr(cube, field): + """Decode the Radiation Types codes - similar to time_averaging.""" + radiation_codes = { + 64: 'instantaneous ("corrected")', + 32: "upward_radiation", + 16: "downward_radiation", + 8: "diffuse_radiation", + 4: "direct_radiation", + 2: "clear_sky_radiation", + } + num = field.radiation_code + radiation_types = [] + for key in sorted(radiation_codes.keys(), reverse=True): + if num >= key: + radiation_types.append(radiation_codes[key]) + num = num - key + if radiation_types: + cube.attributes["radiation_type"] = radiation_types + + def time_averaging(cube, field): """Decode the averagingtype code - similar to the PP LBPROC code.""" time_averaging_codes = { @@ -930,9 +1017,18 @@ def run(field, handle_metadata_errors=True): # vertical vertical_coord(cube, field) - # add other stuff, if present - soil_type_coord(cube, field) - probability_coord(cube, field, handle_metadata_errors) + match field.table: + case Table.table_1: + table_1_attributes(cube, field) + case Table.table_2: + probability_coord(cube, field, handle_metadata_errors) + table_2_attributes(cube, field) + case Table.table_3: + soil_type_coord(cube, field) + case Table.table_4: + radiation_type_attr(cube, field) + + # add other generic stuff, if present ensemble_member(cube, field) time_averaging(cube, field) attributes(cube, field) diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 211ba1621c..971c0ff7ef 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides UK Met Office Post Process (PP) format specific capabilities.""" +"""Provides UK Met Office Post Process (PP) format specific capabilities. + +.. z_reference:: iris.fileformats.pp + :tags: topic_load_save + + API reference +""" from abc import ABCMeta, abstractmethod import collections diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index 59e0f31d17..f28e65a878 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -6,7 +6,13 @@ # Historically this was auto-generated from # SciTools/iris-code-generators:tools/gen_rules.py -"""PP Load Rules.""" +"""PP Load Rules. + +.. z_reference:: iris.fileformats.pp_load_rules + :tags: topic_load_save + + API reference +""" import calendar from functools import wraps diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index a6a72393a7..d2932015c3 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""PP Save Rules.""" +"""PP Save Rules. + +.. z_reference:: iris.fileformats.pp_save_rules + :tags: topic_load_save + + API reference +""" import warnings diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index d61389c663..dce36b2554 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Generalised mechanisms for metadata translation and cube construction.""" +"""Generalised mechanisms for metadata translation and cube construction. + +.. z_reference:: iris.fileformats.rules + :tags: topic_load_save + + API reference +""" import collections import threading diff --git a/lib/iris/fileformats/um/__init__.py b/lib/iris/fileformats/um/__init__.py index 3a4bd6c516..863ef6febf 100644 --- a/lib/iris/fileformats/um/__init__.py +++ b/lib/iris/fileformats/um/__init__.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Provides iris loading support for UM Fieldsfile-like file types, and PP. +.. z_reference:: iris.fileformats.um + :tags: topic_load_save + + API reference + At present, the only UM file types supported are true FieldsFiles and LBCs. Other types of UM file may fail to load correctly (or at all). diff --git a/lib/iris/fileformats/um_cf_map.py b/lib/iris/fileformats/um_cf_map.py index d2e51a3257..b230e543c8 100644 --- a/lib/iris/fileformats/um_cf_map.py +++ b/lib/iris/fileformats/um_cf_map.py @@ -5,6 +5,10 @@ """ Provides UM/CF phenomenon translations. +.. z_reference:: iris.fileformats.um_cf_map + :tags: topic_load_save + + API reference """ from collections import namedtuple diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 4e32ebf20a..0a7cdd9abb 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Provides an interface to manage URI scheme support in iris.""" +"""Provides an interface to manage URI scheme support in iris. + +.. z_reference:: iris.io + :tags: topic_load_save + + API reference +""" import collections from collections import OrderedDict @@ -54,7 +60,7 @@ def run_callback(callback, cube, field, filename): the caller of this function should handle this case. This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.cube import Cube @@ -452,7 +458,7 @@ def save(source, target, saver=None, **kwargs): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.cube import Cube, CubeList diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index c885a55074..3f93b5cfd6 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Provide convenient file format identification. +.. z_reference:: iris.io.format_picker + :tags: topic_load_save + + API reference + A module to provide convenient file format identification through a combination of filename extension and file based *magic* numbers. diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index fd2d2ed139..c2d478806c 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Cube functions for iteration in step.""" +"""Cube functions for iteration in step. + +.. z_reference:: iris.iterate + :tags: topic_slice_combine + + API reference +""" from collections.abc import Iterator import itertools @@ -56,7 +62,7 @@ def izip(*cubes, **kwargs): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if not cubes: diff --git a/lib/iris/loading.py b/lib/iris/loading.py index b188d5ae9d..68042847c1 100644 --- a/lib/iris/loading.py +++ b/lib/iris/loading.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Iris general file loading mechanism.""" +"""Iris general file loading mechanism. + +.. z_reference:: iris.loading + :tags: topic_load_save + + API reference +""" from contextlib import contextmanager from dataclasses import dataclass @@ -537,6 +543,7 @@ class LoadProblems(threading.local): >>> warnings.filterwarnings("ignore") >>> helpers.get_names = get_names_original >>> std_names.STD_NAMES["air_temperature"] = air_temperature + >>> iris.FUTURE.date_microseconds = False """ diff --git a/lib/iris/mesh/__init__.py b/lib/iris/mesh/__init__.py index ff530a4abd..e872f6f864 100644 --- a/lib/iris/mesh/__init__.py +++ b/lib/iris/mesh/__init__.py @@ -5,6 +5,11 @@ """Infra-structure for unstructured mesh support. +.. z_reference:: iris.mesh + :tags: topic_data_model;topic_mesh + + API reference + Based on CF UGRID Conventions (v1.0), https://ugrid-conventions.github.io/ugrid-conventions/. """ diff --git a/lib/iris/mesh/components.py b/lib/iris/mesh/components.py index 2cc10c18c1..3e61d7c0d4 100644 --- a/lib/iris/mesh/components.py +++ b/lib/iris/mesh/components.py @@ -5,6 +5,11 @@ """Iris data model representation of CF UGrid's Mesh and its constituent parts. +.. z_reference:: iris.mesh.components + :tags: topic_data_model;topic_mesh + + API reference + Eventual destination: dedicated module in :mod:`iris` root. """ diff --git a/lib/iris/mesh/utils.py b/lib/iris/mesh/utils.py index 3930fa3f1b..eebfcb9ae1 100644 --- a/lib/iris/mesh/utils.py +++ b/lib/iris/mesh/utils.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Utility operations specific to unstructured data.""" +"""Utility operations specific to unstructured data. + +.. z_reference:: iris.mesh.utils + :tags: topic_data_model;topic_mesh;topic_slice_combine + + API reference +""" from collections.abc import Sequence from typing import Union diff --git a/lib/iris/palette.py b/lib/iris/palette.py index 500c203a43..7f8046fbf9 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Color map pallettes management. +.. z_reference:: iris.palette + :tags: topic_plotting + + API reference + Load, configure and register color map palettes and initialise color map meta-data mappings. """ @@ -128,7 +133,7 @@ def cmap_norm(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ args, kwargs = _default_cmap_norm((cube,), {}) diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index b23b31dff6..dcdcbd3bb9 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -4,12 +4,18 @@ # See LICENSE in the root of the repository for full licensing details. """Provide conversion to and from Pandas data structures. +.. z_reference:: iris.pandas + :tags: topic_interoperability + + API reference + See also: https://pandas.pydata.org/ """ import datetime from itertools import chain, combinations +from typing import TYPE_CHECKING, Optional import warnings import cf_units @@ -18,21 +24,27 @@ import numpy as np import numpy.ma as ma import pandas as pd - -try: - from pandas.core.indexes.datetimes import DatetimeIndex # pandas >=0.20 -except ImportError: - from pandas.tseries.index import DatetimeIndex # pandas <0.20 +from pandas import Index as pd_index import iris -from iris._deprecation import warn_deprecated +from iris._deprecation import explicit_copy_checker, warn_deprecated from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord from iris.cube import Cube, CubeList -from iris.util import new_axis +from iris.util import monotonic, new_axis from iris.warnings import IrisIgnoringWarning +try: + from pandas.core.indexes.datetimes import DatetimeIndex # pandas >=0.20 +except ImportError: + from pandas.tseries.index import DatetimeIndex # pandas <0.20 -def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): + +def _get_dimensional_metadata( + name: str, + values: np.ndarray | DatetimeIndex | pd_index, + calendar: Optional[str] = None, + dm_class: Optional[AuxCoord | DimCoord] = None, +) -> AuxCoord | DimCoord: """Create a Coord or other dimensional metadata from a Pandas index or columns array. If no calendar is specified for a time series, Standard is assumed. @@ -45,7 +57,9 @@ def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): # Getting everything into a single datetime format is hard! # Convert out of NumPy's own datetime format. - if np.issubdtype(values.dtype, np.datetime64): + if isinstance(values.dtype, np.dtype) and np.issubdtype( + values.dtype, np.datetime64 + ): values = pd.to_datetime(values) # Convert pandas datetime objects to python datetime objects. @@ -61,15 +75,14 @@ def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): values = np.array(values) - if dm_class is None: - if np.issubdtype(values.dtype, np.number) and iris.util.monotonic( - values, strict=True - ): - dm_class = DimCoord + if dm_class is not None: + instance = dm_class(values, units=units) # type: ignore[operator] + else: + if np.issubdtype(values.dtype, np.number) and monotonic(values, strict=True): + instance = DimCoord(values, units=units) else: - dm_class = AuxCoord + instance = AuxCoord(values, units=units) - instance = dm_class(values, units=units) if name is not None: # Use rename() to attempt standard_name but fall back on long_name. instance.rename(str(name)) @@ -77,19 +90,25 @@ def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): return instance -def _add_iris_coord(cube, name, points, dim, calendar=None): +def _add_iris_coord( + cube: Cube, + name: str, + points: np.ndarray | DatetimeIndex | pd_index, + dim: int, + calendar: Optional[str] = None, +) -> None: """Add a Coord or other dimensional metadata to a Cube from a Pandas index or columns array.""" # Most functionality has been abstracted to _get_dimensional_metadata, # allowing reuse in as_cube() and as_cubes(). coord = _get_dimensional_metadata(name, points, calendar) - if coord.__class__ == DimCoord: + if isinstance(coord, DimCoord): cube.add_dim_coord(coord, dim) else: cube.add_aux_coord(coord, dim) -def _series_index_unique(pandas_series: pd.Series) -> tuple[int, ...] | None: +def _series_index_unique(pandas_series: pd.Series) -> tuple[int, ...]: """Find an index grouping of a :class:`pandas.Series` that has just one Series value per group. Iterates through grouping single index levels, then combinations of 2 @@ -97,7 +116,10 @@ def _series_index_unique(pandas_series: pd.Series) -> tuple[int, ...] | None: group are found. Returns a ``tuple`` of the index levels that group to produce single values, as soon as one is found. - Returns ``None`` if no index level combination produces single values. + Raises + ------ + ValueError + If no unique grouping can be found. This would cause problems defining iris coordinates later on. """ unique_number = pandas_series.nunique() @@ -116,14 +138,24 @@ def _series_index_unique(pandas_series: pd.Series) -> tuple[int, ...] | None: result = lc # Escape as early as possible - heavy operation. break + # Protect against the possibility of no unique grouping being found, which would cause problems + # defining iris coordinates later on. + if result is None: + message = ( + "No unique index grouping could be found for this Series. " + "Consider resetting the index or adding a unique index level." + ) + raise ValueError(message) + return result +@explicit_copy_checker def as_cube( - pandas_array, - copy=True, - calendars=None, -): + pandas_array: pd.Series | pd.DataFrame, + copy: bool = True, + calendars: Optional[dict] = None, +) -> Cube: """Convert a Pandas Series/DataFrame into a 1D/2D Iris Cube. Parameters @@ -133,6 +165,14 @@ def as_cube( copy : bool, default=True Whether to copy `pandas_array`, or to create array views where possible. Provided in case of memory limit concerns. + + .. deprecated:: 3.15.0 + The 'copy' parameter is deprecated and will be removed in a + future release. This function will always make a copy of the + data array, to ensure that the returned Cube is independent + of the input pandas data and to be consistent with pandas v3 + behaviour. + calendars : dict, optional A dict mapping a dimension to a calendar. Required to convert datetime indices/columns. @@ -188,14 +228,15 @@ def as_cube( return cube +@explicit_copy_checker def as_cubes( - pandas_structure, - copy=True, - calendars=None, - aux_coord_cols=None, - cell_measure_cols=None, - ancillary_variable_cols=None, -): + pandas_structure: pd.DataFrame | pd.Series, + copy: bool = True, + calendars: Optional[dict] = None, + aux_coord_cols: Optional[list[str]] = None, + cell_measure_cols: Optional[list[str]] = None, + ancillary_variable_cols: Optional[list[str]] = None, +) -> CubeList: r"""Convert a Pandas Series/DataFrame into n-dimensional Iris Cubes, including dimensional metadata. The index of `pandas_structure` will be used for generating the @@ -212,6 +253,14 @@ def as_cubes( `pandas_structure` column, or a view of the same array. Arrays other than the data (coords etc.) are always copies. This option is provided to help with memory size concerns. + + .. deprecated:: 3.15.0 + The 'copy' parameter is deprecated and will be removed in a + future release. This function will always make a copy of the + data array, to ensure that the returned Cube is independent + of the input pandas data and to be consistent with pandas v3 + behaviour. + calendars : dict, optional Calendar conversions for individual date-time coordinate columns/index-levels e.g. ``{"my_column": cf_units.CALENDAR_360_DAY}``. @@ -396,7 +445,7 @@ def as_cubes( ) raise ValueError(message) - cube_kwargs = {} + cube_kwargs: dict = {} def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): # Common convenience to get the right DM in the right format for @@ -408,7 +457,7 @@ def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): # DimCoords. dim_coord_kwarg = [] for ix, dim_name in enumerate(pandas_index.names): - if hasattr(pandas_index, "levels"): + if isinstance(pandas_index, pd.MultiIndex): coord_points = pandas_index.levels[ix] else: coord_points = pandas_index @@ -453,7 +502,9 @@ def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): # for this object. _series_index_unique should have ensured # that we are indeed removing the duplicates. shaped = content.reshape(cube_shape) - indices = [0] * len(cube_shape) + # Static typing `indices` needed to avoid mypy call-overload error + # from assuming int instead of list for later slicing + indices: list = [0] * len(cube_shape) for dim in dimensions: indices[dim] = slice(None) collapsed = shaped[tuple(indices)] @@ -486,8 +537,8 @@ def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): return cubes -def _as_pandas_coord(coord): - """Convert an Iris Coord into a Pandas index or columns array.""" +def _as_pandas_coord(coord: DimCoord | AuxCoord) -> np.ndarray: + """Convert an Iris Coord into a numpy array.""" index = coord.points if coord.units.is_time_reference(): index = coord.units.num2date(index) @@ -511,25 +562,25 @@ def _get_base(array): raise AssertionError(msg) -def _make_dim_coord_list(cube): +def _make_dim_coord_list(cube: Cube) -> list: """Get Dimension coordinates.""" outlist = [] for dimn in range(cube.ndim): dimn_coord = cube.coords(dimensions=dimn, dim_coords=True) if dimn_coord: - outlist += [[dimn_coord[0].name(), _as_pandas_coord(dimn_coord[0])]] + outlist += [[dimn_coord[0].name(), _as_pandas_coord(dimn_coord[0])]] # type: ignore[AttributeAccessIssue] else: outlist += [[f"dim{dimn}", range(cube.shape[dimn])]] return list(zip(*outlist)) -def _make_aux_coord_list(cube): +def _make_aux_coord_list(cube: Cube) -> list: """Get Auxiliary coordinates.""" outlist = [] for aux_coord in cube.coords(dim_coords=False): outlist += [ [ - aux_coord.name(), + aux_coord.name(), # type: ignore[AttributeAccessIssue] cube.coord_dims(aux_coord), _as_pandas_coord(aux_coord), ] @@ -537,13 +588,13 @@ def _make_aux_coord_list(cube): return list(chain.from_iterable([outlist])) -def _make_ancillary_variables_list(cube): +def _make_ancillary_variables_list(cube: Cube) -> list: """Get Ancillary variables.""" outlist = [] for ancil_var in cube.ancillary_variables(): outlist += [ [ - ancil_var.name(), + ancil_var.name(), # type: ignore[AttributeAccessIssue] cube.ancillary_variable_dims(ancil_var), ancil_var.data, ] @@ -551,13 +602,13 @@ def _make_ancillary_variables_list(cube): return list(chain.from_iterable([outlist])) -def _make_cell_measures_list(cube): +def _make_cell_measures_list(cube: Cube) -> list: """Get cell measures.""" outlist = [] for cell_measure in cube.cell_measures(): outlist += [ [ - cell_measure.name(), + cell_measure.name(), # type: ignore[AttributeAccessIssue] cube.cell_measure_dims(cell_measure), cell_measure.data, ] @@ -565,7 +616,8 @@ def _make_cell_measures_list(cube): return list(chain.from_iterable([outlist])) -def as_series(cube, copy=True): +@explicit_copy_checker +def as_series(cube: Cube, copy: bool = True) -> pd.Series: """Convert a 1D cube to a Pandas Series. Parameters @@ -576,6 +628,13 @@ def as_series(cube, copy=True): Whether to make a copy of the data. Defaults to True. Must be True for masked data. + .. deprecated:: 3.15.0 + The 'copy' parameter is deprecated and will be removed in a + future release. This function will always make a copy of the + data array, to ensure that the returned Cube is independent + of the input pandas data and to be consistent with pandas v3 + behaviour. + Notes ----- This function will copy your data by default. @@ -601,7 +660,7 @@ def as_series(cube, copy=True): if ma.isMaskedArray(data): if not copy: raise ValueError("Masked arrays must always be copied.") - data = data.astype("f").filled(np.nan) + data = data.astype("f").filled(np.nan) # type: ignore[AttributeAccessIssue] elif copy: data = data.copy() index = None @@ -613,13 +672,14 @@ def as_series(cube, copy=True): return series +@explicit_copy_checker def as_data_frame( - cube, - copy=True, - add_aux_coords=False, - add_cell_measures=False, - add_ancillary_variables=False, -): + cube: Cube, + copy: bool = True, + add_aux_coords: bool = False, + add_cell_measures: bool = False, + add_ancillary_variables: bool = False, +) -> pd.DataFrame: r"""Convert a :class:`~iris.cube.Cube` to a :class:`pandas.DataFrame`. :attr:`~iris.cube.Cube.dim_coords` and :attr:`~iris.cube.Cube.data` are @@ -635,6 +695,14 @@ def as_data_frame( Whether the :class:`pandas.DataFrame` is a copy of the the Cube :attr:`~iris.cube.Cube.data`. This option is provided to help with memory size concerns. + + .. deprecated:: 3.15.0 + The 'copy' parameter is deprecated and will be removed in a + future release. This function will always make a copy of the + data array, to ensure that the returned Cube is independent + of the input pandas data and to be consistent with pandas v3 + behaviour. + add_aux_coords : bool, default=False If True, add all :attr:`~iris.cube.Cube.aux_coords` (including scalar coordinates) to the returned :class:`pandas.DataFrame`. @@ -672,6 +740,12 @@ def as_data_frame( #. Where the :class:`~iris.cube.Cube` contains masked values, these become :data:`numpy.nan` in the returned :class:`~pandas.DataFrame`. + #. If `copy` parameter is explicitly set to True or False, a DeprecationWarning + is raised, as this parameter will be removed in a future release. + This function will always make a copy of the data array, to ensure that the + returned Cube is independent of the input pandas data and to be consistent + with pandas v3 behaviour. + Notes ----- :class:`dask.dataframe.DataFrame` are not supported. @@ -800,8 +874,11 @@ def as_data_frame( Name: surface_temperature, Length: 419904, dtype: float32 """ + data_frame: pd.DataFrame - def merge_metadata(meta_var_list): + def merge_metadata( + meta_var_list: list[tuple[str, list[int], np.ndarray]], + ) -> pd.DataFrame: """Add auxiliary cube metadata to the DataFrame.""" nonlocal data_frame for meta_var_name, meta_var_index, meta_var in meta_var_list: @@ -828,13 +905,14 @@ def merge_metadata(meta_var_list): ) return data_frame - if getattr(cube, "ndim", None) is not None and (is_scalar := cube.ndim == 0): + is_scalar = (getattr(cube, "ndim", None) is not None) and (cube.ndim == 0) + if is_scalar: # promote the scalar cube to a 1D cube, and convert in the same way as a 1D cube cube = new_axis(cube) if iris.FUTURE.pandas_ndim: # Checks - if not isinstance(cube, iris.cube.Cube): + if not isinstance(cube, Cube): raise TypeError( f"Expected input to be iris.cube.Cube instance, got: {type(cube)}" ) @@ -845,13 +923,13 @@ def merge_metadata(meta_var_list): if ma.isMaskedArray(data): if not copy: raise ValueError("Masked arrays must always be copied.") - data = data.astype("f").filled(np.nan) + data = data.astype("f").filled(np.nan) # type: ignore[AttributeAccessIssue] # Extract dim coord information: separate lists for dim names and dim values coord_names, coords = _make_dim_coord_list(cube) # Make base DataFrame index = pd.MultiIndex.from_product(coords, names=coord_names) - data_frame = pd.DataFrame(data.ravel(), columns=[cube.name()], index=index) + data_frame = pd.DataFrame(data.ravel(), columns=[cube.name()], index=index) # type: ignore[AttributeAccessIssue] if add_aux_coords: data_frame = merge_metadata(_make_aux_coord_list(cube)) @@ -882,7 +960,7 @@ def merge_metadata(meta_var_list): if ma.isMaskedArray(data): if not copy: raise ValueError("Masked arrays must always be copied.") - data = data.astype("f").filled(np.nan) + data = data.astype("f").filled(np.nan) # type: ignore[AttributeAccessIssue] elif copy: data = data.copy() diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 789d7fbf86..41d6fc73aa 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Iris-specific extensions to matplotlib, mimicking the :mod:`matplotlib.pyplot` interface. +.. z_reference:: iris.plot + :tags: topic_plotting + + API reference + See also: :ref:`matplotlib `. """ @@ -256,7 +261,7 @@ def _broadcast_2d(u, v): def _string_coord_axis_tick_labels(string_axes, axes=None): """Apply tick labels for string coordinates.""" - ax = axes if axes else plt.gca() + ax = axes or plt.gca() for axis, ticks in string_axes.items(): # Define a tick formatter. This will assign a label to all ticks # located precisely on an integer in range(len(ticks)) and assign @@ -290,7 +295,7 @@ def _invert_yaxis(v_coord, axes=None): axes : optional """ - axes = axes if axes else plt.gca() + axes = axes or plt.gca() yaxis_is_inverted = axes.yaxis_inverted() if not yaxis_is_inverted and isinstance(v_coord, iris.coords.Coord): attr_pve = v_coord.attributes.get("positive") @@ -475,7 +480,7 @@ def _draw_2d_from_bounds(draw_method_name, cube, *args, **kwargs): u, v = _broadcast_2d(u, v) axes = kwargs.pop("axes", None) - draw_method = getattr(axes if axes else plt, draw_method_name) + draw_method = getattr(axes or plt, draw_method_name) result = draw_method(u, v, data, *args, **kwargs) # Apply tick labels for string coordinates. @@ -566,7 +571,7 @@ def _draw_2d_from_points(draw_method_name, arg_func, cube, *args, **kwargs): u, v = _broadcast_2d(u, v) axes = kwargs.pop("axes", None) - draw_method = getattr(axes if axes else plt, draw_method_name) + draw_method = getattr(axes or plt, draw_method_name) if arg_func is not None: args, kwargs = arg_func(u, v, data, *args, **kwargs) result = draw_method(*args, **kwargs) @@ -822,7 +827,7 @@ def _draw_1d_from_points(draw_method_name, arg_func, *args, **kwargs): u = _shift_plot_sections(u_object, u, v) axes = kwargs.pop("axes", None) - draw_method = getattr(axes if axes else plt, draw_method_name) + draw_method = getattr(axes or plt, draw_method_name) if arg_func is not None: args, kwargs = arg_func(u, v, *args, **kwargs) result = draw_method(*args, **kwargs) @@ -870,7 +875,7 @@ def _draw_two_1d_from_points(draw_method_name, arg_func, *args, **kwargs): kwargs = _ensure_cartopy_axes_and_determine_kwargs(u_object, v_object1, kwargs) axes = kwargs.pop("axes", None) - draw_method = getattr(axes if axes else plt, draw_method_name) + draw_method = getattr(axes or plt, draw_method_name) if arg_func is not None: args, kwargs = arg_func(u, v1, v2, *args, **kwargs) result = draw_method(*args, **kwargs) @@ -1067,7 +1072,7 @@ def _map_common(draw_method_name, arg_func, mode, cube, plot_defn, *args, **kwar # Draw the contour lines/filled contours. axes = kwargs.pop("axes", None) - plotfn = getattr(axes if axes else plt, draw_method_name) + plotfn = getattr(axes or plt, draw_method_name) return plotfn(*new_args, **kwargs) @@ -1092,7 +1097,7 @@ def contour(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ result = _draw_2d_from_points("contour", None, cube, *args, **kwargs) @@ -1119,7 +1124,7 @@ def contourf(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") @@ -1193,7 +1198,7 @@ def default_projection(cube): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # XXX logic seems flawed, but it is what map_setup did... @@ -1216,7 +1221,7 @@ def default_projection_extent(cube, mode=iris.coords.POINT_MODE): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ extents = cartography._xy_range(cube, mode) @@ -1258,7 +1263,7 @@ def orography_at_bounds(cube, facecolor="#888888", coords=None, axes=None): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # XXX Needs contiguous orography corners to work. raise NotImplementedError( @@ -1296,7 +1301,7 @@ def orography_at_points(cube, facecolor="#888888", coords=None, axes=None): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ style_args = {"facecolor": facecolor} @@ -1341,7 +1346,7 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ result = _draw_2d_from_bounds( @@ -1385,7 +1390,7 @@ def pcolor(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ kwargs.setdefault("antialiased", True) @@ -1421,7 +1426,7 @@ def pcolormesh(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ result = _draw_2d_from_bounds("pcolormesh", cube, *args, **kwargs) @@ -1449,7 +1454,7 @@ def points(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ @@ -1539,7 +1544,7 @@ def barbs(u_cube, v_cube, *args, **kwargs): # numpydoc ignore=PR08 :class:`cartopy.crs.CRS`. This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # @@ -1589,7 +1594,7 @@ def quiver(u_cube, v_cube, *args, **kwargs): # numpydoc ignore=PR08 :class:`cartopy.crs.CRS`. This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # @@ -1616,7 +1621,7 @@ def plot(*args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. Examples -------- @@ -1673,7 +1678,7 @@ def scatter(x, y, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # here we are more specific about argument types than generic 1d plotting @@ -1707,7 +1712,7 @@ def fill_between(x, y1, y2, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # here we are more specific about argument types than generic 1d plotting @@ -1741,7 +1746,7 @@ def hist(x, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if isinstance(x, iris.cube.Cube): @@ -1783,7 +1788,7 @@ def symbols(x, y, symbols, size, axes=None, units="inches"): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if axes is None: @@ -1845,7 +1850,7 @@ def citation(text, figure=None, axes=None): figure = plt.gcf() anchor = AnchoredText(text, prop=dict(size=6), frameon=True, loc=4) anchor.patch.set_boxstyle("round, pad=0, rounding_size=0.2") - axes = axes if axes else figure.gca() + axes = axes or figure.gca() axes.add_artist(anchor) @@ -1902,7 +1907,7 @@ def animate(cube_iterator, plot_func, fig=None, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ kwargs.setdefault("interval", 100) diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 107945677f..240ae56f02 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """High-level plotting extensions to :mod:`iris.plot`. +.. z_reference:: iris.quickplot + :tags: topic_plotting + + API reference + These routines work much like their :mod:`iris.plot` counterparts, but they automatically add a plot title, axis titles, and a colour bar when appropriate. @@ -190,7 +195,7 @@ def contour(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") @@ -228,7 +233,7 @@ def contourf(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") axes = kwargs.get("axes") @@ -260,7 +265,7 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None, footer=None Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ result = iplt.outline( @@ -285,7 +290,7 @@ def pcolor(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") axes = kwargs.get("axes") @@ -310,7 +315,7 @@ def pcolormesh(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") @@ -331,7 +336,7 @@ def points(cube, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ coords = kwargs.get("coords") @@ -352,7 +357,7 @@ def plot(*args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ axes = kwargs.get("axes") @@ -372,7 +377,7 @@ def scatter(x, y, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ axes = kwargs.get("axes") @@ -392,7 +397,7 @@ def fill_between(x, y1, y2, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ axes = kwargs.get("axes") @@ -412,7 +417,7 @@ def hist(x, *args, **kwargs): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ axes = kwargs.get("axes") diff --git a/lib/iris/symbols.py b/lib/iris/symbols.py index e52266b2fe..ad91bdfab5 100644 --- a/lib/iris/symbols.py +++ b/lib/iris/symbols.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Contains symbol definitions for use with :func:`iris.plot.symbols`.""" +"""Contains symbol definitions for use with :func:`iris.plot.symbols`. + +.. z_reference:: iris.symbols + :tags: topic_plotting + + API reference +""" import itertools import math diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 9ac5dc5322..77b78701eb 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -168,7 +168,14 @@ def assert_masked_array_equal(a, b, strict=False): If False (default), the data array equality considers only unmasked elements. + .. deprecated:: v3.15.0 in favour of `_shared_utils.assert_masked_array_equal()` + """ + iris._deprecation.warn_deprecated( + "assert_masked_array_equal()` is now deprecated as part of the efforts " + "to convert from unittest to pytest." + "Please use `_shared_utils.assert_masked_array_equal()` instead." + ) _assert_masked_array(np.testing.assert_array_equal, a, b, strict) @@ -190,7 +197,14 @@ def assert_masked_array_almost_equal(a, b, decimal=6, strict=False): :meth:`numpy.testing.assert_array_almost_equal`, with the meaning 'abs(desired-actual) < 0.5 * 10**(-decimal)' + .. deprecated:: v3.15.0 in favour of `_shared_utils.assert_masked_array_almost_equal()` + """ + iris._deprecation.warn_deprecated( + "assert_masked_array_almost_equal()` is now deprecated as part of the efforts " + "to convert from unittest to pytest." + "Please use `_shared_utils.assert_masked_array_almost_equal()` instead." + ) _assert_masked_array( np.testing.assert_array_almost_equal, a, b, strict, decimal=decimal ) @@ -216,14 +230,30 @@ def assert_cml(cubes, reference_filename=None, checksum=True): When True, causes the CML to include a checksum for each Cube's data. Defaults to True. + .. deprecated:: v3.15.0 in favour of `_shared_utils.assert_CML()` + """ + iris._deprecation.warn_deprecated( + "`assert_cml` is now deprecated as part of the efforts " + "to convert from unittest to pytest. Please use `_shared_utils.assert_CML()` instead." + ) test = IrisTest() test.assertCML(cubes, reference_filename, checksum) class IrisTest(unittest.TestCase): - """A subclass of unittest.TestCase which provides Iris specific testing functionality.""" + """A subclass of unittest.TestCase which provides Iris specific testing functionality. + + .. deprecated:: v3.15.0 in favour of the private module `_shared_utils`, which contains + the majority of these methods converted to pytest-compliant functions. + """ + + iris._deprecation.warn_deprecated( + "IrisTest class is now deprecated as part of the efforts to migrate from unittest to pytest. " + "The majority of these methods can be found as functions (converted " + "to snake_case) in `_shared_utils`." + ) _assertion_counts: collections.defaultdict[str, int] = collections.defaultdict(int) def _assert_str_same( @@ -906,11 +936,25 @@ def assertEqualAndKind(self, value, expected): class GraphicsTest(graphics.GraphicsTestMixin, IrisTest): + """.. deprecated:: v3.15.0 in favour of `_shared_utils.GraphicsTest`.""" + + iris._deprecation.warn_deprecated( + "`GraphicsTest` has been moved to `_shared_utils` as part of the efforts to convert " + "from unittest to pytest." + ) pass class PPTest: - """A mixin class to provide PP-specific utilities to subclasses of tests.IrisTest.""" + """A mixin class to provide PP-specific utilities to subclasses of tests.IrisTest. + + .. deprecated:: v3.15.0 in favour of `_shared_utils.pp_cube_save_test() + """ + + iris._deprecation.warn_deprecated( + "PPTest class is now deprecated as part of the efforts to migrate from unittest to pytest. " + "`cube_save_test()` has been moved to `_shared_utils` as `pp_cube_save_test()`" + ) @contextlib.contextmanager def cube_save_test( @@ -1005,7 +1049,14 @@ def skip_data(fn): class MyDataTests(tests.IrisTest): ... + .. deprecated:: v3.15.0 in favour of `_shared_utils.skip_data` + """ + iris._deprecation.warn_deprecated( + "`skip_data` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) + no_data = ( not iris.config.TEST_DATA_DIR or not os.path.isdir(iris.config.TEST_DATA_DIR) @@ -1026,7 +1077,13 @@ def skip_gdal(fn): class MyGeoTiffTests(test.IrisTest): ... + .. deprecated:: v3.15.0 in favour of `_shared_utils.skip_gdal` + """ + iris._deprecation.warn_deprecated( + "`skip_gdal` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) skip = unittest.skipIf(condition=not GDAL_AVAILABLE, reason="Test requires 'gdal'.") return skip(fn) @@ -1062,7 +1119,13 @@ def no_warnings(func): """Provides a decorator to ensure that there are no warnings raised within the test, otherwise the test will fail. + .. deprecated:: v3.15.0 in favour of `_shared_utils.no_warnings` + """ + iris._deprecation.warn_deprecated( + "`no_warnings` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) @functools.wraps(func) def wrapped(self, *args, **kwargs): @@ -1096,7 +1159,14 @@ def env_bin_path(exe_name: str | None = None) -> Path | None: For use in tests which spawn commands which should call executables within the Python environment, since many IDEs (Eclipse, PyCharm) don't automatically include this location in $PATH (as opposed to $PYTHONPATH). + + .. deprecated:: v3.15.0 in favour of `_shared_utils.env_bin_path` + """ + iris._deprecation.warn_deprecated( + "`env_bin_path` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) exe_path = Path(os.__file__) exe_path = (exe_path / "../../../bin").resolve() if exe_name is not None: diff --git a/lib/iris/tests/_shared_utils.py b/lib/iris/tests/_shared_utils.py index 7f03ae95f9..3e0ed6ccc4 100644 --- a/lib/iris/tests/_shared_utils.py +++ b/lib/iris/tests/_shared_utils.py @@ -537,9 +537,8 @@ def assert_files_equal(test_filename, reference_filename): reference_path = get_result_path(reference_filename) if _check_reference_file(reference_path): fmt = "test file {!r} does not match reference {!r}." - assert filecmp.cmp(test_filename, reference_path) and fmt.format( - test_filename, reference_path - ) + assert filecmp.cmp(test_filename, reference_path) + assert fmt.format(test_filename, reference_path) else: _ensure_folder(reference_path) shutil.copy(test_filename, reference_path) diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index 7fb2074ca0..2c9fc0b345 100644 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -23,6 +23,8 @@ import filelock import pytest +from iris._deprecation import warn_deprecated + # Test for availability of matplotlib. # (And remove matplotlib as an iris.tests dependency.) try: @@ -241,7 +243,14 @@ def _create_missing(phash: str) -> None: class GraphicsTestMixin: - # TODO: deprecate this in favour of check_graphic_caller. + """.. deprecated:: v3.15.0 in favour of `_check_graphic_caller()`.""" + + warn_deprecated( + "GraphicsTestMixin class is now deprecated as part of the efforts " + "to convert from unittest to pytest." + "Please use `_check_graphic_caller()` instead." + ) + def setUp(self) -> None: # Acquire threading non re-entrant blocking lock to ensure # thread-safe plotting. diff --git a/lib/iris/tests/integration/experimental/geovista/__init__.py b/lib/iris/tests/integration/experimental/geovista/__init__.py index 6a56e09db4..4ac6f3b36e 100644 --- a/lib/iris/tests/integration/experimental/geovista/__init__.py +++ b/lib/iris/tests/integration/experimental/geovista/__init__.py @@ -3,3 +3,11 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Integration tests for the :mod:`iris.experimental.geovista` module.""" + +import pytest + +# Skip this whole package if geovista (and by extension pyvista) is not available: +pytest.importorskip( + "geovista", + reason="Skipping geovista integration tests as `geovista` is not installed", +) diff --git a/lib/iris/tests/integration/netcdf/derived_bounds/test_bounds_files.py b/lib/iris/tests/integration/netcdf/derived_bounds/test_bounds_files.py index 4985d819a3..4d4b4e5c79 100644 --- a/lib/iris/tests/integration/netcdf/derived_bounds/test_bounds_files.py +++ b/lib/iris/tests/integration/netcdf/derived_bounds/test_bounds_files.py @@ -38,7 +38,7 @@ def derived_bounds(request): yield db -@pytest.fixture() +@pytest.fixture def cf_primary_sample_path(tmp_path_factory): cdl = """ netcdf a_new_file { @@ -190,9 +190,9 @@ def test_load_primary_cf_style(derived_bounds, cf_primary_sample_path): assert main_cube.coord_dims(co_P0) == () -@pytest.fixture() +@pytest.fixture def tmp_ncdir(tmp_path_factory): - yield tmp_path_factory.mktemp("_temp_netcdf_dir") + return tmp_path_factory.mktemp("_temp_netcdf_dir") def test_save_primary_cf_style( diff --git a/lib/iris/tests/integration/netcdf/test_chararrays.py b/lib/iris/tests/integration/netcdf/test_chararrays.py new file mode 100644 index 0000000000..496867ee8a --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_chararrays.py @@ -0,0 +1,266 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Integration tests for string data handling.""" + +import subprocess + +import numpy as np +import pytest + +import iris +from iris.coords import AuxCoord, DimCoord +from iris.cube import Cube +from iris.fileformats.netcdf import _bytecoding_datasets + +# from iris.fileformats.netcdf import _thread_safe_nc +from iris.tests import env_bin_path + +NX, N_STRLEN = 3, 64 +TEST_STRINGS = ["Münster", "London", "Amsterdam"] +TEST_COORD_VALS = ["bun", "éclair", "sandwich"] + +# VARS_COORDS_SHARE_STRING_DIM = True +VARS_COORDS_SHARE_STRING_DIM = False +if VARS_COORDS_SHARE_STRING_DIM: + # Fix length so that the max coord strlen will be same as data one + TEST_COORD_VALS[-1] = "Xsandwich" + + +# Ensure all tests run with "split attrs" turned on. +@pytest.fixture(scope="module", autouse=True) +def enable_split_attrs(): + with iris.FUTURE.context(save_split_attrs=True): + yield + + +def convert_strings_to_chararray(string_array_1d, maxlen, encoding="utf-8"): + bbytes = [text.encode(encoding) for text in string_array_1d] + pad = b"\0" * maxlen + bbytes = [(x + pad)[:maxlen] for x in bbytes] + chararray = np.array([[bb[i : i + 1] for i in range(maxlen)] for bb in bbytes]) + return chararray + + +def convert_bytesarray_to_strings( + byte_array, encoding="utf-8", string_length: int | None = None +): + """Convert bytes to strings. + + N.B. for now at least, we assume the string dim is **always the last one**. + """ + bytes_shape = byte_array.shape + var_shape = bytes_shape[:-1] + if string_length is None: + string_length = bytes_shape[-1] + string_dtype = f"U{string_length}" + result = np.empty(var_shape, dtype=string_dtype) + for ndindex in np.ndindex(var_shape): + element_bytes = byte_array[ndindex] + bytes = b"".join([b if b else b"\0" for b in element_bytes]) + string = bytes.decode(encoding) + result[ndindex] = string + return result + + +INCLUDE_COORD = True +# INCLUDE_COORD = False + +INCLUDE_NUMERIC_AUXCOORD = True +# INCLUDE_NUMERIC_AUXCOORD = False + + +# DATASET_CLASS = _thread_safe_nc.DatasetWrapper +DATASET_CLASS = _bytecoding_datasets.EncodedDataset + + +def make_testfile(filepath, chararray, coordarray, encoding_str=None): + ds = DATASET_CLASS(filepath, "w") + try: + ds.createDimension("x", NX) + ds.createDimension("nstr", N_STRLEN) + vx = ds.createVariable("x", int, dimensions=("x")) + vx[:] = np.arange(NX) + if INCLUDE_COORD: + ds.createDimension("nstr2", N_STRLEN) + v_co = ds.createVariable( + "v_co", + "S1", + dimensions=( + "x", + "nstr2", + ), + ) + v_co[:] = coordarray + if encoding_str is not None: + v_co._Encoding = encoding_str + if INCLUDE_NUMERIC_AUXCOORD: + v_num = ds.createVariable( + "v_num", + float, + dimensions=("x",), + ) + v_num[:] = np.arange(NX) + v = ds.createVariable( + "v", + "S1", + dimensions=( + "x", + "nstr", + ), + ) + v[:] = chararray + if encoding_str is not None: + v._Encoding = encoding_str + if INCLUDE_COORD: + coords_str = "v_co" + if INCLUDE_NUMERIC_AUXCOORD: + coords_str += " v_num" + v.coordinates = coords_str + finally: + ds.close() + + +def make_testcube( + dataarray, + coordarray, # for now, these are always *string* arrays + encoding_str: str | None = None, +): + cube = Cube(dataarray, var_name="v") + cube.add_dim_coord(DimCoord(np.arange(NX), var_name="x"), 0) + if encoding_str is not None: + cube.attributes["_Encoding"] = encoding_str + if INCLUDE_COORD: + co_x = AuxCoord(coordarray, var_name="v_co") + if encoding_str is not None: + co_x.attributes["_Encoding"] = encoding_str + cube.add_aux_coord(co_x, 0) + return cube + + +NCDUMP_PATHSTR = str(env_bin_path("ncdump")) + + +def ncdump(nc_path: str, *args): + """Call ncdump to print a dump of a file.""" + call_args = [NCDUMP_PATHSTR, nc_path] + list(args) + bytes = subprocess.check_output(call_args) + text = bytes.decode("utf-8") + print(text) + return text + + +def show_result(filepath): + print(f"File {filepath}") + print("NCDUMP:") + ncdump(filepath) + # with nc.Dataset(filepath, "r") as ds: + # v = ds.variables["v"] + # print("\n----\nNetcdf data readback (basic)") + # try: + # print(repr(v[:])) + # except UnicodeDecodeError as err: + # print(repr(err)) + # print("..raw:") + # v.set_auto_chartostring(False) + # print(repr(v[:])) + print("\nAs iris cube..") + try: + iris.loading.LOAD_PROBLEMS.reset() + cube = iris.load_cube(filepath) + print(cube) + if iris.loading.LOAD_PROBLEMS.problems: + print(iris.loading.LOAD_PROBLEMS) + print( + "\n".join(iris.loading.LOAD_PROBLEMS.problems[0].stack_trace.format()) + ) + print("-data-") + print(repr(cube.data)) + print("-numeric auxcoord data-") + print(repr(cube.coord("x").points)) + if INCLUDE_COORD: + print("-string auxcoord data-") + try: + print(repr(cube.coord("v_co").points)) + except Exception as err2: + print(repr(err2)) + except UnicodeDecodeError as err: + print(repr(err)) + + +@pytest.fixture(scope="session") +def save_dir(tmp_path_factory): + return tmp_path_factory.mktemp("save_files") + + +# TODO: the tests don't test things properly yet, they just exercise the code and print +# things for manual debugging. +test_encodings = ( + None, + "ascii", + "utf-8", + "utf-32", +) +# tsts = ("utf-8",) +# tsts = ("utf-8", "utf-32",) +# tsts = ("utf-32",) +# tsts = ("utf-8", "ascii", "utf-8") + + +@pytest.mark.parametrize("encoding", test_encodings) +def test_load_encodings(encoding, save_dir): + """Load exercise. + + Make a testfile with utf-8 content, variously labelled. + Load with Iris + show result (error or cubes). + """ + # small change + print(f"\n=========\nTesting encoding: {encoding}") + filepath = save_dir / f"tmp_load_{str(encoding)}.nc" + # Actual content is always either utf-8 or utf-32 + do_as = encoding + if encoding != "utf-32": + do_as = "utf-8" + TEST_CHARARRAY = convert_strings_to_chararray( + TEST_STRINGS, N_STRLEN, encoding=do_as + ) + TEST_COORDARRAY = convert_strings_to_chararray( + TEST_COORD_VALS, N_STRLEN, encoding=do_as + ) + make_testfile(filepath, TEST_CHARARRAY, TEST_COORDARRAY, encoding_str=encoding) + if encoding == "ascii": + # If explicitly labelled as ascii, 'utf-8' data will fail to load back ... + msg = r"Character data .* could not be decoded with the 'ascii' encoding\." + with pytest.raises(ValueError, match=msg): + show_result(filepath) + else: + # ... otherwise, utf-8 data loads even without a label, as 'utf-8' default used + show_result(filepath) + + +@pytest.mark.parametrize("encoding", test_encodings) +def test_save_encodings(encoding, save_dir): + """Save exercise. + + Make test-cube with non-ascii content, and various '_Encoding' labels. + Save with Iris + show result (error or ncdump). + """ + cube = make_testcube( + dataarray=TEST_STRINGS, coordarray=TEST_COORD_VALS, encoding_str=encoding + ) + print(cube) + filepath = save_dir / f"tmp_save_{str(encoding)}.nc" + if encoding in ("ascii", None): + msg = ( + "String data written to netcdf character variable 'v' " + "could not be represented in encoding 'ascii'" + ) + with pytest.raises( + ValueError, + match=msg, + ): + iris.save(cube, filepath) + else: + iris.save(cube, filepath) + show_result(filepath) diff --git a/lib/iris/tests/integration/netcdf/test_coord_systems.py b/lib/iris/tests/integration/netcdf/test_coord_systems.py index aa7b715912..6c742ae848 100644 --- a/lib/iris/tests/integration/netcdf/test_coord_systems.py +++ b/lib/iris/tests/integration/netcdf/test_coord_systems.py @@ -168,14 +168,16 @@ def multi_cs_osgb_wkt(): """ -@_shared_utils.skip_data -class TestCoordSystem: - @pytest.fixture(autouse=True) - def _setup(self): - tlc.setUpModule() +@pytest.fixture(autouse=True, scope="module") +def _setup(tmp_path_factory): + if not hasattr(tlc, "TMP_DIR"): + tlc.TMP_DIR = tmp_path_factory.mktemp("temp") yield - tlc.tearDownModule() + delattr(tlc, "TMP_DIR") + +@_shared_utils.skip_data +class TestCoordSystem: def test_load_laea_grid(self, request): cube = iris.load_cube( _shared_utils.get_data_path( diff --git a/lib/iris/tests/integration/netcdf/test_load_managed_attributes.py b/lib/iris/tests/integration/netcdf/test_load_managed_attributes.py index b49c092a38..e840606136 100644 --- a/lib/iris/tests/integration/netcdf/test_load_managed_attributes.py +++ b/lib/iris/tests/integration/netcdf/test_load_managed_attributes.py @@ -32,7 +32,7 @@ def tmp_filepath(self, tmp_path_factory): tmp_dir = tmp_path_factory.mktemp("tmp_nc") # We can reuse the same path all over, as it is recreated for each test. self.tmp_ncpath = tmp_dir / "tmp.nc" - yield + return def _check_load_inner(self, iris_name, nc_name, value): # quickly create a valid netcdf file with a simple cube in it. diff --git a/lib/iris/tests/integration/netcdf/test_save_managed_attributes.py b/lib/iris/tests/integration/netcdf/test_save_managed_attributes.py index 0c6a5b9151..cf3fa2e4a9 100644 --- a/lib/iris/tests/integration/netcdf/test_save_managed_attributes.py +++ b/lib/iris/tests/integration/netcdf/test_save_managed_attributes.py @@ -24,7 +24,7 @@ def tmp_filepath(self, tmp_path_factory): tmp_dir = tmp_path_factory.mktemp("tmp_nc") # We can reuse the same path all over, as it is recreated for each test. self.tmp_ncpath = tmp_dir / "tmp.nc" - yield + return def _check_save_inner(self, iris_name, nc_name, value): cube = Cube([1], var_name="x", attributes={iris_name: value}) diff --git a/lib/iris/tests/integration/netcdf/test_stringdata.py b/lib/iris/tests/integration/netcdf/test_stringdata.py new file mode 100644 index 0000000000..5050152042 --- /dev/null +++ b/lib/iris/tests/integration/netcdf/test_stringdata.py @@ -0,0 +1,412 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Integration tests for various uses of character/string arrays in netcdf file variables. + +This covers both the loading and saving of variables which are the content of +data-variables, auxiliary coordinates, ancillary variables and -possibly?- cell measures. +""" + +from dataclasses import dataclass +from pathlib import Path +from typing import Iterable + +import numpy as np +from numpy.typing import ArrayLike +import pytest + +import iris +from iris.coords import AuxCoord, DimCoord +from iris.cube import Cube +from iris.fileformats.netcdf import _thread_safe_nc + + +@pytest.fixture(scope="module") +def all_lazy_auxcoords(): + """Ensure that *all* aux-coords are loaded lazily, even really small ones.""" + old_minlazybytes = iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES + iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = 0 + yield + iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES = old_minlazybytes + + +N_XDIM = 3 +N_CHARS_DIM = 64 +# TODO: remove (debug) +# PERSIST_TESTFILES: str | None = "~/chararray_testfiles" +PERSIST_TESTFILES: str | None = None + +NO_ENCODING_STR = "" +TEST_ENCODINGS = [ + NO_ENCODING_STR, + "ascii", + "utf-8", + # "iso8859-1", # a common one-byte-per-char "codepage" type + # "utf-16", + "utf-32", +] + + +# +# Routines to convert between byte and string arrays. +# Independently defined here, to avoid relying on any code we are testing. +# +def convert_strings_to_chararray( + string_array_1d: ArrayLike, maxlen: int, encoding: str | None = None +) -> np.ndarray: + # Note: this is limited to 1-D arrays of strings. + # Could generalise that if needed, but for now this makes it simpler. + if encoding is None: + encoding = "ascii" + bbytes = [text.encode(encoding) for text in string_array_1d] + pad = b"\0" * maxlen + bbytes = [(x + pad)[:maxlen] for x in bbytes] + chararray = np.array([[bb[i : i + 1] for i in range(maxlen)] for bb in bbytes]) + return chararray + + +def convert_bytearray_to_strings( + byte_array: ArrayLike, encoding: str = "utf-8", string_length: int | None = None +) -> np.ndarray: + """Convert bytes to strings. + + N.B. for now at least, we assume the string dim is **always the last one**. + """ + byte_array = np.asanyarray(byte_array) + bytes_shape = byte_array.shape + var_shape = bytes_shape[:-1] + if string_length is None: + string_length = bytes_shape[-1] + string_dtype = f"U{string_length}" + result = np.empty(var_shape, dtype=string_dtype) + for ndindex in np.ndindex(var_shape): + element_bytes = byte_array[ndindex] + bytes = b"".join([b if b else b"\0" for b in element_bytes]) + string = bytes.decode(encoding) + result[ndindex] = string + return result + + +@dataclass +class SamplefileDetails: + """Convenience container for information about a sample file.""" + + filepath: Path + datavar_data: ArrayLike + stringcoord_data: ArrayLike + numericcoord_data: ArrayLike + + +def make_testfile( + testfile_path: Path, + encoding_str: str, + coords_on_separate_dim: bool, +) -> SamplefileDetails: + """Create a test netcdf file. + + Also returns content information for checking loaded results. + """ + if encoding_str == NO_ENCODING_STR: + encoding = None + else: + encoding = encoding_str + + data_is_ascii = encoding in (None, "ascii") + + numeric_values = np.arange(3.0) + if data_is_ascii: + coordvar_strings = ["mOnster", "London", "Amsterdam"] + datavar_strings = ["bun", "Eclair", "sandwich"] + else: + coordvar_strings = ["Münster", "London", "Amsterdam"] + datavar_strings = ["bun", "éclair", "sandwich"] + + coordvar_bytearray = convert_strings_to_chararray( + string_array_1d=coordvar_strings, maxlen=N_CHARS_DIM, encoding=encoding + ) + datavar_bytearray = convert_strings_to_chararray( + string_array_1d=datavar_strings, maxlen=N_CHARS_DIM, encoding=encoding + ) + + ds = _thread_safe_nc.DatasetWrapper(testfile_path, "w") + try: + ds.createDimension("x", N_XDIM) + ds.createDimension("nstr", N_CHARS_DIM) + if coords_on_separate_dim: + ds.createDimension("nstr2", N_CHARS_DIM) + v_xdim = ds.createVariable("x", int, dimensions=("x")) + v_xdim[:] = np.arange(N_XDIM) + + v_co = ds.createVariable( + "v_co", + "S1", + dimensions=( + "x", + "nstr2" if coords_on_separate_dim else "nstr", + ), + ) + v_co[:] = coordvar_bytearray + + if encoding is not None: + v_co._Encoding = encoding + + v_numeric = ds.createVariable( + "v_numeric", + float, + dimensions=("x",), + ) + v_numeric[:] = numeric_values + + v_datavar = ds.createVariable( + "v", + "S1", + dimensions=( + "x", + "nstr", + ), + ) + v_datavar[:] = datavar_bytearray + + if encoding is not None: + v_datavar._Encoding = encoding + + v_datavar.coordinates = "v_co v_numeric" + finally: + ds.close() + + return SamplefileDetails( + filepath=testfile_path, + datavar_data=datavar_strings, + stringcoord_data=coordvar_strings, + numericcoord_data=numeric_values, + ) + + +@pytest.fixture(params=TEST_ENCODINGS) +def encoding(request): + return request.param + + +def load_problems_list(): + return [str(prob) for prob in iris.loading.LOAD_PROBLEMS.problems] + + +class TestReadEncodings: + """Test loading of testfiles with encoded string data.""" + + @pytest.fixture(autouse=True) + def _clear_load_problems(self): + iris.loading.LOAD_PROBLEMS.reset() + yield + + @pytest.fixture(params=["coordsSameDim", "coordsOwnDim"]) + def use_separate_dims(self, request): + yield request.param == "coordsOwnDim" + + @pytest.fixture() + def readtest_path( + self, + encoding, + tmp_path, + use_separate_dims, + ) -> Iterable[SamplefileDetails]: + """Create a suitable valid testfile, and return expected string content.""" + match PERSIST_TESTFILES: + case str(): + tmp_path = Path(PERSIST_TESTFILES).expanduser() + case _: + pass + if encoding == "": + filetag = "noencoding" + else: + filetag = encoding + dimtag = "diffdims" if use_separate_dims else "samedims" + tempfile_path = tmp_path / f"sample_read_{filetag}_{dimtag}.nc" + yield tempfile_path + + @pytest.fixture() + def readtest_data( + self, + encoding, + readtest_path, + use_separate_dims, + ) -> Iterable[SamplefileDetails]: + """Create a suitable valid testfile, and return expected string content.""" + testdata = make_testfile( + testfile_path=readtest_path, + encoding_str=encoding, + coords_on_separate_dim=use_separate_dims, + ) + + # # TODO: temporary for debug -- TO REMOVE + # from iris.tests.integration.netcdf.test_chararrays import ncdump + # ncdump(str(tempfile_path)) + yield testdata + + def test_valid_encodings(self, encoding, readtest_data: SamplefileDetails): + testfile_path, datavar_strings, coordvar_strings, numeric_data = ( + readtest_data.filepath, + readtest_data.datavar_data, + readtest_data.stringcoord_data, + readtest_data.numericcoord_data, + ) + cube = iris.load_cube(testfile_path) + assert load_problems_list() == [] + assert cube.shape == (N_XDIM,) + + if encoding != "utf-32": + expected_string_width = N_CHARS_DIM + else: + expected_string_width = (N_CHARS_DIM // 4) - 1 + assert cube.dtype == f" SampleCubeDetails: + data_is_ascii = encoding_str in (NO_ENCODING_STR, "ascii") + + numeric_values = np.arange(3.0) + if data_is_ascii: + coordvar_strings = ["mOnster", "London", "Amsterdam"] + datavar_strings = ["bun", "Eclair", "sandwich"] + else: + coordvar_strings = ["Münster", "London", "Amsterdam"] + datavar_strings = ["bun", "éclair", "sandwich"] + + if not byte_data: + charlen = N_CHARS_DIM + if encoding_str == "utf-32": + charlen = charlen // 4 - 1 + strings_dtype = np.dtype(f"U{charlen}") + coordvar_array = np.array(coordvar_strings, dtype=strings_dtype) + datavar_array = np.array(datavar_strings, dtype=strings_dtype) + else: + write_encoding = encoding_str + if write_encoding == NO_ENCODING_STR: + write_encoding = "ascii" + coordvar_array = convert_strings_to_chararray( + coordvar_strings, maxlen=N_CHARS_DIM, encoding=write_encoding + ) + datavar_array = convert_strings_to_chararray( + datavar_strings, maxlen=N_CHARS_DIM, encoding=write_encoding + ) + + cube = Cube(datavar_array, var_name="v") + cube.add_dim_coord(DimCoord(np.arange(N_XDIM), var_name="x"), 0) + if encoding_str != NO_ENCODING_STR: + cube.attributes["_Encoding"] = encoding_str + co_x = AuxCoord(coordvar_array, var_name="v_co") + if encoding_str != NO_ENCODING_STR: + co_x.attributes["_Encoding"] = encoding_str + co_dims = (0, 1) if byte_data else (0,) + cube.add_aux_coord(co_x, co_dims) + + result = SampleCubeDetails( + cube=cube, + datavar_data=datavar_array, + stringcoord_data=coordvar_array, + ) + return result + + +class TestWriteEncodings: + """Test saving of testfiles with encoded string data. + + To avoid circularity, we generate and save *cube* data. + """ + + @pytest.fixture(params=["dataAsStrings", "dataAsBytes"]) + def write_bytes(self, request): + yield request.param == "dataAsBytes" + + @pytest.fixture() + def writetest_path(self, encoding, write_bytes, tmp_path): + """Create a suitable test cube, with either string or byte content.""" + if PERSIST_TESTFILES: + tmp_path = Path(PERSIST_TESTFILES).expanduser() + if encoding == "": + filetag = "noencoding" + else: + filetag = encoding + datatag = "writebytes" if write_bytes else "writestrings" + tempfile_path = tmp_path / f"sample_write_{filetag}_{datatag}.nc" + yield tempfile_path + + @pytest.fixture() + def writetest_data(self, writetest_path, encoding, write_bytes): + """Create a suitable test cube + save to a file. + + Apply the given encoding to both coord and cube data. + Form the data as bytes, or as strings, depending on 'write_bytes'.' + """ + cube_info = make_testcube(encoding_str=encoding, byte_data=write_bytes) + cube_info.save_path = writetest_path + cube = cube_info.cube + iris.save(cube, writetest_path) + yield cube_info + + def test_valid_encodings(self, encoding, writetest_data, write_bytes): + cube_info = writetest_data + cube, path = cube_info.cube, cube_info.save_path + # TODO: not testing the "byte read/write" yet + # Make a quick check for cube equality : but the presentation depends on the read mode + # with DECODE_TO_STRINGS_ON_READ.context(not write_bytes): + # read_cube = iris.load_cube(path) + # assert read_cube == cube + + # N.B. file content should not depend on whether bytes or strings were written + vararray, coordarray = cube_info.datavar_data, cube_info.stringcoord_data + ds = _thread_safe_nc.DatasetWrapper(path) + ds.set_auto_chartostring(False) + v_main = ds.variables["v"] + v_co = ds.variables["v_co"] + assert v_main.shape == (N_XDIM, N_CHARS_DIM) + assert v_co.shape == (N_XDIM, N_CHARS_DIM) + assert v_main.dtype == " + @@ -92,6 +93,7 @@ + @@ -464,6 +466,7 @@ + @@ -540,6 +543,7 @@ + @@ -667,6 +671,7 @@ + @@ -745,6 +750,7 @@ + @@ -970,6 +976,7 @@ + @@ -1043,7 +1050,6 @@ - @@ -1062,9 +1068,6 @@ - - - @@ -1131,6 +1134,7 @@ + @@ -1214,6 +1218,7 @@ + @@ -1304,6 +1309,7 @@ + @@ -1347,6 +1353,7 @@ + @@ -1496,6 +1503,7 @@ + @@ -1586,6 +1594,7 @@ + @@ -1735,6 +1744,7 @@ + @@ -1778,6 +1788,7 @@ + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml index 3d04a5dd47..65698423a1 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml @@ -7,6 +7,7 @@ + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml index cd9bf79f6a..45890bbaaa 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml @@ -7,6 +7,7 @@ + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml index 5defc4b03c..af271ce196 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml @@ -6,6 +6,7 @@ + @@ -45,6 +46,7 @@ + @@ -123,6 +125,7 @@ + @@ -162,6 +165,7 @@ + @@ -201,6 +205,7 @@ + @@ -240,6 +245,7 @@ + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml index 0ae03b18b9..72239f5279 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml @@ -6,6 +6,7 @@ + @@ -45,6 +46,7 @@ + @@ -84,6 +86,7 @@ + diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/SaveUgrid__cube/basic_mesh.cdl similarity index 100% rename from lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/TestSaveUgrid__cube/basic_mesh.cdl rename to lib/iris/tests/results/unit/fileformats/netcdf/saver/Saver__ugrid/SaveUgrid__cube/basic_mesh.cdl diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py index bd1aeee40f..061d94e7d5 100644 --- a/lib/iris/tests/test_analysis.py +++ b/lib/iris/tests/test_analysis.py @@ -1019,7 +1019,8 @@ def test_max_run_2d(self): self.request, bar_result, ("analysis", "max_run_bar_2d.cml"), checksum=False ) - with pytest.raises(ValueError): + msg = "Not possible to calculate runs over more than one dimension" + with pytest.raises(ValueError, match=msg): _ = cube.collapsed( ("foo", "bar"), iris.analysis.MAX_RUN, @@ -1363,12 +1364,14 @@ def test_area_weights_non_contiguous(self): def test_area_weights_no_lon_bounds(self): self.cube.coord("grid_longitude").bounds = None - with pytest.raises(ValueError): + msg = "Coordinates 'grid_latitude' and 'grid_longitude' must have bounds to determine the area weights." + with pytest.raises(ValueError, match=msg): iris.analysis.cartography.area_weights(self.cube) def test_area_weights_no_lat_bounds(self): self.cube.coord("grid_latitude").bounds = None - with pytest.raises(ValueError): + msg = "Coordinates 'grid_latitude' and 'grid_longitude' must have bounds to determine the area weights" + with pytest.raises(ValueError, match=msg): iris.analysis.cartography.area_weights(self.cube) @@ -1510,12 +1513,14 @@ def test_cosine_latitude_weights_2d_latitude_last(self): def test_cosine_latitude_weights_no_latitude(self): # no coordinate identified as latitude self.cube_dim_lat.remove_coord("grid_latitude") - with pytest.raises(ValueError): + msg = "Cannot get latitude coordinate from cube 'precipitation_flux'." + with pytest.raises(ValueError, match=msg): _ = iris.analysis.cartography.cosine_latitude_weights(self.cube_dim_lat) def test_cosine_latitude_weights_multiple_latitude(self): # two coordinates identified as latitude - with pytest.raises(ValueError): + msg = "Multiple latitude coords are currently disallowed." + with pytest.raises(ValueError, match=msg): _ = iris.analysis.cartography.cosine_latitude_weights(self.cube) diff --git a/lib/iris/tests/test_analysis_calculus.py b/lib/iris/tests/test_analysis_calculus.py index 8c33cd8fc2..46ad9779f3 100644 --- a/lib/iris/tests/test_analysis_calculus.py +++ b/lib/iris/tests/test_analysis_calculus.py @@ -24,7 +24,8 @@ def test_invalid(self): _ = iris.analysis.calculus.cube_delta(cube, "surface_altitude") with pytest.raises(iris.exceptions.CoordinateMultiDimError): _ = iris.analysis.calculus.cube_delta(cube, "altitude") - with pytest.raises(ValueError): + msg = "Cannot calculate delta over 'forecast_period' as it has length of 1." + with pytest.raises(ValueError, match=msg): _ = iris.analysis.calculus.cube_delta(cube, "forecast_period") def test_delta_coord_lookup(self): @@ -175,7 +176,8 @@ def test_singular_delta(self): # Test single valued coordinate mid-points when not circular lon.circular = False - with pytest.raises(ValueError): + msg = "Cannot take interval differences of a single valued coordinate." + with pytest.raises(ValueError, match=msg): iris.analysis.calculus._construct_delta_coord(lon) def test_singular_midpoint(self): @@ -196,7 +198,8 @@ def test_singular_midpoint(self): # Test single valued coordinate mid-points when not circular lon.circular = False - with pytest.raises(ValueError): + msg = "Cannot take the midpoints of a single valued coordinate." + with pytest.raises(ValueError, match=msg): iris.analysis.calculus._construct_midpoint_coord(lon) diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index f51a531721..58f5ef05fe 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -12,6 +12,7 @@ import subprocess from typing import List, Tuple +from packaging.version import Version import pytest import iris @@ -70,8 +71,9 @@ def test_python_versions(): Test is designed to fail whenever Iris' supported Python versions are updated, insisting that versions are updated EVERYWHERE in-sync. """ - latest_supported = "3.13" - all_supported = ["3.11", "3.12", latest_supported] + all_supported = ["3.12", "3.13", "3.14"] + _parsed = [Version(v) for v in all_supported] + latest_supported = str(max(_parsed)) root_dir = Path(__file__).parents[3] workflows_dir = root_dir / ".github" / "workflows" @@ -245,8 +247,8 @@ def test_license_headers(self): "dist/*", "docs/gallery_code/*/*.py", "docs/src/developers_guide/documenting/*.py", - "docs/src/userguide/plotting_examples/*.py", - "docs/src/userguide/regridding_plots/*.py", + "docs/src/user_manual/tutorial/plotting_examples/*.py", + "docs/src/user_manual/tutorial/regridding_plots/*.py", "docs/src/_build/*", "lib/iris/analysis/_scipy_interpolate.py", ) diff --git a/lib/iris/tests/test_nimrod.py b/lib/iris/tests/test_nimrod.py index 93b028aff2..625404e96c 100644 --- a/lib/iris/tests/test_nimrod.py +++ b/lib/iris/tests/test_nimrod.py @@ -9,6 +9,7 @@ import iris from iris.exceptions import TranslationError import iris.fileformats.nimrod_load_rules as nimrod_load_rules +from iris.fileformats.nimrod_load_rules import radiation_type_attr from iris.tests import _shared_utils @@ -158,3 +159,56 @@ def test_period_of_interest(self, request): nimrod_load_rules.time(cube, field) _shared_utils.assert_CML(request, cube, ("nimrod", "period_of_interest.cml")) + + +class TestNimrodTables: + # Testing that the table-based load rules work as expected + + def test_table_1(self): + field = mock_nimrod_field() + cube = iris.cube.Cube(np.arange(100).reshape(10, 10)) + + field.table = "Table_1" + field.clutter_map_number = 5 + + nimrod_load_rules.table_1_attributes(cube, field) + + assert "clutter_map_number" in cube.attributes + + def test_table_2(self): + field = mock_nimrod_field() + cube = iris.cube.Cube(np.arange(100).reshape(10, 10)) + + field.table = "Table_2" + field.field_code = 45 + field.threshold_type = 2 + field.threshold_value_alt = 2 + field.threshold_fuzziness = 1 + field.probability_method = 1 + field.probability_field_of_event = 3 + + nimrod_load_rules.probability_coord(cube, field, handle_metadata_errors=False) + + assert "Probability methods" in cube.attributes + + def test_table_3(self): + field = mock_nimrod_field() + cube = iris.cube.Cube(np.arange(100).reshape(10, 10)) + + field.table = "Table_3" + field.soil_type = 8 + + nimrod_load_rules.soil_type_coord(cube, field) + + assert cube.coord("soil_type") + + def test_table_4(self): + field = mock_nimrod_field() + cube = iris.cube.Cube(np.arange(100).reshape(10, 10)) + + field.table = "Table_4" + field.radiation_code = 16 + + nimrod_load_rules.radiation_type_attr(cube, field) + + assert "radiation_type" in cube.attributes diff --git a/lib/iris/tests/test_pp_module.py b/lib/iris/tests/test_pp_module.py index 9b8babd862..2100e0b132 100644 --- a/lib/iris/tests/test_pp_module.py +++ b/lib/iris/tests/test_pp_module.py @@ -124,7 +124,8 @@ def test_set_stash(self): assert self.pp.lbuser[3] == self.pp.stash.lbuser3() assert self.pp.lbuser[6] == self.pp.stash.lbuser6() - with pytest.raises(ValueError): + msg = r"Cannot set stash to \(4, 15, 5\)" + with pytest.raises(ValueError, match=msg): self.pp.stash = (4, 15, 5) def test_lbproc_bad_access(self): diff --git a/lib/iris/tests/unit/_shapefiles/test_create_shape_mask.py b/lib/iris/tests/unit/_shapefiles/test_create_shape_mask.py index b4d943bbc5..32ca99d48d 100644 --- a/lib/iris/tests/unit/_shapefiles/test_create_shape_mask.py +++ b/lib/iris/tests/unit/_shapefiles/test_create_shape_mask.py @@ -224,7 +224,7 @@ def test_invalid_cube_crs(self, square_polygon, wgs84_crs): ) @pytest.mark.parametrize( - "minimum_weight, error_type", + ("minimum_weight", "error_type"), [(-1, ValueError), (2, ValueError)], ) def test_invalid_minimum_weight( @@ -242,7 +242,7 @@ def test_invalid_minimum_weight( ) @pytest.mark.parametrize( - "minimum_weight, error_type", + ("minimum_weight", "error_type"), [(-1, ValueError), (2, ValueError)], ) def test_invalid_minimum_weight_with_all_touched( diff --git a/lib/iris/tests/unit/_shapefiles/test_get_weighted_mask.py b/lib/iris/tests/unit/_shapefiles/test_get_weighted_mask.py index 406a9dbc59..6863fb1847 100644 --- a/lib/iris/tests/unit/_shapefiles/test_get_weighted_mask.py +++ b/lib/iris/tests/unit/_shapefiles/test_get_weighted_mask.py @@ -43,7 +43,7 @@ def mock_cube(): @pytest.mark.parametrize( - "minimum_weight, expected_mask", + ("minimum_weight", "expected_mask"), [ ( 0.0, diff --git a/lib/iris/tests/unit/_shapefiles/test_is_geometry_valid.py b/lib/iris/tests/unit/_shapefiles/test_is_geometry_valid.py index 46ff058af2..8605d72d8b 100644 --- a/lib/iris/tests/unit/_shapefiles/test_is_geometry_valid.py +++ b/lib/iris/tests/unit/_shapefiles/test_is_geometry_valid.py @@ -22,17 +22,17 @@ # Shareable shape fixtures used in: # - util/test_mask_cube_from_shapefile.py # - _shapefiles/test_is_geometry_valid.py -@pytest.fixture() +@pytest.fixture def wgs84_crs(): return CRS.from_epsg(4326) -@pytest.fixture() +@pytest.fixture def osgb_crs(): return CRS.from_epsg(27700) -@pytest.fixture() +@pytest.fixture def basic_polygon_geometry(): # Define the coordinates of a basic rectangle min_lon = -90 @@ -44,7 +44,7 @@ def basic_polygon_geometry(): return box(min_lon, min_lat, max_lon, max_lat) -@pytest.fixture() +@pytest.fixture def basic_wide_polygon_geometry(): # Define the coordinates of a basic rectangle min_lon = -170 @@ -56,7 +56,7 @@ def basic_wide_polygon_geometry(): return box(min_lon, min_lat, max_lon, max_lat) -@pytest.fixture() +@pytest.fixture def basic_multipolygon_geometry(): # Define the coordinates of a basic rectangle min_lon = 0 @@ -73,25 +73,25 @@ def basic_multipolygon_geometry(): ) -@pytest.fixture() +@pytest.fixture def basic_point_geometry(): # Define the coordinates of a basic point (lon, lat) return Point((-3.476204, 50.727059)) -@pytest.fixture() +@pytest.fixture def basic_line_geometry(): # Define the coordinates of a basic line return LineString([(0, 0), (10, 10)]) -@pytest.fixture() +@pytest.fixture def basic_multiline_geometry(): # Define the coordinates of a basic line return MultiLineString([[(0, 0), (10, 10)], [(20, 20), (30, 30)]]) -@pytest.fixture() +@pytest.fixture def basic_point_collection(): # Define the coordinates of a basic collection of points # as (lon, lat) tuples, assuming a WGS84 projection. @@ -109,37 +109,37 @@ def basic_point_collection(): return points -@pytest.fixture() +@pytest.fixture def canada_geometry(): # Define the coordinates of a rectangle that covers Canada return box(-143.5, 42.6, -37.8, 84.0) -@pytest.fixture() +@pytest.fixture def bering_sea_geometry(): # Define the coordinates of a rectangle that covers the Bering Sea return box(148.42, 49.1, -138.74, 73.12) -@pytest.fixture() +@pytest.fixture def uk_geometry(): # Define the coordinates of a rectangle that covers the UK return box(-10, 49, 2, 61) -@pytest.fixture() +@pytest.fixture def invalid_geometry_poles(): # Define the coordinates of a rectangle that crosses the poles return box(-10, -90, 10, 90) -@pytest.fixture() +@pytest.fixture def invalid_geometry_bounds(): # Define the coordinates of a rectangle that is outside the bounds of the coordinate system return box(-200, -100, 200, 100) -@pytest.fixture() +@pytest.fixture def not_a_valid_geometry(): # Return an invalid geometry type # This is not a valid geometry, e.g., a string @@ -168,7 +168,7 @@ def test_valid_geometry(test_input, request, wgs84_crs): # N.B. error message comparison is done with regex so # any parentheses in the error message must be escaped (\) @pytest.mark.parametrize( - "test_input, errortype, err_message", + ("test_input", "errortype", "err_message"), [ ( "invalid_geometry_poles", @@ -195,10 +195,10 @@ def test_invalid_geometry(test_input, errortype, err_message, request, wgs84_crs @pytest.mark.parametrize( "test_input", - ( + [ "basic_wide_polygon_geometry", "bering_sea_geometry", - ), + ], ) def test_warning_geometry(test_input, request, wgs84_crs): # Assert that all invalid geometries raise the expected error diff --git a/lib/iris/tests/unit/_shapefiles/test_transform_geometry.py b/lib/iris/tests/unit/_shapefiles/test_transform_geometry.py index ca5debb9a9..6aff0931c5 100644 --- a/lib/iris/tests/unit/_shapefiles/test_transform_geometry.py +++ b/lib/iris/tests/unit/_shapefiles/test_transform_geometry.py @@ -23,7 +23,7 @@ def wgs84_crs(): @pytest.mark.parametrize( - "input_geometry, wgs84_crs, input_cube_crs, output_expected_geometry", + ("input_geometry", "wgs84_crs", "input_cube_crs", "output_expected_geometry"), [ ( # Basic geometry in WGS84, no transformation needed shapely.geometry.box(-10, 50, 2, 60), @@ -90,7 +90,7 @@ def test_transform_geometry( # Assert that an invalid inputs raise the expected errors @pytest.mark.parametrize( - "input_geometry, input_geometry_crs, input_cube_crs, expected_error", + ("input_geometry", "input_geometry_crs", "input_cube_crs", "expected_error"), [ ( # Basic geometry in WGS84, no transformation needed "bad_input_geometry", @@ -120,7 +120,7 @@ def test_transform_geometry_invalid_input( @pytest.mark.parametrize( - "input_geometry, wgs84_crs, input_cube_crs", + ("input_geometry", "wgs84_crs", "input_cube_crs"), [ ( # Basic geometry in WGS84, transformed to OSGB shapely.geometry.box(np.inf, np.inf, np.inf, np.inf), diff --git a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py index 7d78217388..12c8e64ef7 100644 --- a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py @@ -7,6 +7,8 @@ """ +import re + from cf_units import Unit import numpy as np import pytest @@ -28,11 +30,13 @@ def _setup(self, mocker): ) def test_insufficient_coordinates_no_args(self): - with pytest.raises(ValueError): + msg = "Unable to construct atmosphere sigma coordinate factory due to insufficient source coordinates" + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory() def test_insufficient_coordinates_no_ptop(self): - with pytest.raises(ValueError): + msg = "Unable to construct atmosphere sigma coordinate factory due to insufficient source coordinates" + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory( pressure_at_top=None, sigma=self.sigma, @@ -40,7 +44,8 @@ def test_insufficient_coordinates_no_ptop(self): ) def test_insufficient_coordinates_no_sigma(self): - with pytest.raises(ValueError): + msg = "Unable to construct atmosphere sigma coordinate factory due to insufficient source coordinates" + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory( pressure_at_top=self.pressure_at_top, sigma=None, @@ -48,7 +53,8 @@ def test_insufficient_coordinates_no_sigma(self): ) def test_insufficient_coordinates_no_ps(self): - with pytest.raises(ValueError): + msg = "Unable to construct atmosphere sigma coordinate factory due to insufficient source coordinates" + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory( pressure_at_top=self.pressure_at_top, sigma=self.sigma, @@ -62,8 +68,11 @@ def test_ptop_shapes(self): def test_ptop_invalid_shapes(self): for shape in [(2,), (1, 1)]: + msg = re.escape( + f"Expected scalar 'pressure_at_top' coordinate, got shape {shape}" + ) self.pressure_at_top.shape = shape - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory(**self.kwargs) def test_sigma_bounds(self): @@ -73,8 +82,9 @@ def test_sigma_bounds(self): def test_sigma_invalid_bounds(self): for n_bounds in [-1, 1, 3]: + msg = f"Invalid 'sigma' coordinate: must have either 0 or 2 bounds, got {n_bounds}" self.sigma.nbounds = n_bounds - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory(**self.kwargs) def test_sigma_units(self): @@ -85,7 +95,8 @@ def test_sigma_units(self): def test_sigma_invalid_units(self): for units in ["Pa", "m"]: self.sigma.units = Unit(units) - with pytest.raises(ValueError): + msg = f"Invalid units: 'sigma' must be dimensionless, got '{units}'" + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory(**self.kwargs) def test_ptop_ps_units(self): @@ -96,9 +107,10 @@ def test_ptop_ps_units(self): def test_ptop_ps_invalid_units(self): for units in [("Pa", "1"), ("1", "Pa"), ("bar", "Pa"), ("Pa", "hPa")]: + msg = f"Incompatible units: 'pressure_at_top' and 'surface_air_pressure' must have the same units, got '{units[0]}' and '{units[1]}'" self.pressure_at_top.units = Unit(units[0]) self.surface_air_pressure.units = Unit(units[1]) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory(**self.kwargs) def test_ptop_units(self): @@ -109,14 +121,15 @@ def test_ptop_units(self): def test_ptop_invalid_units(self): for units in ["1", "m", "kg", None]: + msg = "Invalid units: 'pressure_at_top' and 'surface_air_pressure' must have units of pressure" self.pressure_at_top.units = Unit(units) self.surface_air_pressure.units = Unit(units) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): AtmosphereSigmaFactory(**self.kwargs) class Test_dependencies: - @pytest.fixture() + @pytest.fixture def sample_kwargs(self, mocker): pressure_at_top = mocker.Mock(units=Unit("Pa"), nbounds=0, shape=()) sigma = mocker.Mock(units=Unit("1"), nbounds=0) @@ -247,7 +260,10 @@ def test_pressure_at_top(self, mocker): def test_pressure_at_top_wrong_shape(self, mocker): new_pressure_at_top = mocker.Mock(units=Unit("Pa"), nbounds=0, shape=(2,)) - with pytest.raises(ValueError): + msg = re.escape( + "Failed to update dependencies. Expected scalar 'pressure_at_top' coordinate, got shape (2,)" + ) + with pytest.raises(ValueError, match=msg): self.factory.update(self.pressure_at_top, new_pressure_at_top) def test_sigma(self, mocker): @@ -257,12 +273,14 @@ def test_sigma(self, mocker): def test_sigma_too_many_bounds(self, mocker): new_sigma = mocker.Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid 'sigma' coordinate: must have either 0 or 2 bounds, got 4" + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_sigma_incompatible_units(self, mocker): new_sigma = mocker.Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: 'sigma' must be dimensionless, got 'Pa'" + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_surface_air_pressure(self, mocker): @@ -272,5 +290,6 @@ def test_surface_air_pressure(self, mocker): def test_surface_air_pressure_incompatible_units(self, mocker): new_surface_air_pressure = mocker.Mock(units=Unit("mbar"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: 'pressure_at_top' and 'surface_air_pressure' must have the same units, got 'Pa' and 'mbar'" + with pytest.raises(ValueError, match=msg): self.factory.update(self.surface_air_pressure, new_surface_air_pressure) diff --git a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py index a9d1c6548a..faf57d0e65 100644 --- a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py @@ -140,7 +140,7 @@ def test_lazy_complex(self): @skip_data class Test_lazy_aux_coords: - @pytest.fixture() + @pytest.fixture def sample_cube(self, mocker): path = get_data_path(["NetCDF", "testing", "small_theta_colpex.nc"]) # While loading, "turn off" loading small variables as real data. diff --git a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py index e3caf0c114..5eb0f8f0c9 100644 --- a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py @@ -34,13 +34,14 @@ def _setup(self): create_default_sample_parts(self) def test_insufficient_coords(self): - with pytest.raises(ValueError): + msg = "Unable to construct hybrid pressure coordinate factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): HybridPressureFactory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=None, sigma=self.sigma, surface_air_pressure=None ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=None, sigma=None, @@ -49,7 +50,8 @@ def test_insufficient_coords(self): def test_incompatible_delta_units(self): self.delta.units = cf_units.Unit("m") - with pytest.raises(ValueError): + msg = "Incompatible units: delta and surface_air_pressure must have the same units." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -58,7 +60,8 @@ def test_incompatible_delta_units(self): def test_incompatible_sigma_units(self): self.sigma.units = cf_units.Unit("Pa") - with pytest.raises(ValueError): + msg = "Invalid units: sigma must be dimensionless." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -67,7 +70,8 @@ def test_incompatible_sigma_units(self): def test_incompatible_surface_air_pressure_units(self): self.surface_air_pressure.units = cf_units.Unit("unknown") - with pytest.raises(ValueError): + msg = "Incompatible units: delta and surface_air_pressure must have the same units." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -77,7 +81,8 @@ def test_incompatible_surface_air_pressure_units(self): def test_different_pressure_units(self): self.delta.units = cf_units.Unit("hPa") self.surface_air_pressure.units = cf_units.Unit("Pa") - with pytest.raises(ValueError): + msg = "Incompatible units: delta and surface_air_pressure must have the same units." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -86,7 +91,8 @@ def test_different_pressure_units(self): def test_too_many_delta_bounds(self): self.delta.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid delta coordinate: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -95,7 +101,8 @@ def test_too_many_delta_bounds(self): def test_too_many_sigma_bounds(self): self.sigma.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid sigma coordinate: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): HybridPressureFactory( delta=self.delta, sigma=self.sigma, @@ -257,12 +264,14 @@ def test_good_delta(self): def test_bad_delta(self): new_delta_coord = Mock(units=cf_units.Unit("1"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: delta and surface_air_pressure must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.delta, new_delta_coord) def test_alternative_bad_delta(self): new_delta_coord = Mock(units=cf_units.Unit("Pa"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid delta coordinate: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.delta, new_delta_coord) def test_good_surface_air_pressure(self): @@ -272,7 +281,8 @@ def test_good_surface_air_pressure(self): def test_bad_surface_air_pressure(self): new_surface_p_coord = Mock(units=cf_units.Unit("km"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: delta and surface_air_pressure must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.surface_air_pressure, new_surface_p_coord) def test_non_dependency(self): @@ -292,5 +302,6 @@ def test_none_sigma(self): def test_insufficient_coords(self): self.factory.update(self.delta, None) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Unable to construct hybrid pressure coordinate factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): self.factory.update(self.surface_air_pressure, None) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py index e702b8d2e2..a1e92c03c5 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py @@ -36,9 +36,10 @@ def _setup(self): ) def test_insufficient_coordinates(self): - with pytest.raises(ValueError): + msg = "Unable to construct Ocean s-coordinate factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): OceanSFactory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=None, eta=self.eta, @@ -47,7 +48,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=self.s, eta=None, @@ -56,7 +57,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=self.s, eta=self.eta, @@ -65,7 +66,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=self.s, eta=self.eta, @@ -74,7 +75,7 @@ def test_insufficient_coordinates(self): b=self.b, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=self.s, eta=self.eta, @@ -83,7 +84,7 @@ def test_insufficient_coordinates(self): b=None, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSFactory( s=self.s, eta=self.eta, @@ -95,42 +96,50 @@ def test_insufficient_coordinates(self): def test_s_too_many_bounds(self): self.s.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid s coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_a_non_scalar(self): self.a.shape = (2,) - with pytest.raises(ValueError): + msg = r"Expected scalar a coordinate .*: got shape \(2,\)\." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_b_non_scalar(self): self.b.shape = (2,) - with pytest.raises(ValueError): + msg = r"Expected scalar b coordinate .*: got shape \(2,\)\." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with pytest.raises(ValueError): + msg = r"Expected scalar depth_c coordinate .*: got shape \(2,\)\." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_s_incompatible_units(self): self.s.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: s coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .* and depth coordinate .* must have the same units" + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: depth_c coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSFactory(**self.kwargs) def test_promote_s_units_unknown_to_dimensionless(self): @@ -254,12 +263,14 @@ def test_s(self): def test_s_too_many_bounds(self): new_s = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid s coordinate .*: must have either 0 or 2 bounds" + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_s_incompatible_units(self): new_s = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: s coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_eta(self): @@ -269,7 +280,8 @@ def test_eta(self): def test_eta_incompatible_units(self): new_eta = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.eta, new_eta) def test_depth(self): @@ -279,7 +291,8 @@ def test_depth(self): def test_depth_incompatible_units(self): new_depth = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth, new_depth) def test_a(self): @@ -289,7 +302,8 @@ def test_a(self): def test_a_non_scalar(self): new_a = Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar a coordinate .*: got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.a, new_a) def test_b(self): @@ -299,7 +313,8 @@ def test_b(self): def test_b_non_scalar(self): new_b = Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar b coordinate .*: got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.b, new_b) def test_depth_c(self): @@ -309,10 +324,12 @@ def test_depth_c(self): def test_depth_c_non_scalar(self): new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar depth_c coordinate .*: got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: depth_c coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py index 82e7cd2a7b..6e45c49ec2 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py @@ -34,9 +34,10 @@ def _setup(self): ) def test_insufficient_coordinates(self): - with pytest.raises(ValueError): + msg = "Unable to construct Ocean s-coordinate, generic form 1 factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): OceanSg1Factory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg1Factory( s=None, c=self.c, @@ -44,7 +45,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg1Factory( s=self.s, c=None, @@ -52,7 +53,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg1Factory( s=self.s, c=self.c, @@ -60,7 +61,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg1Factory( s=self.s, c=self.c, @@ -68,7 +69,7 @@ def test_insufficient_coordinates(self): depth=None, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg1Factory( s=self.s, c=self.c, @@ -79,42 +80,50 @@ def test_insufficient_coordinates(self): def test_s_too_many_bounds(self): self.s.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid s coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_c_too_many_bounds(self): self.c.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid c coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with pytest.raises(ValueError): + msg = "Expected scalar depth coordinate .*: got shape .*." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_s_incompatible_units(self): self.s.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: s coordinate .*must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_c_incompatible_units(self): self.c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: c coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: depth_c coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg1Factory(**self.kwargs) def test_promote_c_and_s_units_unknown_to_dimensionless(self): @@ -235,22 +244,26 @@ def test_c(self): def test_s_too_many_bounds(self): new_s = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid s coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_c_too_many_bounds(self): new_c = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid c coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.c, new_c) def test_s_incompatible_units(self): new_s = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: s coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_c_incompatible_units(self): new_c = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: c coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.c, new_c) def test_eta(self): @@ -260,7 +273,8 @@ def test_eta(self): def test_eta_incompatible_units(self): new_eta = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.eta, new_eta) def test_depth(self): @@ -270,7 +284,8 @@ def test_depth(self): def test_depth_incompatible_units(self): new_depth = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth, new_depth) def test_depth_c(self): @@ -280,10 +295,12 @@ def test_depth_c(self): def test_depth_c_non_scalar(self): new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Expected scalar depth coordinate .*: got shape .*" + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: depth_c coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py index ecb8593e99..bd80de9dfe 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py @@ -34,9 +34,10 @@ def _setup(self): ) def test_insufficient_coordinates(self): - with pytest.raises(ValueError): + msg = "Unable to construct Ocean s-coordinate, generic form 2 factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): OceanSg2Factory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg2Factory( s=None, c=self.c, @@ -44,7 +45,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg2Factory( s=self.s, c=None, @@ -52,7 +53,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg2Factory( s=self.s, c=self.c, @@ -60,7 +61,7 @@ def test_insufficient_coordinates(self): depth=self.depth, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg2Factory( s=self.s, c=self.c, @@ -68,7 +69,7 @@ def test_insufficient_coordinates(self): depth=None, depth_c=self.depth_c, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSg2Factory( s=self.s, c=self.c, @@ -79,42 +80,50 @@ def test_insufficient_coordinates(self): def test_s_too_many_bounds(self): self.s.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid s coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_c_too_many_bounds(self): self.c.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid c coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with pytest.raises(ValueError): + msg = r"Expected scalar depth_c coordinate .*: got shape \(2,\)." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_s_incompatible_units(self): self.s.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: s coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_c_incompatible_units(self): self.c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: c coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: depth_c coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): OceanSg2Factory(**self.kwargs) def test_promote_c_and_s_units_unknown_to_dimensionless(self): @@ -235,22 +244,26 @@ def test_c(self): def test_s_too_many_bounds(self): new_s = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid s coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_c_too_many_bounds(self): new_c = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid c coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.c, new_c) def test_s_incompatible_units(self): new_s = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: s coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.s, new_s) def test_c_incompatible_units(self): new_c = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: c coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.c, new_c) def test_eta(self): @@ -260,7 +273,8 @@ def test_eta(self): def test_eta_incompatible_units(self): new_eta = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.eta, new_eta) def test_depth(self): @@ -270,7 +284,8 @@ def test_depth(self): def test_depth_incompatible_units(self): new_depth = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth, new_depth) def test_depth_c(self): @@ -280,10 +295,12 @@ def test_depth_c(self): def test_depth_c_non_scalar(self): new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar depth_c coordinate .*: got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: depth_c coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py index 910e897590..f203718c8c 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py @@ -26,33 +26,38 @@ def _setup(self): self.kwargs = dict(sigma=self.sigma, eta=self.eta, depth=self.depth) def test_insufficient_coordinates(self): - with pytest.raises(ValueError): + msg = "Unable to construct ocean sigma coordinate factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): OceanSigmaFactory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(sigma=None, eta=self.eta, depth=self.depth) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(sigma=self.sigma, eta=None, depth=self.depth) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(sigma=self.sigma, eta=self.eta, depth=None) def test_sigma_too_many_bounds(self): self.sigma.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid sigma coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(**self.kwargs) def test_sigma_incompatible_units(self): self.sigma.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: sigma coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .*and depth coordinate .*must have the same units." + with pytest.raises(ValueError, match=msg): OceanSigmaFactory(**self.kwargs) def test_promote_sigma_units_unknown_to_dimensionless(self): @@ -138,12 +143,14 @@ def test_sigma(self): def test_sigma_too_many_bounds(self): new_sigma = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid sigma coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_sigma_incompatible_units(self): new_sigma = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: sigma coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_eta(self): @@ -153,7 +160,8 @@ def test_eta(self): def test_eta_incompatible_units(self): new_eta = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.eta, new_eta) def test_depth(self): @@ -163,5 +171,6 @@ def test_depth(self): def test_depth_incompatible_units(self): new_depth = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and depth coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth, new_depth) diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py index e6ec074ba4..3b2f7178ef 100644 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py +++ b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py @@ -36,9 +36,10 @@ def _setup(self): ) def test_insufficient_coordinates(self): - with pytest.raises(ValueError): + msg = "Unable to determine units: no zlev coordinate available." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory() - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory( sigma=self.sigma, eta=self.eta, @@ -47,7 +48,8 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=None, ) - with pytest.raises(ValueError): + msg = "Unable to construct ocean sigma over z coordinate factory due to insufficient source coordinates." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory( sigma=None, eta=None, @@ -56,7 +58,7 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=self.zlev, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory( sigma=self.sigma, eta=None, @@ -65,7 +67,7 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=self.zlev, ) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory( sigma=self.sigma, eta=None, @@ -74,7 +76,8 @@ def test_insufficient_coordinates(self): nsigma=self.nsigma, zlev=self.zlev, ) - with pytest.raises(ValueError): + msg = "Missing nsigma coordinate." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory( sigma=self.sigma, eta=self.eta, @@ -86,52 +89,62 @@ def test_insufficient_coordinates(self): def test_sigma_too_many_bounds(self): self.sigma.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid sigma coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_zlev_too_many_bounds(self): self.zlev.nbounds = 4 - with pytest.raises(ValueError): + msg = "Invalid zlev coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_sigma_zlev_same_boundedness(self): self.zlev.nbounds = 2 - with pytest.raises(ValueError): + msg = "The sigma coordinate .* and zlev coordinate .* must be equally bounded." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_depth_c_non_scalar(self): self.depth_c.shape = (2,) - with pytest.raises(ValueError): + msg = r"Expected scalar depth_c coordinate .*: got shape \(2,\)." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_nsigma_non_scalar(self): self.nsigma.shape = (4,) - with pytest.raises(ValueError): + msg = r"Expected scalar nsigma coordinate .*: got shape \(4,\)." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_zlev_incompatible_units(self): self.zlev.units = Unit("Pa") - with pytest.raises(ValueError): + msg = "Invalid units: zlev coordinate .* must have units of distance." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_sigma_incompatible_units(self): self.sigma.units = Unit("km") - with pytest.raises(ValueError): + msg = "Invalid units: sigma coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_eta_incompatible_units(self): self.eta.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: eta coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_depth_c_incompatible_units(self): self.depth_c.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: depth_c coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_depth_incompatible_units(self): self.depth.units = Unit("km") - with pytest.raises(ValueError): + msg = "Incompatible units: depth coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): OceanSigmaZFactory(**self.kwargs) def test_promote_sigma_units_unknown_to_dimensionless(self): @@ -356,17 +369,20 @@ def test_sigma(self): def test_sigma_too_many_bounds(self): new_sigma = Mock(units=Unit("1"), nbounds=4) - with pytest.raises(ValueError): + msg = "Invalid sigma coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_sigma_zlev_same_boundedness(self): new_sigma = Mock(units=Unit("1"), nbounds=2) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. The sigma coordinate .* and zlev coordinate .* must be equally bounded." + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_sigma_incompatible_units(self): new_sigma = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Invalid units: sigma coordinate .* must be dimensionless." + with pytest.raises(ValueError, match=msg): self.factory.update(self.sigma, new_sigma) def test_eta(self): @@ -376,7 +392,8 @@ def test_eta(self): def test_eta_incompatible_units(self): new_eta = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: eta coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.eta, new_eta) def test_depth(self): @@ -386,7 +403,8 @@ def test_depth(self): def test_depth_incompatible_units(self): new_depth = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: depth coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth, new_depth) def test_depth_c(self): @@ -396,12 +414,14 @@ def test_depth_c(self): def test_depth_c_non_scalar(self): new_depth_c = Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar depth_c coordinate .*: got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) def test_depth_c_incompatible_units(self): new_depth_c = Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Incompatible units: depth_c coordinate .* and zlev coordinate .* must have the same units." + with pytest.raises(ValueError, match=msg): self.factory.update(self.depth_c, new_depth_c) def test_nsigma(self): @@ -410,12 +430,14 @@ def test_nsigma(self): assert self.factory.nsigma is new_nsigma def test_nsigma_missing(self): - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Missing nsigma coordinate." + with pytest.raises(ValueError, match=msg): self.factory.update(self.nsigma, None) def test_nsigma_non_scalar(self): new_nsigma = Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with pytest.raises(ValueError): + msg = r"Failed to update dependencies. Expected scalar nsigma coordinate .* got shape \(10,\)." + with pytest.raises(ValueError, match=msg): self.factory.update(self.nsigma, new_nsigma) def test_zlev(self): @@ -424,20 +446,24 @@ def test_zlev(self): assert self.factory.zlev is new_zlev def test_zlev_missing(self): - with pytest.raises(ValueError): + msg = "Failed to update dependencies. Unable to determine units: no zlev coordinate available." + with pytest.raises(ValueError, match=msg): self.factory.update(self.zlev, None) def test_zlev_too_many_bounds(self): new_zlev = Mock(units=Unit("m"), nbounds=4) - with pytest.raises(ValueError): + msg = "Invalid zlev coordinate .*: must have either 0 or 2 bounds." + with pytest.raises(ValueError, match=msg): self.factory.update(self.zlev, new_zlev) def test_zlev_same_boundedness(self): new_zlev = Mock(units=Unit("m"), nbounds=2) - with pytest.raises(ValueError): + msg = "Failed to update dependencies. The sigma coordinate .*and zlev coordinate .* must be equally bounded." + with pytest.raises(ValueError, match=msg): self.factory.update(self.zlev, new_zlev) def test_zlev_incompatible_units(self): - new_zlev = new_zlev = Mock(units=Unit("Pa"), nbounds=0) - with pytest.raises(ValueError): + new_zlev = Mock(units=Unit("Pa"), nbounds=0) + msg = "Failed to update dependencies. Invalid units: zlev coordinate .* must have units of distance." + with pytest.raises(ValueError, match=msg): self.factory.update(self.zlev, new_zlev) diff --git a/lib/iris/tests/unit/common/lenient/test_Lenient.py b/lib/iris/tests/unit/common/lenient/test_Lenient.py index cbc1c8fe1f..7f7ecde5fd 100644 --- a/lib/iris/tests/unit/common/lenient/test_Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test_Lenient.py @@ -9,7 +9,7 @@ from iris.common.lenient import _LENIENT, _LENIENT_PROTECTED, Lenient -@pytest.fixture() +@pytest.fixture def lenient(): # setup state = {key: _LENIENT.__dict__[key] for key in _LENIENT_PROTECTED} diff --git a/lib/iris/tests/unit/common/lenient/test__Lenient.py b/lib/iris/tests/unit/common/lenient/test__Lenient.py index bd19c3922e..f932555dce 100644 --- a/lib/iris/tests/unit/common/lenient/test__Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test__Lenient.py @@ -16,7 +16,7 @@ ) -@pytest.fixture() +@pytest.fixture def lenient(): return _Lenient() diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py index 0b4725da42..a83f13e1df 100644 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py @@ -814,7 +814,7 @@ def test_op_same(self, leniency, mocker): def test_op_different__none(self, fieldname, leniency, mocker): # One side has field=value, and the other field=None, both strict + lenient. - if fieldname in ("attributes",): + if fieldname == "attributes": # These cannot properly be set to 'None'. Tested elsewhere. pytest.skip() diff --git a/lib/iris/tests/unit/common/metadata/test_MeshMetadata.py b/lib/iris/tests/unit/common/metadata/test_MeshMetadata.py index 5e9c06cc8e..8811cc15b1 100644 --- a/lib/iris/tests/unit/common/metadata/test_MeshMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_MeshMetadata.py @@ -94,8 +94,9 @@ def _setup(self, mocker): # The "node_dimension", "edge_dimension" and "face_dimension" members # are stateful only; they do not participate in lenient/strict equivalence. self.members_dim_names = filter( - lambda member: member - in ("node_dimension", "edge_dimension", "face_dimension"), + lambda member: ( + member in ("node_dimension", "edge_dimension", "face_dimension") + ), self.cls._members, ) diff --git a/lib/iris/tests/unit/common/metadata/test_microsecond_future.py b/lib/iris/tests/unit/common/metadata/test_microsecond_future.py index b86ebf06d4..994456aea0 100644 --- a/lib/iris/tests/unit/common/metadata/test_microsecond_future.py +++ b/lib/iris/tests/unit/common/metadata/test_microsecond_future.py @@ -61,7 +61,7 @@ def _op(): @pytest.mark.parametrize( "indexing", - (np.s_[0], np.s_[:], np.s_[:, np.newaxis]), + [np.s_[0], np.s_[:], np.s_[:, np.newaxis]], ids=("single", "array", "array_2d"), ) def test_num2date(time_coord, future_date_microseconds, indexing): diff --git a/lib/iris/tests/unit/concatenate/test__CubeSignature.py b/lib/iris/tests/unit/concatenate/test__CubeSignature.py index c6736f73bd..88c5da6f12 100644 --- a/lib/iris/tests/unit/concatenate/test__CubeSignature.py +++ b/lib/iris/tests/unit/concatenate/test__CubeSignature.py @@ -28,7 +28,7 @@ class SampleData: class Test__coordinate_dim_metadata_equality: - @pytest.fixture() + @pytest.fixture def sample_data(self) -> SampleData: # Return a standard set of test items, wrapped in a data object diff --git a/lib/iris/tests/unit/concatenate/test_concatenate.py b/lib/iris/tests/unit/concatenate/test_concatenate.py index 448ffb5e7b..c08aba238a 100644 --- a/lib/iris/tests/unit/concatenate/test_concatenate.py +++ b/lib/iris/tests/unit/concatenate/test_concatenate.py @@ -19,7 +19,7 @@ class TestEpoch: - @pytest.fixture() + @pytest.fixture def simple_1d_time_cubes(self): reftimes = [ "hours since 1970-01-01 00:00:00", @@ -51,12 +51,12 @@ def test_concat_1d_with_same_time_units(self, simple_1d_time_cubes): class _MessagesMixin: - @pytest.fixture() + @pytest.fixture def placeholder(self): # Shim to allow sample_cubes to have identical signature in both parent and subclasses return [] - @pytest.fixture() + @pytest.fixture def sample_cubes(self, placeholder): # Construct and return a pair of identical cubes data = np.arange(24, dtype=np.float32).reshape(2, 3, 4) @@ -238,7 +238,7 @@ def test_dim_coords_overlap_message(self, sample_cubes): class TestNonMetadataMessages(_MessagesMixin): parent_cubes = _MessagesMixin.sample_cubes - @pytest.fixture() + @pytest.fixture def sample_cubes(self, parent_cubes): coord = parent_cubes[1].coord("time") parent_cubes[1].replace_coord(coord.copy(points=coord.points + 2)) @@ -390,7 +390,7 @@ def test_desc_bounds_all_singleton(self): class TestConcatenate__dask: - @pytest.fixture() + @pytest.fixture def sample_lazy_cubes(self): # Make a pair of concatenatable cubes, with dim points [1, 2] and [3, 4, 5] def build_lazy_cube(points): diff --git a/lib/iris/tests/unit/concatenate/test_hashing.py b/lib/iris/tests/unit/concatenate/test_hashing.py index 88064e4e46..3fafc408e6 100644 --- a/lib/iris/tests/unit/concatenate/test_hashing.py +++ b/lib/iris/tests/unit/concatenate/test_hashing.py @@ -14,7 +14,7 @@ @pytest.mark.parametrize( - "a,b,eq", + ("a", "b", "eq"), [ (np.arange(2), da.arange(2), True), (np.arange(2), np.arange(2).reshape((1, 2)), False), @@ -78,7 +78,7 @@ def test_compute_hashes(a, b, eq): @pytest.mark.parametrize( - "a,b", + ("a", "b"), [ (a, b) for (a, b, withnans, eq) in TEST_CASES diff --git a/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py b/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py index 95069ba378..a7855ce21a 100644 --- a/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py +++ b/lib/iris/tests/unit/coord_systems/test_ObliqueMercator.py @@ -125,11 +125,11 @@ def make_variant_inputs(self, request) -> None: def make_instance(self) -> ObliqueMercator: return ObliqueMercator(**self.class_kwargs) - @pytest.fixture() + @pytest.fixture def instance(self): return self.make_instance() - @pytest.fixture() + @pytest.fixture def mock_ccrs(self, mocker): return mocker.patch("cartopy.crs.ObliqueMercator", autospec=True) @@ -144,7 +144,7 @@ def test_cartopy_projection(self, instance, mock_ccrs): instance.as_cartopy_projection() mock_ccrs.assert_called_with(**self.cartopy_kwargs_expected) - @pytest.fixture() + @pytest.fixture def label_class(self, instance): """Make the tested coordinate system available, even for subclasses.""" from iris import coord_systems diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py index fbcc8f7f0a..d91c7e81c0 100644 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ b/lib/iris/tests/unit/cube/test_Cube.py @@ -1466,17 +1466,20 @@ def test_global_wrapped(self, dataless): class Test_intersection__Invalid: def test_reversed_min_max(self, dataless): cube = create_cube(0, 360, dataless=dataless) - with pytest.raises(ValueError): + msg = "minimum greater than maximum" + with pytest.raises(ValueError, match=msg): cube.intersection(longitude=(30, 10)) def test_dest_too_large(self, dataless): cube = create_cube(0, 360, dataless=dataless) - with pytest.raises(ValueError): + msg = "requested range greater than coordinate's unit's modulus" + with pytest.raises(ValueError, match=msg): cube.intersection(longitude=(30, 500)) def test_src_too_large(self, dataless): cube = create_cube(0, 400, dataless=dataless) - with pytest.raises(ValueError): + msg = "coordinate's range greater than coordinate's unit's modulus" + with pytest.raises(ValueError, match=msg): cube.intersection(longitude=(10, 30)) def test_missing_coord(self, dataless): @@ -2886,7 +2889,7 @@ def test_lazy_data_masked__mask_set(self): class TestSubset: @pytest.mark.parametrize( - ["data", "shape"], [[0, None], [None, ()]], ids=["with_data", "dataless"] + ("data", "shape"), [(0, None), (None, ())], ids=["with_data", "dataless"] ) def test_scalar_coordinate(self, data, shape): cube = Cube(data=data, shape=shape, long_name="apricot", units="1") @@ -2895,8 +2898,8 @@ def test_scalar_coordinate(self, data, shape): assert cube == result @pytest.mark.parametrize( - ["data", "shape"], - [[np.zeros(4), None], [None, (4,)]], + ("data", "shape"), + [(np.zeros(4), None), (None, (4,))], ids=["with_data", "dataless"], ) def test_dimensional_coordinate(self, data, shape): @@ -2909,7 +2912,7 @@ def test_dimensional_coordinate(self, data, shape): assert cube == result @pytest.mark.parametrize( - ["data", "shape"], [[0, None], [None, ()]], ids=["with_data", "dataless"] + ("data", "shape"), [(0, None), (None, ())], ids=["with_data", "dataless"] ) def test_missing_coordinate(self, data, shape): cube = Cube(data=data, shape=shape, long_name="raspberry", units="1") @@ -2918,7 +2921,7 @@ def test_missing_coordinate(self, data, shape): pytest.raises(CoordinateNotFoundError, cube.subset, bad_coord) @pytest.mark.parametrize( - ["data", "shape"], [[0, None], [None, ()]], ids=["with_data", "dataless"] + ("data", "shape"), [(0, None), (None, ())], ids=["with_data", "dataless"] ) def test_different_coordinate(self, data, shape): cube = Cube(data=data, shape=shape, long_name="raspberry", units="1") @@ -2928,7 +2931,7 @@ def test_different_coordinate(self, data, shape): assert result is None @pytest.mark.parametrize( - ["data", "shape"], [[[0, 1], None], [None, (2,)]], ids=["with_data", "dataless"] + ("data", "shape"), [([0, 1], None), (None, (2,))], ids=["with_data", "dataless"] ) def test_different_coordinate_vector(self, data, shape): cube = Cube(data=data, shape=shape, long_name="raspberry", units="1") @@ -2938,7 +2941,7 @@ def test_different_coordinate_vector(self, data, shape): assert result is None @pytest.mark.parametrize( - ["data", "shape"], [[0, None], [None, ()]], ids=["with_data", "dataless"] + ("data", "shape"), [(0, None), (None, ())], ids=["with_data", "dataless"] ) def test_not_coordinate(self, data, shape): cube = Cube(data=data, shape=shape, long_name="peach", units="1") @@ -3712,7 +3715,7 @@ def test_cell_method_correct_order(self): assert cube1 == cube2 -@pytest.fixture() +@pytest.fixture def simplecube(): return stock.simple_2d_w_cell_measure_ancil_var() @@ -3790,14 +3793,14 @@ class TestReprs: """ # Note: logically this could be a staticmethod, but that seems to upset Pytest - @pytest.fixture() + @pytest.fixture def patched_cubeprinter(self, mocker): target = "iris._representation.cube_printout.CubePrinter" instance_mock = mock.MagicMock( to_string=mock.MagicMock(return_value="") # NB this must return a string ) class_mock = mocker.patch(target, return_value=instance_mock) - yield class_mock, instance_mock + return class_mock, instance_mock @staticmethod def _check_expected_effects(simplecube, patched_cubeprinter, oneline, padding): @@ -3847,14 +3850,14 @@ class TestHtmlRepr: """ # Note: logically this could be a staticmethod, but that seems to upset Pytest - @pytest.fixture() + @pytest.fixture def patched_cubehtml(self, mocker): target = "iris.experimental.representation.CubeRepresentation" instance_mock = mock.MagicMock( repr_html=mock.MagicMock(return_value="") # NB this must return a string ) class_mock = mocker.patch(target, return_value=instance_mock) - yield class_mock, instance_mock + return class_mock, instance_mock @staticmethod def test__repr_html__effects(simplecube, patched_cubehtml): diff --git a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py index 943a3268fa..6864780dee 100644 --- a/lib/iris/tests/unit/cube/test_CubeAttrsDict.py +++ b/lib/iris/tests/unit/cube/test_CubeAttrsDict.py @@ -14,7 +14,7 @@ from iris.fileformats.netcdf.saver import _CF_GLOBAL_ATTRS -@pytest.fixture() +@pytest.fixture def sample_attrs() -> CubeAttrsDict: return CubeAttrsDict(locals={"a": 1, "z": "this"}, globals={"b": 2, "z": "that"}) diff --git a/lib/iris/tests/unit/cube/test_CubeList.py b/lib/iris/tests/unit/cube/test_CubeList.py index 440e0950b8..26497048b0 100644 --- a/lib/iris/tests/unit/cube/test_CubeList.py +++ b/lib/iris/tests/unit/cube/test_CubeList.py @@ -256,7 +256,8 @@ def test_fail(self): CubeList([self.cube1, cube2]).merge_cube() def test_empty(self): - with pytest.raises(ValueError): + msg = "can't merge an empty CubeList" + with pytest.raises(ValueError, match=msg): CubeList([]).merge_cube() def test_single_cube(self): diff --git a/lib/iris/tests/unit/experimental/geovista/__init__.py b/lib/iris/tests/unit/experimental/geovista/__init__.py index b2024ce97d..731c8736ad 100644 --- a/lib/iris/tests/unit/experimental/geovista/__init__.py +++ b/lib/iris/tests/unit/experimental/geovista/__init__.py @@ -3,3 +3,10 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.experimental.geovista` module.""" + +import pytest + +# Skip this whole package if geovista (and by extension pyvista) is not available: +pytest.importorskip( + "geovista", reason="Skipping geovista unit tests as `geovista` is not installed" +) diff --git a/lib/iris/tests/unit/fileformats/__init__.py b/lib/iris/tests/unit/fileformats/__init__.py index c5982fc475..bebe6301eb 100644 --- a/lib/iris/tests/unit/fileformats/__init__.py +++ b/lib/iris/tests/unit/fileformats/__init__.py @@ -3,3 +3,14 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats` package.""" + +import pytest +from pytest_mock import MockerFixture + + +class MockerMixin: + mocker: MockerFixture + + @pytest.fixture(autouse=True) + def _mocker_mixin_setup(self, mocker): + self.mocker = mocker diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py index 7f37eb9f24..f293c9d77f 100644 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py @@ -78,7 +78,7 @@ def _setup(self, mocker): getncattr=getncattr, ) mocker.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + "iris.fileformats.netcdf._bytecoding_datasets.EncodedDataset", return_value=dataset, ) @@ -141,7 +141,7 @@ def _setup(self, mocker): mocker.patch("iris.fileformats.cf.CFReader._build_cf_groups") mocker.patch("iris.fileformats.cf.CFReader._reset") mocker.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + "iris.fileformats.netcdf._bytecoding_datasets.EncodedDataset", return_value=self.dataset, ) @@ -237,7 +237,7 @@ def _setup(self, mocker): # and building first level cf-groups for variables. mocker.patch("iris.fileformats.cf.CFReader._reset") mocker.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + "iris.fileformats.netcdf._bytecoding_datasets.EncodedDataset", return_value=self.dataset, ) @@ -301,32 +301,34 @@ def test_formula_terms_ignore(self): self.orography.dimensions = ["lat", "wibble"] with pytest.warns(match="Ignoring formula terms variable"): cf_group = CFReader("dummy").cf_group - group = cf_group.promoted - assert list(group.keys()) == ["orography"] - assert group["orography"].cf_data == self.orography + group = cf_group.promoted + assert list(group.keys()) == ["orography"] + assert group["orography"].cf_data == self.orography def test_auxiliary_ignore(self): self.x.dimensions = ["lat", "wibble"] with pytest.warns(match=r"Ignoring variable x"): cf_group = CFReader("dummy").cf_group - promoted = ["x", "orography"] - group = cf_group.promoted - assert set(group.keys()) == set(promoted) - for name in promoted: - assert group[name].cf_data == getattr(self, name) + promoted = ["x", "orography"] + group = cf_group.promoted + assert set(group.keys()) == set(promoted) + for name in promoted: + assert group[name].cf_data == getattr(self, name) def test_promoted_auxiliary_ignore(self): self.wibble = netcdf_variable("wibble", "lat wibble", np.float64) self.variables["wibble"] = self.wibble self.orography.coordinates = "wibble" + with pytest.warns(match="Ignoring variable wibble") as warns: cf_group = CFReader("dummy").cf_group.promoted - promoted = ["wibble", "orography"] - assert set(cf_group.keys()) == set(promoted) - for name in promoted: - assert cf_group[name].cf_data == getattr(self, name) - # we should have got 2 warnings - assert len(warns.list) == 2 + + promoted = ["wibble", "orography"] + assert set(cf_group.keys()) == set(promoted) + for name in promoted: + assert cf_group[name].cf_data == getattr(self, name) + # we should have got 2 warnings + assert len(warns.list) == 2 class Test_build_cf_groups__ugrid: @@ -373,7 +375,7 @@ def _setup_class(self, mocker): # translations and building first level cf-groups for variables. mocker.patch("iris.fileformats.cf.CFReader._reset") mocker.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + "iris.fileformats.netcdf._bytecoding_datasets.EncodedDataset", return_value=self.dataset, ) cf_reader = CFReader("dummy") diff --git a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py index 679f74c51d..c01e1516b0 100644 --- a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py +++ b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py @@ -27,7 +27,8 @@ def _setup(self, mocker): def test_valid_absolute_path(self, mocker): # Override the configuration value for System.dot_path real_path = os.path.abspath(__file__) - assert os.path.exists(real_path) and os.path.isabs(real_path) + assert os.path.exists(real_path) + assert os.path.isabs(real_path) mocker.patch("iris.config.get_option", return_value=real_path) result = _dot_path() assert result == real_path diff --git a/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py b/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py index 5e731632c6..42451e2af9 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py +++ b/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py @@ -4,22 +4,19 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.ArakawaC`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris.fileformats._ff import ArakawaC +from iris.tests import _shared_utils -class Test__x_vectors(tests.IrisTest): +class Test__x_vectors: def _test(self, column, horiz_grid_type, xp, xu): reals = np.arange(6) + 100 grid = ArakawaC(column, None, reals, horiz_grid_type) result_xp, result_xu = grid._x_vectors() - self.assertArrayEqual(result_xp, xp) - self.assertArrayEqual(result_xu, xu) + _shared_utils.assert_array_equal(result_xp, xp) + _shared_utils.assert_array_equal(result_xu, xu) def test_none(self): self._test(column=None, horiz_grid_type=None, xp=None, xu=None) @@ -49,12 +46,12 @@ def test_2d_with_wrap(self): ) -class Test_regular_x(tests.IrisTest): +class Test_regular_x: def _test(self, subgrid, bzx, bdx): grid = ArakawaC(None, None, [4.0, None, None, -5.0, None, None], None) result_bzx, result_bdx = grid.regular_x(subgrid) - self.assertEqual(result_bzx, bzx) - self.assertEqual(result_bdx, bdx) + assert result_bzx == bzx + assert result_bdx == bdx def test_theta_subgrid(self): self._test(1, -9.0, 4.0) @@ -63,13 +60,13 @@ def test_u_subgrid(self): self._test(11, -7.0, 4.0) -class Test_regular_y(tests.IrisTest): +class Test_regular_y: def _test(self, v_offset, subgrid, bzy, bdy): grid = ArakawaC(None, None, [None, 4.0, 45.0, None, None, None], None) grid._v_offset = v_offset result_bzy, result_bdy = grid.regular_y(subgrid) - self.assertEqual(result_bzy, bzy) - self.assertEqual(result_bdy, bdy) + assert result_bzy == bzy + assert result_bdy == bdy def test_theta_subgrid_NewDynamics(self): self._test(0.5, 1, 41.0, 4.0) @@ -82,7 +79,3 @@ def test_theta_subgrid_ENDGame(self): def test_v_subgrid_ENDGame(self): self._test(-0.5, 11, 39.0, 4.0) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py b/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py index 2a09a60275..865cdf7691 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py +++ b/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py @@ -4,29 +4,26 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.ENDGame`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris.fileformats._ff import ENDGame +from iris.tests import _shared_utils -class Test(tests.IrisTest): +class Test: def test_class_attributes(self): reals = np.arange(6) + 100 grid = ENDGame(None, None, reals, None) - self.assertEqual(grid._v_offset, -0.5) + assert grid._v_offset == -0.5 -class Test__y_vectors(tests.IrisTest): +class Test__y_vectors: def _test(self, row, yp, yv): reals = np.arange(6) + 100 grid = ENDGame(None, row, reals, None) result_yp, result_yv = grid._y_vectors() - self.assertArrayEqual(result_yp, yp) - self.assertArrayEqual(result_yv, yv) + _shared_utils.assert_array_equal(result_yp, yp) + _shared_utils.assert_array_equal(result_yv, yv) def test_none(self): self._test(row=None, yp=None, yv=None) @@ -40,7 +37,3 @@ def test_2d(self): yp=np.array([0, 1, 2]), yv=np.array([0, 10, 20, 30]), ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py index c21fc39821..e1cd1f5912 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py @@ -4,20 +4,18 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.fileformat.ff.FF2PP` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import collections import contextlib -from unittest import mock import numpy as np +import pytest from iris.exceptions import NotYetImplementedError import iris.fileformats._ff as ff from iris.fileformats._ff import FF2PP import iris.fileformats.pp as pp +from iris.tests import _shared_utils +from iris.tests.unit.fileformats import MockerMixin from iris.warnings import IrisLoadWarning # PP-field: LBPACK N1 values. @@ -42,32 +40,31 @@ ) -class Test____iter__(tests.IrisTest): - @mock.patch("iris.fileformats._ff.FFHeader") - def test_call_structure(self, _FFHeader): +class Test____iter__(MockerMixin): + def test_call_structure(self, mocker): # Check that the iter method calls the two necessary utility # functions - extract_result = mock.Mock() - interpret_patch = mock.patch( + _FFHeader = mocker.patch("iris.fileformats._ff.FFHeader") + extract_result = mocker.Mock() + interpret_patch = mocker.patch( "iris.fileformats.pp._interpret_fields", autospec=True, return_value=iter([]), ) - extract_patch = mock.patch( + extract_patch = mocker.patch( "iris.fileformats._ff.FF2PP._extract_field", autospec=True, return_value=extract_result, ) FF2PP_instance = ff.FF2PP("mock") - with interpret_patch as interpret, extract_patch as extract: - list(iter(FF2PP_instance)) + list(iter(FF2PP_instance)) - interpret.assert_called_once_with(extract_result) - extract.assert_called_once_with(FF2PP_instance) + interpret_patch.assert_called_once_with(extract_result) + extract_patch.assert_called_once_with(FF2PP_instance) -class Test__extract_field__LBC_format(tests.IrisTest): +class Test__extract_field__LBC_format(MockerMixin): @contextlib.contextmanager def mock_for_extract_field(self, fields, x=None, y=None): """A context manager to ensure FF2PP._extract_field gets a field @@ -75,22 +72,26 @@ def mock_for_extract_field(self, fields, x=None, y=None): the "make_pp_field" call. """ - with mock.patch("iris.fileformats._ff.FFHeader"): + with self.mocker.patch("iris.fileformats._ff.FFHeader"): ff2pp = ff.FF2PP("mock") ff2pp._ff_header.lookup_table = [0, 0, len(fields)] # Fake level constants, with shape specifying just one model-level. ff2pp._ff_header.level_dependent_constants = np.zeros(1) - grid = mock.Mock() - grid.vectors = mock.Mock(return_value=(x, y)) - ff2pp._ff_header.grid = mock.Mock(return_value=grid) + grid = self.mocker.Mock() + grid.vectors = self.mocker.Mock(return_value=(x, y)) + ff2pp._ff_header.grid = self.mocker.Mock(return_value=grid) open_func = "builtins.open" with ( - mock.patch("iris.fileformats._ff._parse_binary_stream", return_value=[0]), - mock.patch(open_func), - mock.patch("struct.unpack_from", return_value=[4]), - mock.patch("iris.fileformats.pp.make_pp_field", side_effect=fields), - mock.patch("iris.fileformats._ff.FF2PP._payload", return_value=(0, 0)), + self.mocker.patch( + "iris.fileformats._ff._parse_binary_stream", return_value=[0] + ), + self.mocker.patch(open_func), + self.mocker.patch("struct.unpack_from", return_value=[4]), + self.mocker.patch("iris.fileformats.pp.make_pp_field", side_effect=fields), + self.mocker.patch( + "iris.fileformats._ff.FF2PP._payload", return_value=(0, 0) + ), ): yield ff2pp @@ -101,7 +102,7 @@ def _mock_lbc(self, **kwargs): # Apply provided args (replacing any defaults if specified). field_kwargs.update(kwargs) # Return a mock with just those properties pre-defined. - return mock.Mock(**field_kwargs) + return self.mocker.Mock(**field_kwargs) def test_LBC_header(self): bzx, bzy = -10, 15 @@ -121,18 +122,18 @@ def test_LBC_header(self): ff2pp._ff_header.dataset_type = 5 result = list(ff2pp._extract_field()) - self.assertEqual([field], result) - self.assertEqual(field.lbrow, 10 + 14 * 2) - self.assertEqual(field.lbnpt, 12 + 16 * 2) + assert [field] == result + assert 10 + 14 * 2 == field.lbrow + assert 12 + 16 * 2 == field.lbnpt name_mapping_dict = dict( rim_width=slice(4, 6), y_halo=slice(2, 4), x_halo=slice(0, 2) ) boundary_packing = pp.SplittableInt(121416, name_mapping_dict) - self.assertEqual(field.boundary_packing, boundary_packing) - self.assertEqual(field.bzy, bzy - boundary_packing.y_halo * field.bdy) - self.assertEqual(field.bzx, bzx - boundary_packing.x_halo * field.bdx) + assert field.boundary_packing == boundary_packing + assert field.bzy == bzy - boundary_packing.y_halo * field.bdy + assert field.bzx == bzx - boundary_packing.x_halo * field.bdx def check_non_trivial_coordinate_warning(self, field): field.lbegin = 0 @@ -146,25 +147,17 @@ def check_non_trivial_coordinate_warning(self, field): y = np.array([1, 2, 6]) with self.mock_for_extract_field([field], x, y) as ff2pp: ff2pp._ff_header.dataset_type = 5 - with mock.patch("warnings.warn") as warn: + msg = ( + "The x or y coordinates of your boundary condition field may " + "be incorrect, not having taken into account the boundary " + "size." + ) + with pytest.warns(IrisLoadWarning, match=msg): list(ff2pp._extract_field()) # Check the values are unchanged. - self.assertEqual(field.bdy, orig_bdy) - self.assertEqual(field.bdx, orig_bdx) - - # Check a warning was raised with a suitable message. - warn_error_tmplt = "Unexpected warning message: {}" - non_trivial_coord_warn_msg = warn.call_args[0][0] - msg = ( - "The x or y coordinates of your boundary condition field may " - "be incorrect, not having taken into account the boundary " - "size." - ) - self.assertTrue( - non_trivial_coord_warn_msg.startswith(msg), - warn_error_tmplt.format(non_trivial_coord_warn_msg), - ) + assert field.bdy == orig_bdy + assert field.bdx == orig_bdx def test_LBC_header_non_trivial_coords_both(self): # Check a warning is raised when both bdx and bdy are bad. @@ -205,19 +198,16 @@ def test_negative_bdy(self): ) with self.mock_for_extract_field([field]) as ff2pp: ff2pp._ff_header.dataset_type = 5 - with mock.patch("warnings.warn") as warn: + msg = "The LBC has a bdy less than 0." + with pytest.warns(IrisLoadWarning, match=msg): list(ff2pp._extract_field()) - msg = "The LBC has a bdy less than 0." - self.assertTrue( - warn.call_args[0][0].startswith(msg), - "Northwards bdy warning not correctly raised.", - ) -class Test__payload(tests.IrisTest): - def setUp(self): +class Test__payload(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create a mock LBC type PPField. - self.mock_field = mock.Mock() + self.mock_field = mocker.Mock() field = self.mock_field field.raw_lbpack = _UNPACKED field.lbuser = [_REAL] @@ -227,14 +217,14 @@ def setUp(self): field.boundary_packing = None def _test(self, mock_field, expected_depth, expected_dtype, word_depth=None): - with mock.patch("iris.fileformats._ff.FFHeader", return_value=None): + with self.mocker.patch("iris.fileformats._ff.FFHeader", return_value=None): kwargs = {} if word_depth is not None: kwargs["word_depth"] = word_depth ff2pp = FF2PP("dummy_filename", **kwargs) data_depth, data_dtype = ff2pp._payload(mock_field) - self.assertEqual(data_depth, expected_depth) - self.assertEqual(data_dtype, expected_dtype) + assert data_depth == expected_depth + assert data_dtype == expected_dtype def test_unpacked_real(self): mock_field = _DummyField( @@ -388,9 +378,9 @@ def test_lbpack_unsupported(self): lbuser=[_INTEGER], boundary_packing=None, ) - with self.assertRaisesRegex( + with pytest.raises( NotYetImplementedError, - "PP fields with LBPACK of 1239 are not supported.", + match="PP fields with LBPACK of 1239 are not supported.", ): self._test(mock_field, None, None) @@ -418,7 +408,7 @@ def test_lbc_wgdos_unsupported(self): # Anything not None will do here. boundary_packing=0, ) - with self.assertRaisesRegex(ValueError, "packed LBC data is not supported"): + with pytest.raises(ValueError, match="packed LBC data is not supported"): self._test(mock_field, None, None) def test_lbc_cray(self): @@ -436,13 +426,13 @@ def test_lbc_cray(self): self._test(mock_field, ((47 * 34) - (19 * 14)) * 4, ">f4") -class Test__det_border(tests.IrisTest): - def setUp(self): - _FFH_patch = mock.patch("iris.fileformats._ff.FFHeader") +class Test__det_border: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + _FFH_patch = mocker.patch("iris.fileformats._ff.FFHeader") _FFH_patch.start() - self.addCleanup(_FFH_patch.stop) - def test_unequal_spacing_eitherside(self): + def test_unequal_spacing_eitherside(self, mocker): # Ensure that we do not interpret the case where there is not the same # spacing on the lower edge as the upper edge. ff2pp = FF2PP("dummy") @@ -454,10 +444,9 @@ def test_unequal_spacing_eitherside(self): "size." ) - with mock.patch("warnings.warn") as warn: + with pytest.warns(IrisLoadWarning, match=msg): result = ff2pp._det_border(field_x, None) - warn.assert_called_with(msg, category=IrisLoadWarning) - self.assertIs(result, field_x) + assert result is field_x def test_increasing_field_values(self): # Field where its values a increasing. @@ -465,7 +454,7 @@ def test_increasing_field_values(self): field_x = np.array([1, 2, 3]) com = np.array([0, 1, 2, 3, 4]) result = ff2pp._det_border(field_x, 1) - self.assertArrayEqual(result, com) + _shared_utils.assert_array_equal(result, com) def test_decreasing_field_values(self): # Field where its values a decreasing. @@ -473,20 +462,21 @@ def test_decreasing_field_values(self): field_x = np.array([3, 2, 1]) com = np.array([4, 3, 2, 1, 0]) result = ff2pp._det_border(field_x, 1) - self.assertArrayEqual(result, com) + _shared_utils.assert_array_equal(result, com) -class Test__adjust_field_for_lbc(tests.IrisTest): - def setUp(self): +class Test__adjust_field_for_lbc: + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Patch FFHeader to produce a mock header instead of opening a file. - self.mock_ff_header = mock.Mock() + self.mock_ff_header = mocker.Mock() self.mock_ff_header.dataset_type = 5 - self.mock_ff = self.patch( + self.mock_ff = mocker.patch( "iris.fileformats._ff.FFHeader", return_value=self.mock_ff_header ) # Create a mock LBC type PPField. - self.mock_field = mock.Mock() + self.mock_field = mocker.Mock() field = self.mock_field field.lbtim = 0 field.lblev = 7777 @@ -505,54 +495,55 @@ def test__basic(self): ff2pp = FF2PP("dummy_filename") field = self.mock_field ff2pp._adjust_field_for_lbc(field) - self.assertEqual(field.lbtim, 11) - self.assertEqual(field.lbvc, 65) - self.assertEqual(field.boundary_packing.rim_width, 8) - self.assertEqual(field.boundary_packing.y_halo, 5) - self.assertEqual(field.boundary_packing.x_halo, 4) - self.assertEqual(field.lbnpt, 1009) - self.assertEqual(field.lbrow, 2011) + assert field.lbtim == 11 + assert field.lbvc == 65 + assert field.boundary_packing.rim_width == 8 + assert field.boundary_packing.y_halo == 5 + assert field.boundary_packing.x_halo == 4 + assert field.lbnpt == 1009 + assert field.lbrow == 2011 def test__bad_lbtim(self): self.mock_field.lbtim = 717 ff2pp = FF2PP("dummy_filename") - with self.assertRaisesRegex(ValueError, "LBTIM of 717, expected only 0 or 11"): + with pytest.raises(ValueError, match="LBTIM of 717, expected only 0 or 11"): ff2pp._adjust_field_for_lbc(self.mock_field) def test__bad_lbvc(self): self.mock_field.lbvc = 312 ff2pp = FF2PP("dummy_filename") - with self.assertRaisesRegex(ValueError, "LBVC of 312, expected only 0 or 65"): + with pytest.raises(ValueError, match="LBVC of 312, expected only 0 or 65"): ff2pp._adjust_field_for_lbc(self.mock_field) -class Test__fields_over_all_levels(tests.IrisTest): - def setUp(self): +class Test__fields_over_all_levels: + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Patch FFHeader to produce a mock header instead of opening a file. - self.mock_ff_header = mock.Mock() + self.mock_ff_header = mocker.Mock() self.mock_ff_header.dataset_type = 5 # Fake the level constants to look like 3 model levels. self.n_all_levels = 3 self.mock_ff_header.level_dependent_constants = np.zeros((self.n_all_levels)) - self.mock_ff = self.patch( + self.mock_ff = mocker.patch( "iris.fileformats._ff.FFHeader", return_value=self.mock_ff_header ) # Create a simple mock for a test field. - self.mock_field = mock.Mock() + self.mock_field = mocker.Mock() field = self.mock_field field.lbhem = 103 - self.original_lblev = mock.sentinel.untouched_lbev + self.original_lblev = mocker.sentinel.untouched_lbev field.lblev = self.original_lblev def _check_expected_levels(self, results, n_levels): if n_levels == 0: - self.assertEqual(len(results), 1) - self.assertEqual(results[0].lblev, self.original_lblev) + assert len(results) == 1 + assert results[0].lblev == self.original_lblev else: - self.assertEqual(len(results), n_levels) - self.assertEqual([fld.lblev for fld in results], list(range(n_levels))) + assert len(results) == n_levels + assert [fld.lblev for fld in results] == list(range(n_levels)) def test__is_lbc(self): ff2pp = FF2PP("dummy_filename") @@ -564,18 +555,14 @@ def test__lbhem_too_small(self): ff2pp = FF2PP("dummy_filename") field = self.mock_field field.lbhem = 100 - with self.assertRaisesRegex(ValueError, "hence >= 101"): + with pytest.raises(ValueError, match="hence >= 101"): _ = list(ff2pp._fields_over_all_levels(field)) def test__lbhem_too_large(self): ff2pp = FF2PP("dummy_filename") field = self.mock_field field.lbhem = 105 - with self.assertRaisesRegex( - ValueError, "more than the total number of levels in the file = 3" + with pytest.raises( + ValueError, match="more than the total number of levels in the file = 3" ): _ = list(ff2pp._fields_over_all_levels(field)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py index 1c20acd39d..d855f194a2 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py +++ b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py @@ -4,40 +4,42 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.FFHeader`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import collections -from unittest import mock import numpy as np +import pytest from iris.fileformats._ff import FFHeader, _WarnComboLoadingDefaulting +from iris.tests import _shared_utils +from iris.tests.unit.fileformats import MockerMixin MyGrid = collections.namedtuple("MyGrid", "column row real horiz_grid_type") -class Test_grid(tests.IrisTest): +class Test_grid(MockerMixin): def _header(self, grid_staggering): - with mock.patch.object(FFHeader, "__init__", mock.Mock(return_value=None)): - header = FFHeader() + _ = self.mocker.patch.object( + FFHeader, "__init__", self.mocker.Mock(return_value=None) + ) + header = FFHeader() header.grid_staggering = grid_staggering - header.column_dependent_constants = mock.sentinel.column - header.row_dependent_constants = mock.sentinel.row - header.real_constants = mock.sentinel.real - header.horiz_grid_type = mock.sentinel.horiz_grid_type + header.column_dependent_constants = self.mocker.sentinel.column + header.row_dependent_constants = self.mocker.sentinel.row + header.real_constants = self.mocker.sentinel.real + header.horiz_grid_type = self.mocker.sentinel.horiz_grid_type return header def _test_grid_staggering(self, grid_staggering): header = self._header(grid_staggering) - with mock.patch.dict(FFHeader.GRID_STAGGERING_CLASS, {grid_staggering: MyGrid}): - grid = header.grid() - self.assertIsInstance(grid, MyGrid) - self.assertIs(grid.column, mock.sentinel.column) - self.assertIs(grid.row, mock.sentinel.row) - self.assertIs(grid.real, mock.sentinel.real) - self.assertIs(grid.horiz_grid_type, mock.sentinel.horiz_grid_type) + _ = self.mocker.patch.dict( + FFHeader.GRID_STAGGERING_CLASS, {grid_staggering: MyGrid} + ) + grid = header.grid() + assert isinstance(grid, MyGrid) + assert grid.column is self.mocker.sentinel.column + assert grid.row is self.mocker.sentinel.row + assert grid.real is self.mocker.sentinel.real + assert grid.horiz_grid_type is self.mocker.sentinel.horiz_grid_type def test_new_dynamics(self): self._test_grid_staggering(3) @@ -45,29 +47,23 @@ def test_new_dynamics(self): def test_end_game(self): self._test_grid_staggering(6) - def test_unknown(self): + def test_unknown(self, mocker): header = self._header(0) - with mock.patch( + _ = mocker.patch( "iris.fileformats._ff.NewDynamics", - mock.Mock(return_value=mock.sentinel.grid), - ): - with mock.patch("warnings.warn") as warn: - grid = header.grid() - warn.assert_called_with( - "Staggered grid type: 0 not currently" - " interpreted, assuming standard C-grid", - category=_WarnComboLoadingDefaulting, + mocker.Mock(return_value=mocker.sentinel.grid), ) - self.assertIs(grid, mock.sentinel.grid) + msg = ( + "Staggered grid type: 0 not currently interpreted, assuming standard C-grid" + ) + with pytest.warns(_WarnComboLoadingDefaulting, match=msg): + grid = header.grid() + assert grid is mocker.sentinel.grid -@tests.skip_data -class Test_integer_constants(tests.IrisTest): +@_shared_utils.skip_data +class Test_integer_constants: def test_read_ints(self): - test_file_path = tests.get_data_path(("FF", "structured", "small")) + test_file_path = _shared_utils.get_data_path(("FF", "structured", "small")) ff_header = FFHeader(test_file_path) - self.assertEqual(ff_header.integer_constants.dtype, np.dtype(">i8")) - - -if __name__ == "__main__": - tests.main() + assert ff_header.integer_constants.dtype == np.dtype(">i8") diff --git a/lib/iris/tests/unit/fileformats/ff/test_Grid.py b/lib/iris/tests/unit/fileformats/ff/test_Grid.py index 1bb9688c1a..d3ece16916 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_Grid.py +++ b/lib/iris/tests/unit/fileformats/ff/test_Grid.py @@ -4,57 +4,55 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.Grid`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +import pytest from iris.fileformats._ff import Grid +from iris.tests.unit.fileformats import MockerMixin -class Test___init__(tests.IrisTest): - def test_attributes(self): +class Test___init__: + def test_attributes(self, mocker): # Ensure the constructor initialises all the grid's attributes # correctly, including unpacking values from the REAL constants. reals = ( - mock.sentinel.ew, - mock.sentinel.ns, - mock.sentinel.first_lat, - mock.sentinel.first_lon, - mock.sentinel.pole_lat, - mock.sentinel.pole_lon, + mocker.sentinel.ew, + mocker.sentinel.ns, + mocker.sentinel.first_lat, + mocker.sentinel.first_lon, + mocker.sentinel.pole_lat, + mocker.sentinel.pole_lon, ) grid = Grid( - mock.sentinel.column, - mock.sentinel.row, + mocker.sentinel.column, + mocker.sentinel.row, reals, - mock.sentinel.horiz_grid_type, + mocker.sentinel.horiz_grid_type, ) - self.assertIs(grid.column_dependent_constants, mock.sentinel.column) - self.assertIs(grid.row_dependent_constants, mock.sentinel.row) - self.assertIs(grid.ew_spacing, mock.sentinel.ew) - self.assertIs(grid.ns_spacing, mock.sentinel.ns) - self.assertIs(grid.first_lat, mock.sentinel.first_lat) - self.assertIs(grid.first_lon, mock.sentinel.first_lon) - self.assertIs(grid.pole_lat, mock.sentinel.pole_lat) - self.assertIs(grid.pole_lon, mock.sentinel.pole_lon) - self.assertIs(grid.horiz_grid_type, mock.sentinel.horiz_grid_type) - - -class Test_vectors(tests.IrisTest): - def setUp(self): - self.xp = mock.sentinel.xp - self.xu = mock.sentinel.xu - self.yp = mock.sentinel.yp - self.yv = mock.sentinel.yv + assert grid.column_dependent_constants is mocker.sentinel.column + assert grid.row_dependent_constants is mocker.sentinel.row + assert grid.ew_spacing is mocker.sentinel.ew + assert grid.ns_spacing is mocker.sentinel.ns + assert grid.first_lat is mocker.sentinel.first_lat + assert grid.first_lon is mocker.sentinel.first_lon + assert grid.pole_lat is mocker.sentinel.pole_lat + assert grid.pole_lon is mocker.sentinel.pole_lon + assert grid.horiz_grid_type is mocker.sentinel.horiz_grid_type + + +class Test_vectors(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.xp = mocker.sentinel.xp + self.xu = mocker.sentinel.xu + self.yp = mocker.sentinel.yp + self.yv = mocker.sentinel.yv def _test_subgrid_vectors(self, subgrid, expected): grid = Grid(None, None, (None,) * 6, None) - grid._x_vectors = mock.Mock(return_value=(self.xp, self.xu)) - grid._y_vectors = mock.Mock(return_value=(self.yp, self.yv)) + grid._x_vectors = self.mocker.Mock(return_value=(self.xp, self.xu)) + grid._y_vectors = self.mocker.Mock(return_value=(self.yp, self.yv)) result = grid.vectors(subgrid) - self.assertEqual(result, expected) + assert result == expected def test_1(self): # Data on atmospheric theta points. @@ -103,7 +101,3 @@ def test_28(self): def test_29(self): # Orography field for atmospheric LBCs. self._test_subgrid_vectors(29, (self.xp, self.yp)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py b/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py index f3cc41aa82..e623afdeb6 100644 --- a/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py +++ b/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py @@ -4,29 +4,26 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :class:`iris.fileformat.ff.NewDynamics`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np from iris.fileformats._ff import NewDynamics +from iris.tests import _shared_utils -class Test(tests.IrisTest): +class Test: def test_class_attributes(self): reals = np.arange(6) + 100 grid = NewDynamics(None, None, reals, None) - self.assertEqual(grid._v_offset, 0.5) + assert grid._v_offset == 0.5 -class Test__y_vectors(tests.IrisTest): +class Test__y_vectors: def _test(self, row, yp, yv): reals = np.arange(6) + 100 grid = NewDynamics(None, row, reals, None) result_yp, result_yv = grid._y_vectors() - self.assertArrayEqual(result_yp, yp) - self.assertArrayEqual(result_yv, yv) + _shared_utils.assert_array_equal(result_yp, yp) + _shared_utils.assert_array_equal(result_yv, yv) def test_none(self): self._test(row=None, yp=None, yv=None) @@ -44,7 +41,3 @@ def test_2d(self): yp=np.array([0, 1, 2, 3]), yv=np.array([0, 10, 20]), ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 296765f853..0b9caabdf7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -4,15 +4,15 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the module :mod:`iris.fileformats._nc_load_rules.actions`.""" -from pathlib import Path -import shutil -import tempfile import warnings +import pytest + import iris.fileformats._nc_load_rules.engine from iris.fileformats.cf import CFReader import iris.fileformats.netcdf from iris.fileformats.netcdf.loader import _load_cube +from iris.tests import _shared_utils from iris.tests.stock.netcdf import ncgen_from_cdl from iris.warnings import IrisLoadWarning @@ -35,11 +35,8 @@ class Mixin__nc_load_actions: """Class to make testcases for rules or actions code, and check results. - Defines standard setUpClass/tearDownClass methods, to create a temporary + Defines standard setup method, to create a temporary directory for intermediate files. - NOTE: owing to peculiarities of unittest, these must be explicitly called - from a setUpClass/tearDownClass within the 'final' inheritor, i.e. the - actual Test_XXX class which also inherits unittest.TestCase. Testcases are manufactured by the '_make_testcase_cdl' method. The 'run_testcase' method takes the '_make_testcase_cdl' kwargs and makes @@ -55,15 +52,10 @@ class Mixin__nc_load_actions: # "global" test setting : whether to output various debug info debug_info = False - @classmethod - def setUpClass(cls): + @pytest.fixture(autouse=True, scope="class") + def setup_mixin(self, request, tmp_path_factory): # Create a temp directory for temp files. - cls.temp_dirpath = Path(tempfile.mkdtemp()) - - @classmethod - def tearDownClass(cls): - # Destroy a temp directory for temp files. - shutil.rmtree(cls.temp_dirpath) + request.cls.temp_dirpath = tmp_path_factory.mktemp("temp") def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path, mocker=None): """Load the 'phenom' data variable in a CDL testcase, as a cube. @@ -135,9 +127,9 @@ def run_testcase(self, warning_regex=None, **testcase_kwargs): print("------\n") if warning_regex is None: - context = self.assertNoWarningsRegexp() + context = _shared_utils.assert_no_warnings_regexp() else: - context = self.assertWarnsRegex(IrisLoadWarning, warning_regex) + context = pytest.warns(IrisLoadWarning, match=warning_regex) with context: cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index 202fb0fa16..41d5c33623 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -9,12 +9,11 @@ """ -import iris.coord_systems - -import iris.tests as tests # isort: skip +import re import pytest +import iris.coord_systems import iris.coord_systems as ics import iris.fileformats._nc_load_rules.helpers as hh from iris.loading import LOAD_PROBLEMS @@ -178,7 +177,7 @@ def _make_testcase_cdl( {g_varname}:{lonpo_name} = 0.0 ; """ # Those which require 'longitude of central meridian' - if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): + if mapping_type_name == hh.CF_GRID_MAPPING_TRANSVERSE: latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN g_string += f""" {g_varname}:{latcm_name} = 0.0 ; @@ -193,7 +192,7 @@ def _make_testcase_cdl( {g_varname}:{pph_name} = 600000.0 ; """ # Those which require 'sweep angle axis' - if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): + if mapping_type_name == hh.CF_GRID_MAPPING_GEOSTATIONARY: saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS g_string += f""" {g_varname}:{saa_name} = "y" ; @@ -201,7 +200,7 @@ def _make_testcase_cdl( # Polar stereo needs a special 'latitude of projection origin', a # 'straight_vertical_longitude_from_pole' and a `standard_parallel` or # `scale_factor_at_projection_origin` so treat it specially - if mapping_type_name in (hh.CF_GRID_MAPPING_POLAR,): + if mapping_type_name == hh.CF_GRID_MAPPING_POLAR: latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN g_string += f""" {g_varname}:{latpo_name} = 90.0 ; @@ -267,8 +266,8 @@ def check_result( Various options control the expected things which are tested. """ - self.assertEqual(cube.standard_name, "air_temperature") - self.assertEqual(cube.var_name, "phenom") + assert cube.standard_name == "air_temperature" + assert cube.var_name == "phenom" x_coords = cube.coords(dimensions=(1,)) y_coords = cube.coords(dimensions=(0,)) @@ -283,40 +282,40 @@ def check_result( else: expected_dim_coords += x_coords - self.assertEqual(set(expected_dim_coords), set(cube.coords(dim_coords=True))) + assert set(expected_dim_coords) == set(cube.coords(dim_coords=True)) if cube_no_xycoords: - self.assertEqual(expected_dim_coords, []) + assert expected_dim_coords == [] x_coord = None y_coord = None else: - self.assertEqual(len(x_coords), 1) + assert len(x_coords) == 1 (x_coord,) = x_coords - self.assertEqual(len(y_coords), 1) + assert len(y_coords) == 1 (y_coord,) = y_coords - self.assertEqual(set(expected_aux_coords), set(cube.coords(dim_coords=False))) + assert set(expected_aux_coords) == set(cube.coords(dim_coords=False)) if x_coord: if xco_stdname is None: # no check pass elif xco_stdname is True: - self.assertIsNotNone(x_coord.standard_name) + assert x_coord.standard_name is not None elif xco_stdname is False: - self.assertIsNone(x_coord.standard_name) + assert x_coord.standard_name is None else: - self.assertEqual(x_coord.standard_name, xco_stdname) + assert x_coord.standard_name == xco_stdname if y_coord: if yco_stdname is None: # no check pass if yco_stdname is True: - self.assertIsNotNone(y_coord.standard_name) + assert y_coord.standard_name is not None elif yco_stdname is False: - self.assertIsNone(y_coord.standard_name) + assert y_coord.standard_name is None else: - self.assertEqual(y_coord.standard_name, yco_stdname) + assert y_coord.standard_name == yco_stdname cube_cs = cube.coord_system() if cube_no_xycoords: @@ -326,36 +325,29 @@ def check_result( yco_cs = y_coord.coord_system xco_cs = x_coord.coord_system if cube_no_cs: - self.assertIsNone(cube_cs) - self.assertIsNone(yco_cs) - self.assertIsNone(xco_cs) + assert cube_cs is None + assert yco_cs is None + assert xco_cs is None else: - self.assertIsNotNone(cube_cs) + assert cube_cs is not None if cube_cstype is not None: - self.assertIsInstance(cube_cs, cube_cstype) + assert isinstance(cube_cs, cube_cstype) if xco_no_cs: - self.assertIsNone(xco_cs) + assert xco_cs is None else: - self.assertEqual(xco_cs, cube_cs) + assert xco_cs == cube_cs if yco_no_cs: - self.assertIsNone(yco_cs) + assert yco_cs is None else: - self.assertEqual(yco_cs, cube_cs) + assert yco_cs == cube_cs if load_problems_regex is not None: load_problem = LOAD_PROBLEMS.problems[-1] - self.assertRegex(str(load_problem.stack_trace), load_problems_regex) + assert re.search(load_problems_regex, str(load_problem.stack_trace)) -class Test__grid_mapping(Mixin__grid_mapping, tests.IrisTest): +class Test__grid_mapping(Mixin__grid_mapping): # Various testcases for translation of grid-mappings - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() def test_basic_latlon(self): # A basic reference example with a lat-long grid. @@ -801,15 +793,8 @@ def test_extended_mapping_basic_latlon_missing_coords(self): self.check_result(result, xco_no_cs=True) -class Test__aux_latlons(Mixin__grid_mapping, tests.IrisTest): +class Test__aux_latlons(Mixin__grid_mapping): # Testcases for translating auxiliary latitude+longitude variables - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() def test_aux_lon(self): # Change the name of xdim, and put xco on the coords list. @@ -933,15 +918,7 @@ def test_extended_grid_mapping_aux_lat_and_lon(self): self.check_result(result, xco_is_aux=True, yco_is_aux=True, cube_no_cs=False) -class Test__nondimcoords(Mixin__grid_mapping, tests.IrisTest): - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - +class Test__nondimcoords(Mixin__grid_mapping): def test_nondim_lats(self): # Fix a coord's values so it cannot be a dim-coord. # @@ -1235,7 +1212,3 @@ def test_one_coord_system_simple(self, osgb_cs, latlon_cs, mocker, tmp_path): # Loading multiple coord systems or using extended grid mapping implies ordered axes: assert cube.extended_grid_mapping is False - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py index 65b0ecd94e..838cb8b9c7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py @@ -9,21 +9,11 @@ """ -import iris.tests as tests # isort: skip - import iris.fileformats._nc_load_rules.helpers as hh from iris.tests.unit.fileformats.nc_load_rules.actions import Mixin__nc_load_actions -class Test__formulae_tests(Mixin__nc_load_actions, tests.IrisTest): - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - +class Test__formulae_tests(Mixin__nc_load_actions): def _make_testcase_cdl( self, formula_root_name=None, term_names=None, extra_formula_type=None ): @@ -111,7 +101,7 @@ def check_result(self, cube, factory_type="_auto", formula_terms="_auto"): # replace with our 'default', which is hybrid-height. # N.B. 'None' is different: it means expect *no* factory. factory_type = "atmosphere_hybrid_height_coordinate" - self.assertEqual(cube._formula_type_name, factory_type) + assert cube._formula_type_name == factory_type if formula_terms == "_auto": # Set default terms-expected, according to the expected factory @@ -130,12 +120,12 @@ def check_result(self, cube, factory_type="_auto", formula_terms="_auto"): # N.B. the terms dictionary can be missing, if there were none actual_terms = cube._formula_terms_byname or {} - self.assertEqual(sorted(formula_terms), sorted(actual_terms.keys())) + assert sorted(formula_terms) == sorted(actual_terms.keys()) # Check that there is an aux-coord of the expected name for each term for var_name in actual_terms.values(): coords = cube.coords(var_name=var_name, dim_coords=False) - self.assertEqual(len(coords), 1) + assert len(coords) == 1 # # Actual testcase routines @@ -272,7 +262,3 @@ def test_ocean_s_coordinate_g2(self): term_names = hh.CF_COORD_VERTICAL[hybrid_type] result = self.run_testcase(formula_root_name=hybrid_type, term_names=term_names) self.check_result(result, factory_type=hybrid_type, formula_terms=term_names) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py index 0694ebe250..9e534521f1 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py @@ -10,9 +10,10 @@ """ +import re from typing import Literal -import iris.tests as tests # isort: skip +import pytest from iris.common import LimitedAttributeDict from iris.coord_systems import GeogCS, RotatedGeogCS @@ -27,8 +28,8 @@ class Mixin_latlon_dimcoords(Mixin__nc_load_actions): # Set by inheritor classes, which are actual TestCases. lat_1_or_lon_0: Literal[0, 1] - def setUp(self): - super().setUp() + @pytest.fixture(autouse=True) + def _setup(self): # Generate some useful settings : just to generalise operation over # both latitude and longitude. islat = self.lat_1_or_lon_0 @@ -133,9 +134,9 @@ def check_result( # affect the results here, in some cases. coords = cube.coords() # There should be one and only one coord. - self.assertEqual(1, len(coords)) + assert 1 == len(coords) # It should also be a dim-coord - self.assertEqual(1, len(cube.coords(dim_coords=True))) + assert 1 == len(cube.coords(dim_coords=True)) (coord,) = coords if self.debug_info: print() @@ -146,24 +147,24 @@ def check_result( getattr(coord, name) for name in ("standard_name", "long_name", "units", "coord_system") ] - self.assertEqual(standard_name, coord_stdname, context_message) - self.assertEqual(long_name, coord_longname, context_message) - self.assertEqual(units, coord_units, context_message) + assert standard_name == coord_stdname, context_message + assert long_name == coord_longname, context_message + assert units == coord_units, context_message assert crs in (None, "latlon", "rotated") if crs is None: - self.assertEqual(None, coord_crs, context_message) + assert None is coord_crs, context_message elif crs == "latlon": - self.assertIsInstance(coord_crs, GeogCS, context_message) + assert isinstance(coord_crs, GeogCS), context_message elif crs == "rotated": - self.assertIsInstance(coord_crs, RotatedGeogCS, context_message) + assert isinstance(coord_crs, RotatedGeogCS), context_message def check_load_problem(self, setup_kwargs, expected_msg): # Check that the expected load problem is stored. _ = self.run_testcase(**setup_kwargs) load_problem = LOAD_PROBLEMS.problems[-1] attributes = load_problem.loaded.attributes[LimitedAttributeDict.IRIS_RAW] - self.assertEqual(attributes["standard_name"], setup_kwargs["standard_name"]) - self.assertRegex("".join(load_problem.stack_trace.format()), expected_msg) + assert attributes["standard_name"] == setup_kwargs["standard_name"] + assert re.search(expected_msg, "".join(load_problem.stack_trace.format())) # # Testcase routines @@ -330,35 +331,9 @@ def test_fail_projected(self): ) -class Test__longitude_coords(Mixin_latlon_dimcoords, tests.IrisTest): +class Test__longitude_coords(Mixin_latlon_dimcoords): lat_1_or_lon_0 = 0 - @classmethod - def setUpClass(cls): - super().setUpClass() - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def setUp(self): - super().setUp() - - -class Test__latitude_coords(Mixin_latlon_dimcoords, tests.IrisTest): +class Test__latitude_coords(Mixin_latlon_dimcoords): lat_1_or_lon_0 = 1 - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def setUp(self): - super().setUp() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py index 7f4c84ec78..283f01920c 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py @@ -13,22 +13,13 @@ """ -import iris.tests as tests # isort: skip - from iris.coords import AncillaryVariable, AuxCoord, CellMeasure from iris.fileformats.pp import STASH from iris.tests.unit.fileformats.nc_load_rules.actions import Mixin__nc_load_actions -class Test__ukmo_attributes(Mixin__nc_load_actions, tests.IrisTest): +class Test__ukmo_attributes(Mixin__nc_load_actions): # Tests for handling of the special UM-specific data-var attributes. - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() def _make_testcase_cdl(self, **add_attrs): phenom_attrs_string = "" @@ -55,16 +46,16 @@ def check_result(self, cube, stashcode=None, processflags=None): cube_processflags = cube.attributes.get("ukmo__process_flags") if stashcode is not None: - self.assertIsInstance(cube_stashattr, STASH) - self.assertEqual(str(stashcode), str(cube_stashattr)) + assert isinstance(cube_stashattr, STASH) + assert str(stashcode) == str(cube_stashattr) else: - self.assertIsNone(cube_stashattr) + assert cube_stashattr is None if processflags is not None: - self.assertIsInstance(cube_processflags, tuple) - self.assertEqual(set(cube_processflags), set(processflags)) + assert isinstance(cube_processflags, tuple) + assert set(cube_processflags) == set(processflags) else: - self.assertIsNone(cube_processflags) + assert cube_processflags is None # # Testcase routines @@ -85,8 +76,8 @@ def test_stash_empty(self): ukmo__um_stash_source=value, warning_regex="Invalid content for managed attribute name 'um_stash_source'", ) - self.assertNotIn("STASH", cube.attributes) - self.assertEqual(cube.attributes["ukmo__um_stash_source"], value) + assert "STASH" not in cube.attributes + assert cube.attributes["ukmo__um_stash_source"] == value def test_stash_invalid(self): value = "XXX" @@ -94,8 +85,8 @@ def test_stash_invalid(self): ukmo__um_stash_source="XXX", warning_regex="Invalid content for managed attribute name 'um_stash_source'", ) - self.assertNotIn("STASH", cube.attributes) - self.assertEqual(cube.attributes["ukmo__um_stash_source"], value) + assert "STASH" not in cube.attributes + assert cube.attributes["ukmo__um_stash_source"] == value def test_processflags_single(self): cube = self.run_testcase(ukmo__process_flags="this") @@ -113,17 +104,9 @@ def test_processflags_empty(self): self.check_result(cube, processflags=expected_result) -class Test__labels_cellmeasures_ancils(Mixin__nc_load_actions, tests.IrisTest): +class Test__labels_cellmeasures_ancils(Mixin__nc_load_actions): # Tests for some simple rules that translate facts directly into cube data, # with no alternative actions, complications or failure modes to test. - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - def _make_testcase_cdl( self, include_label=False, @@ -185,28 +168,28 @@ def check_result( ): label_coords = cube.coords(var_name="v_label") if expect_label: - self.assertEqual(len(label_coords), 1) + assert len(label_coords) == 1 (coord,) = label_coords - self.assertIsInstance(coord, AuxCoord) - self.assertEqual(coord.dtype.kind, "U") + assert isinstance(coord, AuxCoord) + assert coord.dtype.kind == "U" else: - self.assertEqual(len(label_coords), 0) + assert len(label_coords) == 0 cell_measures = cube.cell_measures() if expect_cellmeasure: - self.assertEqual(len(cell_measures), 1) + assert len(cell_measures) == 1 (cellm,) = cell_measures - self.assertIsInstance(cellm, CellMeasure) + assert isinstance(cellm, CellMeasure) else: - self.assertEqual(len(cell_measures), 0) + assert len(cell_measures) == 0 ancils = cube.ancillary_variables() if expect_ancil: - self.assertEqual(len(ancils), 1) + assert len(ancils) == 1 (ancil,) = ancils - self.assertIsInstance(ancil, AncillaryVariable) + assert isinstance(ancil, AncillaryVariable) else: - self.assertEqual(len(ancils), 0) + assert len(ancils) == 0 def test_label(self): cube = self.run_testcase(include_label=True) @@ -219,7 +202,3 @@ def test_ancil(self): def test_cellmeasure(self): cube = self.run_testcase(include_cellmeasure=True) self.check_result(cube, expect_cellmeasure=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index ab7eedb7e8..6c13c35144 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -9,10 +9,9 @@ """ +import re from typing import ClassVar -import iris.tests as tests # isort: skip - from iris.coords import AuxCoord, DimCoord from iris.loading import LOAD_PROBLEMS from iris.tests.unit.fileformats.nc_load_rules.actions import Mixin__nc_load_actions @@ -187,39 +186,39 @@ def check_result( period_auxcos = cube.coords(period_name, dim_coords=False) if time_is == "dim": - self.assertEqual(len(time_dimcos), 1) - self.assertEqual(len(time_auxcos), 0) + assert len(time_dimcos) == 1 + assert len(time_auxcos) == 0 elif time_is == "aux": - self.assertEqual(len(time_dimcos), 0) - self.assertEqual(len(time_auxcos), 1) + assert len(time_dimcos) == 0 + assert len(time_auxcos) == 1 else: - self.assertEqual(len(time_dimcos), 0) - self.assertEqual(len(time_auxcos), 0) + assert len(time_dimcos) == 0 + assert len(time_auxcos) == 0 if period_is == "dim": - self.assertEqual(len(period_dimcos), 1) - self.assertEqual(len(period_auxcos), 0) + assert len(period_dimcos) == 1 + assert len(period_auxcos) == 0 elif period_is == "aux": - self.assertEqual(len(period_dimcos), 0) - self.assertEqual(len(period_auxcos), 1) + assert len(period_dimcos) == 0 + assert len(period_auxcos) == 1 else: - self.assertEqual(len(period_dimcos), 0) - self.assertEqual(len(period_auxcos), 0) + assert len(period_dimcos) == 0 + assert len(period_auxcos) == 0 # Also check expected built Coord types. if time_is == "dim": - self.assertIsInstance(time_dimcos[0], DimCoord) + assert isinstance(time_dimcos[0], DimCoord) elif time_is == "aux": - self.assertIsInstance(time_auxcos[0], AuxCoord) + assert isinstance(time_auxcos[0], AuxCoord) if period_is == "dim": - self.assertIsInstance(period_dimcos[0], DimCoord) + assert isinstance(period_dimcos[0], DimCoord) elif period_is == "aux": - self.assertIsInstance(period_auxcos[0], AuxCoord) + assert isinstance(period_auxcos[0], AuxCoord) if load_problems_regex is not None: load_problem = LOAD_PROBLEMS.problems[-1] - self.assertRegex(str(load_problem.stack_trace), load_problems_regex) + assert re.search(load_problems_regex, str(load_problem.stack_trace)) class Mixin__singlecoord__tests(Mixin__timecoords__common): @@ -396,43 +395,20 @@ def test_aux_fails_typeident(self): self.check_result(result, "aux") -class Test__time(Mixin__singlecoord__tests, tests.IrisTest): +class Test__time(Mixin__singlecoord__tests): # Run 'time' coord tests which = "time" - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - -class Test__period(Mixin__singlecoord__tests, tests.IrisTest): +class Test__period(Mixin__singlecoord__tests): # Run 'time_period' coord tests which = "period" - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - -class Test__dualcoord(Mixin__timecoords__common, tests.IrisTest): +class Test__dualcoord(Mixin__timecoords__common): # Coordinate tests for a combination of 'time' and 'time_period'. # Not strictly necessary, as handling is independent, but a handy check # on typical usage. - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() def test_time_and_period(self): # Test case with both 'time' and 'period', with separate dims. @@ -461,7 +437,3 @@ def test_time_dim_period_aux(self): ), ) self.check_result(result, time_is="dim", period_is="aux") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py index 71280e5f60..a0cdf99eb1 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py @@ -4,14 +4,14 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module.""" -from unittest import mock +import pytest from iris.fileformats._nc_load_rules.engine import Engine, FactEntity -import iris.tests as tests -class Test_Engine(tests.IrisTest): - def setUp(self): +class Test_Engine: + @pytest.fixture(autouse=True) + def _setup(self): self.empty_engine = Engine() engine = Engine() engine.add_fact("this", ("that", "other")) @@ -20,74 +20,69 @@ def setUp(self): def test__init(self): # Check that init creates an empty Engine. engine = Engine() - self.assertIsInstance(engine, Engine) - self.assertIsInstance(engine.facts, FactEntity) - self.assertEqual(list(engine.facts.entity_lists.keys()), []) + assert isinstance(engine, Engine) + assert isinstance(engine.facts, FactEntity) + assert list(engine.facts.entity_lists.keys()) == [] def test_reset(self): # Check that calling reset() causes a non-empty engine to be emptied. engine = self.nonempty_engine fact_names = list(engine.facts.entity_lists.keys()) - self.assertNotEqual(len(fact_names), 0) + assert len(fact_names) != 0 engine.reset() fact_names = list(engine.facts.entity_lists.keys()) - self.assertEqual(len(fact_names), 0) + assert len(fact_names) == 0 - def test_activate(self): + def test_activate(self, mocker): # Check that calling engine.activate() --> actions.run_actions(engine) engine = self.empty_engine target = "iris.fileformats._nc_load_rules.engine.run_actions" - run_call = self.patch(target) + run_call = mocker.patch(target) engine.activate() - self.assertEqual(run_call.call_args_list, [mock.call(engine)]) + assert run_call.call_args_list == [mocker.call(engine)] def test_add_case_specific_fact__newname(self): # Adding a new fact to a new fact-name records as expected. engine = self.nonempty_engine engine.add_case_specific_fact("new_fact", ("a1", "a2")) - self.assertEqual(engine.fact_list("new_fact"), [("a1", "a2")]) + assert engine.fact_list("new_fact") == [("a1", "a2")] def test_add_case_specific_fact__existingname(self): # Adding a new fact to an existing fact-name records as expected. engine = self.nonempty_engine name = "this" - self.assertEqual(engine.fact_list(name), [("that", "other")]) + assert engine.fact_list(name) == [("that", "other")] engine.add_case_specific_fact(name, ("yetanother",)) - self.assertEqual(engine.fact_list(name), [("that", "other"), ("yetanother",)]) + assert engine.fact_list(name) == [("that", "other"), ("yetanother",)] def test_add_case_specific_fact__emptyargs(self): # Check that empty args work ok, and will create a new fact. engine = self.empty_engine engine.add_case_specific_fact("new_fact", ()) - self.assertIn("new_fact", engine.facts.entity_lists) - self.assertEqual(engine.fact_list("new_fact"), [()]) + assert "new_fact" in engine.facts.entity_lists + assert engine.fact_list("new_fact") == [()] - def test_add_fact(self): + def test_add_fact(self, mocker): # Check that 'add_fact' is equivalent to (short for) a call to # 'add_case_specific_fact'. engine = self.empty_engine target = "iris.fileformats._nc_load_rules.engine.Engine.add_case_specific_fact" - acsf_call = self.patch(target) + acsf_call = mocker.patch(target) engine.add_fact("extra", ()) - self.assertEqual(acsf_call.call_count, 1) - self.assertEqual( - acsf_call.call_args_list, - [mock.call(fact_name="extra", fact_arglist=())], - ) + assert acsf_call.call_count == 1 + assert acsf_call.call_args_list == [ + mocker.call(fact_name="extra", fact_arglist=()) + ] def test_get_kb(self): # Check that this stub just returns the facts database. engine = self.nonempty_engine kb = engine.get_kb() - self.assertIsInstance(kb, FactEntity) - self.assertIs(kb, engine.facts) + assert isinstance(kb, FactEntity) + assert kb is engine.facts def test_fact_list__existing(self): - self.assertEqual(self.nonempty_engine.fact_list("this"), [("that", "other")]) + assert self.nonempty_engine.fact_list("this") == [("that", "other")] def test_fact_list__nonexisting(self): - self.assertEqual(self.empty_engine.fact_list("odd-unknown"), []) - - -if __name__ == "__main__": - tests.main() + assert self.empty_engine.fact_list("odd-unknown") == [] diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py index e151d92aa8..36ea8c9953 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py @@ -6,3 +6,14 @@ :mod:`iris.fileformats.netcdf._nc_load_rules.helpers` . """ + +import pytest +from pytest_mock import MockerFixture + + +class MockerMixin: + mocker: MockerFixture + + @pytest.fixture(autouse=True) + def _mocker_mixin_setup(self, mocker): + self.mocker = mocker diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__add_or_capture.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__add_or_capture.py index eecc23b653..cadd2efa62 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__add_or_capture.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__add_or_capture.py @@ -4,9 +4,8 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers._add_or_capture`.""" -from unittest.mock import MagicMock - import pytest +from pytest_mock import MockType from iris.cube import Cube from iris.fileformats._nc_load_rules import helpers @@ -15,9 +14,9 @@ class Mixin: - build_func: MagicMock - add_method: MagicMock - cf_var: MagicMock + build_func: MockType + add_method: MockType + cf_var: MockType filename: str = "test__add_or_capture.nc" attr_key: str = "attr_key" @@ -63,7 +62,7 @@ def _setup(self, make_args): @pytest.fixture def patch_build_raw_cube(self, mocker): patch = mocker.patch.object(helpers, "build_raw_cube", return_value="RAW_CUBE") - yield patch + return patch @pytest.fixture def cause_build_raw_cube_error(self, patch_build_raw_cube): diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__normalise_bounds_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__normalise_bounds_units.py index 337279426e..4d85ccacd6 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__normalise_bounds_units.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test__normalise_bounds_units.py @@ -4,99 +4,99 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers._normalise_bounds_units`.""" -# import iris tests first so that some things can be initialised before -# importing anything else from typing import Optional -from unittest import mock import numpy as np import pytest +from pytest_mock import MockType from iris.fileformats._nc_load_rules.helpers import ( _normalise_bounds_units, _WarnComboIgnoringCfLoad, ) +from iris.tests import _shared_utils +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin from iris.warnings import IrisCfLoadWarning -BOUNDS = mock.sentinel.bounds CF_NAME = "dummy_bnds" -def _make_cf_bounds_var( - units: Optional[str] = None, - unitless: bool = False, -) -> mock.MagicMock: - """Construct a mock CF bounds variable.""" - if units is None: +class Test(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self): + self.bounds = self.mocker.sentinel.bounds + + def _make_cf_bounds_var( + self, + units: Optional[str] = None, + unitless: bool = False, + ) -> MockType: + """Construct a mock CF bounds variable.""" + if units is None: + units = "days since 1970-01-01" + + cf_data = self.mocker.Mock(spec=[]) + # we want to mock the absence of flag attributes to helpers.get_attr_units + # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes + del cf_data.flag_values + del cf_data.flag_masks + del cf_data.flag_meanings + + cf_var = self.mocker.MagicMock( + cf_name=CF_NAME, + cf_data=cf_data, + units=units, + calendar=None, + dtype=float, + ) + + if unitless: + del cf_var.units + + return cf_var + + def test_unitless(self) -> None: + """Test bounds variable with no units.""" + cf_bounds_var = self._make_cf_bounds_var(unitless=True) + result = _normalise_bounds_units(None, cf_bounds_var, self.bounds) + assert result == self.bounds + + def test_invalid_units__pass_through(self) -> None: + """Test bounds variable with invalid units.""" + units = "invalid" + cf_bounds_var = self._make_cf_bounds_var(units=units) + wmsg = f"Ignoring invalid units {units!r} on netCDF variable {CF_NAME!r}" + with pytest.warns(_WarnComboIgnoringCfLoad, match=wmsg): + result = _normalise_bounds_units(None, cf_bounds_var, self.bounds) + assert result == self.bounds + + @pytest.mark.parametrize("units", ["unknown", "no_unit", "1", "kelvin"]) + def test_ignore_bounds(self, units) -> None: + """Test bounds variable with incompatible units compared to points.""" + points_units = "km" + cf_bounds_var = self._make_cf_bounds_var(units=units) + wmsg = ( + f"Ignoring bounds on NetCDF variable {CF_NAME!r}. " + f"Expected units compatible with {points_units!r}" + ) + with pytest.warns(IrisCfLoadWarning, match=wmsg): + result = _normalise_bounds_units(points_units, cf_bounds_var, self.bounds) + assert result is None + + def test_compatible(self) -> None: + """Test bounds variable with compatible units requiring conversion.""" + points_units, bounds_units = "days since 1970-01-01", "hours since 1970-01-01" + cf_bounds_var = self._make_cf_bounds_var(units=bounds_units) + bounds = np.arange(10, dtype=float) * 24 + result = _normalise_bounds_units(points_units, cf_bounds_var, bounds) + expected = bounds / 24 + _shared_utils.assert_array_equal(result, expected) + + def test_same_units(self) -> None: + """Test bounds variable with same units as points.""" units = "days since 1970-01-01" - - cf_data = mock.Mock(spec=[]) - # we want to mock the absence of flag attributes to helpers.get_attr_units - # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes - del cf_data.flag_values - del cf_data.flag_masks - del cf_data.flag_meanings - - cf_var = mock.MagicMock( - cf_name=CF_NAME, - cf_data=cf_data, - units=units, - calendar=None, - dtype=float, - ) - - if unitless: - del cf_var.units - - return cf_var - - -def test_unitless() -> None: - """Test bounds variable with no units.""" - cf_bounds_var = _make_cf_bounds_var(unitless=True) - result = _normalise_bounds_units(None, cf_bounds_var, BOUNDS) - assert result == BOUNDS - - -def test_invalid_units__pass_through() -> None: - """Test bounds variable with invalid units.""" - units = "invalid" - cf_bounds_var = _make_cf_bounds_var(units=units) - wmsg = f"Ignoring invalid units {units!r} on netCDF variable {CF_NAME!r}" - with pytest.warns(_WarnComboIgnoringCfLoad, match=wmsg): - result = _normalise_bounds_units(None, cf_bounds_var, BOUNDS) - assert result == BOUNDS - - -@pytest.mark.parametrize("units", ["unknown", "no_unit", "1", "kelvin"]) -def test_ignore_bounds(units) -> None: - """Test bounds variable with incompatible units compared to points.""" - points_units = "km" - cf_bounds_var = _make_cf_bounds_var(units=units) - wmsg = ( - f"Ignoring bounds on NetCDF variable {CF_NAME!r}. " - f"Expected units compatible with {points_units!r}" - ) - with pytest.warns(IrisCfLoadWarning, match=wmsg): - result = _normalise_bounds_units(points_units, cf_bounds_var, BOUNDS) - assert result is None - - -def test_compatible() -> None: - """Test bounds variable with compatible units requiring conversion.""" - points_units, bounds_units = "days since 1970-01-01", "hours since 1970-01-01" - cf_bounds_var = _make_cf_bounds_var(units=bounds_units) - bounds = np.arange(10, dtype=float) * 24 - result = _normalise_bounds_units(points_units, cf_bounds_var, bounds) - expected = bounds / 24 - np.testing.assert_array_equal(result, expected) - - -def test_same_units() -> None: - """Test bounds variable with same units as points.""" - units = "days since 1970-01-01" - cf_bounds_var = _make_cf_bounds_var(units=units) - bounds = np.arange(10, dtype=float) - result = _normalise_bounds_units(units, cf_bounds_var, bounds) - np.testing.assert_array_equal(result, bounds) - assert result is bounds + cf_bounds_var = self._make_cf_bounds_var(units=units) + bounds = np.arange(10, dtype=float) + result = _normalise_bounds_units(units, cf_bounds_var, bounds) + _shared_utils.assert_array_equal(result, bounds) + assert result is bounds diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py index 7d5aa24219..d9a33dd948 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import AlbersEqualArea from iris.fileformats._nc_load_rules.helpers import ( build_albers_equal_area_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildAlbersEqualAreaCoordinateSystem(tests.IrisTest): +class TestBuildAlbersEqualAreaCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_optionals=False): if no_optionals: # Most properties are optional for this system. @@ -57,7 +52,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) - cf_grid_var = mock.Mock(spec=[], **gridvar_props) + cf_grid_var = self.mocker.Mock(spec=[], **gridvar_props) cs = build_albers_equal_area_coordinate_system(None, cf_grid_var) @@ -70,7 +65,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): ellipsoid=expected_ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_basic(self): self._test() @@ -82,7 +77,3 @@ def test_inverse_flattening(self): def test_no_optionals(self): # Check defaults, when all optional attributes are absent. self._test(no_optionals=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_ancil_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_ancil_var.py index dc1dfcc052..ce2fd7bf8b 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_ancil_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_ancil_var.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_ancil_var`.""" -from unittest import mock - import numpy as np import pytest @@ -18,25 +16,25 @@ @pytest.fixture -def mock_engine(): - return mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), +def mock_engine(mocker): + return mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock(dimensions=("foo", "bar")), filename="DUMMY", cube_parts=dict(ancillary_variables=[]), ) @pytest.fixture -def mock_cf_av_var(monkeypatch, mock_engine): +def mock_cf_av_var(mocker, monkeypatch, mock_engine): data = np.arange(6) - output = mock.Mock( + output = mocker.Mock( spec=CFAncillaryDataVariable, dimensions=("foo",), scale_factor=1, add_offset=0, cf_name="wibble", - cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]), + cf_data=mocker.MagicMock(chunking=mocker.Mock(return_value=None), spec=[]), filename=mock_engine.filename, standard_name=None, long_name="wibble", diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py index a44986ec98..b721bbec58 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_auxiliary_coordinate.py @@ -7,12 +7,7 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - import contextlib -from unittest import mock import numpy as np import pytest @@ -24,9 +19,10 @@ from iris.fileformats._nc_load_rules.helpers import build_and_add_auxiliary_coordinate from iris.fileformats.cf import CFVariable from iris.loading import LOAD_PROBLEMS +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBoundsVertexDim(tests.IrisTest): +class TestBoundsVertexDim(MockerMixin): # Lookup for various tests (which change the dimension order). dim_names_lens = { "foo": 2, @@ -37,19 +33,24 @@ class TestBoundsVertexDim(tests.IrisTest): "y": 3, } - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create coordinate cf variables and pyke engine. dimension_names = ("foo", "bar") - points, cf_data = self._make_array_and_cf_data(dimension_names) - - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar"), cf_data=cf_data), + points, cf_data = self._make_array_and_cf_data(mocker, dimension_names) + + self.engine = mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock( + dimensions=("foo", "bar"), + cf_data=cf_data, + dtype=np.int32, + ), filename="DUMMY", cube_parts=dict(coordinates=[]), ) - self.cf_coord_var = mock.Mock( + self.cf_coord_var = mocker.Mock( spec=CFVariable, dimensions=dimension_names, cf_name="wibble", @@ -65,7 +66,7 @@ def setUp(self): ) expected_bounds, _ = self._make_array_and_cf_data( - dimension_names=("foo", "bar", "nv") + mocker, dimension_names=("foo", "bar", "nv") ) self.expected_coord = AuxCoord( self.cf_coord_var[:], @@ -83,7 +84,7 @@ def patched__getitem__(proxy_self, keys): return var[keys] raise RuntimeError() - self.patch( + mocker.patch( "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", new=patched__getitem__, ) @@ -95,16 +96,16 @@ def _get_per_test_bounds_var(_coord_unused): # Return the 'cf_bounds_var' created by the current test. return (self.cf_bounds_var, False) - self.patch( + mocker.patch( "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", new=_get_per_test_bounds_var, ) @classmethod - def _make_array_and_cf_data(cls, dimension_names, rollaxis=False): + def _make_array_and_cf_data(cls, mocker, dimension_names, rollaxis=False): shape = tuple(cls.dim_names_lens[name] for name in dimension_names) - cf_data = mock.MagicMock(_FillValue=None, spec=[]) - cf_data.chunking = mock.MagicMock(return_value=shape) + cf_data = mocker.MagicMock(_FillValue=None, spec=[]) + cf_data.chunking = mocker.MagicMock(return_value=shape) data = np.arange(np.prod(shape), dtype=float) if rollaxis: shape = shape[1:] + (shape[0],) @@ -114,13 +115,13 @@ def _make_array_and_cf_data(cls, dimension_names, rollaxis=False): data = data.reshape(shape) return data, cf_data - def _make_cf_bounds_var(self, dimension_names, rollaxis=False): + def _make_cf_bounds_var(self, mocker, dimension_names, rollaxis=False): # Create the bounds cf variable. bounds, cf_data = self._make_array_and_cf_data( - dimension_names, rollaxis=rollaxis + mocker, dimension_names, rollaxis=rollaxis ) bounds *= 1000 # Convert to metres. - cf_bounds_var = mock.Mock( + cf_bounds_var = self.mocker.Mock( spec=CFVariable, dimensions=dimension_names, cf_name="wibble_bnds", @@ -137,7 +138,7 @@ def _make_cf_bounds_var(self, dimension_names, rollaxis=False): def _check_case(self, dimension_names, rollaxis=False): self.cf_bounds_var = self._make_cf_bounds_var( - dimension_names, rollaxis=rollaxis + self.mocker, dimension_names, rollaxis=rollaxis ) # Asserts must lie within context manager because of deferred loading. @@ -148,7 +149,7 @@ def _check_case(self, dimension_names, rollaxis=False): # Test that engine.cube_parts container is correctly populated. expected_list = [(self.expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) + assert self.engine.cube_parts["coordinates"] == expected_list def test_fastest_varying_vertex_dim__normalise_bounds(self): # The usual order. @@ -165,21 +166,22 @@ def test_fastest_with_different_dim_names__normalise_bounds(self): self._check_case(dimension_names=("x", "y", "nv")) -class TestDtype(tests.IrisTest): - def setUp(self): +class TestDtype(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create coordinate cf variables and pyke engine. points = np.arange(6).reshape(2, 3) - cf_data = mock.MagicMock(_FillValue=None) - cf_data.chunking = mock.MagicMock(return_value=points.shape) + cf_data = mocker.MagicMock(_FillValue=None, shape=points.shape) + cf_data.chunking = mocker.MagicMock(return_value=points.shape) - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), + self.engine = mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock(dimensions=("foo", "bar"), dtype=np.int32), filename="DUMMY", cube_parts=dict(coordinates=[]), ) - self.cf_coord_var = mock.Mock( + self.cf_coord_var = mocker.Mock( spec=CFVariable, dimensions=("foo", "bar"), cf_name="wibble", @@ -202,23 +204,22 @@ def patched__getitem__(proxy_self, keys): raise RuntimeError() # Fix for deferred load, *AND* avoid loading small variable data in real arrays. - with mock.patch( + self.mocker.patch( "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", new=patched__getitem__, - ): - # While loading, "turn off" loading small variables as real data. - with mock.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0): - yield + ) + # While loading, "turn off" loading small variables as real data. + self.mocker.patch("iris.fileformats.netcdf.loader._LAZYVAR_MIN_BYTES", 0) + yield def test_scale_factor_add_offset_int(self): self.cf_coord_var.scale_factor = 3 self.cf_coord_var.add_offset = 5 - with self.deferred_load_patch(): - build_and_add_auxiliary_coordinate(self.engine, self.cf_coord_var) + build_and_add_auxiliary_coordinate(self.engine, self.cf_coord_var) coord, _ = self.engine.cube_parts["coordinates"][0] - self.assertEqual(coord.dtype.kind, "i") + assert coord.dtype.kind == "i" def test_scale_factor_float(self): self.cf_coord_var.scale_factor = 3.0 @@ -227,7 +228,7 @@ def test_scale_factor_float(self): build_and_add_auxiliary_coordinate(self.engine, self.cf_coord_var) coord, _ = self.engine.cube_parts["coordinates"][0] - self.assertEqual(coord.dtype.kind, "f") + assert coord.dtype.kind == "f" def test_add_offset_float(self): self.cf_coord_var.add_offset = 5.0 @@ -236,28 +237,30 @@ def test_add_offset_float(self): build_and_add_auxiliary_coordinate(self.engine, self.cf_coord_var) coord, _ = self.engine.cube_parts["coordinates"][0] - self.assertEqual(coord.dtype.kind, "f") + assert coord.dtype.kind == "f" -class TestCoordConstruction(tests.IrisTest): - def setUp(self): +class TestCoordConstruction: + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create dummy pyke engine. - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), + self.engine = mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock(dimensions=("foo", "bar")), + dtype=np.float32, filename="DUMMY", cube_parts=dict(coordinates=[]), ) points = np.arange(6) units = "days since 1970-01-01" - self.cf_coord_var = mock.Mock( + self.cf_coord_var = mocker.Mock( spec=CFVariable, dimensions=("foo",), scale_factor=1, add_offset=0, cf_name="wibble", - cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]), + cf_data=mocker.MagicMock(chunking=mocker.Mock(return_value=None), spec=[]), filename=self.engine.filename, standard_name=None, long_name="wibble", @@ -270,13 +273,13 @@ def setUp(self): ) bounds = np.arange(12).reshape(6, 2) - cf_data = mock.MagicMock(chunking=mock.Mock(return_value=None)) + cf_data = mocker.MagicMock(chunking=mocker.Mock(return_value=None)) # we want to mock the absence of flag attributes to helpers.get_attr_units # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes del cf_data.flag_values del cf_data.flag_masks del cf_data.flag_meanings - self.cf_bounds_var = mock.Mock( + self.cf_bounds_var = mocker.Mock( spec=CFVariable, dimensions=("x", "nv"), scale_factor=1, @@ -300,7 +303,7 @@ def patched__getitem__(proxy_self, keys): return var[keys] raise RuntimeError() - self.patch( + mocker.patch( "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", new=patched__getitem__, ) @@ -312,7 +315,7 @@ def patched__getitem__(proxy_self, keys): def get_cf_bounds_var(coord_var): return self.cf_bounds_var, self.use_climatology_bounds - self.patch( + mocker.patch( "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", new=get_cf_bounds_var, ) @@ -368,7 +371,7 @@ def test_with_coord_system(self): # Test that expected coord is built and added to cube. self.engine.cube.add_aux_coord.assert_called_with(expected_coord, [0]) - def test_bad_coord_system(self): + def test_bad_coord_system(self, mocker): coord_system = RotatedGeogCS( grid_north_pole_latitude=45.0, grid_north_pole_longitude=45.0 ) @@ -382,19 +385,16 @@ def mock_setter(self, value): else: self._metadata_manager.coord_system = value - with mock.patch.object( + mocker.patch.object( AuxCoord, "coord_system", new=property(AuxCoord.coord_system.fget, mock_setter), - ): - build_and_add_auxiliary_coordinate( - self.engine, self.cf_coord_var, coord_system=coord_system - ) - load_problem = LOAD_PROBLEMS.problems[-1] - self.assertIn( - "test_bad_coord_system", - "".join(load_problem.stack_trace.format()), - ) + ) + build_and_add_auxiliary_coordinate( + self.engine, self.cf_coord_var, coord_system=coord_system + ) + load_problem = LOAD_PROBLEMS.problems[-1] + assert "test_bad_coord_system" in "".join(load_problem.stack_trace.format()) def test_not_added(self): # Confirm that the coord will be skipped if a CannotAddError is raised @@ -418,7 +418,7 @@ def test_unhandlable_error(self): m.setattr(self.engine, "cube", "foo") n_problems = len(LOAD_PROBLEMS.problems) build_and_add_auxiliary_coordinate(self.engine, self.cf_coord_var) - self.assertTrue(len(LOAD_PROBLEMS.problems) > n_problems) + assert len(LOAD_PROBLEMS.problems) > n_problems assert self.engine.cube_parts["coordinates"] == [] @@ -433,7 +433,3 @@ def test_problem_destination(self): assert destination.identifier == self.engine.cf_var.cf_name assert self.engine.cube_parts["coordinates"] == [] - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_measure.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_measure.py index 7e55366a5a..aea5061f1e 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_measure.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_measure.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_cell_measure`.""" -from unittest import mock - import numpy as np import pytest @@ -18,25 +16,25 @@ @pytest.fixture -def mock_engine(): - return mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), +def mock_engine(mocker): + return mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock(dimensions=("foo", "bar")), filename="DUMMY", cube_parts=dict(cell_measures=[]), ) @pytest.fixture -def mock_cf_cm_var(monkeypatch, mock_engine): +def mock_cf_cm_var(monkeypatch, mock_engine, mocker): data = np.arange(6) - output = mock.Mock( + output = mocker.Mock( spec=CFMeasureVariable, dimensions=("foo",), scale_factor=1, add_offset=0, cf_name="wibble", - cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]), + cf_data=mocker.MagicMock(chunking=mocker.Mock(return_value=None), spec=[]), filename=mock_engine.filename, standard_name=None, long_name="wibble", diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_methods.py index 19782a0f8c..31c063a90d 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_cell_methods.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_cell_methods`.""" -from unittest import mock - import pytest from iris.coords import CellMethod @@ -16,8 +14,8 @@ @pytest.fixture -def mock_cf_data_var(): - yield mock.Mock( +def mock_cf_data_var(mocker): + return mocker.Mock( spec=CFDataVariable, cell_methods="time: mean", cf_name="wibble", @@ -26,9 +24,9 @@ def mock_cf_data_var(): @pytest.fixture -def mock_engine(mock_cf_data_var): - yield mock.Mock( - cube=mock.Mock(), +def mock_engine(mock_cf_data_var, mocker): + return mocker.Mock( + cube=mocker.Mock(), cf_var=mock_cf_data_var, filename=mock_cf_data_var.filename, ) @@ -55,10 +53,10 @@ def mock_parse_cell_methods(nc_cell_methods, cf_name=None): assert mock_engine.cube.cell_methods == cm_original -def test_not_added(monkeypatch, mock_engine, mock_cf_data_var): +def test_not_added(monkeypatch, mock_engine, mock_cf_data_var, mocker): cm_original = mock_engine.cube.cell_methods - class NoCellMethods(mock.Mock): + class NoCellMethods(mocker.Mock): def __setattr__(self, key, value): if key == "cell_methods": raise RuntimeError("Not added") diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py index a871c967ab..f52d18a076 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_dimension_coordinate.py @@ -4,11 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_dimension_coordinate`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock import warnings import numpy as np @@ -20,37 +15,17 @@ from iris.exceptions import CannotAddError from iris.fileformats._nc_load_rules.helpers import build_and_add_dimension_coordinate from iris.loading import LOAD_PROBLEMS +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -def _make_bounds_var(bounds, dimensions, units): - bounds = np.array(bounds) - cf_data = mock.Mock(spec=[]) - # we want to mock the absence of flag attributes to helpers.get_attr_units - # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes - del cf_data.flag_values - del cf_data.flag_masks - del cf_data.flag_meanings - result = mock.Mock( - dimensions=dimensions, - cf_name="wibble_bnds", - cf_data=cf_data, - units=units, - calendar=None, - shape=bounds.shape, - size=np.prod(bounds.shape), - dtype=bounds.dtype, - __getitem__=lambda self, key: bounds[key], - ) - delattr(result, "_data_array") - return result - - -class RulesTestMixin: - def setUp(self): +class RulesTestMixin(MockerMixin): + @pytest.fixture(autouse=True) + def _mixin_setup(self, mocker): # Create dummy pyke engine. - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), + self.engine = mocker.Mock( + cube=mocker.Mock(), + cf_var=mocker.Mock(dimensions=("foo", "bar")), + dtype=np.int32, filename="DUMMY", cube_parts=dict(coordinates=[]), ) @@ -64,7 +39,7 @@ def patched__getitem__(proxy_self, keys): return var[keys] raise RuntimeError() - self.deferred_load_patch = mock.patch( + self.deferred_load_patch = mocker.patch( "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", new=patched__getitem__, ) @@ -76,21 +51,41 @@ def patched__getitem__(proxy_self, keys): def get_cf_bounds_var(coord_var): return self.cf_bounds_var, self.use_climatology_bounds - self.get_cf_bounds_var_patch = mock.patch( + self.get_cf_bounds_var_patch = mocker.patch( "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", new=get_cf_bounds_var, ) + def _make_bounds_var(self, bounds, dimensions, units): + bounds = np.array(bounds) + cf_data = self.mocker.Mock(spec=[]) + # we want to mock the absence of flag attributes to helpers.get_attr_units + # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes + del cf_data.flag_values + del cf_data.flag_masks + del cf_data.flag_meanings + result = self.mocker.Mock( + dimensions=dimensions, + cf_name="wibble_bnds", + cf_data=cf_data, + units=units, + calendar=None, + shape=bounds.shape, + size=np.prod(bounds.shape), + dtype=bounds.dtype, + __getitem__=lambda self, key: bounds[key], + ) + delattr(result, "_data_array") + return result -class TestCoordConstruction(tests.IrisTest, RulesTestMixin): - def setUp(self): - # Call parent setUp explicitly, because of how unittests work. - RulesTestMixin.setUp(self) +class TestCoordConstruction(RulesTestMixin, MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self): bounds = np.arange(12).reshape(6, 2) dimensions = ("x", "nv") units = "days since 1970-01-01" - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) self.bounds = bounds # test_dimcoord_not_added() and test_auxcoord_not_added have been @@ -100,10 +95,10 @@ def setUp(self): self.monkeypatch = pytest.MonkeyPatch() def _set_cf_coord_var(self, points): - self.cf_coord_var = mock.Mock( + self.cf_coord_var = self.mocker.Mock( dimensions=("foo",), cf_name="wibble", - cf_data=mock.Mock(spec=[]), + cf_data=self.mocker.Mock(spec=[]), standard_name=None, long_name="wibble", units="days since 1970-01-01", @@ -131,12 +126,10 @@ def check_case_dim_coord_construction(self, climatology=False): climatological=climatology, ) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) def test_dim_coord_construction(self): self.check_case_dim_coord_construction(climatology=False) @@ -162,13 +155,10 @@ def test_dim_coord_construction_masked_array(self): ) with warnings.catch_warnings(record=True) as w: - # Asserts must lie within context manager because of deferred - # loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) # Assert warning is raised assert len(w) == 1 @@ -191,13 +181,10 @@ def test_dim_coord_construction_masked_array_mask_does_nothing(self): ) with warnings.catch_warnings(record=True) as w: - # Asserts must lie within context manager because of deferred - # loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) # Assert no warning is raised assert len(w) == 0 @@ -215,13 +202,10 @@ def test_dim_coord_construction_masked_bounds_mask_does_nothing(self): ) with warnings.catch_warnings(record=True) as w: - # Asserts must lie within context manager because of deferred - # loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) # Assert no warning is raised assert len(w) == 0 @@ -241,16 +225,14 @@ def test_with_coord_system(self): coord_system=coord_system, ) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate( - self.engine, self.cf_coord_var, coord_system=coord_system - ) + build_and_add_dimension_coordinate( + self.engine, self.cf_coord_var, coord_system=coord_system + ) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) - def test_bad_coord_system(self): + def test_bad_coord_system(self, mocker): self._set_cf_coord_var(np.arange(6)) coord_system = RotatedGeogCS( grid_north_pole_latitude=45.0, grid_north_pole_longitude=45.0 @@ -265,20 +247,16 @@ def mock_setter(self, value): else: self._metadata_manager.coord_system = value - with mock.patch.object( + _ = mocker.patch.object( DimCoord, "coord_system", new=property(DimCoord.coord_system.fget, mock_setter), - ): - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate( - self.engine, self.cf_coord_var, coord_system=coord_system - ) - load_problem = LOAD_PROBLEMS.problems[-1] - self.assertIn( - "test_bad_coord_system", - "".join(load_problem.stack_trace.format()), - ) + ) + build_and_add_dimension_coordinate( + self.engine, self.cf_coord_var, coord_system=coord_system + ) + load_problem = LOAD_PROBLEMS.problems[-1] + assert "test_bad_coord_system" in "".join(load_problem.stack_trace.format()) def test_aux_coord_construction(self): # Use non monotonically increasing coordinates to force aux coord @@ -293,18 +271,15 @@ def test_aux_coord_construction(self): bounds=self.bounds, ) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with(expected_coord, [0]) - load_problem = LOAD_PROBLEMS.problems[-1] - self.assertIn( - "creating 'wibble' auxiliary coordinate instead", - "".join(load_problem.stack_trace.format()), - ) - self.assertTrue(load_problem.handled) + # Test that expected coord is built and added to cube. + self.engine.cube.add_aux_coord.assert_called_with(expected_coord, [0]) + load_problem = LOAD_PROBLEMS.problems[-1] + assert "creating 'wibble' auxiliary coordinate instead" in "".join( + load_problem.stack_trace.format() + ) + assert load_problem.handled def test_dimcoord_not_added(self): # Confirm that the coord will be skipped if a CannotAddError is raised @@ -317,8 +292,7 @@ def mock_add_dim_coord(_, __): self._set_cf_coord_var(np.arange(6)) - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) load_problem = LOAD_PROBLEMS.problems[-1] assert load_problem.stack_trace.exc_type is CannotAddError @@ -336,8 +310,7 @@ def mock_add_aux_coord(_, __): self._set_cf_coord_var(np.array([1, 3, 2, 4, 6, 5])) - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) load_problem = LOAD_PROBLEMS.problems[-1] assert load_problem.stack_trace.exc_type is CannotAddError @@ -351,7 +324,7 @@ def test_unhandlable_error(self): n_problems = len(LOAD_PROBLEMS.problems) self._set_cf_coord_var(np.array([1, 3, 2, 4, 6, 5])) build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - self.assertTrue(len(LOAD_PROBLEMS.problems) > n_problems) + assert len(LOAD_PROBLEMS.problems) > n_problems assert self.engine.cube_parts["coordinates"] == [] @@ -369,18 +342,17 @@ def test_problem_destination(self): assert self.engine.cube_parts["coordinates"] == [] -class TestBoundsVertexDim(tests.IrisTest, RulesTestMixin): - def setUp(self): - # Call parent setUp explicitly, because of how unittests work. - RulesTestMixin.setUp(self) +class TestBoundsVertexDim(RulesTestMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create test coordinate cf variable. points = np.arange(6) - self.cf_coord_var = mock.Mock( + self.cf_coord_var = mocker.Mock( dimensions=("foo",), cf_name="wibble", standard_name=None, long_name="wibble", - cf_data=mock.Mock(spec=[]), + cf_data=mocker.Mock(spec=[]), units="km", shape=points.shape, dtype=points.dtype, @@ -392,7 +364,7 @@ def test_slowest_varying_vertex_dim__normalise_bounds(self): bounds = np.arange(12).reshape(2, 6) * 1000 dimensions = ("nv", "foo") units = "m" - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) # Expected bounds on the resulting coordinate should be rolled so that # the vertex dimension is at the end. @@ -405,22 +377,20 @@ def test_slowest_varying_vertex_dim__normalise_bounds(self): bounds=expected_bounds, ) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) - # Test that engine.cube_parts container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) + # Test that engine.cube_parts container is correctly populated. + expected_list = [(expected_coord, self.cf_coord_var.cf_name)] + assert self.engine.cube_parts["coordinates"] == expected_list def test_fastest_varying_vertex_dim__normalise_bounds(self): bounds = np.arange(12).reshape(6, 2) * 1000 dimensions = ("foo", "nv") units = "m" - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) expected_coord = DimCoord( self.cf_coord_var[:], @@ -430,16 +400,14 @@ def test_fastest_varying_vertex_dim__normalise_bounds(self): bounds=bounds / 1000, ) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) - # Test that engine.cube_parts container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) + # Test that engine.cube_parts container is correctly populated. + expected_list = [(expected_coord, self.cf_coord_var.cf_name)] + assert self.engine.cube_parts["coordinates"] == expected_list def test_fastest_with_different_dim_names__normalise_bounds(self): # Despite the dimension names 'x' differing from the coord's @@ -448,7 +416,7 @@ def test_fastest_with_different_dim_names__normalise_bounds(self): bounds = np.arange(12).reshape(6, 2) * 1000 dimensions = ("x", "nv") units = "m" - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) expected_coord = DimCoord( self.cf_coord_var[:], @@ -457,34 +425,30 @@ def test_fastest_with_different_dim_names__normalise_bounds(self): units=self.cf_coord_var.units, bounds=bounds / 1000, ) + build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) + # Test that expected coord is built and added to cube. + self.engine.cube.add_dim_coord.assert_called_with(expected_coord, 0) - # Test that engine.cube_parts container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) + # Test that engine.cube_parts container is correctly populated. + expected_list = [(expected_coord, self.cf_coord_var.cf_name)] + assert self.engine.cube_parts["coordinates"] == expected_list -class TestCircular(tests.IrisTest, RulesTestMixin): +class TestCircular(RulesTestMixin): # Test the rules logic for marking a coordinate "circular". - def setUp(self): - # Call parent setUp explicitly, because of how unittests work. - RulesTestMixin.setUp(self) + @pytest.fixture(autouse=True) + def _setup(self): self.cf_bounds_var = None def _make_vars(self, points, bounds=None, units="degrees"): points = np.array(points) - self.cf_coord_var = mock.MagicMock( + self.cf_coord_var = self.mocker.MagicMock( dimensions=("foo",), cf_name="wibble", standard_name=None, long_name="wibble", - cf_data=mock.Mock(spec=[]), + cf_data=self.mocker.Mock(spec=[]), units=units, shape=points.shape, dtype=points.dtype, @@ -493,7 +457,7 @@ def _make_vars(self, points, bounds=None, units="degrees"): if bounds: bounds = np.array(bounds).reshape(self.cf_coord_var.shape + (2,)) dimensions = ("x", "nv") - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) def _check_circular(self, circular, *args, **kwargs): if "coord_name" in kwargs: @@ -501,13 +465,12 @@ def _check_circular(self, circular, *args, **kwargs): else: coord_name = "longitude" self._make_vars(*args, **kwargs) - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate( - self.engine, self.cf_coord_var, coord_name=coord_name - ) - self.assertEqual(self.engine.cube.add_dim_coord.call_count, 1) - coord, dims = self.engine.cube.add_dim_coord.call_args[0] - self.assertEqual(coord.circular, circular) + build_and_add_dimension_coordinate( + self.engine, self.cf_coord_var, coord_name=coord_name + ) + assert self.engine.cube.add_dim_coord.call_count == 1 + coord, dims = self.engine.cube.add_dim_coord.call_args[0] + assert coord.circular == circular def check_circular(self, *args, **kwargs): self._check_circular(True, *args, **kwargs) @@ -566,23 +529,20 @@ def test_multiple_bounded_noncircular(self): ) -class TestCircularScalar(tests.IrisTest, RulesTestMixin): - def setUp(self): - RulesTestMixin.setUp(self) - +class TestCircularScalar(RulesTestMixin): def _make_vars(self, bounds): # Create cf vars for the coordinate and its bounds. # Note that for a scalar the shape of the array from # the cf var is (), rather than (1,). points = np.array([0.0]) units = "degrees" - self.cf_coord_var = mock.Mock( + self.cf_coord_var = self.mocker.Mock( dimensions=(), cf_name="wibble", standard_name=None, long_name="wibble", units=units, - cf_data=mock.Mock(spec=[]), + cf_data=self.mocker.Mock(spec=[]), shape=(), dtype=points.dtype, __getitem__=lambda self, key: points[key], @@ -590,16 +550,15 @@ def _make_vars(self, bounds): bounds = np.array(bounds) dimensions = ("bnds",) - self.cf_bounds_var = _make_bounds_var(bounds, dimensions, units) + self.cf_bounds_var = self._make_bounds_var(bounds, dimensions, units) def _assert_circular(self, value): - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_and_add_dimension_coordinate( - self.engine, self.cf_coord_var, coord_name="longitude" - ) - self.assertEqual(self.engine.cube.add_aux_coord.call_count, 1) - coord, dims = self.engine.cube.add_aux_coord.call_args[0] - self.assertEqual(coord.circular, value) + build_and_add_dimension_coordinate( + self.engine, self.cf_coord_var, coord_name="longitude" + ) + assert self.engine.cube.add_aux_coord.call_count == 1 + coord, dims = self.engine.cube.add_aux_coord.call_args[0] + assert coord.circular == value def test_two_bounds_noncircular(self): self._make_vars([0.0, 180.0]) @@ -624,7 +583,3 @@ def test_two_bounds_circular_alt_decreasing(self): def test_four_bounds(self): self._make_vars([0.0, 10.0, 20.0, 30.0]) self._assert_circular(False) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_global_attributes.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_global_attributes.py index 1b4d48f6bf..b906f076c7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_global_attributes.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_global_attributes.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_global_attributes`.""" -from unittest import mock - import numpy as np import pytest @@ -15,13 +13,13 @@ @pytest.fixture -def mock_engine(): +def mock_engine(mocker): global_attributes = { "Conventions": "CF-1.5", "comment": "Mocked test object", } - cf_group = mock.Mock(global_attributes=global_attributes) - cf_var = mock.MagicMock( + cf_group = mocker.Mock(global_attributes=global_attributes) + cf_var = mocker.MagicMock( cf_name="wibble", standard_name=None, long_name=None, @@ -30,8 +28,8 @@ def mock_engine(): cell_methods=None, cf_group=cf_group, ) - engine = mock.Mock(cube=Cube([23]), cf_var=cf_var, filename="foo.nc") - yield engine + engine = mocker.Mock(cube=Cube([23]), cf_var=cf_var, filename="foo.nc") + return engine def test_construction(mock_engine): diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_names.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_names.py index 357a199546..ba6a289a08 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_names.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_names.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_names`.""" -from unittest import mock - import numpy as np import pytest @@ -15,13 +13,13 @@ @pytest.fixture -def mock_engine(): +def mock_engine(mocker): global_attributes = { "Conventions": "CF-1.5", "comment": "Mocked test object", } - cf_group = mock.Mock(global_attributes=global_attributes) - cf_var = mock.MagicMock( + cf_group = mocker.Mock(global_attributes=global_attributes) + cf_var = mocker.MagicMock( cf_name="wibble", standard_name=None, long_name=None, @@ -30,8 +28,8 @@ def mock_engine(): cell_methods=None, cf_group=cf_group, ) - engine = mock.Mock(cube=Cube([23]), cf_var=cf_var, filename="foo.nc") - yield engine + engine = mocker.Mock(cube=Cube([23]), cf_var=cf_var, filename="foo.nc") + return engine class TestCubeName: diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_units.py index f0e7d16113..ef2931963f 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_units.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_and_add_units.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_and_add_units`.""" -from unittest import mock - from cf_units import Unit import pytest @@ -16,21 +14,21 @@ @pytest.fixture -def mock_cf_data_var(): - yield mock.Mock( +def mock_cf_data_var(mocker): + return mocker.Mock( spec=CFDataVariable, units="kelvin", cf_name="wibble", filename="DUMMY", dtype=float, - cf_data=mock.Mock(spec=[]), + cf_data=mocker.Mock(spec=[]), ) @pytest.fixture -def mock_engine(mock_cf_data_var): - yield mock.Mock( - cube=mock.Mock(attributes={}), +def mock_engine(mock_cf_data_var, mocker): + return mocker.Mock( + cube=mocker.Mock(attributes={}), cf_var=mock_cf_data_var, filename=mock_cf_data_var.filename, ) @@ -65,10 +63,10 @@ def mock_get_attr_units(cf_var, attributes, capture_invalid=False): assert mock_engine.cube.units == units_original -def test_not_added(monkeypatch, mock_engine, mock_cf_data_var): +def test_not_added(monkeypatch, mock_engine, mock_cf_data_var, mocker): units_original = mock_engine.cube.units - class NoUnits(mock.Mock): + class NoUnits(mocker.Mock): def __setattr__(self, key, value): if key == "units": raise RuntimeError("Not added") diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py index 41be1ea932..d810152196 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import Geostationary from iris.fileformats._nc_load_rules.helpers import ( build_geostationary_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildGeostationaryCoordinateSystem(tests.IrisTest): +class TestBuildGeostationaryCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, replace_props=None, remove_props=None): """Generic test that can check vertical perspective validity with or without inverse flattening. @@ -54,11 +49,11 @@ def _test(self, inverse_flattening=False, replace_props=None, remove_props=None) cf_grid_var_kwargs = non_ellipsoid_kwargs.copy() cf_grid_var_kwargs.update(ellipsoid_kwargs) - cf_grid_var = mock.Mock(spec=[], **cf_grid_var_kwargs) + cf_grid_var = self.mocker.Mock(spec=[], **cf_grid_var_kwargs) cs = build_geostationary_coordinate_system(None, cf_grid_var) ellipsoid = iris.coord_systems.GeogCS(**ellipsoid_kwargs) expected = Geostationary(ellipsoid=ellipsoid, **non_ellipsoid_kwargs) - self.assertEqual(cs, expected) + assert cs == expected def test_valid(self): self._test(inverse_flattening=False) @@ -71,7 +66,3 @@ def test_false_offsets_missing(self): def test_false_offsets_none(self): self._test(replace_props={"false_easting": None, "false_northing": None}) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py index 45241fbced..0d4af82889 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import LambertAzimuthalEqualArea from iris.fileformats._nc_load_rules.helpers import ( build_lambert_azimuthal_equal_area_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildLambertAzimuthalEqualAreaCoordinateSystem(tests.IrisTest): +class TestBuildLambertAzimuthalEqualAreaCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_optionals=False): if no_optionals: # Most properties are optional for this system. @@ -54,7 +49,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) - cf_grid_var = mock.Mock(spec=[], **gridvar_props) + cf_grid_var = self.mocker.Mock(spec=[], **gridvar_props) cs = build_lambert_azimuthal_equal_area_coordinate_system(None, cf_grid_var) @@ -66,7 +61,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): ellipsoid=expected_ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_basic(self): self._test() @@ -78,7 +73,3 @@ def test_inverse_flattening(self): def test_no_optionals(self): # Check defaults, when all optional attributes are absent. self._test(no_optionals=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py index fc45a6eab8..9d91c2a3f7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import LambertConformal from iris.fileformats._nc_load_rules.helpers import ( build_lambert_conformal_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildLambertConformalCoordinateSystem(tests.IrisTest): +class TestBuildLambertConformalCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_optionals=False): if no_optionals: # Most properties are optional in this case. @@ -57,7 +52,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS(6377563.396, 6356256.909) - cf_grid_var = mock.Mock(spec=[], **gridvar_props) + cf_grid_var = self.mocker.Mock(spec=[], **gridvar_props) cs = build_lambert_conformal_coordinate_system(None, cf_grid_var) @@ -70,7 +65,7 @@ def _test(self, inverse_flattening=False, no_optionals=False): ellipsoid=expected_ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_basic(self): self._test() @@ -82,7 +77,3 @@ def test_inverse_flattening(self): def test_no_optionals(self): # Check defaults, when all optional attributes are absent. self._test(no_optionals=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py index dc2188b65e..3752337ea9 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py @@ -7,20 +7,14 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import Mercator from iris.fileformats._nc_load_rules.helpers import build_mercator_coordinate_system -class TestBuildMercatorCoordinateSystem(tests.IrisTest): - def test_valid(self): - cf_grid_var = mock.Mock( +class TestBuildMercatorCoordinateSystem: + def test_valid(self, mocker): + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, @@ -37,10 +31,10 @@ def test_valid(self): ), standard_parallel=(cf_grid_var.standard_parallel), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_inverse_flattening(self): - cf_grid_var = mock.Mock( + def test_inverse_flattening(self, mocker): + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, @@ -58,10 +52,10 @@ def test_inverse_flattening(self): ), standard_parallel=(cf_grid_var.standard_parallel), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_longitude_missing(self): - cf_grid_var = mock.Mock( + def test_longitude_missing(self, mocker): + cf_grid_var = mocker.Mock( spec=[], semi_major_axis=6377563.396, inverse_flattening=299.3249646, @@ -77,10 +71,10 @@ def test_longitude_missing(self): ), standard_parallel=(cf_grid_var.standard_parallel), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_standard_parallel_missing(self): - cf_grid_var = mock.Mock( + def test_standard_parallel_missing(self, mocker): + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, @@ -95,10 +89,10 @@ def test_standard_parallel_missing(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_scale_factor_at_projection_origin(self): - cf_grid_var = mock.Mock( + def test_scale_factor_at_projection_origin(self, mocker): + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, @@ -117,8 +111,4 @@ def test_scale_factor_at_projection_origin(self): cf_grid_var.scale_factor_at_projection_origin ), ) - self.assertEqual(cs, expected) - - -if __name__ == "__main__": - tests.main() + assert cs == expected diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py index 50b171655e..ed4395fffa 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_oblique_mercator_coordinate_system.py @@ -5,7 +5,6 @@ """Test function :func:`iris.fileformats._nc_load_rules.helpers.build_oblique_mercator_coordinate_system`.""" from typing import List, NamedTuple, Type -from unittest import mock import pytest @@ -148,22 +147,22 @@ def make_variant_inputs(self, request) -> None: self.coord_system_args_expected = list(coord_system_kwargs_expected.values()) - def test_attributes(self): - cf_var_mock = mock.Mock(spec=[], **self.nc_attributes) - coord_system_mock = mock.Mock(spec=self.expected_class) + def test_attributes(self, mocker): + cf_var_mock = mocker.Mock(spec=[], **self.nc_attributes) + coord_system_mock = mocker.Mock(spec=self.expected_class) setattr(coord_systems, self.expected_class.__name__, coord_system_mock) _ = build_oblique_mercator_coordinate_system(None, cf_var_mock) coord_system_mock.assert_called_with(*self.coord_system_args_expected) -def test_deprecation(): +def test_deprecation(mocker): nc_attributes = dict( grid_mapping_name="rotated_mercator", latitude_of_projection_origin=0.0, longitude_of_projection_origin=0.0, scale_factor_at_projection_origin=1.0, ) - cf_var_mock = mock.Mock(spec=[], **nc_attributes) + cf_var_mock = mocker.Mock(spec=[], **nc_attributes) with pytest.warns(IrisDeprecation, match="azimuth_of_central_line = 90"): _ = build_oblique_mercator_coordinate_system(None, cf_var_mock) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py index 4661ea5449..3e9396cca4 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_polar_stereographic_coordinate_system.py @@ -7,12 +7,6 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import PolarStereographic from iris.fileformats._nc_load_rules.helpers import ( @@ -20,9 +14,9 @@ ) -class TestBuildPolarStereographicCoordinateSystem(tests.IrisTest): - def test_valid_north(self): - cf_grid_var = mock.Mock( +class TestBuildPolarStereographicCoordinateSystem: + def test_valid_north(self, mocker): + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -43,10 +37,10 @@ def test_valid_north(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_valid_south(self): - cf_grid_var = mock.Mock( + def test_valid_south(self, mocker): + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=-90, @@ -67,10 +61,10 @@ def test_valid_south(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_valid_with_standard_parallel(self): - cf_grid_var = mock.Mock( + def test_valid_with_standard_parallel(self, mocker): + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -89,10 +83,10 @@ def test_valid_with_standard_parallel(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_valid_with_false_easting_northing(self): - cf_grid_var = mock.Mock( + def test_valid_with_false_easting_northing(self, mocker): + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -117,10 +111,10 @@ def test_valid_with_false_easting_northing(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) + assert cs == expected - def test_valid_nonzero_veritcal_lon(self): - cf_grid_var = mock.Mock( + def test_valid_nonzero_veritcal_lon(self, mocker): + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=30, latitude_of_projection_origin=90, @@ -141,8 +135,4 @@ def test_valid_nonzero_veritcal_lon(self): cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis ), ) - self.assertEqual(cs, expected) - - -if __name__ == "__main__": - tests.main() + assert cs == expected diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py index 4928631336..481d4441f8 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import Stereographic from iris.fileformats._nc_load_rules.helpers import ( build_stereographic_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildStereographicCoordinateSystem(tests.IrisTest): +class TestBuildStereographicCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_offsets=False): test_easting = -100 test_northing = 200 @@ -49,7 +44,7 @@ def _test(self, inverse_flattening=False, no_offsets=False): test_easting = 0 test_northing = 0 - cf_grid_var = mock.Mock(spec=[], **gridvar_props) + cf_grid_var = self.mocker.Mock(spec=[], **gridvar_props) cs = build_stereographic_coordinate_system(None, cf_grid_var) @@ -62,7 +57,7 @@ def _test(self, inverse_flattening=False, no_offsets=False): ellipsoid=expected_ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_basic(self): self._test() @@ -74,7 +69,3 @@ def test_inverse_flattening(self): def test_no_offsets(self): # Check when false_easting/northing attributes are absent. self._test(no_offsets=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py index ad61c485e0..f63402dcc2 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import TransverseMercator from iris.fileformats._nc_load_rules.helpers import ( build_transverse_mercator_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildTransverseMercatorCoordinateSystem(tests.IrisTest): +class TestBuildTransverseMercatorCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_options=False): test_easting = -100 test_northing = 200 @@ -51,7 +46,7 @@ def _test(self, inverse_flattening=False, no_options=False): test_northing = 0 test_scale_factor = 1.0 - cf_grid_var = mock.Mock(spec=[], **gridvar_props) + cf_grid_var = self.mocker.Mock(spec=[], **gridvar_props) cs = build_transverse_mercator_coordinate_system(None, cf_grid_var) @@ -64,7 +59,7 @@ def _test(self, inverse_flattening=False, no_options=False): ellipsoid=expected_ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_basic(self): self._test() @@ -74,7 +69,3 @@ def test_inverse_flattening(self): def test_missing_optionals(self): self._test(no_options=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py index 2c65e09c3f..932e1d085d 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py @@ -7,20 +7,15 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.coord_systems import VerticalPerspective from iris.fileformats._nc_load_rules.helpers import ( build_vertical_perspective_coordinate_system, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestBuildVerticalPerspectiveCoordinateSystem(tests.IrisTest): +class TestBuildVerticalPerspectiveCoordinateSystem(MockerMixin): def _test(self, inverse_flattening=False, no_offsets=False): """Generic test that can check vertical perspective validity with or without inverse flattening, and false_east/northing-s. @@ -50,7 +45,7 @@ def _test(self, inverse_flattening=False, no_offsets=False): test_easting = 0 test_northing = 0 - cf_grid_var = mock.Mock(**cf_grid_var_kwargs) + cf_grid_var = self.mocker.Mock(**cf_grid_var_kwargs) ellipsoid = iris.coord_systems.GeogCS(**ellipsoid_kwargs) cs = build_vertical_perspective_coordinate_system(None, cf_grid_var) @@ -63,7 +58,7 @@ def _test(self, inverse_flattening=False, no_offsets=False): ellipsoid=ellipsoid, ) - self.assertEqual(cs, expected) + assert cs == expected def test_valid(self): self._test(inverse_flattening=False) @@ -75,7 +70,3 @@ def test_inverse_flattening(self): def test_no_offsets(self): # Check when false_easting/northing attributes are absent. self._test(no_offsets=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py index e7e49879cd..50698e72f8 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py @@ -7,32 +7,28 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest from iris.fileformats._nc_load_rules.helpers import get_attr_units from iris.fileformats.cf import CFDataVariable from iris.loading import LOAD_PROBLEMS +from iris.tests import _shared_utils +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin from iris.warnings import IrisCfLoadWarning -class TestGetAttrUnits(tests.IrisTest): - @staticmethod - def _make_cf_var(global_attributes=None): +class TestGetAttrUnits(MockerMixin): + def _make_cf_var(self, global_attributes=None): if global_attributes is None: global_attributes = {} - cf_group = mock.Mock(global_attributes=global_attributes) + cf_group = self.mocker.Mock(global_attributes=global_attributes) - cf_var = mock.MagicMock( + cf_var = self.mocker.MagicMock( spec=CFDataVariable, cf_name="sound_frequency", - cf_data=mock.Mock(spec=[]), + cf_data=self.mocker.Mock(spec=[]), filename="DUMMY", standard_name=None, long_name=None, @@ -48,30 +44,26 @@ def test_unicode_character(self): expected_attributes = {"invalid_units": "\u266b"} cf_var = self._make_cf_var() attr_units = get_attr_units(cf_var, attributes) - self.assertEqual(attr_units, "?") - self.assertEqual(attributes, expected_attributes) + assert attr_units == "?" + assert attributes == expected_attributes def test_warn(self): attributes = {} expected_attributes = {"invalid_units": "\u266b"} cf_var = self._make_cf_var() - with self.assertWarns(IrisCfLoadWarning, msg="Ignoring invalid units"): + with pytest.warns(IrisCfLoadWarning, match="Ignoring invalid units"): attr_units = get_attr_units(cf_var, attributes) - self.assertEqual(attr_units, "?") - self.assertEqual(attributes, expected_attributes) + assert attr_units == "?" + assert attributes == expected_attributes def test_capture(self): attributes = {} expected_attributes = {"invalid_units": "\u266b"} cf_var = self._make_cf_var() - with self.assertNoWarningsRegexp("Ignoring invalid units"): + with _shared_utils.assert_no_warnings_regexp("Ignoring invalid units"): attr_units = get_attr_units(cf_var, attributes, capture_invalid=True) - self.assertEqual(attr_units, "?") - self.assertEqual(attributes, expected_attributes) + assert attr_units == "?" + assert attributes == expected_attributes load_problem = LOAD_PROBLEMS.problems[-1] - self.assertEqual(load_problem.loaded, {"units": "\u266b"}) - - -if __name__ == "__main__": - tests.main() + assert load_problem.loaded == {"units": "\u266b"} diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py index 43a07fe17b..39a2e6caa7 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py @@ -7,26 +7,21 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - from iris.fileformats._nc_load_rules.helpers import ( CF_ATTR_BOUNDS, CF_ATTR_CLIMATOLOGY, get_cf_bounds_var, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestGetCFBoundsVar(tests.IrisTest): +class TestGetCFBoundsVar(MockerMixin): # Tests to check that get_cf_bounds_var will return the bounds_var and # the correct climatological flag. def _generic_test(self, test_climatological_bounds=False): - cf_coord_var = mock.MagicMock() + cf_coord_var = self.mocker.MagicMock() - cf_group_dict = {"TEST": mock.sentinel.bounds_var} + cf_group_dict = {"TEST": self.mocker.sentinel.bounds_var} if test_climatological_bounds: cf_coord_var.cf_group.climatology = cf_group_dict test_attr = CF_ATTR_CLIMATOLOGY @@ -39,15 +34,11 @@ def _generic_test(self, test_climatological_bounds=False): setattr(cf_coord_var, attr, attr_val) bounds_var, climatological = get_cf_bounds_var(cf_coord_var) - self.assertIs(bounds_var, mock.sentinel.bounds_var) - self.assertEqual(climatological, test_climatological_bounds) + assert bounds_var is self.mocker.sentinel.bounds_var + assert climatological == test_climatological_bounds def test_bounds_normal(self): self._generic_test(test_climatological_bounds=False) def test_bounds_climatological(self): self._generic_test(test_climatological_bounds=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py index 5817a4228d..d54aec0692 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py @@ -7,18 +7,13 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np from iris.fileformats._nc_load_rules.helpers import get_names +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -class TestGetNames(tests.IrisTest): +class TestGetNames(MockerMixin): """The tests included in this class cover all the variations of possible combinations of the following inputs: * standard_name = [None, 'projection_y_coordinate', 'latitude_coordinate'] @@ -32,16 +27,15 @@ class TestGetNames(tests.IrisTest): """ - @staticmethod - def _make_cf_var(standard_name, long_name, cf_name): - cf_var = mock.Mock( + def _make_cf_var(self, standard_name, long_name, cf_name): + cf_var = self.mocker.Mock( cf_name=cf_name, standard_name=standard_name, long_name=long_name, units="degrees", dtype=np.float64, cell_methods=None, - cf_group=mock.Mock(global_attributes={}), + cf_group=self.mocker.Mock(global_attributes={}), ) return cf_var @@ -61,10 +55,10 @@ def check_names(self, inputs, expected): ) # Check the names and attributes are as expected. - self.assertEqual(res_standard_name, exp_std_name) - self.assertEqual(res_long_name, exp_long_name) - self.assertEqual(res_var_name, exp_var_name) - self.assertEqual(attributes, exp_attributes) + assert res_standard_name == exp_std_name + assert res_long_name == exp_long_name + assert res_var_name == exp_var_name + assert attributes == exp_attributes def test_var_name_valid(self): # Only var_name is set and it is set to a valid standard name. @@ -283,7 +277,3 @@ def test_std_name_invalid_long_name_set_var_name_invalid_coord_name_set( {"invalid_standard_name": "latitude_coord"}, ) self.check_names(inp, exp) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index f0dd80de85..8ac08c330c 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -7,26 +7,24 @@ """ -from unittest import mock +import re import warnings from iris.fileformats._nc_load_rules.helpers import has_supported_mercator_parameters +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip +class _EngineMixin(MockerMixin): + def engine(self, cf_grid_var, cf_name): + cf_group = {cf_name: cf_grid_var} + cf_var = self.mocker.Mock(cf_group=cf_group) + return self.mocker.Mock(cf_var=cf_var) -def _engine(cf_grid_var, cf_name): - cf_group = {cf_name: cf_grid_var} - cf_var = mock.Mock(cf_group=cf_group) - return mock.Mock(cf_var=cf_var) - -class TestHasSupportedMercatorParameters(tests.IrisTest): - def test_valid_base(self): +class TestHasSupportedMercatorParameters(_EngineMixin): + def test_valid_base(self, mocker): cf_name = "mercator" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, false_easting=0, @@ -35,15 +33,15 @@ def test_valid_base(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_false_easting_northing(self): + def test_valid_false_easting_northing(self, mocker): cf_name = "mercator" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, false_easting=15, @@ -52,15 +50,15 @@ def test_valid_false_easting_northing(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_standard_parallel(self): + def test_valid_standard_parallel(self, mocker): cf_name = "mercator" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=-90, false_easting=0, @@ -69,15 +67,15 @@ def test_valid_standard_parallel(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_scale_factor(self): + def test_valid_scale_factor(self, mocker): cf_name = "mercator" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=0, false_easting=0, @@ -86,17 +84,17 @@ def test_valid_scale_factor(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_invalid_scale_factor_and_standard_parallel(self): + def test_invalid_scale_factor_and_standard_parallel(self, mocker): # Scale factor and standard parallel cannot both be specified for # Mercator projections cf_name = "mercator" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], longitude_of_projection_origin=0, false_easting=0, @@ -106,19 +104,16 @@ def test_invalid_scale_factor_and_standard_parallel(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") is_valid = has_supported_mercator_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex( - str(warns[0]), - 'both "scale_factor_at_projection_origin" and "standard_parallel"', - ) - + assert not is_valid + assert len(warns) == 1 -if __name__ == "__main__": - tests.main() + msg = re.escape( + 'both "scale_factor_at_projection_origin" and "standard_parallel"' + ) + assert re.search(msg, str(warns[0])) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py index 8ced149ff1..2bfc801af2 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_polar_stereographic_parameters.py @@ -7,28 +7,26 @@ """ -from unittest import mock +import re import warnings from iris.fileformats._nc_load_rules.helpers import ( has_supported_polar_stereographic_parameters, ) +from iris.tests.unit.fileformats.nc_load_rules.helpers import MockerMixin -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip +class _EngineMixin(MockerMixin): + def engine(self, cf_grid_var, cf_name): + cf_group = {cf_name: cf_grid_var} + cf_var = self.mocker.Mock(cf_group=cf_group) + return self.mocker.Mock(cf_var=cf_var) -def _engine(cf_grid_var, cf_name): - cf_group = {cf_name: cf_grid_var} - cf_var = mock.Mock(cf_group=cf_group) - return mock.Mock(cf_var=cf_var) - -class TestHasSupportedPolarStereographicParameters(tests.IrisTest): - def test_valid_base_north(self): +class TestHasSupportedPolarStereographicParameters(_EngineMixin): + def test_valid_base_north(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -38,15 +36,15 @@ def test_valid_base_north(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_base_south(self): + def test_valid_base_south(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=-90, @@ -56,15 +54,15 @@ def test_valid_base_south(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_straight_vertical_longitude(self): + def test_valid_straight_vertical_longitude(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=30, latitude_of_projection_origin=90, @@ -74,15 +72,15 @@ def test_valid_straight_vertical_longitude(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_false_easting_northing(self): + def test_valid_false_easting_northing(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -92,15 +90,15 @@ def test_valid_false_easting_northing(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_standard_parallel(self): + def test_valid_standard_parallel(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -110,15 +108,15 @@ def test_valid_standard_parallel(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_valid_scale_factor(self): + def test_valid_scale_factor(self, mocker): cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -128,17 +126,17 @@ def test_valid_scale_factor(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertTrue(is_valid) + assert is_valid - def test_invalid_scale_factor_and_standard_parallel(self): + def test_invalid_scale_factor_and_standard_parallel(self, mocker): # Scale factor and standard parallel cannot both be specified for # Polar Stereographic projections cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -149,24 +147,25 @@ def test_invalid_scale_factor_and_standard_parallel(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex( - str(warns[0]), - 'both "scale_factor_at_projection_origin" and "standard_parallel"', + assert not is_valid + assert len(warns) == 1 + + msg = re.escape( + 'both "scale_factor_at_projection_origin" and "standard_parallel"' ) + assert re.search(msg, str(warns[0])) - def test_absent_scale_factor_and_standard_parallel(self): + def test_absent_scale_factor_and_standard_parallel(self, mocker): # Scale factor and standard parallel cannot both be specified for # Polar Stereographic projections cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=90, @@ -175,25 +174,25 @@ def test_absent_scale_factor_and_standard_parallel(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex( - str(warns[0]), - 'One of "scale_factor_at_projection_origin" and ' - '"standard_parallel" is required.', + assert not is_valid + assert len(warns) == 1 + + msg = re.escape( + 'One of "scale_factor_at_projection_origin" and "standard_parallel" is required.' ) + assert re.search(msg, str(warns[0])) - def test_invalid_latitude_of_projection_origin(self): + def test_invalid_latitude_of_projection_origin(self, mocker): # Scale factor and standard parallel cannot both be specified for # Polar Stereographic projections cf_name = "polar_stereographic" - cf_grid_var = mock.Mock( + cf_grid_var = mocker.Mock( spec=[], straight_vertical_longitude_from_pole=0, latitude_of_projection_origin=45, @@ -203,19 +202,14 @@ def test_invalid_latitude_of_projection_origin(self): semi_major_axis=6377563.396, semi_minor_axis=6356256.909, ) - engine = _engine(cf_grid_var, cf_name) + engine = self.engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: warnings.simplefilter("always") is_valid = has_supported_polar_stereographic_parameters(engine, cf_name) - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex( - str(warns[0]), - r'"latitude_of_projection_origin" must be \+90 or -90\.', - ) - + assert not is_valid + assert len(warns) == 1 -if __name__ == "__main__": - tests.main() + msg = r'"latitude_of_projection_origin" must be \+90 or -90\.' + assert re.search(msg, str(warns[0])) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py index 528e9d7579..e0da327693 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_parse_cell_methods.py @@ -4,18 +4,16 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.netcdf.parse_cell_methods`.""" -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip +import re -from unittest import mock +import pytest from iris.coords import CellMethod from iris.fileformats._nc_load_rules.helpers import parse_cell_methods from iris.warnings import IrisCfLoadWarning -class Test(tests.IrisTest): +class Test: def test_simple(self): cell_method_strings = [ "time: mean", @@ -25,7 +23,7 @@ def test_simple(self): expected = (CellMethod(method="mean", coords="time"),) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_with_interval(self): cell_method_strings = [ @@ -35,7 +33,7 @@ def test_with_interval(self): expected = (CellMethod(method="variance", coords="time", intervals="1 hr"),) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_multiple_axes(self): cell_method_strings = [ @@ -47,7 +45,7 @@ def test_multiple_axes(self): expected = (CellMethod(method="standard_deviation", coords=["lat", "lon"]),) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_multiple(self): cell_method_strings = [ @@ -62,7 +60,7 @@ def test_multiple(self): ) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_comment(self): cell_method_strings = [ @@ -91,7 +89,7 @@ def test_comment(self): ) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_comment_brackets(self): cell_method_strings = [ @@ -108,7 +106,7 @@ def test_comment_brackets(self): ) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_comment_bracket_mismatch_warning(self): cell_method_strings = [ @@ -116,9 +114,9 @@ def test_comment_bracket_mismatch_warning(self): "time : minimum within days (comment: 18h day-1)-18h)", ] for cell_method_str in cell_method_strings: - with self.assertWarns( + with pytest.warns( IrisCfLoadWarning, - msg="Cell methods may be incorrectly parsed due to mismatched brackets", + match="Cell methods may be incorrectly parsed due to mismatched brackets", ): _ = parse_cell_methods(cell_method_str) @@ -133,9 +131,12 @@ def test_badly_formatted_warning(self): "time: (interval: 1 day comment: second bit)", ] for cell_method_str in cell_method_strings: - with self.assertWarns( + msg = ( + rf"^Failed to .*parse cell method string: {re.escape(cell_method_str)}$" + ) + with pytest.warns( IrisCfLoadWarning, - msg=f"Failed to fully parse cell method string: {cell_method_str}", + match=msg, ): _ = parse_cell_methods(cell_method_str) @@ -147,7 +148,7 @@ def test_portions_of_cells(self): expected = (CellMethod(method="mean where sea_ice over sea", coords="area"),) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected def test_climatology(self): cell_method_strings = [ @@ -162,9 +163,9 @@ def test_climatology(self): ) for cell_method_str in cell_method_strings: res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) + assert res == expected - def test_climatology_with_unknown_method(self): + def test_climatology_with_unknown_method(self, mocker): cell_method_strings = [ "time: min within days time: mean over days", "time : min within days time: mean over days", @@ -176,14 +177,10 @@ def test_climatology_with_unknown_method(self): CellMethod(method="mean over days", coords="time"), ) for cell_method_str in cell_method_strings: - with mock.patch("warnings.warn") as warn: - res = parse_cell_methods(cell_method_str) - self.assertIn( - "NetCDF variable contains unknown cell method 'min'", - warn.call_args[0][0], + warn = mocker.patch("warnings.warn") + res = parse_cell_methods(cell_method_str) + assert ( + "NetCDF variable contains unknown cell method 'min'" + in warn.call_args[0][0] ) - self.assertEqual(res, expected) - - -if __name__ == "__main__": - tests.main() + assert res == expected diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py index 66620166c5..e98de2cbb3 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py @@ -7,48 +7,40 @@ """ -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest from iris.fileformats._nc_load_rules.helpers import reorder_bounds_data +from iris.tests import _shared_utils -class Test(tests.IrisTest): - def test_fastest_varying(self): +class Test: + def test_fastest_varying(self, mocker): bounds_data = np.arange(24).reshape(2, 3, 4) - cf_bounds_var = mock.Mock( + cf_bounds_var = mocker.Mock( dimensions=("foo", "bar", "nv"), cf_name="wibble_bnds" ) - cf_coord_var = mock.Mock(dimensions=("foo", "bar")) + cf_coord_var = mocker.Mock(dimensions=("foo", "bar")) res = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) # Vertex dimension (nv) is already at the end. - self.assertArrayEqual(res, bounds_data) + _shared_utils.assert_array_equal(res, bounds_data) - def test_slowest_varying(self): + def test_slowest_varying(self, mocker): bounds_data = np.arange(24).reshape(4, 2, 3) - cf_bounds_var = mock.Mock(dimensions=("nv", "foo", "bar")) - cf_coord_var = mock.Mock(dimensions=("foo", "bar")) + cf_bounds_var = mocker.Mock(dimensions=("nv", "foo", "bar")) + cf_coord_var = mocker.Mock(dimensions=("foo", "bar")) res = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) # Move zeroth dimension (nv) to the end. expected = np.rollaxis(bounds_data, 0, bounds_data.ndim) - self.assertArrayEqual(res, expected) + _shared_utils.assert_array_equal(res, expected) - def test_different_dim_names(self): + def test_different_dim_names(self, mocker): bounds_data = np.arange(24).reshape(2, 3, 4) - cf_bounds_var = mock.Mock( + cf_bounds_var = mocker.Mock( dimensions=("foo", "bar", "nv"), cf_name="wibble_bnds" ) - cf_coord_var = mock.Mock(dimensions=("x", "y"), cf_name="wibble") - with self.assertRaisesRegex(ValueError, "dimension names"): + cf_coord_var = mocker.Mock(dimensions=("x", "y"), cf_name="wibble") + with pytest.raises(ValueError, match="dimension names"): reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/_thread_safe_nc/test_NetCDFWriteProxy.py b/lib/iris/tests/unit/fileformats/netcdf/_thread_safe_nc/test_NetCDFWriteProxy.py index b33cb515a2..9f6d26f975 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/_thread_safe_nc/test_NetCDFWriteProxy.py +++ b/lib/iris/tests/unit/fileformats/netcdf/_thread_safe_nc/test_NetCDFWriteProxy.py @@ -64,11 +64,15 @@ def __call__(self, *args, **kwargs) -> nc.Dataset: def test_handle_hdf_locking_error(dataset_path, monkeypatch, write_proxy): """Test that NetCDFWriteProxy can handle non-deterministic HDF locking errors.""" monkeypatch.setattr(nc, "Dataset", UnreliableDatasetMaker()) - with pytest.raises(OSError, match="Simulated non-deterministic HDF locking error"): + + def _file_lock_failure(): dataset = nc.Dataset(write_proxy.path, "r+") var = dataset.variables[write_proxy.varname] var[0] = 1.0 + with pytest.raises(OSError, match="Simulated non-deterministic HDF locking error"): + _file_lock_failure() + # Reset. monkeypatch.setattr(nc, "Dataset", UnreliableDatasetMaker()) try: diff --git a/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_GribParamHandler.py b/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_GribParamHandler.py index d0189b474f..9e9ad017e2 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_GribParamHandler.py +++ b/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_GribParamHandler.py @@ -104,5 +104,5 @@ def test_odd_array_case(self): ) def test_badvalue__fail(self, badval): # It can convert random values to strings, but they mostly won't satisfy. - with pytest.raises(ValueError): + with pytest.raises(ValueError, match=r"Invalid.*"): GP_HANDLER.decode_attribute(badval) diff --git a/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_UkmoProcessFlagsHandler.py b/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_UkmoProcessFlagsHandler.py index 7f6eda4397..585d3d966f 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_UkmoProcessFlagsHandler.py +++ b/lib/iris/tests/unit/fileformats/netcdf/attribute_handlers/test_UkmoProcessFlagsHandler.py @@ -107,7 +107,7 @@ def test_junk_string(self): result = UPF_HANDLER.decode_attribute(test_string) assert result == ("xxx",) - @pytest.mark.parametrize("badtype", ("int", "intarray", "floatarray")) + @pytest.mark.parametrize("badtype", ["int", "intarray", "floatarray"]) def test_numeric_values(self, badtype): """Even array attributes get converted to a string + split.""" if badtype == "int": diff --git a/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt b/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt new file mode 100644 index 0000000000..07a0bc3bcd --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/encoding_tests.txt @@ -0,0 +1,191 @@ +=========== +Outstanding Qs +* What would we like to do with all this IN IRIS?? + - generally present as string arrays (Uxx) + - existing scheme of naming dims for length + re-using is quite cunning! + - choice of seeing actual character arrays as alternative to string conversions? + +* string length handling for load/save/roundtrip + - on SAVE, we need some control so we can create files which are compatible, + irrespective of the data (which currently we are not doing) + - ALSO this is wanted to ensure that multiple vars (e.g. string cubes or string coords) + will share the string dim -- instead of creating arbitrary different ones + - presumably, if encoding blows the max-len, we must get a warning/error + + - on LOAD, we may want to *capture* the actual original string dim length, so it can be + re-created on save (by some scheme, as per previous) -- i.e. enable roundtripping. + I don't really want to preserve the name of the string dim, but this could be a + slightly tender point. To consider also : the impact of this on the non-equivalence + of loaded cubes, if we use actual *attributes* to carry this info (see below). + - **if not** : just load data + convert to string arrays as seems best + - this will also lead to incompatible cubes. + + - on SAVE, in the absence of strlen-controls, what is a reasonable default choice? + - take longest encoded + - set nbytes = NEXPAND(encoding) * nchars + - sensible values would depend on the encoding... + : ascii -> 1 + : utf-8 -> 1 or 4 ??? + : utf-16 -> 2 or 4 ??? + : utf-32 -> 4 + + - on LOAD, in absence of strlen controls, how do we choose the result DTYPE (i.e. character length)? + - again, may depend on the encoding: + : ascii = "U" + : UTF-8 = "U" + : UTF-16 = "U" + : UTF-32 = "U" + - N.B. these are ll at least "safe" - i.e. won't lose characters + + +separately from these, there is the question of how the controls affect "normal" +cube operations. + - the easiest approach is to define a "special" attribute, + which can be set on any cube/component + - using the dtype-length of the data would be *possible*, in conjunction with the + above-proposed "default rules" for choosing strlen from the dtype. + But this might not round-trip in all cases. + +within the actual data arrays + - we can't really expect any different to what numpy does + - that is, the dtype-length of any element <= that of the array (and not ==) + this may be tricky, but we can't easily prevent it. + >>> a = np.array(['', 'a', 'bb']) + >>> a + array(['', 'a', 'bb'], dtype='>> a[0].dtype + dtype('>> a[1].dtype + dtype('>> a[2].dtype + dtype('>> a.dtype + dtype('>> + - likewise, we can't assign without possible truncation. + If you **want** to expand the supported width, can use ".astype()" first ? + + +======================== +========================= + +forms in files: + * char chardata(dim1, dim2, strlen_xx); # char data + * string data(dim1, dim2); + +netcdf types: +(netcdf docs terms) + NC_BYTE 8-bit signed integer + NC_UBYTE 8-bit unsigned integer + NC_CHAR 8-bit character + NC_STRING variable length character string + +***NOTE*** there is no NC_UCHAR or "unsigned char" type + + +relevant numpy base types (scalar dtypes): + * "S" bytes : np.bytes_ == np.int8 + * "B" unsigned bytes : np.ubyte == np.uint8 + * 'i' ints : np.int_ + * 'u' unsigned ints : np.int_ + * "U" unicode string : np.str_ + +forms in numpy: + * np.ndarray(dtype="S1") # char data + * np.ndarray(dtype="Snn") # char data + * np.ndarray(dtype="Unn") # strings + * np.ndarray(dtype="") + +possibilities in createVariable: +""" + The datatype can be a numpy datatype object, or a string that describes a numpy dtype object ... + datatype can also be a CompoundType instance (for a structured, or compound array), a VLType instance (for a variable-length array), +** or the python str builtin (for a variable-length string array). +** Numpy string and unicode datatypes with length greater than one are aliases for str. +""" + +test types: + "i1" : np.int8 + "u1" : np.uint8 + "S1" : np.byte_ + "U1" : np.str_ + "S" : + "U" : with/without non-ascii content + +save all these to files... +outputs from "test_nc_dtypes.py" test run: + SPEC:i1 SAVED-AS:int8 byte RELOAD-AS:int8 + SPEC:u1 SAVED-AS:uint8 ubyte RELOAD-AS:uint8 + SPEC:S1 SAVED-AS:|S1 char RELOAD-AS: () + SPEC:U1 SAVED-AS:`) dtype = np.dtype(dtype) - cf_var = mock.MagicMock( + cf_var = self.mocker.MagicMock( spec=iris.fileformats.cf.CFVariable, dtype=dtype, cf_data=cf_data, @@ -46,15 +45,15 @@ def _make(self, chunksizes=None, shape=None, dtype="i4", **extra_properties): size=np.prod(shape), **extra_properties, ) - cf_var.__getitem__.return_value = mock.sentinel.real_data_accessed + cf_var.__getitem__.return_value = self.mocker.sentinel.real_data_accessed return cf_var def test_cf_data_type(self): chunks = [1, 12, 100] cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var) - self.assertIsInstance(lazy_data, da.Array) - self.assertIsInstance(da.utils.meta_from_array(lazy_data), np.ma.MaskedArray) + assert isinstance(lazy_data, da.Array) + assert isinstance(da.utils.meta_from_array(lazy_data), np.ma.MaskedArray) def test_cf_data_chunks(self): chunks = [2500, 240, 200] @@ -62,7 +61,7 @@ def test_cf_data_chunks(self): lazy_data = _get_cf_var_data(cf_var) lazy_data_chunks = [c[0] for c in lazy_data.chunks] expected_chunks = _optimum_chunksize(chunks, self.shape) - self.assertArrayEqual(lazy_data_chunks, expected_chunks) + _shared_utils.assert_array_equal(lazy_data_chunks, expected_chunks) def test_cf_data_chunk_control(self): # more thorough testing can be found at `test__chunk_control` @@ -72,7 +71,7 @@ def test_cf_data_chunk_control(self): lazy_data = _get_cf_var_data(cf_var) lazy_data_chunks = [c[0] for c in lazy_data.chunks] expected_chunks = (25, 24, 20) - self.assertArrayEqual(lazy_data_chunks, expected_chunks) + _shared_utils.assert_array_equal(lazy_data_chunks, expected_chunks) def test_cf_data_no_chunks(self): # No chunks means chunks are calculated from the array's shape by @@ -81,7 +80,7 @@ def test_cf_data_no_chunks(self): cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var) lazy_data_chunks = [c[0] for c in lazy_data.chunks] - self.assertArrayEqual(lazy_data_chunks, self.expected_chunks) + _shared_utils.assert_array_equal(lazy_data_chunks, self.expected_chunks) def test_cf_data_contiguous(self): # Chunks 'contiguous' is equivalent to no chunks. @@ -89,77 +88,73 @@ def test_cf_data_contiguous(self): cf_var = self._make(chunks) lazy_data = _get_cf_var_data(cf_var) lazy_data_chunks = [c[0] for c in lazy_data.chunks] - self.assertArrayEqual(lazy_data_chunks, self.expected_chunks) + _shared_utils.assert_array_equal(lazy_data_chunks, self.expected_chunks) def test_type__1kf8_is_lazy(self): cf_var = self._make(shape=(1000,), dtype="f8") var_data = _get_cf_var_data(cf_var) - self.assertIsInstance(var_data, da.Array) + assert isinstance(var_data, da.Array) - def test_arraytype__1ki2_is_real(self): + def test_arraytype__1ki2_is_real(self, mocker): cf_var = self._make(shape=(1000,), dtype="i2") var_data = _get_cf_var_data(cf_var) - self.assertIs(var_data, mock.sentinel.real_data_accessed) + assert var_data is mocker.sentinel.real_data_accessed - def test_arraytype__100f8_is_real(self): + def test_arraytype__100f8_is_real(self, mocker): cf_var = self._make(shape=(100,), dtype="f8") var_data = _get_cf_var_data(cf_var) - self.assertIs(var_data, mock.sentinel.real_data_accessed) + assert var_data is mocker.sentinel.real_data_accessed - def test_vltype__1000str_is_lazy(self): + def test_vltype__1000str_is_lazy(self, mocker): # Variable length string type - mock_vltype = mock.Mock(spec=VLType, dtype=str, name="varlen string type") + mock_vltype = mocker.Mock(spec=VLType, dtype=str, name="varlen string type") cf_var = self._make(shape=(1000,), dtype=str, datatype=mock_vltype) var_data = _get_cf_var_data(cf_var) - self.assertIsInstance(var_data, da.Array) + assert isinstance(var_data, da.Array) - def test_vltype__1000str_is_real_with_hint(self): + def test_vltype__1000str_is_real_with_hint(self, mocker): # Variable length string type with a hint on the array variable length size - mock_vltype = mock.Mock(spec=VLType, dtype=str, name="varlen string type") + mock_vltype = mocker.Mock(spec=VLType, dtype=str, name="varlen string type") cf_var = self._make(shape=(100,), dtype=str, datatype=mock_vltype) with CHUNK_CONTROL.set("DUMMY_VAR", _vl_hint=1): var_data = _get_cf_var_data(cf_var) - self.assertIs(var_data, mock.sentinel.real_data_accessed) + assert var_data is mocker.sentinel.real_data_accessed - def test_vltype__100str_is_real(self): + def test_vltype__100str_is_real(self, mocker): # Variable length string type - mock_vltype = mock.Mock(spec=VLType, dtype=str, name="varlen string type") + mock_vltype = mocker.Mock(spec=VLType, dtype=str, name="varlen string type") cf_var = self._make(shape=(100,), dtype=str, datatype=mock_vltype) var_data = _get_cf_var_data(cf_var) - self.assertIs(var_data, mock.sentinel.real_data_accessed) + assert var_data is mocker.sentinel.real_data_accessed - def test_vltype__100str_is_lazy_with_hint(self): + def test_vltype__100str_is_lazy_with_hint(self, mocker): # Variable length string type with a hint on the array variable length size - mock_vltype = mock.Mock(spec=VLType, dtype=str, name="varlen string type") + mock_vltype = mocker.Mock(spec=VLType, dtype=str, name="varlen string type") cf_var = self._make(shape=(100,), dtype=str, datatype=mock_vltype) with CHUNK_CONTROL.set("DUMMY_VAR", _vl_hint=50): var_data = _get_cf_var_data(cf_var) - self.assertIsInstance(var_data, da.Array) + assert isinstance(var_data, da.Array) - def test_vltype__100f8_is_lazy(self): + def test_vltype__100f8_is_lazy(self, mocker): # Variable length float64 type - mock_vltype = mock.Mock(spec=VLType, dtype="f8", name="varlen float64 type") + mock_vltype = mocker.Mock(spec=VLType, dtype="f8", name="varlen float64 type") cf_var = self._make(shape=(1000,), dtype="f8", datatype=mock_vltype) var_data = _get_cf_var_data(cf_var) - self.assertIsInstance(var_data, da.Array) + assert isinstance(var_data, da.Array) - def test_vltype__100f8_is_real_with_hint(self): + def test_vltype__100f8_is_real_with_hint(self, mocker): # Variable length float64 type with a hint on the array variable length size - mock_vltype = mock.Mock(spec=VLType, dtype="f8", name="varlen float64 type") + mock_vltype = mocker.Mock(spec=VLType, dtype="f8", name="varlen float64 type") cf_var = self._make(shape=(100,), dtype="f8", datatype=mock_vltype) with CHUNK_CONTROL.set("DUMMY_VAR", _vl_hint=2): var_data = _get_cf_var_data(cf_var) - self.assertIs(var_data, mock.sentinel.real_data_accessed) + assert var_data is mocker.sentinel.real_data_accessed - def test_cf_data_emulation(self): + def test_cf_data_emulation(self, mocker): # Check that a variable emulation object passes its real data directly. - emulated_data = mock.Mock() + emulated_data = mocker.Mock() # Make a cf_var with a special extra '_data_array' property. cf_var = self._make(chunksizes=None, _data_array=emulated_data) result = _get_cf_var_data(cf_var) # This should get directly returned. - self.assertIs(emulated_data, result) - - -if __name__ == "__main__": - tests.main() + assert emulated_data is result diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py index 5aafeaf0fc..da3ceaf77a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_aux_factory.py @@ -4,53 +4,50 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.netcdf._load_aux_factory` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock +import re import warnings import numpy as np +import pytest from iris.coords import DimCoord from iris.cube import Cube from iris.fileformats.netcdf.loader import _load_aux_factory +from iris.tests.unit.fileformats import MockerMixin from iris.warnings import IrisFactoryCoordNotFoundWarning -class TestAtmosphereHybridSigmaPressureCoordinate(tests.IrisTest): - def setUp(self): +class TestAtmosphereHybridSigmaPressureCoordinate(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" self.requires = dict(formula_type=standard_name) - self.ap = mock.MagicMock(units="units") - self.ps = mock.MagicMock(units="units") - coordinates = [(mock.sentinel.b, "b"), (self.ps, "ps")] + self.ap = mocker.MagicMock(units="units") + self.ps = mocker.MagicMock(units="units") + coordinates = [(mocker.sentinel.b, "b"), (self.ps, "ps")] self.cube_parts = dict(coordinates=coordinates) - self.engine = mock.Mock(requires=self.requires, cube_parts=self.cube_parts) - self.cube = mock.create_autospec(Cube, spec_set=True, instance=True) + self.engine = mocker.Mock(requires=self.requires, cube_parts=self.cube_parts) + self.cube = mocker.create_autospec(Cube, spec_set=True, instance=True) # Patch out the check_dependencies functionality. func = "iris.aux_factory.HybridPressureFactory._check_dependencies" - patcher = mock.patch(func) - patcher.start() - self.addCleanup(patcher.stop) + _ = mocker.patch(func) - def test_formula_terms_ap(self): + def test_formula_terms_ap(self, mocker): self.cube_parts["coordinates"].append((self.ap, "ap")) self.requires["formula_terms"] = dict(ap="ap", b="b", ps="ps") _load_aux_factory(self.engine, self.cube) # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 0) + assert self.cube.add_aux_coord.call_count == 0 # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) + assert self.cube.add_aux_factory.call_count == 1 args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) + assert len(args) == 1 factory = args[0] - self.assertEqual(factory.delta, self.ap) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) + assert factory.delta == self.ap + assert factory.sigma == mocker.sentinel.b + assert factory.surface_air_pressure == self.ps - def test_formula_terms_a_p0(self): + def test_formula_terms_a_p0(self, mocker): coord_a = DimCoord(np.arange(5), units="1") coord_p0 = DimCoord(10, units="Pa") coord_expected = DimCoord( @@ -63,28 +60,26 @@ def test_formula_terms_a_p0(self): self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") _load_aux_factory(self.engine, self.cube) # Check cube.coord_dims method. - self.assertEqual(self.cube.coord_dims.call_count, 1) + assert self.cube.coord_dims.call_count == 1 args, _ = self.cube.coord_dims.call_args - self.assertEqual(len(args), 1) - self.assertIs(args[0], coord_a) + assert len(args) == 1 + assert args[0] is coord_a # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 1) + assert self.cube.add_aux_coord.call_count == 1 args, _ = self.cube.add_aux_coord.call_args - self.assertEqual(len(args), 2) - self.assertEqual(args[0], coord_expected) - self.assertIsInstance(args[1], mock.Mock) + assert len(args) == 2 + assert args[0] == coord_expected + assert isinstance(args[1], mocker.Mock) # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) + assert self.cube.add_aux_factory.call_count == 1 args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) + assert len(args) == 1 factory = args[0] - self.assertEqual(factory.delta, coord_expected) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) + assert factory.delta == coord_expected + assert factory.sigma == mocker.sentinel.b + assert factory.surface_air_pressure == self.ps - def test_formula_terms_a_p0__promote_a_units_unknown_to_dimensionless( - self, - ): + def test_formula_terms_a_p0__promote_a_units_unknown_to_dimensionless(self, mocker): coord_a = DimCoord(np.arange(5), units="unknown") coord_p0 = DimCoord(10, units="Pa") coord_expected = DimCoord( @@ -97,31 +92,34 @@ def test_formula_terms_a_p0__promote_a_units_unknown_to_dimensionless( self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") _load_aux_factory(self.engine, self.cube) # Check cube.coord_dims method. - self.assertEqual(self.cube.coord_dims.call_count, 1) + assert self.cube.coord_dims.call_count == 1 args, _ = self.cube.coord_dims.call_args - self.assertEqual(len(args), 1) - self.assertIs(args[0], coord_a) - self.assertEqual("1", args[0].units) + assert len(args) == 1 + assert args[0] is coord_a + assert "1" == args[0].units # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 1) + assert self.cube.add_aux_coord.call_count == 1 args, _ = self.cube.add_aux_coord.call_args - self.assertEqual(len(args), 2) - self.assertEqual(args[0], coord_expected) - self.assertIsInstance(args[1], mock.Mock) + assert len(args) == 2 + assert args[0] == coord_expected + assert isinstance(args[1], mocker.Mock) # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) + assert self.cube.add_aux_factory.call_count == 1 args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) + assert len(args) == 1 factory = args[0] - self.assertEqual(factory.delta, coord_expected) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) + assert factory.delta == coord_expected + assert factory.sigma == mocker.sentinel.b + assert factory.surface_air_pressure == self.ps def test_formula_terms_p0_non_scalar(self): coord_p0 = DimCoord(np.arange(5)) + msg = re.escape( + "Expecting None to be a scalar reference pressure coordinate, got shape (5,)" + ) self.cube_parts["coordinates"].append((coord_p0, "p0")) self.requires["formula_terms"] = dict(p0="p0") - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match=msg): _load_aux_factory(self.engine, self.cube) def test_formula_terms_p0_bounded(self): @@ -132,34 +130,34 @@ def test_formula_terms_p0_bounded(self): with warnings.catch_warnings(record=True) as warn: warnings.simplefilter("always") _load_aux_factory(self.engine, self.cube) - self.assertEqual(len(warn), 1) + assert len(warn) == 1 msg = ( "Ignoring atmosphere hybrid sigma pressure scalar " "coordinate {!r} bounds.".format(coord_p0.name()) ) - self.assertEqual(msg, str(warn[0].message)) + assert msg == str(warn[0].message) def _check_no_delta(self): # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 0) + assert self.cube.add_aux_coord.call_count == 0 # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) + assert self.cube.add_aux_factory.call_count == 1 args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) + assert len(args) == 1 factory = args[0] # Check that the factory has no delta term - self.assertEqual(factory.delta, None) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) + assert factory.delta == None + assert factory.sigma == self.mocker.sentinel.b + assert factory.surface_air_pressure == self.ps def test_formula_terms_ap_missing_coords(self): self.requires["formula_terms"] = dict(ap="ap", b="b", ps="ps") - with mock.patch("warnings.warn") as warn: + with pytest.warns( + IrisFactoryCoordNotFoundWarning, + match="Unable to find coordinate for variable 'ap'", + ) as warn: _load_aux_factory(self.engine, self.cube) - warn.assert_called_once_with( - "Unable to find coordinate for variable 'ap'", - category=IrisFactoryCoordNotFoundWarning, - ) + assert len(warn) == 1 self._check_no_delta() def test_formula_terms_no_delta_terms(self): @@ -180,7 +178,3 @@ def test_formula_terms_no_a_term(self): self.requires["formula_terms"] = dict(a="p0", b="b", ps="ps") _load_aux_factory(self.engine, self.cube) self._check_no_delta() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py index ad6017c4be..cc6e03a2cc 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__load_cube.py @@ -4,18 +4,14 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.netcdf._load_cube` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest from iris.coords import DimCoord import iris.fileformats.cf from iris.fileformats.netcdf.loader import _load_cube from iris.loading import LOAD_PROBLEMS +from iris.tests.unit.fileformats import MockerMixin class NoStr: @@ -23,7 +19,7 @@ def __str__(self): raise RuntimeError("No string representation") -class TestCoordAttributes(tests.IrisTest): +class TestCoordAttributes(MockerMixin): @staticmethod def _patcher(engine, cf, cf_group): coordinates = [] @@ -32,19 +28,18 @@ def _patcher(engine, cf, cf_group): coordinates.append((coord, coord.name())) engine.cube_parts["coordinates"] = coordinates - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self, mocker): this = "iris.fileformats.netcdf.loader._assert_case_specific_facts" - patch = mock.patch(this, side_effect=self._patcher) - patch.start() - self.addCleanup(patch.stop) - self.engine = mock.Mock() + _ = mocker.patch(this, side_effect=self._patcher) + self.engine = mocker.Mock() self.filename = "DUMMY" - self.flag_masks = mock.sentinel.flag_masks - self.flag_meanings = mock.sentinel.flag_meanings - self.flag_values = mock.sentinel.flag_values - self.valid_range = mock.sentinel.valid_range - self.valid_min = mock.sentinel.valid_min - self.valid_max = mock.sentinel.valid_max + self.flag_masks = mocker.sentinel.flag_masks + self.flag_meanings = mocker.sentinel.flag_meanings + self.flag_values = mocker.sentinel.flag_values + self.valid_range = mocker.sentinel.valid_range + self.valid_min = mocker.sentinel.valid_min + self.valid_max = mocker.sentinel.valid_max def _make(self, names, attrs): coords = [DimCoord(i, long_name=name) for i, name in enumerate(names)] @@ -52,13 +47,13 @@ def _make(self, names, attrs): cf_group = {} for name, cf_attrs in zip(names, attrs): - cf_attrs_unused = mock.Mock(return_value=cf_attrs) - cf_group[name] = mock.Mock(cf_attrs_unused=cf_attrs_unused) - cf = mock.Mock(cf_group=cf_group) + cf_attrs_unused = self.mocker.Mock(return_value=cf_attrs) + cf_group[name] = self.mocker.Mock(cf_attrs_unused=cf_attrs_unused) + cf = self.mocker.Mock(cf_group=cf_group) - cf_data = mock.Mock(_FillValue=None) - cf_data.chunking = mock.MagicMock(return_value=shape) - cf_var = mock.MagicMock( + cf_data = self.mocker.Mock(_FillValue=None) + cf_data.chunking = self.mocker.MagicMock(return_value=shape) + cf_var = self.mocker.MagicMock( spec=iris.fileformats.cf.CFVariable, dtype=np.dtype("i4"), cf_data=cf_data, @@ -80,11 +75,11 @@ def test_flag_pass_thru(self): attrs = [[(attr, value)]] cf, cf_var = self._make(names, attrs) cube = _load_cube(self.engine, cf, cf_var, self.filename) - self.assertEqual(len(cube.coords(name)), 1) + assert len(cube.coords(name)) == 1 coord = cube.coord(name) - self.assertEqual(len(coord.attributes), 1) - self.assertEqual(list(coord.attributes.keys()), [attr]) - self.assertEqual(list(coord.attributes.values()), [value]) + assert len(coord.attributes) == 1 + assert list(coord.attributes.keys()) == [attr] + assert list(coord.attributes.values()) == [value] def test_flag_pass_thru_multi(self): names = ["masks", "meanings", "values"] @@ -101,8 +96,8 @@ def test_flag_pass_thru_multi(self): ] cf, cf_var = self._make(names, attrs) cube = _load_cube(self.engine, cf, cf_var, self.filename) - self.assertEqual(len(cube.coords()), 3) - self.assertEqual(set([c.name() for c in cube.coords()]), set(names)) + assert len(cube.coords()) == 3 + assert set([c.name() for c in cube.coords()]) == set(names) expected = [ attrs[0], [attrs[1][0]], @@ -113,7 +108,7 @@ def test_flag_pass_thru_multi(self): ] for name, expect in zip(names, expected): attributes = cube.coord(name).attributes - self.assertEqual(set(attributes.items()), set(expect)) + assert set(attributes.items()) == set(expect) def test_load_problems(self): key_and_val = (NoStr(), "wibble") @@ -121,43 +116,40 @@ def test_load_problems(self): cf, cf_var = self._make(["foo"], [[key_and_val]]) _ = _load_cube(self.engine, cf, cf_var, self.filename) load_problem = LOAD_PROBLEMS.problems[-1] - self.assertIn( - "No string representation", "".join(load_problem.stack_trace.format()) - ) + assert "No string representation" in "".join(load_problem.stack_trace.format()) destination = load_problem.destination - self.assertIs(destination.iris_class, DimCoord) + assert destination.iris_class is DimCoord # Note: cannot test destination.identifier without large increase in # complexity. Rely on TestCubeAttributes.test_load_problems for this. -class TestCubeAttributes(tests.IrisTest): - def setUp(self): +class TestCubeAttributes(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): this = "iris.fileformats.netcdf.loader._assert_case_specific_facts" - patch = mock.patch(this) - patch.start() - self.addCleanup(patch.stop) - self.engine = mock.Mock() + _ = mocker.patch(this) + self.engine = mocker.Mock() self.cf = None self.filename = "DUMMY" - self.flag_masks = mock.sentinel.flag_masks - self.flag_meanings = mock.sentinel.flag_meanings - self.flag_values = mock.sentinel.flag_values - self.valid_range = mock.sentinel.valid_range - self.valid_min = mock.sentinel.valid_min - self.valid_max = mock.sentinel.valid_max + self.flag_masks = mocker.sentinel.flag_masks + self.flag_meanings = mocker.sentinel.flag_meanings + self.flag_values = mocker.sentinel.flag_values + self.valid_range = mocker.sentinel.valid_range + self.valid_min = mocker.sentinel.valid_min + self.valid_max = mocker.sentinel.valid_max def _make(self, attrs): shape = (1,) - cf_attrs_unused = mock.Mock(return_value=attrs) - cf_data = mock.Mock(_FillValue=None) - cf_data.chunking = mock.MagicMock(return_value=shape) - cf_var = mock.MagicMock( + cf_attrs_unused = self.mocker.Mock(return_value=attrs) + cf_data = self.mocker.Mock(_FillValue=None) + cf_data.chunking = self.mocker.MagicMock(return_value=shape) + cf_var = self.mocker.MagicMock( spec=iris.fileformats.cf.CFVariable, dtype=np.dtype("i4"), cf_data=cf_data, cf_name="DUMMY_VAR", filename="DUMMY", - cf_group=mock.Mock(), + cf_group=self.mocker.Mock(), cf_attrs_unused=cf_attrs_unused, shape=shape, size=np.prod(shape), @@ -173,9 +165,9 @@ def test_flag_pass_thru(self): for key, value in attrs: cf_var = self._make([(key, value)]) cube = _load_cube(self.engine, self.cf, cf_var, self.filename) - self.assertEqual(len(cube.attributes), 1) - self.assertEqual(list(cube.attributes.keys()), [key]) - self.assertEqual(list(cube.attributes.values()), [value]) + assert len(cube.attributes) == 1 + assert list(cube.attributes.keys()) == [key] + assert list(cube.attributes.values()) == [value] def test_flag_pass_thru_multi(self): attrs = [ @@ -195,8 +187,8 @@ def test_flag_pass_thru_multi(self): expected = set([attrs[ind] for ind in [0, 1, 2, 4, 6, 7, 8]]) cf_var = self._make(attrs) cube = _load_cube(self.engine, self.cf, cf_var, self.filename) - self.assertEqual(len(cube.attributes), len(expected)) - self.assertEqual(set(cube.attributes.items()), expected) + assert len(cube.attributes) == len(expected) + assert set(cube.attributes.items()) == expected def test_load_problems(self): key_and_val = (NoStr(), "wibble") @@ -204,13 +196,7 @@ def test_load_problems(self): cf_var = self._make([key_and_val]) _ = _load_cube(self.engine, self.cf, cf_var, self.filename) load_problem = LOAD_PROBLEMS.problems[-1] - self.assertIn( - "No string representation", "".join(load_problem.stack_trace.format()) - ) + assert "No string representation" in "".join(load_problem.stack_trace.format()) destination = load_problem.destination - self.assertIs(destination.iris_class, self.engine.cube.__class__) - self.assertEqual(destination.identifier, cf_var.cf_name) - - -if __name__ == "__main__": - tests.main() + assert destination.iris_class is self.engine.cube.__class__ + assert destination.identifier == cf_var.cf_name diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py index b95bbd0552..3f386238ea 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test__translate_constraints_to_var_callback.py @@ -7,28 +7,29 @@ """ -from unittest.mock import MagicMock +import pytest import iris from iris.fileformats.cf import CFDataVariable from iris.fileformats.netcdf.loader import _translate_constraints_to_var_callback - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests - - -class Test(tests.IrisTest): - data_variables = [ - CFDataVariable("var1", MagicMock(standard_name="x_wind")), - CFDataVariable("var2", MagicMock(standard_name="y_wind")), - CFDataVariable("var1", MagicMock(long_name="x component of wind")), - CFDataVariable( - "var1", - MagicMock(standard_name="x_wind", long_name="x component of wind"), - ), - CFDataVariable("var1", MagicMock()), - ] +from iris.tests import _shared_utils + + +class Test: + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.data_variables = [ + CFDataVariable("var1", mocker.MagicMock(standard_name="x_wind")), + CFDataVariable("var2", mocker.MagicMock(standard_name="y_wind")), + CFDataVariable("var1", mocker.MagicMock(long_name="x component of wind")), + CFDataVariable( + "var1", + mocker.MagicMock( + standard_name="x_wind", long_name="x component of wind" + ), + ), + CFDataVariable("var1", mocker.MagicMock()), + ] def test_multiple_constraints(self): constrs = [ @@ -37,7 +38,7 @@ def test_multiple_constraints(self): ] callback = _translate_constraints_to_var_callback(constrs) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [True, True, False, True, False]) + _shared_utils.assert_array_equal(result, [True, True, False, True, False]) def test_multiple_constraints_invalid(self): constrs = [ @@ -45,9 +46,9 @@ def test_multiple_constraints_invalid(self): iris.NameConstraint(var_name="var1", STASH="m01s00i024"), ] result = _translate_constraints_to_var_callback(constrs) - self.assertIsNone(result) + assert result is None - def test_multiple_constraints__multiname(self): + def test_multiple_constraints__multiname(self, mocker): # Modify the first constraint to require BOTH var-name and std-name match constrs = [ iris.NameConstraint(standard_name="x_wind", var_name="var1"), @@ -56,51 +57,53 @@ def test_multiple_constraints__multiname(self): callback = _translate_constraints_to_var_callback(constrs) # Add 2 extra vars: one passes both name checks, and the other does not vars = self.data_variables + [ - CFDataVariable("var1", MagicMock(standard_name="x_wind")), - CFDataVariable("var1", MagicMock(standard_name="air_pressure")), + CFDataVariable("var1", mocker.MagicMock(standard_name="x_wind")), + CFDataVariable("var1", mocker.MagicMock(standard_name="air_pressure")), ] result = [callback(var) for var in vars] - self.assertArrayEqual(result, [True, True, False, True, False, True, False]) + _shared_utils.assert_array_equal( + result, [True, True, False, True, False, True, False] + ) - def test_non_NameConstraint(self): + def test_non_name_constraint(self): constr = iris.AttributeConstraint(STASH="m01s00i002") result = _translate_constraints_to_var_callback(constr) - self.assertIsNone(result) + assert result is None def test_str_constraint(self): result = _translate_constraints_to_var_callback("x_wind") - self.assertIsNone(result) + assert result is None - def test_Constaint_with_name(self): + def test_constaint_with_name(self): constr = iris.Constraint(name="x_wind") result = _translate_constraints_to_var_callback(constr) - self.assertIsNone(result) + assert result is None - def test_NameConstraint_standard_name(self): + def test_name_constraint_standard_name(self): constr = iris.NameConstraint(standard_name="x_wind") callback = _translate_constraints_to_var_callback(constr) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [True, False, False, True, False]) + _shared_utils.assert_array_equal(result, [True, False, False, True, False]) - def test_NameConstraint_long_name(self): + def test_name_constraint_long_name(self): constr = iris.NameConstraint(long_name="x component of wind") callback = _translate_constraints_to_var_callback(constr) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [False, False, True, True, False]) + _shared_utils.assert_array_equal(result, [False, False, True, True, False]) - def test_NameConstraint_var_name(self): + def test_name_constraint_var_name(self): constr = iris.NameConstraint(var_name="var1") callback = _translate_constraints_to_var_callback(constr) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [True, False, True, True, True]) + _shared_utils.assert_array_equal(result, [True, False, True, True, True]) - def test_NameConstraint_standard_name_var_name(self): + def test_name_constraint_standard_name_var_name(self): constr = iris.NameConstraint(standard_name="x_wind", var_name="var1") callback = _translate_constraints_to_var_callback(constr) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [True, False, False, True, False]) + _shared_utils.assert_array_equal(result, [True, False, False, True, False]) - def test_NameConstraint_standard_name_long_name_var_name(self): + def test_name_constraint_standard_name_long_name_var_name(self): constr = iris.NameConstraint( standard_name="x_wind", long_name="x component of wind", @@ -108,18 +111,14 @@ def test_NameConstraint_standard_name_long_name_var_name(self): ) callback = _translate_constraints_to_var_callback(constr) result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [False, False, False, True, False]) + _shared_utils.assert_array_equal(result, [False, False, False, True, False]) - def test_NameConstraint_with_STASH(self): + def test_name_constraint_with_stash(self): constr = iris.NameConstraint(standard_name="x_wind", STASH="m01s00i024") result = _translate_constraints_to_var_callback(constr) - self.assertIsNone(result) + assert result is None def test_no_constraints(self): constrs = [] result = _translate_constraints_to_var_callback(constrs) - self.assertIsNone(result) - - -if __name__ == "__main__": - tests.main() + assert result is None diff --git a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py index 5fa37b18ef..5ebbbcc96c 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py +++ b/lib/iris/tests/unit/fileformats/netcdf/loader/test_load_cubes.py @@ -9,14 +9,6 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from pathlib import Path -from shutil import rmtree -import tempfile - from cf_units import as_unit import numpy as np import pytest @@ -27,17 +19,14 @@ from iris.fileformats.netcdf.loader import load_cubes from iris.loading import LOAD_PROBLEMS from iris.mesh import MeshCoord +from iris.tests import _shared_utils from iris.tests.stock.netcdf import ncgen_from_cdl -def setUpModule(): +@pytest.fixture(autouse=True, scope="module") +def _setup(tmp_path_factory): global TMP_DIR - TMP_DIR = Path(tempfile.mkdtemp()) - - -def tearDownModule(): - if TMP_DIR is not None: - rmtree(TMP_DIR) + TMP_DIR = tmp_path_factory.mktemp("temp") def cdl_to_nc(cdl): @@ -47,7 +36,7 @@ def cdl_to_nc(cdl): return str(nc_path) -class Tests(tests.IrisTest): +class Tests: def test_ancillary_variables(self): # Note: using a CDL string as a test data reference, rather than a # binary file. @@ -76,9 +65,9 @@ def test_ancillary_variables(self): # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) + assert len(cubes) == 1 avs = cubes[0].ancillary_variables() - self.assertEqual(len(avs), 1) + assert len(avs) == 1 expected = AncillaryVariable( np.ma.array([11.0, 12.0, 13.0]), long_name="refs", @@ -86,7 +75,7 @@ def test_ancillary_variables(self): units="1", attributes={"custom": "extra-attribute"}, ) - self.assertEqual(avs[0], expected) + assert avs[0] == expected def test_status_flags(self): # Note: using a CDL string as a test data reference, rather than a binary file. @@ -115,9 +104,9 @@ def test_status_flags(self): # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) + assert len(cubes) == 1 avs = cubes[0].ancillary_variables() - self.assertEqual(len(avs), 1) + assert len(avs) == 1 expected = AncillaryVariable( np.ma.array([1, 1, 2], dtype=np.int8), long_name="qq status_flag", @@ -128,7 +117,7 @@ def test_status_flags(self): "flag_meanings": "a b", }, ) - self.assertEqual(avs[0], expected) + assert avs[0] == expected def test_cell_measures(self): # Note: using a CDL string as a test data reference, rather than a binary file. @@ -162,9 +151,9 @@ def test_cell_measures(self): # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) + assert len(cubes) == 1 cms = cubes[0].cell_measures() - self.assertEqual(len(cms), 1) + assert len(cms) == 1 expected = CellMeasure( np.ma.array([[110.0, 120.0, 130.0], [221.0, 231.0, 241.0]]), measure="area", @@ -173,7 +162,7 @@ def test_cell_measures(self): units="m2", attributes={"custom": "extra-attribute"}, ) - self.assertEqual(cms[0], expected) + assert cms[0] == expected def test_default_units(self): # Note: using a CDL string as a test data reference, rather than a binary file. @@ -206,17 +195,17 @@ def test_default_units(self): # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) - self.assertEqual(cubes[0].units, as_unit("unknown")) - self.assertEqual(cubes[0].coord("y").units, as_unit("unknown")) - self.assertEqual(cubes[0].coord("x").units, as_unit(1)) - self.assertEqual(cubes[0].ancillary_variable("refs").units, as_unit("unknown")) - self.assertEqual(cubes[0].cell_measure("areas").units, as_unit("unknown")) + assert len(cubes) == 1 + assert cubes[0].units == as_unit("unknown") + assert cubes[0].coord("y").units == as_unit("unknown") + assert cubes[0].coord("x").units == as_unit(1) + assert cubes[0].ancillary_variable("refs").units == as_unit("unknown") + assert cubes[0].cell_measure("areas").units == as_unit("unknown") -class TestsMesh(tests.IrisTest): +class TestsMesh: @classmethod - def setUpClass(cls): + def setup_class(cls): cls.ref_cdl = """ netcdf mesh_test { dimensions: @@ -266,14 +255,15 @@ def setUpClass(cls): cls.nc_path = cdl_to_nc(cls.ref_cdl) cls.mesh_cubes = list(load_cubes(cls.nc_path)) - def setUp(self): + @pytest.fixture(autouse=True) + def _setup(self): # Interim measure to allow pytest-style patching in the absence of # full-scale pytest conversion. self.monkeypatch = pytest.MonkeyPatch() def test_standard_dims(self): for cube in self.mesh_cubes: - self.assertIsNotNone(cube.coords("levels")) + assert cube.coords("levels") is not None def test_mesh_coord(self): cube = [cube for cube in self.mesh_cubes if cube.var_name == "face_data"][0] @@ -281,21 +271,21 @@ def test_mesh_coord(self): face_y = cube.coord("latitude") for coord in (face_x, face_y): - self.assertIsInstance(coord, MeshCoord) - self.assertEqual("face", coord.location) - self.assertArrayEqual(np.ma.array([0.5]), coord.points) + assert isinstance(coord, MeshCoord) + assert "face" == coord.location + _shared_utils.assert_array_equal(np.ma.array([0.5]), coord.points) - self.assertEqual("x", face_x.axis) - self.assertEqual("y", face_y.axis) - self.assertEqual(face_x.mesh, face_y.mesh) - self.assertArrayEqual(np.ma.array([[0.0, 2.0, 1.0]]), face_x.bounds) - self.assertArrayEqual(np.ma.array([[0.0, 0.0, 1.0]]), face_y.bounds) + assert "x" == face_x.axis + assert "y" == face_y.axis + assert face_x.mesh == face_y.mesh + _shared_utils.assert_array_equal(np.ma.array([[0.0, 2.0, 1.0]]), face_x.bounds) + _shared_utils.assert_array_equal(np.ma.array([[0.0, 0.0, 1.0]]), face_y.bounds) def test_shared_mesh(self): cube_meshes = [cube.coord("latitude").mesh for cube in self.mesh_cubes] - self.assertEqual(cube_meshes[0], cube_meshes[1]) + assert cube_meshes[0] == cube_meshes[1] - def test_missing_mesh(self): + def test_missing_mesh(self, caplog): ref_cdl = self.ref_cdl.replace( 'face_data:mesh = "mesh"', 'face_data:mesh = "mesh2"' ) @@ -305,7 +295,9 @@ def test_missing_mesh(self): _ = list(load_cubes(nc_path)) log_regex = r".*could not be found in file." - with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): + with _shared_utils.assert_logs( + caplog, logger=logger, level="DEBUG", msg_regex=log_regex + ): _ = list(load_cubes(nc_path)) def test_mesh_coord_not_built(self): diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py index 0905c3d2a9..b1d2a7f370 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver.py @@ -4,15 +4,9 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :class:`iris.fileformats.netcdf.Saver` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -from types import ModuleType - -import iris.tests as tests # isort:skip - import collections from contextlib import contextmanager -from unittest import mock +from types import ModuleType import numpy as np from numpy import ma @@ -36,11 +30,14 @@ from iris.coords import AncillaryVariable, AuxCoord, DimCoord from iris.cube import Cube from iris.fileformats.netcdf import Saver, _thread_safe_nc +from iris.fileformats.netcdf import _bytecoding_datasets as ds_wrappers +from iris.tests import _shared_utils from iris.tests._shared_utils import assert_CDL import iris.tests.stock as stock +from iris.tests.unit.fileformats import MockerMixin -class Test_write(tests.IrisTest): +class Test_write: # ------------------------------------------------------------------------- # It is not considered necessary to have integration tests for saving # EVERY coordinate system. A subset are tested below. @@ -119,64 +116,64 @@ def _stereo_cube(self, ellipsoid=None, scale_factor=None): cube.add_dim_coord(coord, 1) return cube - def test_transverse_mercator(self): + def test_transverse_mercator(self, request, tmp_path): # Create a Cube with a transverse Mercator coordinate system. ellipsoid = GeogCS(6377563.396, 6356256.909) cube = self._transverse_mercator_cube(ellipsoid) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_transverse_mercator_no_ellipsoid(self): + def test_transverse_mercator_no_ellipsoid(self, request, tmp_path): # Create a Cube with a transverse Mercator coordinate system. cube = self._transverse_mercator_cube() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_mercator(self): + def test_mercator(self, request, tmp_path): # Create a Cube with a Mercator coordinate system. ellipsoid = GeogCS(6377563.396, 6356256.909) cube = self._mercator_cube(ellipsoid) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_stereographic(self): + def test_stereographic(self, request, tmp_path): # Create a Cube with a stereographic coordinate system. ellipsoid = GeogCS(6377563.396, 6356256.909) cube = self._stereo_cube(ellipsoid) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_mercator_no_ellipsoid(self): + def test_mercator_no_ellipsoid(self, request, tmp_path): # Create a Cube with a Mercator coordinate system. cube = self._mercator_cube() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_stereographic_no_ellipsoid(self): + def test_stereographic_no_ellipsoid(self, request, tmp_path): # Create a Cube with a stereographic coordinate system. cube = self._stereo_cube() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_stereographic_scale_factor(self): + def test_stereographic_scale_factor(self, request, tmp_path): # Create a Cube with a stereographic coordinate system. cube = self._stereo_cube(scale_factor=1.3) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) @staticmethod def _filter_compression_calls(patch, compression_kwargs, mismatch=False): @@ -199,27 +196,27 @@ def _simple_cube(self, dtype): cube.add_dim_coord(coord, 0) return cube - def test_little_endian(self): + def test_little_endian(self, request, tmp_path): # Create a Cube with little-endian data. cube = self._simple_cube("f4") - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - result_path = self.result_path("endian", "cdl") - self.assertCDL(nc_path, result_path, flags="") + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + result_path = _shared_utils.result_path(request, "endian", "cdl") + _shared_utils.assert_CDL(request, nc_path, result_path, flags="") - def test_zlib(self): + def test_zlib(self, mocker): cube = self._simple_cube(">f4") - api = self.patch("iris.fileformats.netcdf.saver._thread_safe_nc") + api = mocker.patch("iris.fileformats.netcdf.saver.bytecoding_datasets") # Define mocked default fill values to prevent deprecation warning (#4374). api.default_fillvals = collections.defaultdict(lambda: -99.0) # Mock the apparent dtype of mocked variables, to avoid an error. @@ -230,8 +227,8 @@ def test_zlib(self): # a fill-value report on a non-compliant variable in a non-file (!) with Saver("/dummy/path", "NETCDF4", compute=False) as saver: saver.write(cube, zlib=True) - dataset = api.DatasetWrapper.return_value - create_var_call = mock.call( + dataset = api.EncodedDataset.return_value + create_var_call = mocker.call( "air_pressure_anomaly", np.dtype("float32"), ["dim0", "dim1"], @@ -245,9 +242,9 @@ def test_zlib(self): complevel=4, chunksizes=None, ) - self.assertIn(create_var_call, dataset.createVariable.call_args_list) + assert create_var_call in dataset.createVariable.call_args_list - def test_compression(self): + def test_compression(self, mocker, tmp_path): cube = self._simple_cube(">f4") data_dims, shape = range(cube.ndim), cube.shape @@ -261,9 +258,6 @@ def test_compression(self): ) cube.add_ancillary_variable(anc_coord, data_dims=data_dims) - patch = self.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable" - ) compression_kwargs = { "complevel": 9, "fletcher32": True, @@ -273,14 +267,24 @@ def test_compression(self): with self.temp_filename(suffix=".nc") as nc_path: with Saver(nc_path, "NETCDF4", compute=False) as saver: + tgt = ( + "iris.fileformats.netcdf.saver.bytecoding_datasets" + ".EncodedDataset.createVariable" + ) + patch = mocker.patch( + tgt, + # Use 'wraps' to allow the patched methods to function as normal + # - the patch object just acts as a 'spy' on its calls. + wraps=saver._dataset.createVariable, + ) saver.write(cube, **compression_kwargs) - self.assertEqual(5, patch.call_count) + assert 5 == patch.call_count result = self._filter_compression_calls(patch, compression_kwargs) - self.assertEqual(3, len(result)) - self.assertEqual({cube.name(), aux_coord.name(), anc_coord.name()}, set(result)) + assert 3 == len(result) + assert {cube.name(), aux_coord.name(), anc_coord.name()} == set(result) - def test_non_compression__shape(self): + def test_non_compression__shape(self, mocker, tmp_path): cube = self._simple_cube(">f4") data_dims, shape = (0, 1), cube.shape @@ -294,9 +298,6 @@ def test_non_compression__shape(self): ) cube.add_ancillary_variable(anc_coord, data_dims=data_dims[1]) - patch = self.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable" - ) compression_kwargs = { "complevel": 9, "fletcher32": True, @@ -306,19 +307,29 @@ def test_non_compression__shape(self): with self.temp_filename(suffix=".nc") as nc_path: with Saver(nc_path, "NETCDF4", compute=False) as saver: + tgt = ( + "iris.fileformats.netcdf.saver.bytecoding_datasets" + ".EncodedDataset.createVariable" + ) + patch = mocker.patch( + tgt, + # Use 'wraps' to allow the patched methods to function as normal + # - the patch object just acts as a 'spy' on its calls. + wraps=saver._dataset.createVariable, + ) saver.write(cube, **compression_kwargs) - self.assertEqual(5, patch.call_count) + assert 5 == patch.call_count result = self._filter_compression_calls( - patch, compression_kwargs, mismatch=True + createvar_spy, compression_kwargs, mismatch=True ) - self.assertEqual(4, len(result)) + assert 4 == len(result) # the aux coord and ancil variable are not compressed due to shape, and # the dim coord and its associated bounds are also not compressed expected = {aux_coord.name(), anc_coord.name(), "dim0", "dim0_bnds"} - self.assertEqual(expected, set(result)) + assert expected == set(result) - def test_non_compression__dtype(self): + def test_non_compression__dtype(self, mocker, tmp_path): cube = self._simple_cube(">f4") data_dims, shape = (0, 1), cube.shape @@ -327,10 +338,6 @@ def test_non_compression__dtype(self): aux_coord = AuxCoord(data, var_name="non_compress_aux", units="1") cube.add_aux_coord(aux_coord, data_dims=data_dims) - patch = self.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable" - ) - patch.return_value = mock.MagicMock(dtype=np.dtype("S1")) compression_kwargs = { "complevel": 9, "fletcher32": True, @@ -340,19 +347,29 @@ def test_non_compression__dtype(self): with self.temp_filename(suffix=".nc") as nc_path: with Saver(nc_path, "NETCDF4", compute=False) as saver: + tgt = ( + "iris.fileformats.netcdf.saver.bytecoding_datasets" + ".EncodedDataset.createVariable" + ) + patch = self.patch( + tgt, + # Use 'wraps' to allow the patched methods to function as normal + # - the patch object just acts as a 'spy' on its calls. + wraps=saver._dataset.createVariable, + ) saver.write(cube, **compression_kwargs) - self.assertEqual(4, patch.call_count) + assert 4 == patch.call_count result = self._filter_compression_calls( - patch, compression_kwargs, mismatch=True + createvar_spy, compression_kwargs, mismatch=True ) - self.assertEqual(3, len(result)) + assert 3 == len(result) # the aux coord is not compressed due to its string dtype, and # the dim coord and its associated bounds are also not compressed expected = {aux_coord.name(), "dim0", "dim0_bnds"} - self.assertEqual(expected, set(result)) + assert expected == set(result) - def test_least_significant_digit(self): + def test_least_significant_digit(self, tmp_path): cube = Cube( self.array_lib.array([1.23, 4.56, 7.89]), standard_name="surface_temperature", @@ -360,43 +377,44 @@ def test_least_significant_digit(self): var_name="temp", units="K", ) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, least_significant_digit=1) - cube_saved = iris.load_cube(nc_path) - self.assertEqual(cube_saved.attributes["least_significant_digit"], 1) - self.assertFalse(np.all(cube.data == cube_saved.data)) - self.assertArrayAllClose(cube.data, cube_saved.data, 0.1) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, least_significant_digit=1) + cube_saved = iris.load_cube(nc_path) + assert cube_saved.attributes["least_significant_digit"] == 1 + assert not np.all(cube.data == cube_saved.data) + _shared_utils.assert_array_all_close(cube.data, cube_saved.data, 0.1) - def test_default_unlimited_dimensions(self): + def test_default_unlimited_dimensions(self, tmp_path): # Default is no unlimited dimensions. cube = self._simple_cube(">f4") with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube) + ds = ds_wrappers.EncodedDataset(nc_path) ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertFalse(ds.dimensions["dim0"].isunlimited()) - self.assertFalse(ds.dimensions["dim1"].isunlimited()) + assert not ds.dimensions["dim0"].isunlimited() + assert not ds.dimensions["dim1"].isunlimited() ds.close() - def test_no_unlimited_dimensions(self): + def test_no_unlimited_dimensions(self, tmp_path): cube = self._simple_cube(">f4") with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=None) - ds = _thread_safe_nc.DatasetWrapper(nc_path) + ds = ds_wrappers.EncodedDataset(nc_path) for dim in ds.dimensions.values(): - self.assertFalse(dim.isunlimited()) + assert not dim.isunlimited() ds.close() - def test_invalid_unlimited_dimensions(self): + def test_invalid_unlimited_dimensions(self, tmp_path): cube = self._simple_cube(">f4") - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - # should not raise an exception - saver.write(cube, unlimited_dimensions=["not_found"]) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + # should not raise an exception + saver.write(cube, unlimited_dimensions=["not_found"]) - def test_custom_unlimited_dimensions(self): + def test_custom_unlimited_dimensions(self, tmp_path): cube = self._transverse_mercator_cube() unlimited_dimensions = [ "projection_y_coordinate", @@ -406,39 +424,40 @@ def test_custom_unlimited_dimensions(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=unlimited_dimensions) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - for dim in unlimited_dimensions: - self.assertTrue(ds.dimensions[dim].isunlimited()) - ds.close() - # test coordinate arguments + ds = _thread_safe_nc.DatasetWrapper(nc_path) + for dim in unlimited_dimensions: + assert ds.dimensions[dim].isunlimited() + ds.close() + # test coordinate arguments with self.temp_filename(".nc") as nc_path: coords = [cube.coord(dim) for dim in unlimited_dimensions] with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=coords) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - for dim in unlimited_dimensions: - self.assertTrue(ds.dimensions[dim].isunlimited()) - ds.close() + ds = ds_wrappers.EncodedDataset(nc_path) + for dim in unlimited_dimensions: + assert ds.dimensions[dim].isunlimited() + ds.close() - def test_reserved_attributes(self): + + def test_reserved_attributes(self, tmp_path): cube = self._simple_cube(">f4") cube.attributes["dimensions"] = "something something_else" with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - res = ds.getncattr("dimensions") - ds.close() - self.assertEqual(res, "something something_else") + ds = ds_wrappers.EncodedDataset(nc_path) + res = ds.getncattr("dimensions") + ds.close() + assert res == "something something_else" - def test_with_climatology(self): + def test_with_climatology(self, request, tmp_path): cube = stock.climatology_3d() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube) + _shared_utils.assert_CDL(request, nc_path) - def test_dimensional_to_scalar(self): + def test_dimensional_to_scalar(self, tmp_path): # Bounds for 1 point are still in a 2D array. scalar_bounds = self.array_lib.arange(2).reshape(1, 2) scalar_point = scalar_bounds.mean() @@ -448,14 +467,14 @@ def test_dimensional_to_scalar(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - # Confirm that the only dimension is the one denoting the number - # of bounds - have successfully saved the 2D bounds array into 1D. - self.assertEqual(["bnds"], list(ds.dimensions.keys())) - ds.close() + ds = ds_wrappers.EncodedDataset(nc_path) + # Confirm that the only dimension is the one denoting the number + # of bounds - have successfully saved the 2D bounds array into 1D. + assert ["bnds"] == list(ds.dimensions.keys()) + ds.close() -class Test__create_cf_bounds(tests.IrisTest): +class Test__create_cf_bounds(MockerMixin): # Method is substituted in test_Saver__lazy. @staticmethod def climatology_3d(): @@ -478,34 +497,34 @@ def _check_bounds_setting(self, climatological=False): boundsvar_name = "time_" + varname_extra # Set up arguments for testing _create_cf_bounds. - saver = mock.MagicMock(spec=Saver) + saver = self.mocker.MagicMock(spec=Saver) # NOTE: 'saver' must have spec=Saver to fake isinstance(save, Saver), # so it can pass as 'self' in the call to _create_cf_cbounds. # Mock a '_dataset' property; not automatic because 'spec=Saver'. - saver._dataset = mock.MagicMock() + saver._dataset = self.mocker.MagicMock() # Mock the '_ensure_valid_dtype' method to return an object with a # suitable 'shape' and 'dtype'. - saver._ensure_valid_dtype.return_value = mock.Mock( + saver._ensure_valid_dtype.return_value = self.mocker.Mock( shape=coord.bounds.shape, dtype=coord.bounds.dtype ) - var = mock.MagicMock(spec=_thread_safe_nc.VariableWrapper) + var = self.mocker.MagicMock(spec=ds_wrappers.EncodedVariable) # Make the main call. Saver._create_cf_bounds(saver, coord, var, "time") # Test the call of _setncattr in _create_cf_bounds. - setncattr_call = mock.call( + setncattr_call = self.mocker.call( property_name, boundsvar_name.encode(encoding="ascii") ) - self.assertEqual(setncattr_call, var.setncattr.call_args) + assert setncattr_call == var.setncattr.call_args # Test the call of createVariable in _create_cf_bounds. dataset = saver._dataset expected_dimensions = var.dimensions + ("bnds",) - create_var_call = mock.call( + create_var_call = self.mocker.call( boundsvar_name, coord.bounds.dtype, expected_dimensions ) - self.assertEqual(create_var_call, dataset.createVariable.call_args) + assert create_var_call == dataset.createVariable.call_args def test_set_bounds_default(self): self._check_bounds_setting(climatological=False) @@ -514,58 +533,59 @@ def test_set_bounds_climatology(self): self._check_bounds_setting(climatological=True) -class Test_write__valid_x_cube_attributes(tests.IrisTest): +class Test_write__valid_x_cube_attributes: """Testing valid_range, valid_min and valid_max attributes.""" # Attribute is substituted in test_Saver__lazy. array_lib: ModuleType = np - def test_valid_range_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_range_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") vrange = self.array_lib.array([1, 2], dtype="int32") cube.attributes["valid_range"] = vrange - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual(ds.valid_range, vrange) - ds.close() + nc_path = tmp_path / "temp.nc" + with Saver(nc_path, "NETCDF4") as saver: + saver.write(cube, unlimited_dimensions=[]) + ds = ds_wrappers.EncodedDataset(nc_path) + _shared_utils.assert_array_equal(ds.valid_range, vrange) + ds.close() - def test_valid_min_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_min_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") cube.attributes["valid_min"] = 1 with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual(ds.valid_min, 1) + ds = ds_wrappers.EncodedDataset(nc_path) + _shared_utils.assert_array_equal(ds.valid_min, 1) ds.close() - def test_valid_max_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_max_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") cube.attributes["valid_max"] = 2 with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) + ds = ds_wrappers.EncodedDataset(nc_path) self.assertArrayEqual(ds.valid_max, 2) + _shared_utils.assert_array_equal(ds.valid_max, 2) ds.close() -class Test_write__valid_x_coord_attributes(tests.IrisTest): +class Test_write__valid_x_coord_attributes: """Testing valid_range, valid_min and valid_max attributes.""" # Attribute is substituted in test_Saver__lazy. array_lib: ModuleType = np - def test_valid_range_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_range_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") vrange = self.array_lib.array([1, 2], dtype="int32") @@ -573,36 +593,36 @@ def test_valid_range_saved(self): with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual(ds.variables["longitude"].valid_range, vrange) + ds = ds_wrappers.EncodedDataset(nc_path) + _shared_utils.assert_array_equal(ds.variables["longitude"].valid_range, vrange) ds.close() - def test_valid_min_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_min_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") cube.coord(axis="x").attributes["valid_min"] = 1 with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual(ds.variables["longitude"].valid_min, 1) + ds = ds_wrappers.EncodedDataset(nc_path) + _shared_utils.assert_array_equal(ds.variables["longitude"].valid_min, 1) ds.close() - def test_valid_max_saved(self): - cube = tests.stock.lat_lon_cube() + def test_valid_max_saved(self, tmp_path): + cube = stock.lat_lon_cube() cube.data = cube.data.astype("int32") cube.coord(axis="x").attributes["valid_max"] = 2 with self.temp_filename(".nc") as nc_path: with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, unlimited_dimensions=[]) - ds = _thread_safe_nc.DatasetWrapper(nc_path) - self.assertArrayEqual(ds.variables["longitude"].valid_max, 2) + ds = ds_wrappers.EncodedDataset(nc_path) + _shared_utils.assert_array_equal(ds.variables["longitude"].valid_max, 2) ds.close() -class Test_write_fill_value(tests.IrisTest): +class Test_write_fill_value: # Attribute is substituted in test_Saver__lazy. array_lib: ModuleType = np @@ -622,14 +642,16 @@ def _make_cube(self, dtype, masked_value=None, masked_index=None): dim_coords_and_dims=[(lat, 0), (lon, 1)], ) - @contextmanager - def _netCDF_var(self, cube, **kwargs): - # Get the netCDF4 Variable for a cube from a temp file - standard_name = cube.standard_name - with self.temp_filename(".nc") as nc_path: + @pytest.fixture + def _netCDF_var(self, tmp_path): + @contextmanager + def netCDF_var(cube, **kwargs): + # Get the netCDF4 Variable for a cube from a temp file + standard_name = cube.standard_name + nc_path = tmp_path / "temp.nc" with Saver(nc_path, "NETCDF4") as saver: saver.write(cube, **kwargs) - ds = _thread_safe_nc.DatasetWrapper(nc_path) + ds = ds_wrappers.EncodedDataset(nc_path) (var,) = [ var for var in ds.variables.values() @@ -637,88 +659,87 @@ def _netCDF_var(self, cube, **kwargs): ] yield var - def test_fill_value(self): + return netCDF_var + + def test_fill_value(self, _netCDF_var): # Test that a passed fill value is saved as a _FillValue attribute. cube = self._make_cube(">f4") fill_value = 12345.0 - with self._netCDF_var(cube, fill_value=fill_value) as var: - self.assertEqual(fill_value, var._FillValue) + with _netCDF_var(cube, fill_value=fill_value) as var: + assert fill_value == var._FillValue - def test_default_fill_value(self): + def test_default_fill_value(self, _netCDF_var): # Test that if no fill value is passed then there is no _FillValue. # attribute. cube = self._make_cube(">f4") - with self._netCDF_var(cube) as var: - self.assertNotIn("_FillValue", var.ncattrs()) + with _netCDF_var(cube) as var: + assert "_FillValue" not in var.ncattrs() - def test_mask_fill_value(self): + def test_mask_fill_value(self, _netCDF_var): # Test that masked data saves correctly when given a fill value. index = (1, 1) fill_value = 12345.0 cube = self._make_cube(">f4", masked_index=index) - with self._netCDF_var(cube, fill_value=fill_value) as var: - self.assertEqual(fill_value, var._FillValue) - self.assertTrue(var[index].mask) + with _netCDF_var(cube, fill_value=fill_value) as var: + assert fill_value == var._FillValue + assert var[index].mask - def test_mask_default_fill_value(self): + def test_mask_default_fill_value(self, _netCDF_var): # Test that masked data saves correctly using the default fill value. index = (1, 1) cube = self._make_cube(">f4", masked_index=index) - with self._netCDF_var(cube) as var: - self.assertNotIn("_FillValue", var.ncattrs()) - self.assertTrue(var[index].mask) + with _netCDF_var(cube) as var: + assert "_FillValue" not in var.ncattrs() + assert var[index].mask -class Test_cf_valid_var_name(tests.IrisTest): +class Test_cf_valid_var_name: def test_no_replacement(self): - self.assertEqual(Saver.cf_valid_var_name("valid_Nam3"), "valid_Nam3") + assert Saver.cf_valid_var_name("valid_Nam3") == "valid_Nam3" def test_special_chars(self): - self.assertEqual(Saver.cf_valid_var_name("inv?alid"), "inv_alid") + assert Saver.cf_valid_var_name("inv?alid") == "inv_alid" def test_leading_underscore(self): - self.assertEqual(Saver.cf_valid_var_name("_invalid"), "var__invalid") + assert Saver.cf_valid_var_name("_invalid") == "var__invalid" def test_leading_number(self): - self.assertEqual(Saver.cf_valid_var_name("2invalid"), "var_2invalid") + assert Saver.cf_valid_var_name("2invalid") == "var_2invalid" def test_leading_invalid(self): - self.assertEqual(Saver.cf_valid_var_name("?invalid"), "var__invalid") + assert Saver.cf_valid_var_name("?invalid") == "var__invalid" def test_no_hyphen(self): # CF explicitly prohibits hyphen, even though it is fine in NetCDF. - self.assertEqual(Saver.cf_valid_var_name("valid-netcdf"), "valid_netcdf") + assert Saver.cf_valid_var_name("valid-netcdf") == "valid_netcdf" class _Common__check_attribute_compliance: # Attribute is substituted in test_Saver__lazy. array_lib: ModuleType = np - def setUp(self): - self.container = mock.Mock(name="container", attributes={}) + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self.container = mocker.Mock(name="container", attributes={}) self.data_dtype = np.dtype("int32") # We need to create mock datasets which look like they are closed. - dataset_class = mock.Mock( - return_value=mock.Mock( + dataset_class = mocker.Mock( + return_value=mocker.Mock( # Mock dataset : the isopen() call should return 0. - isopen=mock.Mock(return_value=0) + isopen=mocker.Mock(return_value=0) ) ) - patch = mock.patch( - "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + _ = mocker.patch( + "iris.fileformats.netcdf._bytecoding_datasets.EncodedDataset", dataset_class, ) - _ = patch.start() - self.addCleanup(patch.stop) def set_attribute(self, value): self.container.attributes[self.attribute] = value - def assertAttribute(self, value): - self.assertEqual( - np.asarray(self.container.attributes[self.attribute]).dtype, value - ) + def assert_attribute(self, value): + assert np.asarray(self.container.attributes[self.attribute]).dtype == value def check_attribute_compliance_call(self, value, file_type="NETCDF4"): self.set_attribute(value) @@ -728,9 +749,7 @@ def check_attribute_compliance_call(self, value, file_type="NETCDF4"): saver.check_attribute_compliance(self.container, self.data_dtype) -class Test_check_attribute_compliance__valid_range( - _Common__check_attribute_compliance, tests.IrisTest -): +class Test_check_attribute_compliance__valid_range(_Common__check_attribute_compliance): @property def attribute(self): return "valid_range" @@ -738,18 +757,18 @@ def attribute(self): def test_valid_range_type_coerce(self): value = self.array_lib.array([1, 2], dtype="float") self.check_attribute_compliance_call(value) - self.assertAttribute(self.data_dtype) + self.assert_attribute(self.data_dtype) def test_valid_range_unsigned_int8_data_signed_range(self): self.data_dtype = np.dtype("uint8") value = self.array_lib.array([1, 2], dtype="int8") self.check_attribute_compliance_call(value) - self.assertAttribute(value.dtype) + self.assert_attribute(value.dtype) def test_valid_range_cannot_coerce(self): value = self.array_lib.array([1.5, 2.5], dtype="float64") msg = '"valid_range" is not of a suitable value' - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): self.check_attribute_compliance_call(value) def test_valid_range_not_numpy_array(self): @@ -757,18 +776,16 @@ def test_valid_range_not_numpy_array(self): self.data_dtype = np.dtype("int8") value = [1, 2] self.check_attribute_compliance_call(value) - self.assertAttribute(np.int64) + self.assert_attribute(np.int64) def test_uncastable_dtype(self): self.data_dtype = np.dtype("int64") value = [0, np.iinfo(self.data_dtype).max] - with self.assertRaisesRegex(ValueError, "cannot be safely cast"): + with pytest.raises(ValueError, match="cannot be safely cast"): self.check_attribute_compliance_call(value, file_type="NETCDF4_CLASSIC") -class Test_check_attribute_compliance__valid_min( - _Common__check_attribute_compliance, tests.IrisTest -): +class Test_check_attribute_compliance__valid_min(_Common__check_attribute_compliance): @property def attribute(self): return "valid_min" @@ -776,18 +793,18 @@ def attribute(self): def test_valid_range_type_coerce(self): value = self.array_lib.array(1, dtype="float") self.check_attribute_compliance_call(value) - self.assertAttribute(self.data_dtype) + self.assert_attribute(self.data_dtype) def test_valid_range_unsigned_int8_data_signed_range(self): self.data_dtype = np.dtype("uint8") value = self.array_lib.array(1, dtype="int8") self.check_attribute_compliance_call(value) - self.assertAttribute(value.dtype) + self.assert_attribute(value.dtype) def test_valid_range_cannot_coerce(self): value = self.array_lib.array(1.5, dtype="float64") msg = '"valid_min" is not of a suitable value' - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): self.check_attribute_compliance_call(value) def test_valid_range_not_numpy_array(self): @@ -795,18 +812,16 @@ def test_valid_range_not_numpy_array(self): self.data_dtype = np.dtype("int8") value = 1 self.check_attribute_compliance_call(value) - self.assertAttribute(np.int64) + self.assert_attribute(np.int64) def test_uncastable_dtype(self): self.data_dtype = np.dtype("int64") value = np.iinfo(self.data_dtype).min - with self.assertRaisesRegex(ValueError, "cannot be safely cast"): + with pytest.raises(ValueError, match="cannot be safely cast"): self.check_attribute_compliance_call(value, file_type="NETCDF4_CLASSIC") -class Test_check_attribute_compliance__valid_max( - _Common__check_attribute_compliance, tests.IrisTest -): +class Test_check_attribute_compliance__valid_max(_Common__check_attribute_compliance): @property def attribute(self): return "valid_max" @@ -814,18 +829,18 @@ def attribute(self): def test_valid_range_type_coerce(self): value = self.array_lib.array(2, dtype="float") self.check_attribute_compliance_call(value) - self.assertAttribute(self.data_dtype) + self.assert_attribute(self.data_dtype) def test_valid_range_unsigned_int8_data_signed_range(self): self.data_dtype = np.dtype("uint8") value = self.array_lib.array(2, dtype="int8") self.check_attribute_compliance_call(value) - self.assertAttribute(value.dtype) + self.assert_attribute(value.dtype) def test_valid_range_cannot_coerce(self): value = self.array_lib.array(2.5, dtype="float64") msg = '"valid_max" is not of a suitable value' - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): self.check_attribute_compliance_call(value) def test_valid_range_not_numpy_array(self): @@ -833,17 +848,17 @@ def test_valid_range_not_numpy_array(self): self.data_dtype = np.dtype("int8") value = 2 self.check_attribute_compliance_call(value) - self.assertAttribute(np.int64) + self.assert_attribute(np.int64) def test_uncastable_dtype(self): self.data_dtype = np.dtype("int64") value = np.iinfo(self.data_dtype).max - with self.assertRaisesRegex(ValueError, "cannot be safely cast"): + with pytest.raises(ValueError, match="cannot be safely cast"): self.check_attribute_compliance_call(value, file_type="NETCDF4_CLASSIC") class Test_check_attribute_compliance__exception_handling( - _Common__check_attribute_compliance, tests.IrisTest + _Common__check_attribute_compliance ): def test_valid_range_and_valid_min_valid_max_provided(self): # Conflicting attributes should raise a suitable exception. @@ -852,17 +867,17 @@ def test_valid_range_and_valid_min_valid_max_provided(self): self.container.attributes["valid_min"] = [1] msg = 'Both "valid_range" and "valid_min"' with Saver("nonexistent test file", "NETCDF4") as saver: - with self.assertRaisesRegex(ValueError, msg): + with pytest.raises(ValueError, match=msg): saver.check_attribute_compliance(self.container, self.data_dtype) -class Test__cf_coord_identity(tests.IrisTest): +class Test__cf_coord_identity: def check_call(self, coord_name, coord_system, units, expected_units): coord = iris.coords.DimCoord( [30, 45], coord_name, units=units, coord_system=coord_system ) result = Saver._cf_coord_standardised_units(coord) - self.assertEqual(result, expected_units) + assert result == expected_units def test_geogcs_latitude(self): crs = iris.coord_systems.GeogCS(60, 30) @@ -1006,7 +1021,7 @@ def test_no_cs(self, transverse_mercator_cube_multi_cs, tmp_path, request): assert_CDL(request, nc_path) -class Test_create_cf_grid_mapping: +class Test_create_cf_grid_mapping(MockerMixin): """Tests correct generation of CF grid_mapping variable attributes. Note: The first 3 tests are run with the "extended grid" mapping @@ -1033,15 +1048,15 @@ def _grid_mapping_variable(self, coord_system): """ cube = self._cube_with_cs(coord_system) - class NCMock(mock.Mock): + class NCMock(self.mocker.Mock): def setncattr(self, name, attr): setattr(self, name, attr) # Calls the actual NetCDF saver with appropriate mocking, returning # the grid variable that gets created. grid_variable = NCMock(name="NetCDFVariable") - create_var_fn = mock.Mock(side_effect=[grid_variable]) - dataset = mock.Mock(variables=[], createVariable=create_var_fn) + create_var_fn = self.mocker.Mock(side_effect=[grid_variable]) + dataset = self.mocker.Mock(variables=[], createVariable=create_var_fn) variable = NCMock() saver = Saver(dataset, "NETCDF4", compute=False) @@ -1443,7 +1458,3 @@ def test_oblique_cs(self): def extended_grid_mapping(request): """Fixture for enabling/disabling extended grid mapping.""" return request.param - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py index 5b04b3b042..a8175da116 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy.py @@ -4,27 +4,33 @@ # See LICENSE in the root of the repository for full licensing details. """Mirror of :mod:`iris.tests.unit.fileformats.netcdf.test_Saver`, but with lazy arrays.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. from types import ModuleType -import iris.tests as tests # isort:skip - from dask import array as da +import pytest from iris.coords import AuxCoord from iris.fileformats.netcdf import Saver -from iris.tests import stock +from iris.tests import _shared_utils, stock from iris.tests.unit.fileformats.netcdf.saver import test_Saver -class LazyMixin(tests.IrisTest): +class LazyMixin: array_lib: ModuleType = da - def result_path(self, basename=None, ext=""): - # Precisely mirroring the tests in test_Saver, so use those CDL's. - original = super().result_path(basename, ext) - return original.replace("Saver__lazy", "Saver") + @pytest.fixture(autouse=True) + def _setup_lazy_mixin(self, monkeypatch): + rp = _shared_utils.result_path + + def _result_path(request, basename=None, ext=""): + # Precisely mirroring the tests in test_Saver, so use those CDL's. + original = rp(request, basename, ext) + return original.replace("Saver__lazy", "Saver") + + monkeypatch.setattr( + "iris.tests._shared_utils.result_path", # IMPORTANT: patch where it is USED + _result_path, + ) class Test_write(LazyMixin, test_Saver.Test_write): @@ -81,39 +87,40 @@ class Test_check_attribute_compliance__exception_handling( pass -class TestStreamed(tests.IrisTest): - def setUp(self): +class TestStreamed: + @pytest.fixture(autouse=True) + def _setup(self, mocker): self.cube = stock.simple_2d() - self.store_watch = self.patch("dask.array.store") + self.store_watch = mocker.patch("dask.array.store") - def save_common(self, cube_to_save): - with self.temp_filename(".nc") as nc_path: + @pytest.fixture + def save_common(self, tmp_path): + def _save_common(cube_to_save): + nc_path = tmp_path / "temp.nc" with Saver(nc_path, "NETCDF4") as saver: saver.write(cube_to_save) - def test_realised_not_streamed(self): - self.save_common(self.cube) - self.assertFalse(self.store_watch.called) + return _save_common - def test_lazy_streamed_data(self): + def test_realised_not_streamed(self, save_common): + save_common(self.cube) + assert not self.store_watch.called + + def test_lazy_streamed_data(self, save_common): self.cube.data = self.cube.lazy_data() - self.save_common(self.cube) - self.assertTrue(self.store_watch.called) + save_common(self.cube) + assert self.store_watch.called - def test_lazy_streamed_coord(self): + def test_lazy_streamed_coord(self, save_common): aux_coord = AuxCoord.from_coord(self.cube.coords()[0]) lazy_coord = aux_coord.copy(aux_coord.lazy_points(), aux_coord.lazy_bounds()) self.cube.replace_coord(lazy_coord) - self.save_common(self.cube) - self.assertTrue(self.store_watch.called) + save_common(self.cube) + assert self.store_watch.called - def test_lazy_streamed_bounds(self): + def test_lazy_streamed_bounds(self, save_common): aux_coord = AuxCoord.from_coord(self.cube.coords()[0]) lazy_coord = aux_coord.copy(aux_coord.points, aux_coord.lazy_bounds()) self.cube.replace_coord(lazy_coord) - self.save_common(self.cube) - self.assertTrue(self.store_watch.called) - - -if __name__ == "__main__": - tests.main() + save_common(self.cube) + assert self.store_watch.called diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py index 7c884e4c22..9cb84e81b5 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__lazy_stream_data.py @@ -11,7 +11,6 @@ """ from collections.abc import Iterator -from unittest import mock import dask.array as da import numpy as np @@ -24,19 +23,19 @@ class Test__lazy_stream_data: @staticmethod @pytest.fixture(autouse=True) - def saver_patch(): + def saver_patch(mocker): # Install patches, so we can create a Saver without opening a real output file. # Mock just enough of Dataset behaviour to allow a 'Saver.complete()' call. - mock_dataset = mock.MagicMock() - mock_dataset_class = mock.Mock(return_value=mock_dataset) + mock_dataset = mocker.MagicMock() + mock_dataset_class = mocker.Mock(return_value=mock_dataset) # Mock the wrapper within the netcdf saver - target1 = "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper" + target1 = "iris.fileformats.netcdf.saver.bytecoding_datasets.DatasetWrapper" # Mock the real netCDF4.Dataset within the threadsafe-nc module, as this is # used by NetCDFDataProxy and NetCDFWriteProxy. target2 = "iris.fileformats.netcdf._thread_safe_nc.netCDF4.Dataset" - with mock.patch(target1, mock_dataset_class): - with mock.patch(target2, mock_dataset_class): - yield + mocker.patch(target1, mock_dataset_class) + mocker.patch(target2, mock_dataset_class) + return # A fixture to parametrise tests over delayed and non-delayed Saver type. # NOTE: this only affects the saver context-exit, which we do not test here, so @@ -44,13 +43,13 @@ def saver_patch(): @staticmethod @pytest.fixture(params=[False, True], ids=["nocompute", "compute"]) def compute(request) -> Iterator[bool]: - yield request.param + return request.param # A fixture to parametrise tests over real and lazy-type data. @staticmethod @pytest.fixture(params=["realdata", "lazydata", "emulateddata"]) def data_form(request) -> Iterator[bool]: - yield request.param + return request.param @staticmethod def saver(compute) -> Saver: @@ -58,14 +57,14 @@ def saver(compute) -> Saver: return Saver(filename="", netcdf_format="NETCDF4", compute=compute) @staticmethod - def mock_var(shape, with_data_array): + def mock_var(shape, with_data_array, mocker): # Create a test cf_var object. # N.B. using 'spec=' so we can control whether it has a '_data_array' property. if with_data_array: - extra_properties = {"_data_array": mock.sentinel.initial_data_array} + extra_properties = {"_data_array": mocker.sentinel.initial_data_array} else: extra_properties = {} - mock_cfvar = mock.MagicMock( + mock_cfvar = mocker.MagicMock( spec=threadsafe_nc.VariableWrapper, shape=tuple(shape), dtype=np.dtype(np.float32), @@ -77,7 +76,7 @@ def mock_var(shape, with_data_array): mock_cfvar.name = "" return mock_cfvar - def test_data_save(self, compute, data_form): + def test_data_save(self, compute, data_form, mocker): """Real data is transferred immediately, lazy data creates a delayed write.""" saver = self.saver(compute=compute) @@ -86,7 +85,7 @@ def test_data_save(self, compute, data_form): data = da.from_array(data) cf_var = self.mock_var( - data.shape, with_data_array=(data_form == "emulateddata") + data.shape, with_data_array=(data_form == "emulateddata"), mocker=mocker ) saver._lazy_stream_data(data=data, cf_var=cf_var) if data_form == "lazydata": @@ -111,4 +110,4 @@ def test_data_save(self, compute, data_form): cf_var.__setitem__.assert_called_once_with(slice(None), data) else: assert data_form == "emulateddata" - cf_var._data_array == mock.sentinel.exact_data_array + cf_var._data_array == mocker.sentinel.exact_data_array diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py index 9494eabebf..2ace3f4f86 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_Saver__ugrid.py @@ -9,21 +9,17 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from pathlib import Path -import shutil -import tempfile import numpy as np +import pytest from iris import save from iris.coords import AuxCoord from iris.cube import Cube, CubeList from iris.fileformats.netcdf import _thread_safe_nc from iris.mesh import Connectivity, MeshXY, save_mesh +from iris.tests import _shared_utils from iris.tests.stock import realistic_4d XY_LOCS = ("x", "y") @@ -358,38 +354,39 @@ def filter_compression_calls(patch, compression_kwargs, mismatch=False): return result -class TestSaveUgrid__cube(tests.IrisTest): +class TestSaveUgrid__cube: """Test for saving cubes which have meshes.""" - @classmethod - def setUpClass(cls): - cls.temp_dir = Path(tempfile.mkdtemp()) + @pytest.fixture(autouse=True, scope="class") + @staticmethod + def _setup(request, tmp_path_factory): + request.cls.temp_dir = tmp_path_factory.mktemp("test") - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.temp_dir) + @pytest.fixture + def check_save_cubes(self, request): + def _check_save_cubes(cube_or_cubes, compression_kwargs=None): + """Write cubes to a new file in the common temporary directory. - def check_save_cubes(self, cube_or_cubes, compression_kwargs=None): - """Write cubes to a new file in the common temporary directory. + Use a name unique to this testcase, to avoid any clashes. - Use a name unique to this testcase, to avoid any clashes. + """ + # use 'result_path' to name the file after the test function + tempfile_path = _shared_utils.result_path(request, ext=".nc") + # Create a file of that name, but discard the result path and put it + # in the common temporary directory. + tempfile_path = self.temp_dir / Path(tempfile_path).name - """ - # use 'result_path' to name the file after the test function - tempfile_path = self.result_path(ext=".nc") - # Create a file of that name, but discard the result path and put it - # in the common temporary directory. - tempfile_path = self.temp_dir / Path(tempfile_path).name + if compression_kwargs is None: + compression_kwargs = {} - if compression_kwargs is None: - compression_kwargs = {} + # Save data to the file. + save(cube_or_cubes, tempfile_path, **compression_kwargs) - # Save data to the file. - save(cube_or_cubes, tempfile_path, **compression_kwargs) + return tempfile_path - return tempfile_path + return _check_save_cubes - def test_compression(self): + def test_compression(self, check_save_cubes, mocker): """Test NetCDF serialization of a cube with attached mesh using compression. NetCDF data compression keyword arguments include "complevel", @@ -400,13 +397,13 @@ def test_compression(self): # Note that the patch location is "_thread_safe_nc" when it is imported # into the iris.fileformats.netcdf.saver. Also we want to check that the # compression kwargs are passed into the NetCDF4 createVariable method - patch = self.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", + patch = mocker.patch( + "iris.fileformats.netcdf.saver.bytecoding_datasets.EncodedDataset.createVariable", ) # No need to patch this NetCDF4 variable to compensate for the previous patch # on createVariable, which doesn't actually create the variable. - self.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.variables" + mocker.patch( + "iris.fileformats.netcdf.saver.bytecoding_datasets.EncodedDataset.variables" ) cube = make_cube(var_name=(var_name := "a")) compression_kwargs = { @@ -416,7 +413,7 @@ def test_compression(self): "zlib": True, } - _ = self.check_save_cubes(cube, compression_kwargs=compression_kwargs) + _ = check_save_cubes(cube, compression_kwargs=compression_kwargs) # The following mesh components and cube should be compressed on serialization. result = filter_compression_calls(patch, compression_kwargs) @@ -427,12 +424,12 @@ def test_compression(self): expected = {"Mesh2d"} assert result == expected - def test_basic_mesh(self): + def test_basic_mesh(self, request, check_save_cubes): # Save a small mesh example and check aspects of the resulting file. cube = make_cube() # A simple face-mapped data example. # Save and snapshot the result - tempfile_path = self.check_save_cubes(cube) + tempfile_path = check_save_cubes(cube) dims, vars = scan_dataset(tempfile_path) # There is exactly 1 mesh var. @@ -444,68 +441,64 @@ def test_basic_mesh(self): mesh_props = vars[mesh_name] # The mesh var links to the mesh, with location 'faces' - self.assertEqual(a_name, "unknown") - self.assertEqual(a_props["mesh"], mesh_name) - self.assertEqual(a_props["location"], "face") + assert a_name == "unknown" + assert a_props["mesh"] == mesh_name + assert a_props["location"] == "face" # There are 2 face coords == those listed in the mesh face_coords = mesh_props["face_coordinates"].split(" ") - self.assertEqual(len(face_coords), 2) + assert len(face_coords) == 2 # The face coords should both map that single dim. face_dim = vars_meshdim(vars, "face") - self.assertTrue(all(vars[co][_VAR_DIMS] == [face_dim] for co in face_coords)) + assert all(vars[co][_VAR_DIMS] == [face_dim] for co in face_coords) # The face coordinates should be referenced by the data variable. for coord in face_coords: - self.assertIn(coord, a_props["coordinates"]) + assert coord in a_props["coordinates"] # The dims of the datavar also == [] - self.assertEqual(a_props[_VAR_DIMS], [face_dim]) + assert a_props[_VAR_DIMS] == [face_dim] # There are 2 node coordinates == those listed in the mesh. node_coords = mesh_props["node_coordinates"].split(" ") - self.assertEqual(len(node_coords), 2) + assert len(node_coords) == 2 # These are the *only* ones using the 'nodes' dimension. node_dim = vars_meshdim(vars, "node") - self.assertEqual( - sorted(node_coords), sorted(vars_w_dims(vars, [node_dim]).keys()) - ) + assert sorted(node_coords) == sorted(vars_w_dims(vars, [node_dim]).keys()) # There are no edges. - self.assertNotIn("edge_node_connectivity", mesh_props) - self.assertEqual(len(vars_w_props(vars, cf_role="edge_node_connectivity")), 0) + assert "edge_node_connectivity" not in mesh_props + assert len(vars_w_props(vars, cf_role="edge_node_connectivity")) == 0 # The dims are precisely (nodes, faces, nodes-per-face), in that order. - self.assertEqual( - list(dims.keys()), - ["Mesh2d_nodes", "Mesh2d_faces", "Mesh2d_face_N_nodes"], - ) + assert list(dims.keys()) == [ + "Mesh2d_nodes", + "Mesh2d_faces", + "Mesh2d_face_N_nodes", + ] # The variables are exactly (mesh, 2*node-coords, 2*face-coords, # face-nodes, data) -- in that order - self.assertEqual( - list(vars.keys()), - [ - "Mesh2d", - "node_x", - "node_y", - "face_x", - "face_y", - "mesh2d_faces", - "unknown", - ], - ) + assert list(vars.keys()) == [ + "Mesh2d", + "node_x", + "node_y", + "face_x", + "face_y", + "mesh2d_faces", + "unknown", + ] # For completeness, also check against a full CDL snapshot - self.assertCDL(tempfile_path) + _shared_utils.assert_CDL(request, tempfile_path) - def test_multi_cubes_common_mesh(self): + def test_multi_cubes_common_mesh(self, check_save_cubes): cube1 = make_cube(var_name="a") cube2 = make_cube(var_name="b") # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) + tempfile_path = check_save_cubes([cube1, cube2]) dims, vars = scan_dataset(tempfile_path) # there is exactly 1 mesh in the file @@ -513,19 +506,19 @@ def test_multi_cubes_common_mesh(self): # both the main variables reference the same mesh, and 'face' location v_a, v_b = vars["a"], vars["b"] - self.assertEqual(v_a["mesh"], mesh_name) - self.assertEqual(v_a["location"], "face") - self.assertEqual(v_a["coordinates"], "face_x face_y") - self.assertEqual(v_b["mesh"], mesh_name) - self.assertEqual(v_b["location"], "face") - self.assertEqual(v_b["coordinates"], "face_x face_y") - - def test_multi_cubes_different_locations(self): + assert v_a["mesh"] == mesh_name + assert v_a["location"] == "face" + assert v_a["coordinates"] == "face_x face_y" + assert v_b["mesh"] == mesh_name + assert v_b["location"] == "face" + assert v_b["coordinates"] == "face_x face_y" + + def test_multi_cubes_different_locations(self, check_save_cubes): cube1 = make_cube(var_name="a", location="face") cube2 = make_cube(var_name="b", location="node") # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) + tempfile_path = check_save_cubes([cube1, cube2]) dims, vars = scan_dataset(tempfile_path) # there is exactly 1 mesh in the file @@ -533,20 +526,20 @@ def test_multi_cubes_different_locations(self): # the main variables reference the same mesh at different locations v_a, v_b = vars["a"], vars["b"] - self.assertEqual(v_a["mesh"], mesh_name) - self.assertEqual(v_a["location"], "face") - self.assertEqual(v_a["coordinates"], "face_x face_y") - self.assertEqual(v_b["mesh"], mesh_name) - self.assertEqual(v_b["location"], "node") - self.assertEqual(v_b["coordinates"], "node_x node_y") + assert v_a["mesh"] == mesh_name + assert v_a["location"] == "face" + assert v_a["coordinates"] == "face_x face_y" + assert v_b["mesh"] == mesh_name + assert v_b["location"] == "node" + assert v_b["coordinates"] == "node_x node_y" # the main variables map the face and node dimensions face_dim = vars_meshdim(vars, "face") node_dim = vars_meshdim(vars, "node") - self.assertEqual(v_a[_VAR_DIMS], [face_dim]) - self.assertEqual(v_b[_VAR_DIMS], [node_dim]) + assert v_a[_VAR_DIMS] == [face_dim] + assert v_b[_VAR_DIMS] == [node_dim] - def test_multi_cubes_equal_meshes(self): + def test_multi_cubes_equal_meshes(self, check_save_cubes): # Make 2 identical meshes # NOTE: *can't* name these explicitly, as it stops them being identical. mesh1 = make_mesh() @@ -555,49 +548,49 @@ def test_multi_cubes_equal_meshes(self): cube2 = make_cube(var_name="b", mesh=mesh2) # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) + tempfile_path = check_save_cubes([cube1, cube2]) dims, vars = scan_dataset(tempfile_path) # there is exactly 1 mesh in the file mesh_names = vars_meshnames(vars) - self.assertEqual(sorted(mesh_names), ["Mesh2d"]) + assert sorted(mesh_names) == ["Mesh2d"] # same dimensions - self.assertEqual(vars_meshdim(vars, "node", mesh_name="Mesh2d"), "Mesh2d_nodes") - self.assertEqual(vars_meshdim(vars, "face", mesh_name="Mesh2d"), "Mesh2d_faces") + assert vars_meshdim(vars, "node", mesh_name="Mesh2d") == "Mesh2d_nodes" + assert vars_meshdim(vars, "face", mesh_name="Mesh2d") == "Mesh2d_faces" # there are exactly two data-variables with a 'mesh' property mesh_datavars = vars_w_props(vars, mesh="*") - self.assertEqual(["a", "b"], list(mesh_datavars)) + assert ["a", "b"] == list(mesh_datavars) # the data variables reference the same mesh a_props, b_props = vars["a"], vars["b"] for props in a_props, b_props: - self.assertEqual(props["mesh"], "Mesh2d") - self.assertEqual(props["location"], "face") - self.assertEqual(props["coordinates"], "face_x face_y") + assert props["mesh"] == "Mesh2d" + assert props["location"] == "face" + assert props["coordinates"] == "face_x face_y" # the data variables map the appropriate node dimension - self.assertEqual(a_props[_VAR_DIMS], ["Mesh2d_faces"]) - self.assertEqual(b_props[_VAR_DIMS], ["Mesh2d_faces"]) + assert a_props[_VAR_DIMS] == ["Mesh2d_faces"] + assert b_props[_VAR_DIMS] == ["Mesh2d_faces"] - def test_multi_cubes_different_mesh(self): + def test_multi_cubes_different_mesh(self, check_save_cubes): # Check that we can correctly distinguish 2 different meshes. cube1 = make_cube(var_name="a") cube2 = make_cube(var_name="b", mesh=make_mesh(n_faces=4)) # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) + tempfile_path = check_save_cubes([cube1, cube2]) dims, vars = scan_dataset(tempfile_path) # there are 2 meshes in the file mesh_names = vars_meshnames(vars) - self.assertEqual(len(mesh_names), 2) + assert len(mesh_names) == 2 # there are two (data)variables with a 'mesh' property mesh_datavars = vars_w_props(vars, mesh="*") - self.assertEqual(2, len(mesh_datavars)) - self.assertEqual(["a", "b"], sorted(mesh_datavars.keys())) + assert 2 == len(mesh_datavars) + assert ["a", "b"] == sorted(mesh_datavars.keys()) def get_props_attrs(props: dict): return props["mesh"], props["location"], props["coordinates"] @@ -606,18 +599,18 @@ def get_props_attrs(props: dict): a_props, b_props = vars["a"], vars["b"] mesh_a, loc_a, coords_a = get_props_attrs(a_props) mesh_b, loc_b, coords_b = get_props_attrs(b_props) - self.assertNotEqual(mesh_a, mesh_b) - self.assertNotEqual(coords_a, coords_b) - self.assertEqual(loc_a, "face") - self.assertEqual(loc_b, "face") + assert mesh_a != mesh_b + assert coords_a != coords_b + assert loc_a == "face" + assert loc_b == "face" - def test_nonmesh_dim(self): + def test_nonmesh_dim(self, check_save_cubes): # Check where the data variable has a 'normal' dim and a mesh dim. cube = make_cube() cube = add_height_dim(cube) # Save and snapshot the result - tempfile_path = self.check_save_cubes(cube) + tempfile_path = check_save_cubes(cube) dims, vars = scan_dataset(tempfile_path) # have just 1 mesh, including a face and node coordinates. @@ -631,12 +624,12 @@ def test_nonmesh_dim(self): ((data_name, data_props),) = vars_w_props(vars, mesh="*").items() # data maps to the height + mesh dims - self.assertEqual(data_props[_VAR_DIMS], ["height", face_dim]) - self.assertEqual(data_props["mesh"], mesh_name) - self.assertEqual(data_props["location"], "face") + assert data_props[_VAR_DIMS] == ["height", face_dim] + assert data_props["mesh"] == mesh_name + assert data_props["location"] == "face" - @tests.skip_data - def test_nonmesh_hybrid_dim(self): + @_shared_utils.skip_data + def test_nonmesh_hybrid_dim(self, check_save_cubes): # Check a case with a hybrid non-mesh dimension cube = realistic_4d() # Strip off the time and longitude dims, to make it simpler. @@ -663,7 +656,7 @@ def test_nonmesh_hybrid_dim(self): cube.add_aux_coord(coord, (i_horizontal_dim,)) # Save and snapshot the result - tempfile_path = self.check_save_cubes(cube) + tempfile_path = check_save_cubes(cube) dims, vars = scan_dataset(tempfile_path) # have just 1 mesh, including face and node coordinates. @@ -672,29 +665,26 @@ def test_nonmesh_hybrid_dim(self): _ = vars_meshdim(vars, "node", mesh_name) # have hybrid vertical dimension, with all the usual term variables. - self.assertIn("model_level_number", dims) + assert "model_level_number" in dims vert_vars = list(vars_w_dims(vars, ["model_level_number"]).keys()) # The list of file variables mapping the vertical dimension: # = the data-var, plus all the height terms - self.assertEqual( - vert_vars, - [ - "air_potential_temperature", - "model_level_number", - "level_height", - "level_height_bnds", - "sigma", - "sigma_bnds", - ], - ) + assert vert_vars == [ + "air_potential_temperature", + "model_level_number", + "level_height", + "level_height_bnds", + "sigma", + "sigma_bnds", + ] # have just 1 data-variable, which maps to hybrid-height and mesh dims ((data_name, data_props),) = vars_w_props(vars, mesh="*").items() - self.assertEqual(data_props[_VAR_DIMS], ["model_level_number", face_dim]) - self.assertEqual(data_props["mesh"], mesh_name) - self.assertEqual(data_props["location"], "face") + assert data_props[_VAR_DIMS] == ["model_level_number", face_dim] + assert data_props["mesh"] == mesh_name + assert data_props["location"] == "face" - def test_alternate_cube_dim_order(self): + def test_alternate_cube_dim_order(self, check_save_cubes): # A cube transposed from the 'usual' order # Should work much the same as the "basic" case. cube_1 = make_cube(var_name="a") @@ -705,7 +695,7 @@ def test_alternate_cube_dim_order(self): cube_2.transpose() # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube_1, cube_2]) + tempfile_path = check_save_cubes([cube_1, cube_2]) dims, vars = scan_dataset(tempfile_path) # There is only 1 mesh @@ -713,16 +703,16 @@ def test_alternate_cube_dim_order(self): # both variables reference the same mesh v_a, v_b = vars["a"], vars["b"] - self.assertEqual(v_a["mesh"], mesh_name) - self.assertEqual(v_a["location"], "face") - self.assertEqual(v_b["mesh"], mesh_name) - self.assertEqual(v_b["location"], "face") + assert v_a["mesh"] == mesh_name + assert v_a["location"] == "face" + assert v_b["mesh"] == mesh_name + assert v_b["location"] == "face" # Check the var dimensions - self.assertEqual(v_a[_VAR_DIMS], ["height", "Mesh2d_faces"]) - self.assertEqual(v_b[_VAR_DIMS], ["Mesh2d_faces", "height"]) + assert v_a[_VAR_DIMS] == ["height", "Mesh2d_faces"] + assert v_b[_VAR_DIMS] == ["Mesh2d_faces", "height"] - def test_mixed_aux_coords(self): + def test_mixed_aux_coords(self, check_save_cubes): """``coordinates`` attribute should include mesh location coords and 'normal' coords.""" cube = make_cube() mesh_dim = cube.mesh_dim() @@ -731,7 +721,7 @@ def test_mixed_aux_coords(self): cube.add_aux_coord(coord, mesh_dim) # Save and snapshot the result - tempfile_path = self.check_save_cubes(cube) + tempfile_path = check_save_cubes(cube) dims, vars = scan_dataset(tempfile_path) # There is exactly 1 mesh-linked (data)var @@ -742,41 +732,42 @@ def test_mixed_aux_coords(self): expected_coords.append(coord) expected_coord_names = [c.var_name for c in expected_coords] expected_coord_attr = " ".join(sorted(expected_coord_names)) - self.assertEqual(a_props["coordinates"], expected_coord_attr) + assert a_props["coordinates"] == expected_coord_attr -class TestSaveUgrid__mesh(tests.IrisTest): +class TestSaveUgrid__mesh: """Tests for saving meshes to a file.""" - @classmethod - def setUpClass(cls): - cls.temp_dir = Path(tempfile.mkdtemp()) + @pytest.fixture(autouse=True, scope="class") + @staticmethod + def _setup(request, tmp_path_factory): + request.cls.temp_dir = tmp_path_factory.mktemp("test") - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.temp_dir) + @pytest.fixture + def check_save_mesh(self, request): + def _check_save_mesh(mesh, compression_kwargs=None): + """Write a mesh to a new file in the common temporary directory. - def check_save_mesh(self, mesh, compression_kwargs=None): - """Write a mesh to a new file in the common temporary directory. + Use a name unique to this testcase, to avoid any clashes. - Use a name unique to this testcase, to avoid any clashes. + """ + # use 'result_path' to name the file after the test function + tempfile_path = _shared_utils.result_path(request, ext=".nc") + # Create a file of that name, but discard the result path and put it + # in the common temporary directory. + tempfile_path = self.temp_dir / Path(tempfile_path).name - """ - # use 'result_path' to name the file after the test function - tempfile_path = self.result_path(ext=".nc") - # Create a file of that name, but discard the result path and put it - # in the common temporary directory. - tempfile_path = self.temp_dir / Path(tempfile_path).name + if compression_kwargs is None: + compression_kwargs = {} - if compression_kwargs is None: - compression_kwargs = {} + # Save data to the file. + save_mesh(mesh, tempfile_path, **compression_kwargs) - # Save data to the file. - save_mesh(mesh, tempfile_path, **compression_kwargs) + return tempfile_path - return tempfile_path + return _check_save_mesh - def test_compression(self): + def test_compression(self, check_save_mesh, mocker): """Test NetCDF serialization of a mesh using compression. NetCDF data compression keyword arguments include "complevel", @@ -784,11 +775,11 @@ def test_compression(self): are only applicable when "zlib=True". """ - patch = self.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.createVariable", + patch = mocker.patch( + "iris.fileformats.netcdf.saver.bytecoding_datasets.EncodedDataset.createVariable", ) - self.patch( - "iris.fileformats.netcdf.saver._thread_safe_nc.DatasetWrapper.variables" + mocker.patch( + "iris.fileformats.netcdf.saver.bytecoding_datasets.EncodedDataset.variables" ) mesh = make_mesh() compression_kwargs = { @@ -798,7 +789,7 @@ def test_compression(self): "zlib": True, } - _ = self.check_save_mesh(mesh, compression_kwargs=compression_kwargs) + _ = check_save_mesh(mesh, compression_kwargs=compression_kwargs) # The following mesh components should be compressed on serialization. result = filter_compression_calls(patch, compression_kwargs) @@ -809,7 +800,7 @@ def test_compression(self): expected = {"Mesh2d"} assert result == expected - def test_connectivity_dim_order(self): + def test_connectivity_dim_order(self, check_save_mesh): """Test a mesh with some connectivities in the 'other' order. This should also create a property with the dimension name. @@ -832,7 +823,7 @@ def test_connectivity_dim_order(self): ) # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh2) + tempfile_path = check_save_mesh(mesh2) dims, vars = scan_dataset(tempfile_path) # Check shape and dimensions of the associated connectivity variables. @@ -842,24 +833,20 @@ def test_connectivity_dim_order(self): edgeconn_name = mesh_props["edge_node_connectivity"] faceconn_props = vars[faceconn_name] edgeconn_props = vars[edgeconn_name] - self.assertEqual( - faceconn_props[_VAR_DIMS], ["Mesh_2d_face_N_nodes", "Mesh2d_face"] - ) - self.assertEqual( - edgeconn_props[_VAR_DIMS], ["Mesh_2d_edge_N_nodes", "Mesh2d_edge"] - ) + assert faceconn_props[_VAR_DIMS] == ["Mesh_2d_face_N_nodes", "Mesh2d_face"] + assert edgeconn_props[_VAR_DIMS] == ["Mesh_2d_edge_N_nodes", "Mesh2d_edge"] # Check the dimension lengths are also as expected - self.assertEqual(dims["Mesh2d_face"], 2) - self.assertEqual(dims["Mesh_2d_face_N_nodes"], 4) - self.assertEqual(dims["Mesh2d_edge"], 7) - self.assertEqual(dims["Mesh_2d_edge_N_nodes"], 2) + assert dims["Mesh2d_face"] == 2 + assert dims["Mesh_2d_face_N_nodes"] == 4 + assert dims["Mesh2d_edge"] == 7 + assert dims["Mesh_2d_edge_N_nodes"] == 2 # the mesh has extra location-dimension properties - self.assertEqual(mesh_props["face_dimension"], "Mesh2d_face") - self.assertEqual(mesh_props["edge_dimension"], "Mesh2d_edge") + assert mesh_props["face_dimension"] == "Mesh2d_face" + assert mesh_props["edge_dimension"] == "Mesh2d_edge" - def test_connectivity_start_index(self): + def test_connectivity_start_index(self, check_save_mesh): """Test a mesh where some connectivities have start_index = 1.""" # Make a mesh with both faces *and* some edges mesh = make_mesh(n_edges=7) @@ -881,7 +868,7 @@ def test_connectivity_start_index(self): ) # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh2) + tempfile_path = check_save_mesh(mesh2) dims, vars = scan_dataset(tempfile_path) # Check shape and dimensions of the associated connectivity variables. @@ -891,10 +878,10 @@ def test_connectivity_start_index(self): edgeconn_name = mesh_props["edge_node_connectivity"] faceconn_props = vars[faceconn_name] edgeconn_props = vars[edgeconn_name] - self.assertEqual(faceconn_props["start_index"], 0) - self.assertEqual(edgeconn_props["start_index"], 1) + assert faceconn_props["start_index"] == 0 + assert edgeconn_props["start_index"] == 1 - def test_nonuniform_connectivity(self): + def test_nonuniform_connectivity(self, check_save_mesh): # Check handling of connectivities with missing points. n_faces = 7 mesh = make_mesh(n_faces=n_faces) @@ -915,65 +902,66 @@ def test_nonuniform_connectivity(self): mesh.add_connectivities(conn) # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh) + tempfile_path = check_save_mesh(mesh) dims, vars = scan_dataset(tempfile_path) # Check that the mesh saved with the additional connectivity (mesh_name,) = vars_meshnames(vars) mesh_props = vars[mesh_name] - self.assertIn("face_face_connectivity", mesh_props) + assert "face_face_connectivity" in mesh_props ff_conn_name = mesh_props["face_face_connectivity"] # check that the connectivity has the corrects dims and fill-property ff_props = vars[ff_conn_name] - self.assertEqual(ff_props[_VAR_DIMS], ["Mesh2d_faces", "Mesh2d_face_N_faces"]) - self.assertIn("_FillValue", ff_props) - self.assertEqual(ff_props["_FillValue"], -1) + assert ff_props[_VAR_DIMS] == ["Mesh2d_faces", "Mesh2d_face_N_faces"] + assert "_FillValue" in ff_props + assert ff_props["_FillValue"] == -1 # Check that a 'normal' connectivity does *not* have a _FillValue fn_conn_name = mesh_props["face_node_connectivity"] fn_props = vars[fn_conn_name] - self.assertNotIn("_FillValue", fn_props) + assert "_FillValue" not in fn_props # For what it's worth, *also* check the actual data array in the file ds = _thread_safe_nc.DatasetWrapper(tempfile_path) conn_var = ds.variables[ff_conn_name] data = conn_var[:] ds.close() - self.assertIsInstance(data, np.ma.MaskedArray) - self.assertEqual(data.fill_value, -1) + assert isinstance(data, np.ma.MaskedArray) + assert data.fill_value == -1 # Compare raw values stored to indices, but with -1 at missing points raw_data = data.data filled_indices = indices.filled(-1) - self.assertArrayEqual(raw_data, filled_indices) + _shared_utils.assert_array_equal(raw_data, filled_indices) - def test_one_dimensional(self): + def test_one_dimensional(self, check_save_mesh): # Test a mesh with edges only. mesh = make_mesh(n_edges=5, n_faces=0, mesh_kwargs={"var_name": "Mesh1d"}) # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh) + tempfile_path = check_save_mesh(mesh) dims, vars = scan_dataset(tempfile_path) # there is a single mesh-var (mesh_name,) = vars_meshnames(vars) # the dims include edges but not faces - self.assertEqual( - list(dims.keys()), - ["Mesh1d_node", "Mesh1d_edge", "Mesh1d_edge_N_nodes"], - ) - self.assertEqual(vars_meshdim(vars, "node"), "Mesh1d_node") - self.assertEqual(vars_meshdim(vars, "edge"), "Mesh1d_edge") + assert list(dims.keys()) == [ + "Mesh1d_node", + "Mesh1d_edge", + "Mesh1d_edge_N_nodes", + ] + assert vars_meshdim(vars, "node") == "Mesh1d_node" + assert vars_meshdim(vars, "edge") == "Mesh1d_edge" # check suitable mesh properties - self.assertEqual(mesh_name, "Mesh1d") + assert mesh_name == "Mesh1d" mesh_props = vars[mesh_name] - self.assertEqual(mesh_props["topology_dimension"], 1) - self.assertIn("edge_node_connectivity", mesh_props) - self.assertNotIn("face_node_connectivity", mesh_props) + assert mesh_props["topology_dimension"] == 1 + assert "edge_node_connectivity" in mesh_props + assert "face_node_connectivity" not in mesh_props - def test_location_coord_units(self): + def test_location_coord_units(self, check_save_mesh): # Check that units on mesh locations are handled correctly. # NOTE: at present, the MeshXY class cannot handle coordinates that are # not recognised by 'guess_coord_axis' == suitable standard names @@ -1007,7 +995,7 @@ def test_location_coord_units(self): ) # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh) + tempfile_path = check_save_mesh(mesh) dims, vars = scan_dataset(tempfile_path) # there is a single mesh-var @@ -1024,17 +1012,17 @@ def test_location_coord_units(self): # 2. non- (plain) lonlat are NOT converted # 3. other names remain as whatever was given # 4. no units on input --> none on output - self.assertEqual(node_x["units"], "degrees") - self.assertEqual(node_y["units"], "ms-1") - self.assertNotIn("units", face_x) - self.assertEqual(face_y["units"], "degrees_north") + assert node_x["units"] == "degrees" + assert node_y["units"] == "ms-1" + assert "units" not in face_x + assert face_y["units"] == "degrees_north" # Check also that we did not add 'axis' properties. # We should *only* do that for dim-coords. - self.assertNotIn("axis", node_x) - self.assertNotIn("axis", node_y) - self.assertNotIn("axis", face_x) - self.assertNotIn("axis", face_y) + assert "axis" not in node_x + assert "axis" not in node_y + assert "axis" not in face_x + assert "axis" not in face_y @staticmethod def _namestext(names): @@ -1044,7 +1032,7 @@ def _namestext(names): ] return f"({' '.join(name_texts)})" - def test_mesh_names(self): + def test_mesh_names(self, check_save_mesh): # Check the selection of mesh-variables names. # N.B. this is basically centralised in Saver._get_mesh_variable_name, # but we test in an implementation-neutral way (as it's fairly easy). @@ -1100,7 +1088,7 @@ def test_mesh_names(self): # Make a mesh, with the mesh names set for the testcase mesh = make_mesh(mesh_kwargs=mesh_name_kwargs) - filepath = self.check_save_mesh(mesh) + filepath = check_save_mesh(mesh) dims, vars = scan_dataset(filepath) (mesh_name,) = vars_meshnames(vars) @@ -1114,9 +1102,9 @@ def test_mesh_names(self): f"Unexpected resulting names {self._namestext(result_names)} " f"when saving mesh with {self._namestext(given_names)}" ) - self.assertEqual(expected_names, result_names, fail_msg) + assert expected_names == result_names, fail_msg - def test_location_coord_names(self): + def test_location_coord_names(self, check_save_mesh): # Check the selection of mesh-element coordinate names. # Check the selection of mesh-variables names. # N.B. this is basically centralised in Saver._get_mesh_variable_name, @@ -1165,7 +1153,7 @@ def test_location_coord_names(self): ): setattr(coord, key, name) - filepath = self.check_save_mesh(mesh) + filepath = check_save_mesh(mesh) dims, vars = scan_dataset(filepath) (mesh_name,) = vars_meshnames(vars) @@ -1181,9 +1169,9 @@ def test_location_coord_names(self): "when saving mesh coordinate " f"with {self._namestext(given_names)}" ) - self.assertEqual(expected_names, result_names, fail_msg) + assert expected_names == result_names, fail_msg - def test_mesh_dim_names(self): + def test_mesh_dim_names(self, check_save_mesh): # Check the selection of dimension names from the mesh. dim_names_tests = [ @@ -1194,7 +1182,7 @@ def test_mesh_dim_names(self): for given_name, expected_name in dim_names_tests: mesh = make_mesh(mesh_kwargs={"face_dimension": given_name}) - filepath = self.check_save_mesh(mesh) + filepath = check_save_mesh(mesh) dims, vars = scan_dataset(filepath) (mesh_name,) = vars_meshnames(vars) @@ -1204,9 +1192,9 @@ def test_mesh_dim_names(self): f'Unexpected resulting dimension name "{face_dim}" ' f'when saving mesh with dimension name of "{given_name}".' ) - self.assertEqual(expected_name, face_dim, fail_msg) + assert expected_name == face_dim, fail_msg - def test_connectivity_names(self): + def test_connectivity_names(self, check_save_mesh): # Check the selection of connectivity names. conn_names_tests = [ # var_name only @@ -1258,7 +1246,7 @@ def test_connectivity_names(self): ): setattr(conn, key, name) - filepath = self.check_save_mesh(mesh) + filepath = check_save_mesh(mesh) dims, vars = scan_dataset(filepath) (mesh_name,) = vars_meshnames(vars) @@ -1275,111 +1263,98 @@ def test_connectivity_names(self): "when saving connectivity " f"with {self._namestext(given_names)}" ) - self.assertEqual(expected_names, result_names, fail_msg) + assert expected_names == result_names, fail_msg - def test_multiple_equal_mesh(self): + def test_multiple_equal_mesh(self, check_save_mesh): mesh1 = make_mesh() mesh2 = make_mesh() # Save and snapshot the result - tempfile_path = self.check_save_mesh([mesh1, mesh2]) + tempfile_path = check_save_mesh([mesh1, mesh2]) dims, vars = scan_dataset(tempfile_path) # In this case there should be only *one* mesh. mesh_names = vars_meshnames(vars) - self.assertEqual(1, len(mesh_names)) + assert 1 == len(mesh_names) # Check it has the correct number of coords + conns (no duplicates) # Should have 2 each X and Y coords (face+node): _no_ edge coords. coord_vars_x = vars_w_props(vars, standard_name="longitude") coord_vars_y = vars_w_props(vars, standard_name="latitude") - self.assertEqual(2, len(coord_vars_x)) - self.assertEqual(2, len(coord_vars_y)) + assert 2 == len(coord_vars_x) + assert 2 == len(coord_vars_y) # Check the connectivities are all present: _only_ 1 var of each type. for conn in mesh1.all_connectivities: if conn is not None: conn_vars = vars_w_props(vars, cf_role=conn.cf_role) - self.assertEqual(1, len(conn_vars)) + assert 1 == len(conn_vars) - def test_multiple_different_meshes(self): + def test_multiple_different_meshes(self, check_save_mesh): # Create 2 meshes with different faces, but same edges. # N.B. they should then share an edge dimension. mesh1 = make_mesh(n_faces=3, n_edges=2) mesh2 = make_mesh(n_faces=4, n_edges=2) # Save and snapshot the result - tempfile_path = self.check_save_mesh([mesh1, mesh2]) + tempfile_path = check_save_mesh([mesh1, mesh2]) dims, vars = scan_dataset(tempfile_path) # Check the dims are as expected - self.assertEqual(dims["Mesh2d_faces"], 3) - self.assertEqual(dims["Mesh2d_faces_0"], 4) + assert dims["Mesh2d_faces"] == 3 + assert dims["Mesh2d_faces_0"] == 4 # There are no 'second' edge and node dims - self.assertEqual(dims["Mesh2d_nodes"], 5) - self.assertEqual(dims["Mesh2d_edge"], 2) + assert dims["Mesh2d_nodes"] == 5 + assert dims["Mesh2d_edge"] == 2 # Check there are two independent meshes in the file... # there are exactly 2 meshes in the file mesh_names = vars_meshnames(vars) - self.assertEqual(sorted(mesh_names), ["Mesh2d", "Mesh2d_0"]) + assert sorted(mesh_names) == ["Mesh2d", "Mesh2d_0"] # they use different dimensions # mesh1 - self.assertEqual(vars_meshdim(vars, "node", mesh_name="Mesh2d"), "Mesh2d_nodes") - self.assertEqual(vars_meshdim(vars, "face", mesh_name="Mesh2d"), "Mesh2d_faces") + assert vars_meshdim(vars, "node", mesh_name="Mesh2d") == "Mesh2d_nodes" + assert vars_meshdim(vars, "face", mesh_name="Mesh2d") == "Mesh2d_faces" if "edge_coordinates" in vars["Mesh2d"]: - self.assertEqual( - vars_meshdim(vars, "edge", mesh_name="Mesh2d"), "Mesh2d_edge" - ) + assert vars_meshdim(vars, "edge", mesh_name="Mesh2d") == "Mesh2d_edge" # mesh2 - self.assertEqual( - vars_meshdim(vars, "node", mesh_name="Mesh2d_0"), "Mesh2d_nodes" - ) - self.assertEqual( - vars_meshdim(vars, "face", mesh_name="Mesh2d_0"), "Mesh2d_faces_0" - ) + assert vars_meshdim(vars, "node", mesh_name="Mesh2d_0") == "Mesh2d_nodes" + assert vars_meshdim(vars, "face", mesh_name="Mesh2d_0") == "Mesh2d_faces_0" if "edge_coordinates" in vars["Mesh2d_0"]: - self.assertEqual( - vars_meshdim(vars, "edge", mesh_name="Mesh2d_0"), - "Mesh2d_edge", - ) + assert vars_meshdim(vars, "edge", mesh_name="Mesh2d_0") == "Mesh2d_edge" # the relevant coords + connectivities are also distinct # mesh1 - self.assertEqual(vars["node_x"][_VAR_DIMS], ["Mesh2d_nodes"]) - self.assertEqual(vars["face_x"][_VAR_DIMS], ["Mesh2d_faces"]) - self.assertEqual( - vars["mesh2d_faces"][_VAR_DIMS], - ["Mesh2d_faces", "Mesh2d_face_N_nodes"], - ) + assert vars["node_x"][_VAR_DIMS] == ["Mesh2d_nodes"] + assert vars["face_x"][_VAR_DIMS] == ["Mesh2d_faces"] + assert vars["mesh2d_faces"][_VAR_DIMS] == [ + "Mesh2d_faces", + "Mesh2d_face_N_nodes", + ] if "edge_coordinates" in vars["Mesh2d"]: - self.assertEqual(vars["longitude"][_VAR_DIMS], ["Mesh2d_edge"]) - self.assertEqual( - vars["mesh2d_edge"][_VAR_DIMS], - ["Mesh2d_edge", "Mesh2d_edge_N_nodes"], - ) + assert vars["longitude"][_VAR_DIMS] == ["Mesh2d_edge"] + assert vars["mesh2d_edge"][_VAR_DIMS] == [ + "Mesh2d_edge", + "Mesh2d_edge_N_nodes", + ] # mesh2 - self.assertEqual(vars["node_x_0"][_VAR_DIMS], ["Mesh2d_nodes"]) - self.assertEqual(vars["face_x_0"][_VAR_DIMS], ["Mesh2d_faces_0"]) - self.assertEqual( - vars["mesh2d_faces_0"][_VAR_DIMS], - ["Mesh2d_faces_0", "Mesh2d_0_face_N_nodes"], - ) + assert vars["node_x_0"][_VAR_DIMS] == ["Mesh2d_nodes"] + assert vars["face_x_0"][_VAR_DIMS] == ["Mesh2d_faces_0"] + assert vars["mesh2d_faces_0"][_VAR_DIMS] == [ + "Mesh2d_faces_0", + "Mesh2d_0_face_N_nodes", + ] if "edge_coordinates" in vars["Mesh2d_0"]: - self.assertEqual(vars["longitude_0"][_VAR_DIMS], ["Mesh2d_edge"]) - self.assertEqual( - vars["mesh2d_edge_0"][_VAR_DIMS], - ["Mesh2d_edge", "Mesh2d_0_edge_N_nodes"], - ) + assert vars["longitude_0"][_VAR_DIMS] == ["Mesh2d_edge"] + assert vars["mesh2d_edge_0"][_VAR_DIMS] == [ + "Mesh2d_edge", + "Mesh2d_0_edge_N_nodes", + ] # WHEN MODIFYING THIS MODULE, CHECK IF ANY CORRESPONDING CHANGES ARE NEEDED IN # :mod:`iris.tests.unit.fileformats.netcdf.test_Saver__lazy.` - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py index b4b06c8c33..b4ae37f29b 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py +++ b/lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py @@ -4,15 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.fileformats.netcdf.save` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from pathlib import Path -from shutil import rmtree -from tempfile import mkdtemp -from unittest import mock - import numpy as np import pytest @@ -20,81 +11,83 @@ from iris.coords import AuxCoord, DimCoord from iris.cube import Cube, CubeList from iris.fileformats.netcdf import CF_CONVENTIONS_VERSION, Saver, _thread_safe_nc, save +from iris.tests import _shared_utils from iris.tests.stock import lat_lon_cube from iris.tests.stock.mesh import sample_mesh_cube -class Test_conventions(tests.IrisTest): - def setUp(self): +class Test_conventions: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = Cube([0]) self.custom_conventions = "convention1 convention2" self.cube.attributes["Conventions"] = self.custom_conventions self.options = iris.config.netcdf - def test_custom_conventions__ignored(self): + def test_custom_conventions__ignored(self, tmp_path): # Ensure that we drop existing conventions attributes and replace with # CF convention. - with self.temp_filename(".nc") as nc_path: - save(self.cube, nc_path, "NETCDF4") - ds = _thread_safe_nc.DatasetWrapper(nc_path) - res = ds.getncattr("Conventions") - ds.close() - self.assertEqual(res, CF_CONVENTIONS_VERSION) - - def test_custom_conventions__allowed(self): + nc_path = tmp_path / "dummy.nc" + save(self.cube, nc_path, "NETCDF4") + ds = _thread_safe_nc.DatasetWrapper(nc_path) + res = ds.getncattr("Conventions") + ds.close() + assert res == CF_CONVENTIONS_VERSION + + def test_custom_conventions__allowed(self, mocker, tmp_path): # Ensure that existing conventions attributes are passed through if the # relevant Iris option is set. - with mock.patch.object(self.options, "conventions_override", True): - with self.temp_filename(".nc") as nc_path: - save(self.cube, nc_path, "NETCDF4") - ds = _thread_safe_nc.DatasetWrapper(nc_path) - res = ds.getncattr("Conventions") - ds.close() - self.assertEqual(res, self.custom_conventions) - - def test_custom_conventions__allowed__missing(self): + nc_path = tmp_path / "dummy.nc" + mocker.patch.object(self.options, "conventions_override", True) + save(self.cube, nc_path, "NETCDF4") + ds = _thread_safe_nc.DatasetWrapper(nc_path) + res = ds.getncattr("Conventions") + ds.close() + assert res == self.custom_conventions + + def test_custom_conventions__allowed__missing(self, mocker, tmp_path): # Ensure the default conventions attribute is set if the relevant Iris # option is set but there is no custom conventions attribute. del self.cube.attributes["Conventions"] - with mock.patch.object(self.options, "conventions_override", True): - with self.temp_filename(".nc") as nc_path: - save(self.cube, nc_path, "NETCDF4") - ds = _thread_safe_nc.DatasetWrapper(nc_path) - res = ds.getncattr("Conventions") - ds.close() - self.assertEqual(res, CF_CONVENTIONS_VERSION) + mocker.patch.object(self.options, "conventions_override", True) + nc_path = tmp_path / "dummy.nc" + save(self.cube, nc_path, "NETCDF4") + ds = _thread_safe_nc.DatasetWrapper(nc_path) + res = ds.getncattr("Conventions") + ds.close() + assert res == CF_CONVENTIONS_VERSION -class Test_attributes(tests.IrisTest): - def test_attributes_arrays(self): +class Test_attributes: + def test_attributes_arrays(self, tmp_path): # Ensure that attributes containing NumPy arrays can be equality # checked and their cubes saved as appropriate. c1 = Cube([1], attributes={"bar": np.arange(2)}) c2 = Cube([2], attributes={"bar": np.arange(2)}) - with self.temp_filename("foo.nc") as nc_out: - save([c1, c2], nc_out) - ds = _thread_safe_nc.DatasetWrapper(nc_out) - res = ds.getncattr("bar") - ds.close() - self.assertArrayEqual(res, np.arange(2)) + nc_out = tmp_path / "foo.nc" + save([c1, c2], nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) + res = ds.getncattr("bar") + ds.close() + _shared_utils.assert_array_equal(res, np.arange(2)) - def test_attributes_arrays_incompatible_shapes(self): + def test_attributes_arrays_incompatible_shapes(self, tmp_path): # Ensure successful comparison without raising a broadcast error. c1 = Cube([1], attributes={"bar": np.arange(2)}) c2 = Cube([2], attributes={"bar": np.arange(3)}) - with self.temp_filename("foo.nc") as nc_out: - save([c1, c2], nc_out) - ds = _thread_safe_nc.DatasetWrapper(nc_out) - with pytest.raises(AttributeError): - _ = ds.getncattr("bar") - for var in ds.variables.values(): - res = var.getncattr("bar") - self.assertIsInstance(res, np.ndarray) - ds.close() - - def test_no_special_attribute_clash(self): + nc_out = tmp_path / "foo.nc" + save([c1, c2], nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) + with pytest.raises(AttributeError): + _ = ds.getncattr("bar") + for var in ds.variables.values(): + res = var.getncattr("bar") + assert isinstance(res, np.ndarray) + ds.close() + + def test_no_special_attribute_clash(self, tmp_path): # Ensure that saving multiple cubes with netCDF4 protected attributes # works as expected. # Note that here we are testing variable attribute clashes only - by @@ -103,35 +96,36 @@ def test_no_special_attribute_clash(self): c1 = Cube([0], var_name="test", attributes={"name": "bar"}) c2 = Cube([0], var_name="test_1", attributes={"name": "bar_1"}) - with self.temp_filename("foo.nc") as nc_out: - save([c1, c2], nc_out) - ds = _thread_safe_nc.DatasetWrapper(nc_out) - res = ds.variables["test"].getncattr("name") - res_1 = ds.variables["test_1"].getncattr("name") - ds.close() - self.assertEqual(res, "bar") - self.assertEqual(res_1, "bar_1") + nc_out = tmp_path / "foo.nc" + save([c1, c2], nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) + res = ds.variables["test"].getncattr("name") + res_1 = ds.variables["test_1"].getncattr("name") + ds.close() + assert res == "bar" + assert res_1 == "bar_1" -class Test_unlimited_dims(tests.IrisTest): - def test_no_unlimited_dims(self): +class Test_unlimited_dims: + def test_no_unlimited_dims(self, tmp_path): cube = lat_lon_cube() - with self.temp_filename("foo.nc") as nc_out: - save(cube, nc_out) - ds = _thread_safe_nc.DatasetWrapper(nc_out) - self.assertFalse(ds.dimensions["latitude"].isunlimited()) + nc_out = tmp_path / "foo.nc" + save(cube, nc_out) + ds = _thread_safe_nc.DatasetWrapper(nc_out) + assert not ds.dimensions["latitude"].isunlimited() - def test_unlimited_dim_latitude(self): + def test_unlimited_dim_latitude(self, tmp_path): cube = lat_lon_cube() unlim_dim_name = "latitude" - with self.temp_filename("foo.nc") as nc_out: - save(cube, nc_out, unlimited_dimensions=[unlim_dim_name]) - ds = _thread_safe_nc.DatasetWrapper(nc_out) - self.assertTrue(ds.dimensions[unlim_dim_name].isunlimited()) + nc_out = tmp_path / "foo.nc" + save(cube, nc_out, unlimited_dimensions=[unlim_dim_name]) + ds = _thread_safe_nc.DatasetWrapper(nc_out) + assert ds.dimensions[unlim_dim_name].isunlimited() -class Test_fill_value(tests.IrisTest): - def setUp(self): +class Test_fill_value: + @pytest.fixture(autouse=True) + def _setup(self): self.standard_names = [ "air_temperature", "air_potential_temperature", @@ -152,84 +146,85 @@ def _make_cubes(self): for name in self.standard_names ) - def test_None(self): + def test_none(self, mocker): # Test that when no fill_value argument is passed, the fill_value # argument to Saver.write is None or not present. cubes = self._make_cubes() - with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver: - save(cubes, "dummy.nc") + Saver = mocker.patch("iris.fileformats.netcdf.saver.Saver") + save(cubes, "dummy.nc") # Get the Saver.write mock with Saver() as saver: write = saver.write - self.assertEqual(3, write.call_count) + assert 3 == write.call_count for call in write.mock_calls: _, _, kwargs = call if "fill_value" in kwargs: - self.assertIs(None, kwargs["fill_value"]) + assert None is kwargs["fill_value"] - def test_single(self): + def test_single(self, mocker): # Test that when a single value is passed as the fill_value argument, # that value is passed to each call to Saver.write cubes = self._make_cubes() fill_value = 12345.0 - with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver: - save(cubes, "dummy.nc", fill_value=fill_value) + Saver = mocker.patch("iris.fileformats.netcdf.saver.Saver") + save(cubes, "dummy.nc", fill_value=fill_value) # Get the Saver.write mock with Saver() as saver: write = saver.write - self.assertEqual(3, write.call_count) + assert 3 == write.call_count for call in write.mock_calls: _, _, kwargs = call - self.assertEqual(fill_value, kwargs["fill_value"]) + assert fill_value == kwargs["fill_value"] - def test_multiple(self): + def test_multiple(self, mocker): # Test that when a list is passed as the fill_value argument, # each element is passed to separate calls to Saver.write cubes = self._make_cubes() fill_values = [123.0, 456.0, 789.0] - with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver: - save(cubes, "dummy.nc", fill_value=fill_values) + Saver = mocker.patch("iris.fileformats.netcdf.saver.Saver") + save(cubes, "dummy.nc", fill_value=fill_values) # Get the Saver.write mock with Saver() as saver: write = saver.write - self.assertEqual(3, write.call_count) + assert 3 == write.call_count for call, fill_value in zip(write.mock_calls, fill_values): _, _, kwargs = call - self.assertEqual(fill_value, kwargs["fill_value"]) + assert fill_value == kwargs["fill_value"] - def test_single_string(self): + def test_single_string(self, mocker): # Test that when a string is passed as the fill_value argument, # that value is passed to calls to Saver.write cube = Cube(["abc", "def", "hij"]) fill_value = "xyz" - with mock.patch("iris.fileformats.netcdf.saver.Saver") as Saver: - save(cube, "dummy.nc", fill_value=fill_value) + Saver = mocker.patch("iris.fileformats.netcdf.saver.Saver") + save(cube, "dummy.nc", fill_value=fill_value) # Get the Saver.write mock with Saver() as saver: write = saver.write - self.assertEqual(1, write.call_count) + assert 1 == write.call_count _, _, kwargs = write.mock_calls[0] - self.assertEqual(fill_value, kwargs["fill_value"]) + assert fill_value == kwargs["fill_value"] - def test_multi_wrong_length(self): + def test_multi_wrong_length(self, mocker): # Test that when a list of a different length to the number of cubes # is passed as the fill_value argument, an error is raised cubes = self._make_cubes() fill_values = [1.0, 2.0, 3.0, 4.0] - with mock.patch("iris.fileformats.netcdf.saver.Saver"): - with self.assertRaises(ValueError): + msg = "If fill_value is a list, it must have the same number of elements as the cube argument." + with mocker.patch("iris.fileformats.netcdf.saver.Saver"): + with pytest.raises(ValueError, match=msg): save(cubes, "dummy.nc", fill_value=fill_values) -class Test_HdfSaveBug(tests.IrisTest): +class Test_HdfSaveBug: """Check for a known problem with netcdf4. If you create dimension with the same name as an existing variable, there @@ -252,10 +247,10 @@ class Test_HdfSaveBug(tests.IrisTest): """ - def _check_save_and_reload(self, cubes): - tempdir = Path(mkdtemp()) - filepath = tempdir / "tmp.nc" - try: + @pytest.fixture + def _check_save_and_reload(self, tmp_path): + def check_save_and_reload(cubes): + filepath = tmp_path / "temp.nc" # Save the given cubes. save(cubes, filepath) @@ -263,7 +258,7 @@ def _check_save_and_reload(self, cubes): new_cubes = iris.load(str(filepath)) # There should definitely still be the same number of cubes. - self.assertEqual(len(new_cubes), len(cubes)) + assert len(new_cubes) == len(cubes) # Get results in the input order, matching by var_names. result = [new_cubes.extract_cube(cube.var_name) for cube in cubes] @@ -272,15 +267,14 @@ def _check_save_and_reload(self, cubes): # NB in this codeblock, before we destroy the temporary file. for cube_in, cube_out in zip(cubes, result): # Using special tolerant equivalence-check. - self.assertSameCubes(cube_in, cube_out) + self.assert_same_cubes(cube_in, cube_out) - finally: - rmtree(tempdir) + # Return result cubes for any additional checks. + return result - # Return result cubes for any additional checks. - return result + return check_save_and_reload - def assertSameCubes(self, cube1, cube2): + def assert_same_cubes(self, cube1, cube2): """A special tolerant cube compare. Ignore any 'Conventions' attributes. @@ -304,20 +298,20 @@ def clean_cube(cube): return cube - self.assertEqual(clean_cube(cube1), clean_cube(cube2)) + assert clean_cube(cube1) == clean_cube(cube2) - def test_dimcoord_varname_collision(self): + def test_dimcoord_varname_collision(self, _check_save_and_reload): cube_2 = Cube([0, 1], var_name="cube_2") x_dim = DimCoord([0, 1], long_name="dim_x", var_name="dimco_name") cube_2.add_dim_coord(x_dim, 0) # First cube has a varname which collides with the dimcoord. cube_1 = Cube([0, 1], long_name="cube_1", var_name="dimco_name") # Test save + loadback - reload_1, reload_2 = self._check_save_and_reload([cube_1, cube_2]) + reload_1, reload_2 = _check_save_and_reload([cube_1, cube_2]) # As re-loaded, the coord will have a different varname. - self.assertEqual(reload_2.coord("dim_x").var_name, "dimco_name_0") + assert reload_2.coord("dim_x").var_name == "dimco_name_0" - def test_anonymous_dim_varname_collision(self): + def test_anonymous_dim_varname_collision(self, _check_save_and_reload): # Second cube is going to name an anonymous dim. cube_2 = Cube([0, 1], var_name="cube_2") # First cube has a varname which collides with the dim-name. @@ -326,9 +320,9 @@ def test_anonymous_dim_varname_collision(self): x_dim = DimCoord([0, 1], long_name="dim_x", var_name="dimco_name") cube_1.add_dim_coord(x_dim, 0) # Test save + loadback - self._check_save_and_reload([cube_1, cube_2]) + _check_save_and_reload([cube_1, cube_2]) - def test_bounds_dim_varname_collision(self): + def test_bounds_dim_varname_collision(self, _check_save_and_reload): cube_2 = Cube([0, 1], var_name="cube_2") x_dim = DimCoord([0, 1], long_name="dim_x", var_name="dimco_name") x_dim.guess_bounds() @@ -336,9 +330,9 @@ def test_bounds_dim_varname_collision(self): # First cube has a varname which collides with the bounds dimension. cube_1 = Cube([0], long_name="cube_1", var_name="bnds") # Test save + loadback - self._check_save_and_reload([cube_1, cube_2]) + _check_save_and_reload([cube_1, cube_2]) - def test_string_dim_varname_collision(self): + def test_string_dim_varname_collision(self, _check_save_and_reload): cube_2 = Cube([0, 1], var_name="cube_2") # NOTE: it *should* be possible for a cube with string data to cause # this collision, but cubes with string data are currently not working. @@ -347,21 +341,21 @@ def test_string_dim_varname_collision(self): cube_2.add_aux_coord(x_dim, 0) cube_1 = Cube([0], long_name="cube_1", var_name="string4") # Test save + loadback - self._check_save_and_reload([cube_1, cube_2]) + _check_save_and_reload([cube_1, cube_2]) - def test_mesh_location_dim_varname_collision(self): + def test_mesh_location_dim_varname_collision(self, _check_save_and_reload): cube_2 = sample_mesh_cube() cube_2.var_name = "cube_2" # Make it identifiable cube_1 = Cube([0], long_name="cube_1", var_name="Mesh2d_node") # Test save + loadback - self._check_save_and_reload([cube_1, cube_2]) + _check_save_and_reload([cube_1, cube_2]) - def test_connectivity_dim_varname_collision(self): + def test_connectivity_dim_varname_collision(self, _check_save_and_reload): cube_2 = sample_mesh_cube() cube_2.var_name = "cube_2" # Make it identifiable cube_1 = Cube([0], long_name="cube_1", var_name="Mesh_2d_face_N_nodes") # Test save + loadback - self._check_save_and_reload([cube_1, cube_2]) + _check_save_and_reload([cube_1, cube_2]) class Test_compute_usage: @@ -374,13 +368,13 @@ class Test_compute_usage: # A fixture to mock out Saver object creation in a 'save' call. @staticmethod @pytest.fixture - def mock_saver_creation(): + def mock_saver_creation(mocker): # A mock for a Saver object. - mock_saver = mock.MagicMock(spec=Saver) + mock_saver = mocker.MagicMock(spec=Saver) # make an __enter__ call return the object itself (as the real Saver does). - mock_saver.__enter__ = mock.Mock(return_value=mock_saver) + mock_saver.__enter__ = mocker.Mock(return_value=mock_saver) # A mock for the Saver() constructor call. - mock_new_saver_call = mock.Mock(return_value=mock_saver) + mock_new_saver_call = mocker.Mock(return_value=mock_saver) # Replace the whole Saver class with a simple function, which thereby emulates # the constructor call. This avoids complications due to the fact that Mock @@ -389,23 +383,23 @@ def mock_saver_class_create(*args, **kwargs): return mock_new_saver_call(*args, **kwargs) # Patch the Saver() creation to return our mock Saver object. - with mock.patch("iris.fileformats.netcdf.saver.Saver", mock_saver_class_create): - # Return mocks for both constructor call, and Saver object. - yield mock_new_saver_call, mock_saver + mocker.patch("iris.fileformats.netcdf.saver.Saver", mock_saver_class_create) + # Return mocks for both constructor call, and Saver object. + return mock_new_saver_call, mock_saver # A fixture to provide some mock args for 'Saver' creation. @staticmethod @pytest.fixture - def mock_saver_args(): + def mock_saver_args(mocker): from collections import namedtuple # A special object for the cube, since cube.attributes must be indexable - mock_cube = mock.MagicMock() + mock_cube = mocker.MagicMock() args = namedtuple("saver_args", ["cube", "filename", "format", "compute"])( cube=mock_cube, - filename=mock.sentinel.filepath, - format=mock.sentinel.netcdf4, - compute=mock.sentinel.compute, + filename=mocker.sentinel.filepath, + format=mocker.sentinel.netcdf4, + compute=mocker.sentinel.compute, ) return args @@ -454,7 +448,3 @@ def test_compute_false_result_delayed(self, mock_saver_creation, mock_saver_args assert mock_saver.delayed_completion.call_count == 1 # .. and should return the result of that. assert result is mock_saver.delayed_completion.return_value - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py new file mode 100644 index 0000000000..f16097bef3 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/test_bytecoding_datasets.py @@ -0,0 +1,454 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for :class:`iris.fileformats.netcdf._bytecoding_datasets` module.""" + +from pathlib import Path + +import numpy as np +import pytest + +from iris.exceptions import TranslationError +from iris.fileformats.netcdf._bytecoding_datasets import ( + DECODE_TO_STRINGS_ON_READ, + EncodedDataset, +) +from iris.fileformats.netcdf._thread_safe_nc import DatasetWrapper +from iris.warnings import IrisCfLoadWarning, IrisCfSaveWarning + +encoding_options = [None, "ascii", "utf-8", "utf-32"] + +samples_3_ascii = np.array( + ["one", "", "seven"], # N.B. include empty! +) +samples_3_nonascii = np.array(["two", "", "épéé"]) + + +def strings_maxbytes(strings, encoding): + return max(len(string.encode(encoding)) for string in strings) + + +@pytest.fixture(params=encoding_options) +def encoding(request): + return request.param + + +@pytest.fixture(scope="module") +def tempdir(tmp_path_factory): + path = tmp_path_factory.mktemp("netcdf") + return path + + +def make_encoded_dataset( + path: Path, strlen: int, encoding: str | None = None +) -> EncodedDataset: + """Create a test EncodedDataset linked to an actual file. + + * strlen becomes the string dimension (i.e. a number of *bytes*) + * a variable "vxs" is created + * If 'encoding' is given, the "vxs::_Encoding" attribute is created with this value + """ + ds = EncodedDataset(path, "w") + ds.createDimension("x", 3) + ds.createDimension("strlen", strlen) + v = ds.createVariable("vxs", "S1", ("x", "strlen")) + if encoding is not None: + v.setncattr("_Encoding", encoding) + return ds + + +def fetch_undecoded_var(path, varname): + # Open a path as a "normal" dataset, and return a given variable. + ds_normal = DatasetWrapper(path) + ds_normal._contained_instance.set_auto_chartostring(False) + v = ds_normal.variables[varname] + # Return a variable, rather than its data, so we can check attributes etc. + return v + + +def check_array_matching(arr1, arr2): + """Check for arrays matching shape, dtype and content.""" + assert ( + arr1.shape == arr2.shape and arr1.dtype == arr2.dtype and np.all(arr1 == arr2) + ) + + +def check_raw_content(path, varname, expected_byte_array): + v = fetch_undecoded_var(path, varname) + bytes_result = v[:] + check_array_matching(bytes_result, expected_byte_array) + + +def _make_bytearray_inner(data, bytewidth, encoding): + # Convert to a (list of [lists of..]) strings or bytes to a + # (list of [lists of..]) length-1 bytes with an extra dimension. + if isinstance(data, str): + # Convert input strings to bytes + data = data.encode(encoding) + if isinstance(data, bytes): + # iterate over bytes to get a sequence of length-1 bytes (what np.array wants) + result = [data[i : i + 1] for i in range(len(data))] + # pad or truncate everything to the required bytewidth + result = (result + [b"\0"] * bytewidth)[:bytewidth] + else: + # If not string/bytes, expect the input to be a list. + # N.B. the recursion is inefficient, but we don't care about that here + result = [_make_bytearray_inner(part, bytewidth, encoding) for part in data] + return result + + +def make_bytearray(data, bytewidth, encoding="ascii"): + """Convert bytes or lists of bytes into a numpy byte array. + + This is largely to avoid using "encode_stringarray_as_bytearray", since we don't + want to depend on that when we should be testing it. + So, it mostly replicates the function of that, but it does also support bytes in the + input. + """ + # First, Convert to a (list of [lists of]..) length-1 bytes objects + data = _make_bytearray_inner(data, bytewidth, encoding) + # We should now be able to create an array of single bytes. + result = np.array(data) + assert result.dtype == "S1" + return result + + +class TestWriteStrings: + """Test how string data is saved to a file. + + Mostly, we read back data as a "normal" dataset to avoid relying on the read code, + which is separately tested -- see 'TestReadStrings'. + """ + + def test_encodings(self, encoding, tempdir): + # Create a dataset with the variable + path = tempdir / f"test_writestrings_encoding_{encoding!s}.nc" + + if encoding in [None, "ascii"]: + writedata = samples_3_ascii + write_encoding = "ascii" + else: + writedata = samples_3_nonascii + write_encoding = encoding + + writedata = writedata.copy() # just for safety? + strlen = strings_maxbytes(writedata, write_encoding) + + ds_encoded = make_encoded_dataset(path, strlen, encoding) + v = ds_encoded.variables["vxs"] + + # Effectively, checks that we *can* write strings + v[:] = writedata + + # Close, re-open as an "ordinary" dataset, and check the raw content. + ds_encoded.close() + expected_bytes = make_bytearray(writedata, strlen, write_encoding) + check_raw_content(path, "vxs", expected_bytes) + + # Check also that the "_Encoding" property is as expected + v = fetch_undecoded_var(path, "vxs") + result_attr = v.getncattr("_Encoding") if "_Encoding" in v.ncattrs() else None + assert result_attr == encoding + + def test_scalar(self, tempdir): + # Like 'test_write_strings', but the variable has *only* the string dimension. + path = tempdir / "test_writestrings_scalar.nc" + + strlen = 5 + ds_encoded = make_encoded_dataset(path, strlen=strlen) + v = ds_encoded.createVariable("v0_scalar", "S1", ("strlen",)) + + # Checks that we *can* write a string + v[:] = np.array("stuff", dtype=str) + + # Close, re-open as an "ordinary" dataset, and check the raw content. + ds_encoded.close() + expected_bytes = make_bytearray(b"stuff", strlen) + check_raw_content(path, "v0_scalar", expected_bytes) + + def test_multidim(self, tempdir): + # Like 'test_write_strings', but the variable has additional dimensions. + path = tempdir / "test_writestrings_multidim.nc" + + strlen = 5 + ds_encoded = make_encoded_dataset(path, strlen=strlen) + ds_encoded.createDimension("y", 2) + v = ds_encoded.createVariable( + "vyxn", + "S1", + ( + "y", + "x", + "strlen", + ), + ) + + # Check that we *can* write a multidimensional string array + test_data = [ + ["one", "n", ""], + ["two", "xxxxx", "four"], + ] + v[:] = test_data + + # Close, re-open as an "ordinary" dataset, and check the raw content. + ds_encoded.close() + expected_bytes = make_bytearray(test_data, strlen) + check_raw_content(path, "vyxn", expected_bytes) + + @pytest.mark.parametrize("encoding", [None, "ascii"]) + def test_write_encoding_failure(self, tempdir, encoding): + path = tempdir / f"test_writestrings_encoding_{encoding}_fail.nc" + ds = make_encoded_dataset(path, strlen=5, encoding=encoding) + v = ds.variables["vxs"] + encoding_name = encoding + if encoding_name == None: + encoding_name = "ascii" + msg = ( + "String data written to netcdf character variable 'vxs'.*" + f" could not be represented in encoding '{encoding_name}'. " + ) + with pytest.raises(ValueError, match=msg): + v[:] = samples_3_nonascii + + def test_write_badencoding_ignore(self, tempdir): + path = tempdir / "test_writestrings_badencoding_ignore.nc" + ds = make_encoded_dataset(path, strlen=5, encoding="unknown") + v = ds.variables["vxs"] + msg = r"Ignoring unknown encoding for variable 'vxs': _Encoding = 'unknown'\." + with pytest.warns(IrisCfSaveWarning, match=msg): + v[:] = samples_3_ascii # will work OK + + def test_overlength(self, tempdir): + # Check expected behaviour with over-length data + path = tempdir / "test_writestrings_overlength.nc" + strlen = 5 + ds = make_encoded_dataset(path, strlen=strlen, encoding="ascii") + v = ds.variables["vxs"] + msg = r"String .* written to netcdf exceeds string dimension .* : [0-9]* > 5\." + with pytest.raises(TranslationError, match=msg): + v[:] = ["1", "123456789", "two"] + + def test_overlength_splitcoding(self, tempdir): + # Check expected behaviour when non-ascii multibyte coding gets truncated + path = tempdir / "test_writestrings_overlength_splitcoding.nc" + strlen = 5 + ds = make_encoded_dataset(path, strlen=strlen, encoding="utf-8") + v = ds.variables["vxs"] + # Note: we must do the assignment as a single byte array, to avoid hitting the + # safety check for this exact problem : see previous check. + byte_arrays = [ + string.encode("utf-8")[:strlen] for string in ("1", "1234ü", "two") + ] + nd_bytes_array = np.array( + [ + [bytes[i : i + 1] if i < len(bytes) else b"\0" for i in range(strlen)] + for bytes in byte_arrays + ] + ) + v[:] = nd_bytes_array + # This creates a problem: it won't read back + msg = ( + "Character data in variable 'vxs' could not be decoded " + "with the 'utf-8' encoding." + ) + with pytest.raises(ValueError, match=msg): + v[:] + + # Check also that we *can* read the raw content. + ds.close() + expected_bytes = [ + b"1", + b"1234\xc3", # NOTE: truncated encoding + b"two", + ] + expected_bytearray = make_bytearray(expected_bytes, strlen) + check_raw_content(path, "vxs", expected_bytearray) + + +class TestWriteChars: + @pytest.mark.parametrize("write_form", ["strings", "bytes"]) + def test_write_chars(self, tempdir, write_form): + encoding = "utf-8" + write_strings = samples_3_nonascii + strlen = strings_maxbytes(write_strings, encoding) + write_bytes = make_bytearray(write_strings, strlen, encoding=encoding) + # NOTE: 'flexi' form util decides the width needs to be 7 !! + path = tempdir / f"test_writechars_{write_form}.nc" + ds = make_encoded_dataset(path, encoding=encoding, strlen=strlen) + v = ds.variables["vxs"] + + # assign in *either* way.. + if write_form == "strings": + v[:] = write_strings + else: + v[:] = write_bytes + + # .. the result should be the same + ds.close() + check_raw_content(path, "vxs", write_bytes) + + +class TestRead: + """Test how character data is read and converted to strings. + + N.B. many testcases here parallel the 'TestWriteStrings' : we are creating test + datafiles with 'make_dataset' and assigning raw bytes, as-per 'TestWriteChars'. + + We are mostly checking here that reading back produces string arrays as expected. + However, it is simple + convenient to also check the 'DECODE_TO_STRINGS_ON_READ' + function here, i.e. "raw" bytes reads. So that is also done in this class. + """ + + @pytest.fixture(params=["strings", "bytes"]) + def readmode(self, request): + return request.param + + def undecoded_testvar(self, ds_encoded, varname: str): + path = ds_encoded.filepath() + ds_encoded.close() + ds = DatasetWrapper(path) + v = ds.variables[varname] + v.set_auto_chartostring(False) + return v + + def test_encodings(self, encoding, tempdir, readmode): + # Create a dataset with the variable + path = tempdir / f"test_read_encodings_{encoding!s}_{readmode}.nc" + + if encoding in [None, "ascii"]: + write_strings = samples_3_ascii + write_encoding = "ascii" + else: + write_strings = samples_3_nonascii + write_encoding = encoding + + write_strings = write_strings.copy() # just for safety? + strlen = strings_maxbytes(write_strings, write_encoding) + write_bytes = make_bytearray(write_strings, strlen, encoding=write_encoding) + + ds_encoded = make_encoded_dataset(path, strlen, encoding) + v = ds_encoded.variables["vxs"] + v[:] = write_bytes + + if readmode == "strings": + # Test "normal" read --> string array + result = v[:] + expected = write_strings + if encoding == "utf-8": + # In this case, with the given non-ascii sample data, the + # "default minimum string length" is overestimated. + assert strlen == 7 and result.dtype == "U7" + # correct the result dtype to pass the write_strings comparison below + truncated_result = result.astype("U4") + # Also check that content is the same (i.e. not actually truncated) + assert np.all(truncated_result == result) + result = truncated_result + else: + # Close and re-open as "regular" dataset -- just to check the raw content + v = self.undecoded_testvar(ds_encoded, "vxs") + result = v[:] + expected = write_bytes + + check_array_matching(result, expected) + + def test_scalar(self, tempdir, readmode): + # Like 'test_write_strings', but the variable has *only* the string dimension. + path = tempdir / f"test_read_scalar_{readmode}.nc" + + strlen = 5 + ds_encoded = make_encoded_dataset(path, strlen=strlen) + v = ds_encoded.createVariable("v0_scalar", "S1", ("strlen",)) + + data_string = "stuff" + data_bytes = make_bytearray(data_string, 5) + + # Checks that we *can* write a string + v[:] = data_bytes + + if readmode == "strings": + # Test "normal" read --> string array + result = v[:] + expected = np.array(data_string) + else: + # Test "raw" read --> byte array + v = self.undecoded_testvar(ds_encoded, "v0_scalar") + result = v[:] + expected = data_bytes + + check_array_matching(result, expected) + + def test_multidim(self, tempdir, readmode): + # Like 'test_write_strings', but the variable has additional dimensions. + path = tempdir / f"test_read_multidim_{readmode}.nc" + + strlen = 5 + ds_encoded = make_encoded_dataset(path, strlen=strlen) + ds_encoded.createDimension("y", 2) + v = ds_encoded.createVariable( + "vyxn", + "S1", + ( + "y", + "x", + "strlen", + ), + ) + + # Check that we *can* write a multidimensional string array + test_strings = [ + ["one", "n", ""], + ["two", "xxxxx", "four"], + ] + test_bytes = make_bytearray(test_strings, strlen) + v[:] = test_bytes + + if readmode == "strings": + # Test "normal" read --> string array + result = v[:] + expected = np.array(test_strings) + else: + # Test "raw" read --> byte array + v = self.undecoded_testvar(ds_encoded, "vyxn") + result = v[:] + expected = test_bytes + + check_array_matching(result, expected) + + def test_read_encoding_failure(self, tempdir, readmode): + path = tempdir / f"test_read_encoding_failure_{readmode}.nc" + strlen = 10 + ds_encoded = make_encoded_dataset(path, strlen=strlen, encoding="ascii") + v = ds_encoded.variables["vxs"] + test_utf8_bytes = make_bytearray( + samples_3_nonascii, bytewidth=strlen, encoding="utf-8" + ) + v[:] = test_utf8_bytes + + if readmode == "strings": + msg = ( + "Character data in variable 'vxs' could not be decoded " + "with the 'ascii' encoding." + ) + with pytest.raises(ValueError, match=msg): + v[:] + else: + v = self.undecoded_testvar(ds_encoded, "vxs") + result = v[:] # this ought to be ok! + + assert np.all(result == test_utf8_bytes) + + def test_read_badencoding_ignore(self, tempdir): + path = tempdir / f"test_read_badencoding_ignore.nc" + strlen = 10 + ds = make_encoded_dataset(path, strlen=strlen, encoding="unknown") + v = ds.variables["vxs"] + test_utf8_bytes = make_bytearray( + samples_3_nonascii, bytewidth=strlen, encoding="utf-8" + ) + v[:] = test_utf8_bytes + + msg = r"Ignoring unknown encoding for variable 'vxs': _Encoding = 'unknown'\." + with pytest.warns(IrisCfLoadWarning, match=msg): + # raises warning but succeeds, due to default read encoding of 'utf-8' + v[:] diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py b/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py index 4d963e7f08..cddc6ccad4 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py @@ -4,33 +4,23 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.PPDataProxy` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - from iris.fileformats.pp import PPDataProxy, SplittableInt -class Test_lbpack(tests.IrisTest): - def test_lbpack_SplittableInt(self): - lbpack = mock.Mock(spec_set=SplittableInt) +class Test_lbpack: + def test_lbpack_splittable_int(self, mocker): + lbpack = mocker.Mock(spec_set=SplittableInt) proxy = PPDataProxy(None, None, None, None, None, lbpack, None, None) - self.assertEqual(proxy.lbpack, lbpack) - self.assertIs(proxy.lbpack, lbpack) + assert proxy.lbpack == lbpack + assert proxy.lbpack is lbpack def test_lbpack_raw(self): lbpack = 4321 proxy = PPDataProxy(None, None, None, None, None, lbpack, None, None) - self.assertEqual(proxy.lbpack, lbpack) - self.assertIsNot(proxy.lbpack, lbpack) - self.assertIsInstance(proxy.lbpack, SplittableInt) - self.assertEqual(proxy.lbpack.n1, lbpack % 10) - self.assertEqual(proxy.lbpack.n2, lbpack // 10 % 10) - self.assertEqual(proxy.lbpack.n3, lbpack // 100 % 10) - self.assertEqual(proxy.lbpack.n4, lbpack // 1000 % 10) - - -if __name__ == "__main__": - tests.main() + assert proxy.lbpack == lbpack + assert proxy.lbpack is not lbpack + assert isinstance(proxy.lbpack, SplittableInt) + assert proxy.lbpack.n1 == lbpack % 10 + assert proxy.lbpack.n2 == lbpack // 10 % 10 + assert proxy.lbpack.n3 == lbpack // 100 % 10 + assert proxy.lbpack.n4 == lbpack // 1000 % 10 diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPField.py b/lib/iris/tests/unit/fileformats/pp/test_PPField.py index e3d782b156..5a63f6a8cd 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_PPField.py +++ b/lib/iris/tests/unit/fileformats/pp/test_PPField.py @@ -4,16 +4,13 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.PPField` class.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest import iris.fileformats.pp as pp from iris.fileformats.pp import PPField, SplittableInt +from iris.tests import _shared_utils +from iris.tests.unit.fileformats import MockerMixin from iris.warnings import IrisDefaultingWarning, IrisMaskValueMatchWarning # The PPField class is abstract, so to test we define a minimal, @@ -75,51 +72,51 @@ def t2(self): return None -class Test_save(tests.IrisTest): - def test_float64(self): +class Test_save: + def test_float64(self, tmp_path): # Tests down-casting of >f8 data to >f4. def field_checksum(data): field = DummyPPField()._ready_for_save() field.data = data - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) - checksum = self.file_checksum(temp_filename) + temp_filename = tmp_path / "temp.pp" + with open(temp_filename, "wb") as pp_file: + field.save(pp_file) + checksum = _shared_utils.file_checksum(temp_filename) return checksum data_64 = np.linspace(0, 1, num=10, endpoint=False).reshape(2, 5) checksum_32 = field_checksum(data_64.astype(">f4")) msg = "Downcasting array precision from float64 to float32 for save." - with self.assertWarnsRegex(IrisDefaultingWarning, msg): + with pytest.warns(IrisDefaultingWarning, match=msg): checksum_64 = field_checksum(data_64.astype(">f8")) - self.assertEqual(checksum_32, checksum_64) + assert checksum_32 == checksum_64 - def test_masked_mdi_value_warning(self): + def test_masked_mdi_value_warning(self, tmp_path): # Check that an unmasked MDI value raises a warning. field = DummyPPField()._ready_for_save() # Make float32 data, as float64 default produces an extra warning. field.bmdi = np.float32(-123.4) field.data = np.ma.masked_array([1.0, field.bmdi, 3.0], dtype=np.float32) msg = "PPField data contains unmasked points" - with self.assertWarnsRegex(IrisMaskValueMatchWarning, msg): - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) + temp_filename = tmp_path / "temp.pp" + with pytest.warns(IrisMaskValueMatchWarning, match=msg): + with open(temp_filename, "wb") as pp_file: + field.save(pp_file) - def test_unmasked_mdi_value_warning(self): + def test_unmasked_mdi_value_warning(self, tmp_path): # Check that MDI in *unmasked* data raises a warning. field = DummyPPField()._ready_for_save() field.bmdi = -123.4 # Make float32 data, as float64 default produces an extra warning. field.data = np.array([1.0, field.bmdi, 3.0], dtype=np.float32) msg = "PPField data contains unmasked points" - with self.assertWarnsRegex(IrisMaskValueMatchWarning, msg): - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) + temp_filename = tmp_path / "temp.pp" + with pytest.warns(IrisMaskValueMatchWarning, match=msg): + with open(temp_filename, "wb") as pp_file: + field.save(pp_file) - def test_mdi_masked_value_nowarning(self): + def test_mdi_masked_value_nowarning(self, tmp_path): # Check that a *masked* MDI value does not raise a warning. field = DummyPPField()._ready_for_save() field.bmdi = -123.4 @@ -129,49 +126,48 @@ def test_mdi_masked_value_nowarning(self): ) # Set underlying data value at masked point to BMDI value. field.data.data[1] = field.bmdi - self.assertArrayAllClose(field.data.data[1], field.bmdi) - with self.assertNoWarningsRegexp(r"\(mask\|fill\)"): - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) + _shared_utils.assert_array_all_close(field.data.data[1], field.bmdi) + with _shared_utils.assert_no_warnings_regexp(r"\(mask\|fill\)"): + temp_filename = tmp_path / "temp.pp" + with open(temp_filename, "wb") as pp_file: + field.save(pp_file) -class Test_calendar(tests.IrisTest): +class Test_calendar: def test_greg(self): field = DummyPPField() field.lbtim = SplittableInt(1, {"ia": 2, "ib": 1, "ic": 0}) - self.assertEqual(field.calendar, "standard") + assert field.calendar == "standard" def test_360(self): field = DummyPPField() field.lbtim = SplittableInt(2, {"ia": 2, "ib": 1, "ic": 0}) - self.assertEqual(field.calendar, "360_day") + assert field.calendar == "360_day" def test_365(self): field = DummyPPField() field.lbtim = SplittableInt(4, {"ia": 2, "ib": 1, "ic": 0}) - self.assertEqual(field.calendar, "365_day") + assert field.calendar == "365_day" -class Test_coord_system(tests.IrisTest): +class Test_coord_system(MockerMixin): def _check_cs(self, bplat, bplon, rotated): field = DummyPPField() field.bplat = bplat field.bplon = bplon - with mock.patch("iris.fileformats.pp.iris.coord_systems") as mock_cs_mod: - result = field.coord_system() + mock_cs_mod = self.mocker.patch("iris.fileformats.pp.iris.coord_systems") + result = field.coord_system() if not rotated: # It should return a standard unrotated CS. - self.assertTrue(mock_cs_mod.GeogCS.call_count == 1) - self.assertEqual(result, mock_cs_mod.GeogCS()) + assert mock_cs_mod.GeogCS.call_count == 1 + assert result == mock_cs_mod.GeogCS() else: # It should return a rotated CS with the correct makeup. - self.assertTrue(mock_cs_mod.GeogCS.call_count == 1) - self.assertTrue(mock_cs_mod.RotatedGeogCS.call_count == 1) - self.assertEqual(result, mock_cs_mod.RotatedGeogCS()) - self.assertEqual( - mock_cs_mod.RotatedGeogCS.call_args_list[0], - mock.call(bplat, bplon, ellipsoid=mock_cs_mod.GeogCS()), + assert mock_cs_mod.GeogCS.call_count == 1 + assert mock_cs_mod.RotatedGeogCS.call_count == 1 + assert result == mock_cs_mod.RotatedGeogCS() + assert mock_cs_mod.RotatedGeogCS.call_args_list[0] == self.mocker.call( + bplat, bplon, ellipsoid=mock_cs_mod.GeogCS() ) def test_normal_unrotated(self): @@ -191,41 +187,43 @@ def test_odd_bplon_rotated(self): self._check_cs(bplat=90, bplon=123.45, rotated=True) -class Test__init__(tests.IrisTest): - def setUp(self): +class Test__init__: + @pytest.fixture(autouse=True) + def _setup(self): header_longs = np.zeros(pp.NUM_LONG_HEADERS, dtype=np.int_) header_floats = np.zeros(pp.NUM_FLOAT_HEADERS, dtype=np.float64) self.header = list(header_longs) + list(header_floats) def test_no_headers(self): field = DummyPPField() - self.assertIsNone(field._raw_header) - self.assertIsNone(field.raw_lbtim) - self.assertIsNone(field.raw_lbpack) + assert field._raw_header is None + assert field.raw_lbtim is None + assert field.raw_lbpack is None def test_lbtim_lookup(self): - self.assertEqual(DummyPPField.HEADER_DICT["lbtim"], (12,)) + assert DummyPPField.HEADER_DICT["lbtim"] == (12,) def test_lbpack_lookup(self): - self.assertEqual(DummyPPField.HEADER_DICT["lbpack"], (20,)) + assert DummyPPField.HEADER_DICT["lbpack"] == (20,) def test_raw_lbtim(self): raw_lbtim = 4321 (loc,) = DummyPPField.HEADER_DICT["lbtim"] self.header[loc] = raw_lbtim field = DummyPPField(header=self.header) - self.assertEqual(field.raw_lbtim, raw_lbtim) + assert field.raw_lbtim == raw_lbtim def test_raw_lbpack(self): raw_lbpack = 4321 (loc,) = DummyPPField.HEADER_DICT["lbpack"] self.header[loc] = raw_lbpack field = DummyPPField(header=self.header) - self.assertEqual(field.raw_lbpack, raw_lbpack) + assert field.raw_lbpack == raw_lbpack -class Test__getattr__(tests.IrisTest): - def setUp(self): +class Test__getattr__: + @pytest.fixture(autouse=True) + def _setup(self): header_longs = np.zeros(pp.NUM_LONG_HEADERS, dtype=np.int_) header_floats = np.zeros(pp.NUM_FLOAT_HEADERS, dtype=np.float64) self.header = list(header_longs) + list(header_floats) @@ -235,21 +233,21 @@ def test_attr_singular_long(self): (loc,) = DummyPPField.HEADER_DICT["lbrow"] self.header[loc] = lbrow field = DummyPPField(header=self.header) - self.assertEqual(field.lbrow, lbrow) + assert field.lbrow == lbrow def test_attr_multi_long(self): lbuser = (100, 101, 102, 103, 104, 105, 106) loc = DummyPPField.HEADER_DICT["lbuser"] self.header[loc[0] : loc[-1] + 1] = lbuser field = DummyPPField(header=self.header) - self.assertEqual(field.lbuser, lbuser) + assert field.lbuser == lbuser def test_attr_singular_float(self): bdatum = 1234 (loc,) = DummyPPField.HEADER_DICT["bdatum"] self.header[loc] = bdatum field = DummyPPField(header=self.header) - self.assertEqual(field.bdatum, bdatum) + assert field.bdatum == bdatum def test_attr_multi_float(self): brsvd = (100, 101, 102, 103) @@ -258,7 +256,7 @@ def test_attr_multi_float(self): stop = loc[-1] + 1 self.header[start:stop] = brsvd field = DummyPPField(header=self.header) - self.assertEqual(field.brsvd, brsvd) + assert field.brsvd == brsvd def test_attr_lbtim(self): raw_lbtim = 4321 @@ -266,11 +264,11 @@ def test_attr_lbtim(self): self.header[loc] = raw_lbtim field = DummyPPField(header=self.header) result = field.lbtim - self.assertEqual(result, raw_lbtim) - self.assertIsInstance(result, SplittableInt) + assert result == raw_lbtim + assert isinstance(result, SplittableInt) result = field._lbtim - self.assertEqual(result, raw_lbtim) - self.assertIsInstance(result, SplittableInt) + assert result == raw_lbtim + assert isinstance(result, SplittableInt) def test_attr_lbpack(self): raw_lbpack = 4321 @@ -278,55 +276,57 @@ def test_attr_lbpack(self): self.header[loc] = raw_lbpack field = DummyPPField(header=self.header) result = field.lbpack - self.assertEqual(result, raw_lbpack) - self.assertIsInstance(result, SplittableInt) + assert result == raw_lbpack + assert isinstance(result, SplittableInt) result = field._lbpack - self.assertEqual(result, raw_lbpack) - self.assertIsInstance(result, SplittableInt) + assert result == raw_lbpack + assert isinstance(result, SplittableInt) def test_attr_raw_lbtim_assign(self): field = DummyPPField(header=self.header) - self.assertEqual(field.raw_lbpack, 0) - self.assertEqual(field.lbtim, 0) + assert field.raw_lbpack == 0 + assert field.lbtim == 0 raw_lbtim = 4321 field.lbtim = raw_lbtim - self.assertEqual(field.raw_lbtim, raw_lbtim) - self.assertNotIsInstance(field.raw_lbtim, SplittableInt) + assert field.raw_lbtim == raw_lbtim + assert not isinstance(field.raw_lbtim, SplittableInt) def test_attr_raw_lbpack_assign(self): field = DummyPPField(header=self.header) - self.assertEqual(field.raw_lbpack, 0) - self.assertEqual(field.lbpack, 0) + assert field.raw_lbpack == 0 + assert field.lbpack == 0 raw_lbpack = 4321 field.lbpack = raw_lbpack - self.assertEqual(field.raw_lbpack, raw_lbpack) - self.assertNotIsInstance(field.raw_lbpack, SplittableInt) + assert field.raw_lbpack == raw_lbpack + assert not isinstance(field.raw_lbpack, SplittableInt) def test_attr_unknown(self): - with self.assertRaises(AttributeError): + with pytest.raises( + AttributeError, match="'DummyPPField' object has no attribute 'x'" + ): DummyPPField().x -class Test_lbtim(tests.IrisTest): +class Test_lbtim: def test_get_splittable(self): headers = [0] * 64 headers[12] = 12345 field = DummyPPField(headers) - self.assertIsInstance(field.lbtim, SplittableInt) - self.assertEqual(field.lbtim.ia, 123) - self.assertEqual(field.lbtim.ib, 4) - self.assertEqual(field.lbtim.ic, 5) + assert isinstance(field.lbtim, SplittableInt) + assert field.lbtim.ia == 123 + assert field.lbtim.ib == 4 + assert field.lbtim.ic == 5 def test_set_int(self): headers = [0] * 64 headers[12] = 12345 field = DummyPPField(headers) field.lbtim = 34567 - self.assertIsInstance(field.lbtim, SplittableInt) - self.assertEqual(field.lbtim.ia, 345) - self.assertEqual(field.lbtim.ib, 6) - self.assertEqual(field.lbtim.ic, 7) - self.assertEqual(field.raw_lbtim, 34567) + assert isinstance(field.lbtim, SplittableInt) + assert field.lbtim.ia == 345 + assert field.lbtim.ib == 6 + assert field.lbtim.ic == 7 + assert field.raw_lbtim == 34567 def test_set_splittable(self): # Check that assigning a SplittableInt to lbtim uses the integer @@ -337,14 +337,12 @@ def test_set_splittable(self): field = DummyPPField(headers) si = SplittableInt(34567, {"foo": 0}) field.lbtim = si - self.assertIsInstance(field.lbtim, SplittableInt) - with self.assertRaises(AttributeError): + assert isinstance(field.lbtim, SplittableInt) + with pytest.raises( + AttributeError, match="'SplittableInt' object has no attribute 'foo'" + ): field.lbtim.foo - self.assertEqual(field.lbtim.ia, 345) - self.assertEqual(field.lbtim.ib, 6) - self.assertEqual(field.lbtim.ic, 7) - self.assertEqual(field.raw_lbtim, 34567) - - -if __name__ == "__main__": - tests.main() + assert field.lbtim.ia == 345 + assert field.lbtim.ib == 6 + assert field.lbtim.ic == 7 + assert field.raw_lbtim == 34567 diff --git a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py b/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py index 51b5c5732f..2e88a8378d 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py +++ b/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py @@ -4,68 +4,62 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.load` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import iris from iris.fileformats.pp import STASH, _convert_constraints -class Test_convert_constraints(tests.IrisTest): +class Test_convert_constraints: def _single_stash(self): constraint = iris.AttributeConstraint(STASH="m01s03i236") return _convert_constraints(constraint) - def test_single_stash(self): + def test_single_stash(self, mocker): pp_filter = self._single_stash() - stcube = mock.Mock(stash=STASH.from_msi("m01s03i236")) - self.assertTrue(pp_filter(stcube)) + stcube = mocker.Mock(stash=STASH.from_msi("m01s03i236")) + assert pp_filter(stcube) - def test_stash_object(self): + def test_stash_object(self, mocker): constraint = iris.AttributeConstraint(STASH=STASH.from_msi("m01s03i236")) pp_filter = _convert_constraints(constraint) - stcube = mock.Mock(stash=STASH.from_msi("m01s03i236")) - self.assertTrue(pp_filter(stcube)) + stcube = mocker.Mock(stash=STASH.from_msi("m01s03i236")) + assert pp_filter(stcube) - def test_surface_altitude(self): + def test_surface_altitude(self, mocker): # Ensure that surface altitude fields are not filtered. pp_filter = self._single_stash() - orography_cube = mock.Mock(stash=STASH.from_msi("m01s00i033")) - self.assertTrue(pp_filter(orography_cube)) + orography_cube = mocker.Mock(stash=STASH.from_msi("m01s00i033")) + assert pp_filter(orography_cube) - def test_surface_pressure(self): + def test_surface_pressure(self, mocker): # Ensure that surface pressure fields are not filtered. pp_filter = self._single_stash() - pressure_cube = mock.Mock(stash=STASH.from_msi("m01s00i001")) - self.assertTrue(pp_filter(pressure_cube)) + pressure_cube = mocker.Mock(stash=STASH.from_msi("m01s00i001")) + assert pp_filter(pressure_cube) - def test_double_stash(self): - stcube236 = mock.Mock(stash=STASH.from_msi("m01s03i236")) - stcube4 = mock.Mock(stash=STASH.from_msi("m01s00i004")) - stcube7 = mock.Mock(stash=STASH.from_msi("m01s00i007")) + def test_double_stash(self, mocker): + stcube236 = mocker.Mock(stash=STASH.from_msi("m01s03i236")) + stcube4 = mocker.Mock(stash=STASH.from_msi("m01s00i004")) + stcube7 = mocker.Mock(stash=STASH.from_msi("m01s00i007")) constraints = [ iris.AttributeConstraint(STASH="m01s03i236"), iris.AttributeConstraint(STASH="m01s00i004"), ] pp_filter = _convert_constraints(constraints) - self.assertTrue(pp_filter(stcube236)) - self.assertTrue(pp_filter(stcube4)) - self.assertFalse(pp_filter(stcube7)) - - def test_callable_stash(self): - stcube236 = mock.Mock(stash=STASH.from_msi("m01s03i236")) - stcube4 = mock.Mock(stash=STASH.from_msi("m01s00i004")) - stcube7 = mock.Mock(stash=STASH.from_msi("m01s00i007")) + assert pp_filter(stcube236) + assert pp_filter(stcube4) + assert not pp_filter(stcube7) + + def test_callable_stash(self, mocker): + stcube236 = mocker.Mock(stash=STASH.from_msi("m01s03i236")) + stcube4 = mocker.Mock(stash=STASH.from_msi("m01s00i004")) + stcube7 = mocker.Mock(stash=STASH.from_msi("m01s00i007")) con1 = iris.AttributeConstraint(STASH=lambda s: s.endswith("004")) con2 = iris.AttributeConstraint(STASH=lambda s: s == "m01s00i007") constraints = [con1, con2] pp_filter = _convert_constraints(constraints) - self.assertFalse(pp_filter(stcube236)) - self.assertTrue(pp_filter(stcube4)) - self.assertTrue(pp_filter(stcube7)) + assert not pp_filter(stcube236) + assert pp_filter(stcube4) + assert pp_filter(stcube7) def test_multiple_with_stash(self): constraints = [ @@ -73,7 +67,7 @@ def test_multiple_with_stash(self): iris.AttributeConstraint(STASH="m01s00i004"), ] pp_filter = _convert_constraints(constraints) - self.assertIsNone(pp_filter) + assert pp_filter is None def test_no_stash(self): constraints = [ @@ -81,13 +75,9 @@ def test_no_stash(self): iris.AttributeConstraint(source="asource"), ] pp_filter = _convert_constraints(constraints) - self.assertIsNone(pp_filter) + assert pp_filter is None def test_no_constraint(self): constraints = [] pp_filter = _convert_constraints(constraints) - self.assertIsNone(pp_filter) - - -if __name__ == "__main__": - tests.main() + assert pp_filter is None diff --git a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py b/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py index 266502253a..a0a8fa3a5b 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py +++ b/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py @@ -4,33 +4,25 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp._create_field_data` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np import iris.fileformats.pp as pp -class Test__create_field_data(tests.IrisTest): - def test_loaded_bytes(self): +class Test__create_field_data: + def test_loaded_bytes(self, mocker): # Check that a field with LoadedArrayBytes in core_data gets the # result of a suitable call to _data_bytes_to_shaped_array(). - mock_loaded_bytes = mock.Mock(spec=pp.LoadedArrayBytes) - core_data = mock.MagicMock(return_value=mock_loaded_bytes) - field = mock.Mock(core_data=core_data) - data_shape = mock.Mock() - land_mask = mock.Mock() - with mock.patch( - "iris.fileformats.pp._data_bytes_to_shaped_array" - ) as convert_bytes: - convert_bytes.return_value = mock.sentinel.array - pp._create_field_data(field, data_shape, land_mask) + mock_loaded_bytes = mocker.Mock(spec=pp.LoadedArrayBytes) + core_data = mocker.MagicMock(return_value=mock_loaded_bytes) + field = mocker.Mock(core_data=core_data) + data_shape = mocker.Mock() + land_mask = mocker.Mock() + convert_bytes = mocker.patch("iris.fileformats.pp._data_bytes_to_shaped_array") + convert_bytes.return_value = mocker.sentinel.array + pp._create_field_data(field, data_shape, land_mask) - self.assertIs(field.data, mock.sentinel.array) + assert field.data is mocker.sentinel.array convert_bytes.assert_called_once_with( mock_loaded_bytes.bytes, field.lbpack, @@ -41,19 +33,19 @@ def test_loaded_bytes(self): land_mask, ) - def test_deferred_bytes(self): + def test_deferred_bytes(self, mocker): # Check that a field with deferred array bytes in core_data gets a # dask array. - fname = mock.sentinel.fname - position = mock.sentinel.position - n_bytes = mock.sentinel.n_bytes - newbyteorder = mock.Mock(return_value=mock.sentinel.dtype) - dtype = mock.Mock(newbyteorder=newbyteorder) + fname = mocker.sentinel.fname + position = mocker.sentinel.position + n_bytes = mocker.sentinel.n_bytes + newbyteorder = mocker.Mock(return_value=mocker.sentinel.dtype) + dtype = mocker.Mock(newbyteorder=newbyteorder) deferred_bytes = (fname, position, n_bytes, dtype) - core_data = mock.MagicMock(return_value=deferred_bytes) - field = mock.Mock(core_data=core_data) + core_data = mocker.MagicMock(return_value=deferred_bytes) + field = mocker.Mock(core_data=core_data) data_shape = (100, 120) - proxy = mock.Mock( + proxy = mocker.Mock( dtype=np.dtype("f4"), dask_meta=np.empty((0,) * len(data_shape), dtype=np.dtype("f4")), shape=data_shape, @@ -63,13 +55,13 @@ def test_deferred_bytes(self): # We can't directly inspect the concrete data source underlying # the dask array, so instead we patch the proxy creation and check it's # being created and invoked correctly. - with mock.patch("iris.fileformats.pp.PPDataProxy") as PPDataProxy: - PPDataProxy.return_value = proxy - pp._create_field_data(field, data_shape, land_mask_field=None) + PPDataProxy = mocker.patch("iris.fileformats.pp.PPDataProxy") + PPDataProxy.return_value = proxy + pp._create_field_data(field, data_shape, land_mask_field=None) # The data should be assigned via field.data. As this is a mock object # we can check the attribute directly. - self.assertEqual(field.data.shape, data_shape) - self.assertEqual(field.data.dtype, np.dtype("f4")) + assert field.data.shape == data_shape + assert field.data.dtype == np.dtype("f4") # Is it making use of a correctly configured proxy? # NB. We know it's *using* the result of this call because # that's where the dtype came from above. @@ -83,7 +75,3 @@ def test_deferred_bytes(self): field.boundary_packing, field.bmdi, ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py index 3bd8fcb8d7..205c910dbc 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py +++ b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py @@ -4,18 +4,15 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp._data_bytes_to_shaped_array` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import io -from unittest import mock import numpy as np import numpy.ma as ma import pytest import iris.fileformats.pp as pp +from iris.tests import _shared_utils +from iris.tests.unit.fileformats import MockerMixin @pytest.mark.parametrize("data_shape", [(2, 3)]) @@ -54,8 +51,9 @@ def test_data_padding__no_compression(data_shape, expected_shape, data_type): _ = pp._data_bytes_to_shaped_array(*args) -class Test__data_bytes_to_shaped_array__lateral_boundary_compression(tests.IrisTest): - def setUp(self): +class Test__data_bytes_to_shaped_array__lateral_boundary_compression: + @pytest.fixture(autouse=True) + def _setup(self): self.data_shape = 30, 40 y_halo, x_halo, rim = 2, 3, 4 @@ -84,9 +82,9 @@ def setUp(self): buf.seek(0) self.data_payload_bytes = buf.read() - def test_boundary_decompression(self): - boundary_packing = mock.Mock(rim_width=4, x_halo=3, y_halo=2) - lbpack = mock.Mock(n1=0) + def test_boundary_decompression(self, mocker): + boundary_packing = mocker.Mock(rim_width=4, x_halo=3, y_halo=2) + lbpack = mocker.Mock(n1=0) r = pp._data_bytes_to_shaped_array( self.data_payload_bytes, lbpack, @@ -96,11 +94,12 @@ def test_boundary_decompression(self): -9223372036854775808, ) r = ma.masked_array(r, np.isnan(r), fill_value=-9223372036854775808) - self.assertMaskedArrayEqual(r, self.decompressed) + _shared_utils.assert_masked_array_equal(r, self.decompressed) -class Test__data_bytes_to_shaped_array__land_packed(tests.IrisTest): - def setUp(self): +class Test__data_bytes_to_shaped_array__land_packed(MockerMixin): + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Sets up some useful arrays for use with the land/sea mask # decompression. self.land = np.array( @@ -123,7 +122,7 @@ def setUp(self): dtype=np.float64, ) - self.land_mask = mock.Mock( + self.land_mask = mocker.Mock( data=self.land, lbrow=self.land.shape[0], lbnpt=self.land.shape[1] ) @@ -131,11 +130,11 @@ def create_lbpack(self, value): name_mapping = dict(n5=slice(4, None), n4=3, n3=2, n2=1, n1=0) return pp.SplittableInt(value, name_mapping) - def test_no_land_mask(self): + def test_no_land_mask(self, mocker): # Check that without a mask, it returns the raw (compressed) data. - with mock.patch("numpy.frombuffer", return_value=np.arange(3)): + with mocker.patch("numpy.frombuffer", return_value=np.arange(3)): result = pp._data_bytes_to_shaped_array( - mock.Mock(), + mocker.Mock(), self.create_lbpack(120), None, (3, 4), @@ -143,44 +142,44 @@ def test_no_land_mask(self): -999, mask=None, ) - self.assertArrayAllClose(result, np.arange(3)) + _shared_utils.assert_array_all_close(result, np.arange(3)) def test_land_mask(self): # Check basic land unpacking. field_data = self.land_masked_data result = self.check_read_data(field_data, 120, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_land_data) + _shared_utils.assert_masked_array_equal(result, self.decomp_land_data) def test_land_masked_data_too_long(self): # Check land unpacking with field data that is larger than the mask. field_data = np.tile(self.land_masked_data, 2) result = self.check_read_data(field_data, 120, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_land_data) + _shared_utils.assert_masked_array_equal(result, self.decomp_land_data) def test_sea_mask(self): # Check basic land unpacking. field_data = self.sea_masked_data result = self.check_read_data(field_data, 220, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_sea_data) + _shared_utils.assert_masked_array_equal(result, self.decomp_sea_data) def test_sea_masked_data_too_long(self): # Check sea unpacking with field data that is larger than the mask. field_data = np.tile(self.sea_masked_data, 2) result = self.check_read_data(field_data, 220, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_sea_data) + _shared_utils.assert_masked_array_equal(result, self.decomp_sea_data) def test_bad_lbpack(self): # Check basic land unpacking. field_data = self.sea_masked_data - with self.assertRaises(ValueError): + with pytest.raises(ValueError, match="Unsupported mask compression."): self.check_read_data(field_data, 320, self.land_mask) def check_read_data(self, field_data, lbpack, mask): # Calls pp._data_bytes_to_shaped_array with the necessary mocked # items, an lbpack instance, the correct data shape and mask instance. - with mock.patch("numpy.frombuffer", return_value=field_data): + with self.mocker.patch("numpy.frombuffer", return_value=field_data): data = pp._data_bytes_to_shaped_array( - mock.Mock(), + self.mocker.Mock(), self.create_lbpack(lbpack), None, mask.shape, @@ -189,7 +188,3 @@ def check_read_data(self, field_data, lbpack, mask): mask=mask, ) return ma.masked_array(data, np.isnan(data), fill_value=-999) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test__field_gen.py b/lib/iris/tests/unit/fileformats/pp/test__field_gen.py index f1018d8df4..6618c79a38 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__field_gen.py +++ b/lib/iris/tests/unit/fileformats/pp/test__field_gen.py @@ -4,75 +4,70 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp._field_gen` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import contextlib import io -from unittest import mock -import warnings import numpy as np +import pytest import iris.fileformats.pp as pp -class Test(tests.IrisTest): - @contextlib.contextmanager - def mock_for_field_gen(self, fields): - side_effect_fields = list(fields)[:] +class Test: + @pytest.fixture + def mock_for_field_gen(self, mocker): + @contextlib.contextmanager + def _mock_for_field_gen(fields): + side_effect_fields = list(fields)[:] - def make_pp_field_override(*args): - # Iterates over the fields passed to this context manager, - # until there are no more, upon which the np.fromfile - # returns an empty list and the while loop in load() is - # broken. - result = side_effect_fields.pop(0) - if not side_effect_fields: - np.fromfile.return_value = [] - return result + def make_pp_field_override(*args): + # Iterates over the fields passed to this context manager, + # until there are no more, upon which the np.fromfile + # returns an empty list and the while loop in load() is + # broken. + result = side_effect_fields.pop(0) + if not side_effect_fields: + np.fromfile.return_value = [] + return result - open_func = "builtins.open" - with ( - mock.patch("numpy.fromfile", return_value=[0]), - mock.patch(open_func), - mock.patch("struct.unpack_from", return_value=[4]), - mock.patch( + open_func = "builtins.open" + mocker.patch("numpy.fromfile", return_value=[0]) + mocker.patch(open_func) + mocker.patch("struct.unpack_from", return_value=[4]) + mocker.patch( "iris.fileformats.pp.make_pp_field", side_effect=make_pp_field_override, - ), - ): + ) yield - def gen_fields(self, fields): - with self.mock_for_field_gen(fields): + return _mock_for_field_gen + + def gen_fields(self, fields, mock_for_field_gen): + with mock_for_field_gen(fields): return list(pp._field_gen("mocked", "mocked")) - def test_lblrec_invalid(self): - pp_field = mock.Mock(lblrec=2, lbext=0) - with warnings.catch_warnings(record=True) as warn: - warnings.simplefilter("always") - self.gen_fields([pp_field]) - self.assertEqual(len(warn), 1) + def test_lblrec_invalid(self, mocker, mock_for_field_gen): + pp_field = mocker.Mock(lblrec=2, lbext=0) wmsg = ( "LBLREC has a different value to the .* the header in the " r"file \(8 and 4\)\. Skipping .*" ) - self.assertRegex(str(warn[0].message), wmsg) + with pytest.warns(UserWarning, match=wmsg) as warn: + self.gen_fields([pp_field], mock_for_field_gen) + assert len(warn) == 1 - def test_read_headers_call(self): + def test_read_headers_call(self, mocker, mock_for_field_gen): # Checks that the two calls to np.fromfile are called in the # expected way. - pp_field = mock.Mock(lblrec=1, lbext=0, lbuser=[0]) - with self.mock_for_field_gen([pp_field]): - open_fh = mock.MagicMock(spec=io.RawIOBase) + pp_field = mocker.Mock(lblrec=1, lbext=0, lbuser=[0]) + with mock_for_field_gen([pp_field]): + open_fh = mocker.MagicMock(spec=io.RawIOBase) open.return_value = open_fh next(pp._field_gen("mocked", read_data_bytes=False)) with open_fh as open_fh_ctx: calls = [ - mock.call(open_fh_ctx, count=45, dtype=">i4"), - mock.call(open_fh_ctx, count=19, dtype=">f4"), + mocker.call(open_fh_ctx, count=45, dtype=">i4"), + mocker.call(open_fh_ctx, count=19, dtype=">f4"), ] np.fromfile.assert_has_calls(calls) with open_fh as open_fh_ctx: @@ -82,33 +77,30 @@ def test_read_headers_call(self): 4, np.dtype(">f4"), ) - self.assertEqual(pp_field.data, expected_deferred_bytes) + assert pp_field.data == expected_deferred_bytes - def test_read_data_call(self): + def test_read_data_call(self, mocker, mock_for_field_gen): # Checks that data is read if read_data is True. - pp_field = mock.Mock(lblrec=1, lbext=0, lbuser=[0]) - with self.mock_for_field_gen([pp_field]): - open_fh = mock.MagicMock(spec=io.RawIOBase) + pp_field = mocker.Mock(lblrec=1, lbext=0, lbuser=[0]) + with mock_for_field_gen([pp_field]): + open_fh = mocker.MagicMock(spec=io.RawIOBase) open.return_value = open_fh next(pp._field_gen("mocked", read_data_bytes=True)) with open_fh as open_fh_ctx: expected_loaded_bytes = pp.LoadedArrayBytes( open_fh_ctx.read(), np.dtype(">f4") ) - self.assertEqual(pp_field.data, expected_loaded_bytes) + assert pp_field.data == expected_loaded_bytes - def test_invalid_header_release(self): + def test_invalid_header_release(self, tmp_path): # Check that an unknown LBREL value just results in a warning # and the end of the file iteration instead of raising an error. - with self.temp_filename() as temp_path: - np.zeros(65, dtype="i4").tofile(temp_path) - generator = pp._field_gen(temp_path, False) - with mock.patch("warnings.warn") as warn: - with self.assertRaises(StopIteration): - next(generator) - self.assertEqual(warn.call_count, 1) - self.assertIn("header release number", warn.call_args[0][0]) - - -if __name__ == "__main__": - tests.main() + temp_path = tmp_path / "temp" + np.zeros(65, dtype="i4").tofile(temp_path) + generator = pp._field_gen(temp_path, False) + with pytest.warns( + pp._WarnComboIgnoringLoad, match="header release number" + ) as warn: + with pytest.raises(StopIteration): + next(generator) + assert len(warn) == 1 diff --git a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py b/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py index 68520300b6..f0042593c3 100644 --- a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py +++ b/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py @@ -4,36 +4,34 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp._interpret_field` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - from copy import deepcopy -from unittest import mock import numpy as np +import pytest import iris.fileformats.pp as pp +from iris.warnings import IrisLoadWarning -class Test__interpret_fields__land_packed_fields(tests.IrisTest): - def setUp(self): +class Test__interpret_fields__land_packed_fields: + @pytest.fixture(autouse=True) + def _setup(self, mocker): return_value = ("dummy", 0, 0, np.dtype("f4")) - core_data = mock.MagicMock(return_value=return_value) + core_data = mocker.MagicMock(return_value=return_value) # A field packed using a land/sea mask. - self.pp_field = mock.Mock( + self.pp_field = mocker.Mock( lblrec=1, lbext=0, lbuser=[0] * 7, lbrow=0, lbnpt=0, raw_lbpack=21, - lbpack=mock.Mock(n1=0, n2=2, n3=1), + lbpack=mocker.Mock(n1=0, n2=2, n3=1), core_data=core_data, ) # The field specifying the land/seamask. lbuser = [None, None, None, 30, None, None, 1] # m01s00i030 - self.land_mask_field = mock.Mock( + self.land_mask_field = mocker.Mock( lblrec=1, lbext=0, lbuser=lbuser, @@ -47,33 +45,29 @@ def test_non_deferred_fix_lbrow_lbnpt(self): # Checks the fix_lbrow_lbnpt is applied to fields which are not # deferred. f1, mask = self.pp_field, self.land_mask_field - self.assertEqual(f1.lbrow, 0) - self.assertEqual(f1.lbnpt, 0) + assert f1.lbrow == 0 + assert f1.lbnpt == 0 list(pp._interpret_fields([mask, f1])) - self.assertEqual(f1.lbrow, 3) - self.assertEqual(f1.lbnpt, 4) + assert f1.lbrow == 3 + assert f1.lbnpt == 4 # Check the data's shape has been updated too. - self.assertEqual(f1.data.shape, (3, 4)) + assert f1.data.shape == (3, 4) def test_fix_lbrow_lbnpt_no_mask_available(self): # Check a warning is issued when loading a land masked field # without a land mask. - with mock.patch("warnings.warn") as warn: + with pytest.warns( + IrisLoadWarning, + match="Landmask compressed fields existed without a landmask", + ) as warn: list(pp._interpret_fields([self.pp_field])) - self.assertEqual(warn.call_count, 1) - warn_msg = warn.call_args[0][0] - self.assertTrue( - warn_msg.startswith( - "Landmask compressed fields existed without a landmask" - ), - "Unexpected warning message: {!r}".format(warn_msg), - ) + assert len(warn) == 1 def test_deferred_mask_field(self): # Check that the order of the load is yielded last if the mask # hasn't yet been seen. result = list(pp._interpret_fields([self.pp_field, self.land_mask_field])) - self.assertEqual(result, [self.land_mask_field, self.pp_field]) + assert result == [self.land_mask_field, self.pp_field] def test_not_deferred_mask_field(self): # Check that the order of the load is unchanged if a land mask @@ -81,15 +75,11 @@ def test_not_deferred_mask_field(self): f1, mask = self.pp_field, self.land_mask_field mask2 = deepcopy(mask) result = list(pp._interpret_fields([mask, f1, mask2])) - self.assertEqual(result, [mask, f1, mask2]) + assert result == [mask, f1, mask2] def test_deferred_fix_lbrow_lbnpt(self): # Check the fix is also applied to fields which are deferred. f1, mask = self.pp_field, self.land_mask_field list(pp._interpret_fields([f1, mask])) - self.assertEqual(f1.lbrow, 3) - self.assertEqual(f1.lbnpt, 4) - - -if __name__ == "__main__": - tests.main() + assert f1.lbrow == 3 + assert f1.lbnpt == 4 diff --git a/lib/iris/tests/unit/fileformats/pp/test_as_fields.py b/lib/iris/tests/unit/fileformats/pp/test_as_fields.py index 213eb6c9c4..091ca11b7a 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_as_fields.py +++ b/lib/iris/tests/unit/fileformats/pp/test_as_fields.py @@ -4,30 +4,25 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.as_fields` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest import iris.fileformats.pp as pp import iris.tests.stock as stock -class TestAsFields(tests.IrisTest): - def setUp(self): +class TestAsFields: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.realistic_3d() def test_cube_only(self): fields = pp.as_fields(self.cube) for field in fields: - self.assertEqual(field.lbcode, 101) + assert field.lbcode == 101 def test_field_coords(self): fields = pp.as_fields( self.cube, field_coords=["grid_longitude", "grid_latitude"] ) for field in fields: - self.assertEqual(field.lbcode, 101) - - -if __name__ == "__main__": - tests.main() + assert field.lbcode == 101 diff --git a/lib/iris/tests/unit/fileformats/pp/test_load.py b/lib/iris/tests/unit/fileformats/pp/test_load.py index e802b36c0e..19e21fe077 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_load.py +++ b/lib/iris/tests/unit/fileformats/pp/test_load.py @@ -4,38 +4,27 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.load` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import iris.fileformats.pp as pp -class Test_load(tests.IrisTest): - def test_call_structure(self): +class Test_load: + def test_call_structure(self, mocker): # Check that the load function calls the two necessary utility # functions. - extract_result = mock.Mock() - interpret_patch = mock.patch( + extract_result = mocker.Mock() + interpret_patch = mocker.patch( "iris.fileformats.pp._interpret_fields", autospec=True, return_value=iter([]), ) - field_gen_patch = mock.patch( + field_gen_patch = mocker.patch( "iris.fileformats.pp._field_gen", autospec=True, return_value=extract_result, ) - with interpret_patch as interpret, field_gen_patch as field_gen: - pp.load("mock", read_data=True) + pp.load("mock", read_data=True) - interpret.assert_called_once_with(extract_result) - field_gen.assert_called_once_with( + interpret_patch.assert_called_once_with(extract_result) + field_gen_patch.assert_called_once_with( "mock", read_data_bytes=True, little_ended=False ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py index bd48ade809..32c7b36e40 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save.py @@ -21,7 +21,7 @@ @pytest.mark.parametrize( - "unit,modulus", + ("unit", "modulus"), [ (cf_units.Unit("radians"), 2 * np.pi), (cf_units.Unit("degrees"), 360.0), @@ -75,6 +75,7 @@ def test_bad_stash_string(mocker): def _pp_save_ppfield_values(cube): """Emulate saving a cube as PP, and capture the resulting PP field values.""" # Create a test object to stand in for a real PPField. + # todo: Still uses unittest.mock, it causes a lot of issues making this a fixture pp_field = mock.MagicMock(spec=pp.PPField3) # Add minimal content required by the pp.save operation. pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN @@ -88,7 +89,8 @@ def _pp_save_ppfield_values(cube): class TestVertical: - def setup_method(self): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.lat_lon_cube() def test_pseudo_level(self): @@ -124,7 +126,8 @@ def test_soil_depth(self): class TestLbfcProduction: - def setup_method(self): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.lat_lon_cube() def check_cube_stash_yields_lbfc(self, stash, lbfc_expected): @@ -168,7 +171,8 @@ def test_bad_name_units_to_lbfc_0(self): class TestLbsrceProduction: - def setup_method(self): + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.lat_lon_cube() def check_cube_um_source_yields_lbsrce( @@ -212,7 +216,7 @@ class Test_Save__LbprocProduction: @pytest.fixture(autouse=True) def _setup(self, mocker): self.cube = stock.realistic_3d() - self.pp_field = mock.MagicMock(spec=pp.PPField3) + self.pp_field = mocker.MagicMock(spec=pp.PPField3) self.pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN mocker.patch("iris.fileformats.pp.PPField3", return_value=self.pp_field) @@ -427,7 +431,7 @@ def single_mean_time_cube(single_time_cube): return single_time_cube -@pytest.fixture() +@pytest.fixture def global_cube(): x_coord = DimCoord( np.arange(0, 360, 10), standard_name="longitude", units="degrees", circular=True diff --git a/lib/iris/tests/unit/fileformats/pp/test_save_fields.py b/lib/iris/tests/unit/fileformats/pp/test_save_fields.py index 2eaebc0059..0deb8f6f0c 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save_fields.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save_fields.py @@ -4,13 +4,8 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.save_fields` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import numpy as np +import pytest import iris.fileformats.pp as pp @@ -19,31 +14,28 @@ def asave(afilehandle): afilehandle.write("saved") -class TestSaveFields(tests.IrisTest): - def setUp(self): +class TestSaveFields: + @pytest.fixture(autouse=True) + def _setup(self, mocker): # Create a test object to stand in for a real PPField. - self.pp_field = mock.MagicMock(spec=pp.PPField3) + self.pp_field = mocker.MagicMock(spec=pp.PPField3) # Add minimal content required by the pp.save operation. self.pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN self.pp_field.data = np.zeros((1, 1)) self.pp_field.save = asave - def test_save(self): + def test_save(self, mocker): open_func = "builtins.open" - m = mock.mock_open() - with mock.patch(open_func, m, create=True): - pp.save_fields([self.pp_field], "foo.pp") - self.assertTrue(mock.call("foo.pp", "wb") in m.mock_calls) - self.assertTrue(mock.call().write("saved") in m.mock_calls) + m = mocker.mock_open() + mocker.patch(open_func, m, create=True) + pp.save_fields([self.pp_field], "foo.pp") + assert mocker.call("foo.pp", "wb") in m.mock_calls + assert mocker.call().write("saved") in m.mock_calls - def test_save_append(self): + def test_save_append(self, mocker): open_func = "builtins.open" - m = mock.mock_open() - with mock.patch(open_func, m, create=True): - pp.save_fields([self.pp_field], "foo.pp", append=True) - self.assertTrue(mock.call("foo.pp", "ab") in m.mock_calls) - self.assertTrue(mock.call().write("saved") in m.mock_calls) - - -if __name__ == "__main__": - tests.main() + m = mocker.mock_open() + mocker.patch(open_func, m, create=True) + pp.save_fields([self.pp_field], "foo.pp", append=True) + assert mocker.call("foo.pp", "ab") in m.mock_calls + assert mocker.call().write("saved") in m.mock_calls diff --git a/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py b/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py index 5ab3f7c480..7d697781aa 100644 --- a/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py +++ b/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py @@ -4,31 +4,30 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.fileformats.pp.save_pairs_from_cube` function.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip +import pytest from iris.fileformats.pp import save_pairs_from_cube import iris.tests.stock as stock -class TestSaveFields(tests.IrisTest): - def setUp(self): +class TestSaveFields: + @pytest.fixture(autouse=True) + def _setup(self): self.cube = stock.realistic_3d() def test_cube_only(self): slices_and_fields = save_pairs_from_cube(self.cube) for aslice, field in slices_and_fields: - self.assertEqual(aslice.shape, (9, 11)) - self.assertEqual(field.lbcode, 101) + assert aslice.shape == (9, 11) + assert field.lbcode == 101 def test_field_coords(self): slices_and_fields = save_pairs_from_cube( self.cube, field_coords=["grid_longitude", "grid_latitude"] ) for aslice, field in slices_and_fields: - self.assertEqual(aslice.shape, (11, 9)) - self.assertEqual(field.lbcode, 101) + assert aslice.shape == (11, 9) + assert field.lbcode == 101 def test_lazy_data(self): cube = self.cube.copy() @@ -37,13 +36,9 @@ def test_lazy_data(self): # Check that lazy data is preserved in save-pairs generation. slices_and_fields = save_pairs_from_cube(cube) for aslice, _ in slices_and_fields: - self.assertTrue(aslice.has_lazy_data()) + assert aslice.has_lazy_data() def test_default_bmdi(self): slices_and_fields = save_pairs_from_cube(self.cube) _, field = next(slices_and_fields) - self.assertEqual(field.bmdi, -1e30) - - -if __name__ == "__main__": - tests.main() + assert field.bmdi == -1e30 diff --git a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py b/lib/iris/tests/unit/fileformats/rules/test__make_cube.py index c14a5df705..75d68cdbee 100644 --- a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py +++ b/lib/iris/tests/unit/fileformats/rules/test__make_cube.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.rules._make_cube`.""" -from unittest import mock - import numpy as np import pytest @@ -13,7 +11,7 @@ class Test: - def test_invalid_units(self): + def test_invalid_units(self, mocker): # Mock converter() function that returns an invalid # units string amongst the collection of other elements. factories = None @@ -36,10 +34,10 @@ def test_invalid_units(self): dim_coords_and_dims, aux_coords_and_dims, ) - converter = mock.Mock(return_value=metadata) + converter = mocker.Mock(return_value=metadata) data = np.arange(3.0) - field = mock.Mock( + field = mocker.Mock( core_data=lambda: data, bmdi=9999.0, realised_dtype=data.dtype ) diff --git a/lib/iris/tests/unit/fileformats/rules/test_rules.py b/lib/iris/tests/unit/fileformats/rules/test_rules.py index df3c769a70..423725221f 100644 --- a/lib/iris/tests/unit/fileformats/rules/test_rules.py +++ b/lib/iris/tests/unit/fileformats/rules/test_rules.py @@ -110,8 +110,8 @@ def field_generator(filename): aux_factory = mock.Mock() factory = mock.Mock() factory.args = [{"name": "foo"}] - factory.factory_class = ( - lambda *args: setattr(aux_factory, "fake_args", args) or aux_factory + factory.factory_class = lambda *args: ( + setattr(aux_factory, "fake_args", args) or aux_factory ) def converter(field): diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py index bc461f84ee..dcc5d59a00 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py @@ -125,7 +125,8 @@ def test_not_an_array(self): assert ArrayStructure.from_array([1, 2, 3]) == ArrayStructure(1, [1, 2, 3]) def test_multi_dim_array(self): - with pytest.raises(ValueError): + msg = "The given array must be 1D." + with pytest.raises(ValueError, match=msg): ArrayStructure.from_array(np.arange(12).reshape(3, 4)) def test_eq_incompatible_shapes(self): diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py index ef2d1d2e75..f4c6fa8a44 100644 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py +++ b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py @@ -7,10 +7,6 @@ """ -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - import numpy as np import pytest @@ -32,7 +28,7 @@ def regular_array_structures(shape, names="abcdefg"): return array_structures -class TestGroupStructure_from_component_arrays(tests.IrisTest): +class TestGroupStructure_from_component_arrays: def test_different_sizes(self): arrays = {"a": np.arange(6), "b": np.arange(5)} msg = "All array elements must have the same size." @@ -51,7 +47,7 @@ def test_structure_creation(self): assert grp._cmpt_structure == expected_structure -class TestGroupStructure_possible_structures(tests.IrisTest): +class TestGroupStructure_possible_structures: def test_simple_3d_structure(self): # Construct a structure representing a (3, 2, 4) group and assert # that the result is of the expected form. @@ -114,7 +110,7 @@ def test_completely_unstructured_element(self): self.assert_potentials(24, array_structures, [["a", "b", "c"]]) -class TestGroupStructure_build_arrays(tests.IrisTest): +class TestGroupStructure_build_arrays: def assert_built_array(self, name, result, expected): ex_arr, ex_dims = expected re_arr, re_dims = result[name] diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py index 35da9fab47..04efd4accc 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py @@ -11,23 +11,19 @@ """ -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest import iris +from iris.tests import _shared_utils from iris.tests.integration.fast_load.test_fast_load import Mixin_FieldTest -class TestFastCallbackLocationInfo(Mixin_FieldTest, tests.IrisTest): +class TestFastCallbackLocationInfo(Mixin_FieldTest): do_fast_loads = True - def setUp(self): - # Call parent setup. - super().setUp() - + @pytest.fixture(autouse=True) + def _setup(self): # Create a basic load test case. self.callback_collations = [] self.callback_filepaths = [] @@ -41,21 +37,17 @@ def fast_load_callback(cube, collation, filename): iris.load(self.test_filepath, callback=fast_load_callback) def test_callback_collations_filepaths(self): - self.assertEqual(len(self.callback_collations), 2) - self.assertEqual(self.callback_collations[0].data_filepath, self.test_filepath) - self.assertEqual(self.callback_collations[1].data_filepath, self.test_filepath) + assert len(self.callback_collations) == 2 + assert self.callback_collations[0].data_filepath == self.test_filepath + assert self.callback_collations[1].data_filepath == self.test_filepath def test_callback_collations_field_indices(self): - self.assertEqual(self.callback_collations[0].data_field_indices.dtype, np.int64) - self.assertArrayEqual( + assert self.callback_collations[0].data_field_indices.dtype == np.int64 + _shared_utils.assert_array_equal( self.callback_collations[0].data_field_indices, [[1, 3], [5, 7]] ) - self.assertEqual(self.callback_collations[1].data_field_indices.dtype, np.int64) - self.assertArrayEqual( + assert self.callback_collations[1].data_field_indices.dtype == np.int64 + _shared_utils.assert_array_equal( self.callback_collations[1].data_field_indices, [[0, 2], [4, 6]] ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py index 6d2c95eaff..52961badea 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py @@ -4,12 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for :func:`iris.fileformats.um._fast_load._convert_collation`.""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - import cf_units import cftime import numpy as np @@ -34,7 +28,7 @@ ) -class Test(tests.IrisTest): +class Test: def _field(self): # Create PP field for X wind on a regular lat-lon grid. header = [0] * 64 @@ -55,28 +49,28 @@ def _field(self): def _check_phenomenon(self, metadata, factory=None): if factory is None: - self.assertEqual(metadata.factories, []) + assert metadata.factories == [] else: - self.assertEqual(metadata.factories, [factory]) - self.assertEqual(metadata.references, []) - self.assertEqual(metadata.standard_name, "x_wind") - self.assertIsNone(metadata.long_name) - self.assertEqual(metadata.units, cf_units.Unit("m s-1")) - self.assertEqual(metadata.attributes, {"STASH": (1, 0, 2)}) - self.assertEqual(metadata.cell_methods, []) + assert metadata.factories == [factory] + assert metadata.references == [] + assert metadata.standard_name == "x_wind" + assert metadata.long_name is None + assert metadata.units == cf_units.Unit("m s-1") + assert metadata.attributes == {"STASH": (1, 0, 2)} + assert metadata.cell_methods == [] - def test_all_scalar(self): + def test_all_scalar(self, mocker): field = self._field() field.lbtim = 11 field.t1 = cftime.datetime(1970, 1, 1, 18) field.t2 = cftime.datetime(1970, 1, 1, 12) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(), element_arrays_and_dims={} ) metadata = convert_collation(collation) self._check_phenomenon(metadata) coords_and_dims = [(LONGITUDE, 1), (LATITUDE, 0)] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [ ( iris.coords.DimCoord(18, "time", units="hours since epoch"), @@ -90,9 +84,9 @@ def test_all_scalar(self): ), (iris.coords.DimCoord(6, "forecast_period", units="hours"), None), ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vector_t1(self): + def test_vector_t1(self, mocker): field = self._field() field.lbtim = 11 field.t2 = cftime.datetime(1970, 1, 1, 12) @@ -104,7 +98,7 @@ def test_vector_t1(self): ], [0], ) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={"t1": t1}, @@ -119,7 +113,7 @@ def test_vector_t1(self): (0,), ), ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [ ( iris.coords.DimCoord( @@ -132,9 +126,9 @@ def test_vector_t1(self): (0,), ), ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vector_t2(self): + def test_vector_t2(self, mocker): field = self._field() field.lbtim = 11 field.t1 = cftime.datetime(1970, 1, 1, 18) @@ -146,7 +140,7 @@ def test_vector_t2(self): ], [0], ) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={"t2": t2}, @@ -165,7 +159,7 @@ def test_vector_t2(self): (0,), ), ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [ ( iris.coords.DimCoord(18, "time", units="hours since epoch"), @@ -176,15 +170,15 @@ def test_vector_t2(self): (0,), ), ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vector_lbft(self): + def test_vector_lbft(self, mocker): field = self._field() field.lbtim = 21 field.t1 = cftime.datetime(1970, 1, 1, 12) field.t2 = cftime.datetime(1970, 1, 1, 18) lbft = ([18, 15, 12], [0]) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={"lbft": lbft}, @@ -220,9 +214,9 @@ def test_vector_lbft(self): (0,), ), ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vector_t1_and_t2(self): + def test_vector_t1_and_t2(self, mocker): field = self._field() field.lbtim = 11 t1 = ( @@ -237,7 +231,7 @@ def test_vector_t1_and_t2(self): [cftime.datetime(1970, 1, 1, 12), cftime.datetime(1970, 1, 2, 0)], [0], ) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(2, 3), element_arrays_and_dims={"t1": t1, "t2": t2}, @@ -260,7 +254,7 @@ def test_vector_t1_and_t2(self): (0,), ), ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [ ( iris.coords.AuxCoord( @@ -271,14 +265,14 @@ def test_vector_t1_and_t2(self): (0, 1), ) ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vertical_pressure(self): + def test_vertical_pressure(self, mocker): field = self._field() field.lbvc = 8 blev = ([1000, 850, 700], (0,)) lblev = ([1000, 850, 700], (0,)) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={"blev": blev, "lblev": lblev}, @@ -295,11 +289,11 @@ def test_vertical_pressure(self): (0,), ), ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_soil_level(self): + def test_soil_level(self, mocker): field = self._field() field.lbvc = 6 points = [10, 20, 30] @@ -308,7 +302,7 @@ def test_soil_level(self): lblev = (points, (0,)) brsvd1 = (lower, (0,)) brlev = (upper, (0,)) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={ @@ -326,11 +320,11 @@ def test_soil_level(self): units="1", ) coords_and_dims = [(LONGITUDE, 2), (LATITUDE, 1), (level, (0,))] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_soil_depth(self): + def test_soil_depth(self, mocker): field = self._field() field.lbvc = 6 points = [10, 20, 30] @@ -339,7 +333,7 @@ def test_soil_depth(self): blev = (points, (0,)) brsvd1 = (lower, (0,)) brlev = (upper, (0,)) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={ @@ -358,11 +352,11 @@ def test_soil_depth(self): attributes={"positive": "down"}, ) coords_and_dims = [(LONGITUDE, 2), (LATITUDE, 1), (depth, (0,))] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) + assert metadata.aux_coords_and_dims == coords_and_dims - def test_vertical_hybrid_height(self): + def test_vertical_hybrid_height(self, mocker): field = self._field() field.lbvc = 65 blev = ([5, 18, 38], (0,)) @@ -373,7 +367,7 @@ def test_vertical_hybrid_height(self): bhrlev = ([1, 0.9989, 0.9970], (0,)) lblev = ([1, 2, 3], (0,)) bhlev = ([0.9994, 0.9979, 0.9957], (0,)) - collation = mock.Mock( + collation = mocker.Mock( fields=[field], vector_dims_shape=(3,), element_arrays_and_dims={ @@ -410,7 +404,7 @@ def test_vertical_hybrid_height(self): (0,), ), ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) + assert metadata.dim_coords_and_dims == coords_and_dims coords_and_dims = [ ( iris.coords.DimCoord( @@ -432,8 +426,4 @@ def test_vertical_hybrid_height(self): (0,), ), ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) - - -if __name__ == "__main__": - tests.main() + assert metadata.aux_coords_and_dims == coords_and_dims diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py index a07672e43a..0b497d39d7 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py @@ -7,29 +7,27 @@ """ -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - from cftime import datetime import numpy as np +import pytest from iris._lazy_data import as_lazy_data import iris.fileformats.pp from iris.fileformats.um._fast_load_structured_fields import BasicFieldCollation +from iris.tests import _shared_utils -class Test___init__(tests.IrisTest): +class Test___init__: def test_no_fields(self): - with self.assertRaises(AssertionError): + with pytest.raises(AssertionError): BasicFieldCollation([]) -class Test_fields(tests.IrisTest): +class Test_fields: def test_preserve_members(self): fields = ("foo", "bar", "wibble") collation = BasicFieldCollation(fields) - self.assertEqual(collation.fields, fields) + assert collation.fields == fields def _make_field(lbyr=None, lbyrd=None, lbft=None, blev=None, bhlev=None, data=None): @@ -60,7 +58,7 @@ def _make_data(fill_value): return as_lazy_data(np.ones(shape) * fill_value) -class Test_data(tests.IrisTest): +class Test_data: # Test order of the data attribute when fastest-varying element is changed. def test_t1_varies_faster(self): collation = BasicFieldCollation( @@ -75,7 +73,7 @@ def test_t1_varies_faster(self): ) result = collation.data[:, :, 0, 0] expected = [[0, 1, 2], [3, 4, 5]] - self.assertArrayEqual(result, expected) + _shared_utils.assert_array_equal(result, expected) def test_t2_varies_faster(self): collation = BasicFieldCollation( @@ -90,24 +88,26 @@ def test_t2_varies_faster(self): ) result = collation.data[:, :, 0, 0] expected = [[0, 1, 2], [3, 4, 5]] - self.assertArrayEqual(result, expected) + _shared_utils.assert_array_equal(result, expected) -class Test_element_arrays_and_dims(tests.IrisTest): +class Test_element_arrays_and_dims: def test_single_field(self): field = _make_field(2013) collation = BasicFieldCollation([field]) - self.assertEqual(collation.element_arrays_and_dims, {}) + assert collation.element_arrays_and_dims == {} def test_t1(self): collation = BasicFieldCollation( [_make_field(lbyr=2013), _make_field(lbyr=2014)] ) result = collation.element_arrays_and_dims - self.assertEqual(list(result.keys()), ["t1"]) + assert list(result.keys()) == ["t1"] values, dims = result["t1"] - self.assertArrayEqual(values, [datetime(2013, 1, 1), datetime(2014, 1, 1)]) - self.assertEqual(dims, (0,)) + _shared_utils.assert_array_equal( + values, [datetime(2013, 1, 1), datetime(2014, 1, 1)] + ) + assert dims == (0,) def test_t1_and_t2(self): collation = BasicFieldCollation( @@ -118,19 +118,19 @@ def test_t1_and_t2(self): ] ) result = collation.element_arrays_and_dims - self.assertEqual(set(result.keys()), set(["t1", "t2"])) + assert set(result.keys()) == set(["t1", "t2"]) values, dims = result["t1"] - self.assertArrayEqual( + _shared_utils.assert_array_equal( values, [datetime(2013, 1, 1), datetime(2014, 1, 1), datetime(2015, 1, 1)], ) - self.assertEqual(dims, (0,)) + assert dims == (0,) values, dims = result["t2"] - self.assertArrayEqual( + _shared_utils.assert_array_equal( values, [datetime(2000, 1, 1), datetime(2001, 1, 1), datetime(2002, 1, 1)], ) - self.assertEqual(dims, (0,)) + assert dims == (0,) def test_t1_and_t2_and_lbft(self): collation = BasicFieldCollation( @@ -142,31 +142,33 @@ def test_t1_and_t2_and_lbft(self): ] ) result = collation.element_arrays_and_dims - self.assertEqual(set(result.keys()), set(["t1", "t2", "lbft"])) + assert set(result.keys()) == set(["t1", "t2", "lbft"]) values, dims = result["t1"] - self.assertArrayEqual(values, [datetime(1, 1, 1), datetime(11, 1, 1)]) - self.assertEqual(dims, (0,)) + _shared_utils.assert_array_equal( + values, [datetime(1, 1, 1), datetime(11, 1, 1)] + ) + assert dims == (0,) values, dims = result["t2"] - self.assertArrayEqual( + _shared_utils.assert_array_equal( values, [ [datetime(15, 1, 1), datetime(16, 1, 1)], [datetime(25, 1, 1), datetime(26, 1, 1)], ], ) - self.assertEqual(dims, (0, 1)) + assert dims == (0, 1) values, dims = result["lbft"] - self.assertArrayEqual(values, [6, 9]) - self.assertEqual(dims, (1,)) + _shared_utils.assert_array_equal(values, [6, 9]) + assert dims == (1,) def test_blev(self): collation = BasicFieldCollation([_make_field(blev=1), _make_field(blev=2)]) result = collation.element_arrays_and_dims keys = set(["blev", "brsvd1", "brsvd2", "brlev", "bhrlev", "lblev", "bhlev"]) - self.assertEqual(set(result.keys()), keys) + assert set(result.keys()) == keys values, dims = result["blev"] - self.assertArrayEqual(values, [1, 2]) - self.assertEqual(dims, (0,)) + _shared_utils.assert_array_equal(values, [1, 2]) + assert dims == (0,) def test_bhlev(self): collation = BasicFieldCollation( @@ -174,13 +176,13 @@ def test_bhlev(self): ) result = collation.element_arrays_and_dims keys = set(["blev", "brsvd1", "brsvd2", "brlev", "bhrlev", "lblev", "bhlev"]) - self.assertEqual(set(result.keys()), keys) + assert set(result.keys()) == keys values, dims = result["bhlev"] - self.assertArrayEqual(values, [1, 2]) - self.assertEqual(dims, (0,)) + _shared_utils.assert_array_equal(values, [1, 2]) + assert dims == (0,) -class Test__time_comparable_int(tests.IrisTest): +class Test__time_comparable_int: def test(self): # Define a list of date-time tuples, which should remain both all # distinct and in ascending order when converted... @@ -208,10 +210,6 @@ def test(self): for test_tuple in test_date_tuples ] # Check all values are distinct. - self.assertEqual(len(test_date_ints), len(set(test_date_ints))) + assert len(test_date_ints) == len(set(test_date_ints)) # Check all values are in order. - self.assertEqual(test_date_ints, sorted(test_date_ints)) - - -if __name__ == "__main__": - tests.main() + assert test_date_ints == sorted(test_date_ints) diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py index 7c1a9113b4..ac9d4495aa 100644 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py +++ b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py @@ -7,13 +7,8 @@ """ -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - from iris.fileformats.um._fast_load_structured_fields import group_structured_fields +from iris.tests.unit.fileformats import MockerMixin def _convert_to_vector(value, length, default): @@ -33,7 +28,7 @@ def _convert_to_vector(value, length, default): return value -class Test__grouping(tests.IrisTest): +class Test__grouping(MockerMixin): def _dummy_fields_iter(self, stashes=None, models=None, lbprocs=None): # Make a group of test fields, and return an iterator over it. a_vec = [vec for vec in (stashes, models, lbprocs) if vec is not None] @@ -42,7 +37,7 @@ def _dummy_fields_iter(self, stashes=None, models=None, lbprocs=None): models = _convert_to_vector(models, number, default=71) lbprocs = _convert_to_vector(lbprocs, number, default=91) self.test_fields = [ - mock.MagicMock( + self.mocker.MagicMock( lbuser=[0, 0, 0, x_stash, 0, 0, x_model], lbproc=x_lbproc, i_field=ind + 1001, @@ -69,53 +64,46 @@ def _test_fields(self, item): def test_none(self): null_iter = (x for x in []) result = self._group_result(null_iter) - self.assertEqual(result, []) + assert result == [] def test_one(self): fields_iter = self._dummy_fields_iter(stashes=[1]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001,)])) + assert result == self._test_fields([(1001,)]) def test_allsame(self): fields_iter = self._dummy_fields_iter(stashes=[1, 1, 1]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001, 1002, 1003)])) + assert result == self._test_fields([(1001, 1002, 1003)]) def test_stashes_different(self): fields_iter = self._dummy_fields_iter(stashes=[1, 1, 22, 1, 22, 333]) result = self._group_result(fields_iter) - self.assertEqual( - result, - self._test_fields([(1001, 1002, 1004), (1003, 1005), (1006,)]), - ) + assert result == self._test_fields([(1001, 1002, 1004), (1003, 1005), (1006,)]) def test_models_different(self): fields_iter = self._dummy_fields_iter(models=[10, 21, 10]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001, 1003), (1002,)])) + assert result == self._test_fields([(1001, 1003), (1002,)]) def test_lbprocs_different(self): fields_iter = self._dummy_fields_iter(lbprocs=[991, 995, 991]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001, 1003), (1002,)])) + assert result == self._test_fields([(1001, 1003), (1002,)]) def test_2d_combines(self): fields_iter = self._dummy_fields_iter( stashes=[11, 11, 15, 11], lbprocs=[31, 42, 31, 42] ) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001,), (1002, 1004), (1003,)])) + assert result == self._test_fields([(1001,), (1002, 1004), (1003,)]) def test_sortorder(self): fields_iter = self._dummy_fields_iter(stashes=[11, 7, 12]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1002,), (1001,), (1003,)])) + assert result == self._test_fields([(1002,), (1001,), (1003,)]) def test_sortorder_2d(self): fields_iter = self._dummy_fields_iter(stashes=[11, 11, 12], lbprocs=[31, 9, 1]) result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1002,), (1001,), (1003,)])) - - -if __name__ == "__main__": - tests.main() + assert result == self._test_fields([(1002,), (1001,), (1003,)]) diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py index e6e9359c26..d4d98c59cd 100644 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py +++ b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py @@ -7,33 +7,29 @@ """ -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - import numpy as np +import pytest from iris.fileformats.um._optimal_array_structuring import optimal_array_structure +from iris.tests import _shared_utils class Test__optimal_dimensioning_structure: pass -class Test_optimal_array_structure(tests.IrisTest): +class Test_optimal_array_structure: def _check_arrays_and_dims(self, result, spec): - self.assertEqual(set(result.keys()), set(spec.keys())) + assert set(result.keys()) == set(spec.keys()) for keyname in spec.keys(): result_array, result_dims = result[keyname] spec_array, spec_dims = spec[keyname] - self.assertEqual( - result_dims, - spec_dims, + assert result_dims == spec_dims, ( 'element dims differ for "{}": result={!r}, expected {!r}'.format( keyname, result_dims, spec_dims - ), + ) ) - self.assertArrayEqual( + _shared_utils.assert_array_equal( result_array, spec_array, 'element arrays differ for "{}": result={!r}, expected {!r}'.format( @@ -42,22 +38,22 @@ def _check_arrays_and_dims(self, result, spec): ) def test_none(self): - with self.assertRaises(IndexError): + with pytest.raises(IndexError, match="index 0 is out of bounds"): _ = optimal_array_structure([], []) def test_one(self): # A single value does not make a dimension (no length-1 dims). elements = [("a", np.array([1]))] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, ()) - self.assertEqual(primaries, set()) - self.assertEqual(elems_and_dims, {}) + assert shape == () + assert primaries == set() + assert elems_and_dims == {} def test_1d(self): elements = [("a", np.array([1, 2, 4]))] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set("a")) + assert shape == (3,) + assert primaries == set("a") self._check_arrays_and_dims(elems_and_dims, {"a": (np.array([1, 2, 4]), (0,))}) def test_1d_actuals(self): @@ -67,14 +63,14 @@ def test_1d_actuals(self): shape, primaries, elems_and_dims = optimal_array_structure( elements, actual_values ) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set("a")) + assert shape == (3,) + assert primaries == set("a") self._check_arrays_and_dims(elems_and_dims, {"a": (np.array([7, 3, 9]), (0,))}) def test_actuals_mismatch_fail(self): elements = [("a", np.array([1, 2, 4]))] actual_values = [("b", np.array([7, 3, 9]))] - with self.assertRaisesRegex(ValueError, "Names.* do not match.*"): + with pytest.raises(ValueError, match="Names.* do not match.*"): shape, primaries, elems_and_dims = optimal_array_structure( elements, actual_values ) @@ -85,8 +81,8 @@ def test_2d(self): ("b", np.array([7, 8, 9, 7, 8, 9])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (2, 3)) - self.assertEqual(primaries, set(["a", "b"])) + assert shape == (2, 3) + assert primaries == set(["a", "b"]) self._check_arrays_and_dims( elems_and_dims, {"a": (np.array([2, 3]), (0,)), "b": (np.array([7, 8, 9]), (1,))}, @@ -105,8 +101,8 @@ def test_2d_with_element_values(self): shape, primaries, elems_and_dims = optimal_array_structure( elements, elements_values ) - self.assertEqual(shape, (2, 3)) - self.assertEqual(primaries, set(["a", "b"])) + assert shape == (2, 3) + assert primaries == set(["a", "b"]) self._check_arrays_and_dims( elems_and_dims, {"a": (np.array([6, 8]), (0,)), "b": (np.array([3, 4, 5]), (1,))}, @@ -119,8 +115,8 @@ def test_non_2d(self): ("b", np.array([7, 8, 9, 7, 8])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (5,)) - self.assertEqual(primaries, set()) + assert shape == (5,) + assert primaries == set() self._check_arrays_and_dims( elems_and_dims, { @@ -133,16 +129,16 @@ def test_degenerate(self): # A all-same vector does not appear in the output. elements = [("a", np.array([1, 2, 3])), ("b", np.array([4, 4, 4]))] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set(["a"])) + assert shape == (3,) + assert primaries == set(["a"]) self._check_arrays_and_dims(elems_and_dims, {"a": (np.array([1, 2, 3]), (0,))}) def test_1d_duplicates(self): # When two have the same structure, the first is 'the dimension'. elements = [("a", np.array([1, 3, 4])), ("b", np.array([6, 7, 9]))] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set("a")) + assert shape == (3,) + assert primaries == set("a") self._check_arrays_and_dims( elems_and_dims, { @@ -155,9 +151,9 @@ def test_1d_duplicates_order(self): # Same as previous but reverse passed order of elements 'a' and 'b'. elements = [("b", np.array([6, 7, 9])), ("a", np.array([1, 3, 4]))] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) + assert shape == (3,) # The only difference is the one chosen as 'principal' - self.assertEqual(primaries, set("b")) + assert primaries == set("b") self._check_arrays_and_dims( elems_and_dims, { @@ -173,8 +169,8 @@ def test_3_way(self): ("period", np.array([9, 8, 7])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set(["t1"])) + assert shape == (3,) + assert primaries == set(["t1"]) self._check_arrays_and_dims( elems_and_dims, { @@ -191,8 +187,8 @@ def test_mixed_dims(self): ("ft", np.array([15, 16, 15, 16])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (2, 2)) - self.assertEqual(primaries, set(["t1", "ft"])) + assert shape == (2, 2) + assert primaries == set(["t1", "ft"]) self._check_arrays_and_dims( elems_and_dims, { @@ -209,10 +205,10 @@ def test_missing_dim(self): ("t2", np.array([15, 16, 25, 26])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (4,)) + assert shape == (4,) # The potential 2d nature can not be recognised. # 't1' is auxiliary, as it has duplicate values over the dimension. - self.assertEqual(primaries, set(["t2"])) + assert primaries == set(["t2"]) self._check_arrays_and_dims( elems_and_dims, { @@ -232,8 +228,8 @@ def test_optimal_structure_decision(self): ("d", np.array([10, 10, 10, 10, 10, 10])), ] shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (2, 3)) - self.assertEqual(primaries, set(["a", "b"])) + assert shape == (2, 3) + assert primaries == set(["a", "b"]) self._check_arrays_and_dims( elems_and_dims, { @@ -242,7 +238,3 @@ def test_optimal_structure_decision(self): "b": (np.array([0, 1, 2]), (1,)), }, ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py index 05c2749f40..0581ac5ed4 100644 --- a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py +++ b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py @@ -7,44 +7,33 @@ """ -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - from iris.fileformats.um import um_to_pp -class Test_call(tests.IrisTest): - def test__call(self): +class Test_call: + def test__call(self, mocker): # Check that the function creates an FF2PP and returns the result # of iterating over it. # Make a real (test) iterator object, as otherwise iter() complains... mock_iterator = (1 for x in ()) # Make a mock for the iter() call of an FF2PP object. - mock_iter_call = mock.MagicMock(return_value=mock_iterator) + mock_iter_call = mocker.MagicMock(return_value=mock_iterator) # Make a mock FF2PP object instance. - mock_ff2pp_instance = mock.MagicMock(__iter__=mock_iter_call) + mock_ff2pp_instance = mocker.MagicMock(__iter__=mock_iter_call) # Make the mock FF2PP class. - mock_ff2pp_class = mock.MagicMock(return_value=mock_ff2pp_instance) + mock_ff2pp_class = mocker.MagicMock(return_value=mock_ff2pp_instance) # Call um_to_pp while patching the um._ff_replacement.FF2PP class. test_path = "/any/old/file.name" - with mock.patch("iris.fileformats.um._ff_replacement.FF2PP", mock_ff2pp_class): - result = um_to_pp(test_path) + _ = mocker.patch("iris.fileformats.um._ff_replacement.FF2PP", mock_ff2pp_class) + result = um_to_pp(test_path) # Check that it called FF2PP in the expected way. - self.assertEqual( - mock_ff2pp_class.call_args_list, - [mock.call("/any/old/file.name", read_data=False)], - ) - self.assertEqual(mock_ff2pp_instance.__iter__.call_args_list, [mock.call()]) + assert mock_ff2pp_class.call_args_list == [ + mocker.call("/any/old/file.name", read_data=False) + ] + assert mock_ff2pp_instance.__iter__.call_args_list == [mocker.call()] # Check that it returned the expected result. - self.assertIs(result, mock_iterator) - - -if __name__ == "__main__": - tests.main() + assert result is mock_iterator diff --git a/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py index 6466ab0ea2..3106d11182 100644 --- a/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py +++ b/lib/iris/tests/unit/lazy_data/test_is_lazy_masked_data.py @@ -19,7 +19,7 @@ @pytest.mark.parametrize( - "arr, expected", zip(real_arrays + lazy_arrays, [False] * 4 + [True] * 2) + ("arr", "expected"), zip(real_arrays + lazy_arrays, [False] * 4 + [True] * 2) ) def test_is_lazy_masked_data(arr, expected): result = is_lazy_masked_data(arr) diff --git a/lib/iris/tests/unit/pandas/test_pandas.py b/lib/iris/tests/unit/pandas/test_pandas.py index f83004a31b..5fed3ee956 100644 --- a/lib/iris/tests/unit/pandas/test_pandas.py +++ b/lib/iris/tests/unit/pandas/test_pandas.py @@ -18,6 +18,7 @@ import iris from iris._deprecation import IrisDeprecation from iris.tests import _shared_utils +from iris.warnings import IrisUserWarning # Importing pandas has the side-effect of messing with the formatters # used by matplotlib for handling dates. @@ -122,24 +123,6 @@ def test_copy_true(self): series[0] = 99 assert cube.data[0] == 0 - def test_copy_int32_false(self): - cube = Cube(np.array([0, 1, 2, 3, 4], dtype=np.int32), long_name="foo") - series = iris.pandas.as_series(cube, copy=False) - series[0] = 99 - assert cube.data[0] == 99 - - def test_copy_int64_false(self): - cube = Cube(np.array([0, 1, 2, 3, 4], dtype=np.int64), long_name="foo") - series = iris.pandas.as_series(cube, copy=False) - series[0] = 99 - assert cube.data[0] == 99 - - def test_copy_float_false(self): - cube = Cube(np.array([0, 1, 2, 3.3, 4]), long_name="foo") - series = iris.pandas.as_series(cube, copy=False) - series[0] = 99 - assert cube.data[0] == 99 - def test_copy_masked_true(self): data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0]) cube = Cube(data, long_name="foo") @@ -147,12 +130,6 @@ def test_copy_masked_true(self): series[0] = 99 assert cube.data[0] == 0 - def test_copy_masked_false(self): - data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0]) - cube = Cube(data, long_name="foo") - with pytest.raises(ValueError, match="Masked arrays must always be copied"): - _ = iris.pandas.as_series(cube, copy=False) - @skip_pandas @pytest.mark.filterwarnings( @@ -270,33 +247,9 @@ def test_time_360(self): def test_copy_true(self): cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo") data_frame = iris.pandas.as_data_frame(cube) - data_frame[0][0] = 99 + data_frame.iloc[0, 0] = 99 assert cube.data[0, 0] == 0 - def test_copy_int32_false(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], dtype=np.int32), - long_name="foo", - ) - data_frame = iris.pandas.as_data_frame(cube, copy=False) - data_frame[0][0] = 99 - assert cube.data[0, 0] == 99 - - def test_copy_int64_false(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], dtype=np.int64), - long_name="foo", - ) - data_frame = iris.pandas.as_data_frame(cube, copy=False) - data_frame[0][0] = 99 - assert cube.data[0, 0] == 99 - - def test_copy_float_false(self): - cube = Cube(np.array([[0, 1, 2, 3, 4.4], [5, 6, 7, 8, 9]]), long_name="foo") - data_frame = iris.pandas.as_data_frame(cube, copy=False) - data_frame[0][0] = 99 - assert cube.data[0, 0] == 99 - def test_copy_masked_true(self): data = np.ma.MaskedArray( [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], @@ -304,25 +257,9 @@ def test_copy_masked_true(self): ) cube = Cube(data, long_name="foo") data_frame = iris.pandas.as_data_frame(cube) - data_frame[0][0] = 99 + data_frame.iloc[0, 0] = 99 assert cube.data[0, 0] == 0 - def test_copy_masked_false(self): - data = np.ma.MaskedArray( - [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], - mask=[[0, 1, 0, 1, 0], [1, 0, 1, 0, 1]], - ) - cube = Cube(data, long_name="foo") - with pytest.raises(ValueError, match="Masked arrays must always be copied"): - _ = iris.pandas.as_data_frame(cube, copy=False) - - def test_copy_false_with_cube_view(self): - data = np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) - cube = Cube(data[:], long_name="foo") - data_frame = iris.pandas.as_data_frame(cube, copy=False) - data_frame[0][0] = 99 - assert cube.data[0, 0] == 99 - @skip_pandas class TestAsDataFrameNDim: @@ -450,17 +387,11 @@ def test_simple3_d(self): data_frame.index.get_level_values("kid"), expected_kid ) - def test_copy_false(self): - cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") - data_frame = iris.pandas.as_data_frame(cube, copy=False) - cube.data[2] = 99 - assert cube.data[2] == data_frame.foo[2] - - def test_copy_true(self): + def test_implicit_copy_true(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") - data_frame = iris.pandas.as_data_frame(cube, copy=True) + data_frame = iris.pandas.as_data_frame(cube) cube.data[2] = 99 - assert cube.data[2] != data_frame.foo[2] + assert cube.data[2] != data_frame.loc[2, "foo"].values def test_time_standard(self): cube = Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="ts") @@ -709,18 +640,12 @@ def test_series_cftime_360(self, request): ), ) - def test_copy_true(self): + def test_implicit_copy_true(self): series = pd.Series([0, 1, 2, 3, 4], index=[5, 6, 7, 8, 9]) cube = iris.pandas.as_cube(series) cube.data[0] = 99 assert series[5] == 0 - def test_copy_false(self): - series = pd.Series([0, 1, 2, 3, 4], index=[5, 6, 7, 8, 9]) - cube = iris.pandas.as_cube(series, copy=False) - cube.data[0] = 99 - assert series[5] == 99 - @skip_pandas @pytest.mark.filterwarnings( @@ -818,17 +743,11 @@ def test_data_frame_datetime_standard(self, request): ), ) - def test_copy_true(self): + def test_implicit_copy_true(self): data_frame = pd.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) cube = iris.pandas.as_cube(data_frame) cube.data[0, 0] = 99 - assert data_frame[0][0] == 0 - - def test_copy_false(self): - data_frame = pd.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) - cube = iris.pandas.as_cube(data_frame, copy=False) - cube.data[0, 0] = 99 - assert data_frame[0][0] == 99 + assert data_frame.iloc[0, 0] == 0 @skip_pandas @@ -856,6 +775,49 @@ def test_as_dataframe_no_future_warning(self, activate_pandas_ndim): warnings.simplefilter("error", FutureWarning) _ = iris.pandas.as_data_frame(cube) + @pytest.mark.parametrize( + ("test_function", "test_input"), + [ + (iris.pandas.as_cube, pd.DataFrame()), + (iris.pandas.as_cubes, pd.DataFrame()), + ( + iris.pandas.as_series, + Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"), + ), + ( + iris.pandas.as_data_frame, + Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"), + ), + ], + ) + def test_explicit_copy_true_error(self, test_function, test_input): + with pytest.warns( + IrisDeprecation, + match=f"The `copy` parameter in `{test_function.__name__}` is deprecated", + ): + _ = test_function(test_input, copy=True) + + @pytest.mark.parametrize( + ("test_function", "test_input"), + [ + (iris.pandas.as_cube, pd.DataFrame()), + (iris.pandas.as_cubes, pd.DataFrame()), + ( + iris.pandas.as_series, + Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"), + ), + ( + iris.pandas.as_data_frame, + Cube(np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo"), + ), + ], + ) + def test_explicit_copy_false_error(self, test_function, test_input): + with pytest.warns( + IrisUserWarning, match="Pandas v3 behaviour defaults to copy=True." + ): + _ = test_function(test_input, copy=False) + @skip_pandas class TestPandasAsCubes: @@ -948,26 +910,24 @@ def test_3d_series(self): def test_non_unique_index(self): df = self._create_pandas(index_levels=1) - new_index = df.index.values + new_index = df.index.values.copy() new_index[1] = new_index[0] - df.set_index(new_index) + df.set_index(new_index, inplace=True) with pytest.raises(ValueError, match="not unique per row"): _ = iris.pandas.as_cubes(df) def test_non_monotonic_index(self): df = self._create_pandas(index_levels=1) - new_index = df.index.values + new_index = df.index.values.copy() new_index[:2] = new_index[1::-1] - df.set_index(new_index) - + df.set_index(new_index, inplace=True) with pytest.raises(ValueError, match="not monotonic"): _ = iris.pandas.as_cubes(df) def test_missing_rows(self): df = self._create_pandas(index_levels=2) df = df[:-1] - with pytest.raises( ValueError, match="Not all index values have a corresponding row" ): @@ -1186,39 +1146,15 @@ def test_series_with_col_args(self): with pytest.warns(Warning, match="is a Series; ignoring"): _ = iris.pandas.as_cubes(series, aux_coord_cols=["some_column"]) - def test_phenom_view(self): - df = self._create_pandas() - result = iris.pandas.as_cubes(df, copy=False) - - # Modify AFTER creating the Cube(s). - df[0][0] += 1 - - (result_cube,) = result - assert result_cube.data[0] == df[0][0] - def test_phenom_copy(self): df = self._create_pandas() result = iris.pandas.as_cubes(df) # Modify AFTER creating the Cube(s). - df[0][0] += 1 - - (result_cube,) = result - assert result_cube.data[0] != df[0][0] - - def test_coord_never_view(self): - # Using AuxCoord - DimCoords and Pandas indices are immutable. - df = self._create_pandas() - coord_name = "foo" - df[coord_name] = df.index.values - result = iris.pandas.as_cubes(df, copy=False, aux_coord_cols=[coord_name]) - - # Modify AFTER creating the Cube(s). - df[coord_name][0] += 1 + df.iloc[0, 0] += 1 (result_cube,) = result - result_coord = result_cube.coord(coord_name) - assert result_coord.points[0] != df[coord_name][0] + assert result_cube.data[0] != df.iloc[0, 0] def _test_dates_common(self, mode=None, alt_calendar=False): df = self._create_pandas() diff --git a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py index ec568ed13d..4323755606 100644 --- a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py +++ b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py @@ -220,15 +220,24 @@ def test_ancillary_variable(self): def test_attributes(self): cube = self.cube - cube.attributes = {"a": 1, "b": "two", "c": " this \n that\tand."} + cube.attributes = { + "a": 1, + "b": "two", + "c": " this \n that\tand.", + "d": np.array([1, 2]), + "e": np.float32(123.456), + } rep = CubeSummary(cube) attribute_section = rep.scalar_sections["Attributes:"] attribute_contents = attribute_section.contents + print(attribute_contents) expected_contents = [ "a: 1", "b: 'two'", "c: ' this \\n that\\tand.'", + "d: array([1, 2])", + "e: 123.456", ] # Note: a string with \n or \t in it gets "repr-d". # Other strings don't (though in coord 'extra' lines, they do.) diff --git a/lib/iris/tests/unit/util/test_array_equal.py b/lib/iris/tests/unit/util/test_array_equal.py index eafe123aed..82f1473f03 100644 --- a/lib/iris/tests/unit/util/test_array_equal.py +++ b/lib/iris/tests/unit/util/test_array_equal.py @@ -180,7 +180,7 @@ @pytest.mark.parametrize("lazy", [False, True]) -@pytest.mark.parametrize("array_a,array_b,withnans,eq", TEST_CASES) +@pytest.mark.parametrize(("array_a", "array_b", "withnans", "eq"), TEST_CASES) def test_array_equal(array_a, array_b, withnans, eq, lazy): if lazy: identical = array_a is array_b diff --git a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py b/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py index 2e06a75fc7..91305dda87 100644 --- a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py +++ b/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py @@ -4,8 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.demote_dim_coord_to_aux_coord`.""" -import unittest - import pytest import iris @@ -65,7 +63,3 @@ def test_trying_to_demote_a_scalar_coord(self): cube_b = cube_a.copy() demote_dim_coord_to_aux_coord(cube_b, "an_other") assert cube_a == cube_b - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/iris/tests/unit/util/test_file_is_newer_than.py b/lib/iris/tests/unit/util/test_file_is_newer_than.py index bba3f1fe37..fa63c2aaaa 100644 --- a/lib/iris/tests/unit/util/test_file_is_newer_than.py +++ b/lib/iris/tests/unit/util/test_file_is_newer_than.py @@ -89,21 +89,24 @@ def test_wild_fail(self): self._test(False, "example_result", ["older_sour*", "newer_sour*"]) def test_error_missing_result(self): - with pytest.raises(OSError) as error_trap: + msg = r"\[Errno 2\] No such file or directory:.*" + with pytest.raises(OSError, match=msg) as error_trap: self._test(False, "non_exist", ["older_sour*"]) error = error_trap.value assert error.strerror == "No such file or directory" assert error.filename == self._name2path("non_exist") def test_error_missing_source(self): - with pytest.raises(IOError) as error_trap: + msg = "One or more of the files specified did not exist:.*" + with pytest.raises(IOError, match=msg) as error_trap: self._test(False, "example_result", ["older_sour*", "non_exist"]) assert ( "One or more of the files specified did not exist" in error_trap.exconly() ) def test_error_missing_wild(self): - with pytest.raises(IOError) as error_trap: + msg = "One or more of the files specified did not exist:.*" + with pytest.raises(IOError, match=msg) as error_trap: self._test(False, "example_result", ["older_sour*", "unknown_*"]) assert ( "One or more of the files specified did not exist" in error_trap.exconly() diff --git a/lib/iris/tests/unit/util/test_make_gridcube.py b/lib/iris/tests/unit/util/test_make_gridcube.py index 24bd1ad72a..8a876211ec 100644 --- a/lib/iris/tests/unit/util/test_make_gridcube.py +++ b/lib/iris/tests/unit/util/test_make_gridcube.py @@ -137,7 +137,7 @@ def test_regular_badlims__fail(self, lims): @pytest.fixture(params=["int", "float", "i2", "i4", "i8", "f2", "f4", "f8"]) def arg_dtype(self, request): """Check all valid numeric argument types.""" - yield request.param + return request.param @staticmethod def f4_promoted_dtype(typename): diff --git a/lib/iris/tests/unit/util/test_mask_cube_from_shape.py b/lib/iris/tests/unit/util/test_mask_cube_from_shape.py index 93e7921509..ef7b658589 100644 --- a/lib/iris/tests/unit/util/test_mask_cube_from_shape.py +++ b/lib/iris/tests/unit/util/test_mask_cube_from_shape.py @@ -68,7 +68,7 @@ def test_mask_cube_from_shape_not_inplace(mock_cube, square_polygon): @pytest.mark.parametrize( - "minimum_weight, expected_output", + ("minimum_weight", "expected_output"), [ ( 0.0, diff --git a/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py b/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py index a12e2b146a..845867ebae 100644 --- a/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py +++ b/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py @@ -57,7 +57,7 @@ def test_mask_cube_from_shapefile_not_inplace(mock_cube): @pytest.mark.parametrize( - "minimum_weight, expected_output", + ("minimum_weight", "expected_output"), [ ( 0.0, diff --git a/lib/iris/tests/unit/util/test_new_axis.py b/lib/iris/tests/unit/util/test_new_axis.py index 4ade2eb61c..578d1e0b8d 100644 --- a/lib/iris/tests/unit/util/test_new_axis.py +++ b/lib/iris/tests/unit/util/test_new_axis.py @@ -18,7 +18,7 @@ class Test: - @pytest.fixture() + @pytest.fixture def stock_cube(self): cube = stock.simple_2d_w_cell_measure_ancil_var() time = iris.coords.DimCoord([1], standard_name="time") diff --git a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py index bceffe700d..492309b60c 100644 --- a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py +++ b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py @@ -4,7 +4,7 @@ # See LICENSE in the root of the repository for full licensing details. """Test function :func:`iris.util.promote_aux_coord_to_dim_coord`.""" -import unittest +import re import pytest @@ -67,7 +67,10 @@ def test_coord_does_not_exist(self): cube_a = stock.simple_2d_w_multidim_and_scalars() coord = cube_a.coord("dim1").copy() coord.rename("new") - with pytest.raises(ValueError): + msg = re.escape( + "Attempting to promote an AuxCoord (new) which does not exist in the cube." + ) + with pytest.raises(ValueError, match=msg): promote_aux_coord_to_dim_coord(cube_a, coord) def test_argument_is_wrong_type(self): @@ -77,19 +80,25 @@ def test_argument_is_wrong_type(self): def test_trying_to_promote_a_multidim_coord(self): cube_a = stock.simple_2d_w_multidim_coords() - with pytest.raises(ValueError): + msg = re.escape( + "Attempting to promote an AuxCoord (bar) which is associated with 2 dimensions." + ) + with pytest.raises(ValueError, match=msg): promote_aux_coord_to_dim_coord(cube_a, "bar") def test_trying_to_promote_a_scalar_coord(self): cube_a = stock.simple_2d_w_multidim_and_scalars() - with pytest.raises(ValueError): + msg = re.escape( + "Attempting to promote an AuxCoord (an_other) which is associated with 0 dimensions." + ) + with pytest.raises(ValueError, match=msg): promote_aux_coord_to_dim_coord(cube_a, "an_other") def test_trying_to_promote_a_nonmonotonic_coord(self): cube_a = stock.hybrid_height() - with pytest.raises(ValueError): + msg = re.escape( + "Attempt to promote an AuxCoord (surface_altitude) fails when attempting to create a DimCoord " + "from the AuxCoord because: The 'surface_altitude' DimCoord points array must be strictly monotonic." + ) + with pytest.raises(ValueError, match=msg): promote_aux_coord_to_dim_coord(cube_a, "surface_altitude") - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/iris/tests/unit/util/test_rolling_window.py b/lib/iris/tests/unit/util/test_rolling_window.py index c2e5bdbb6c..da7a9b6ae0 100644 --- a/lib/iris/tests/unit/util/test_rolling_window.py +++ b/lib/iris/tests/unit/util/test_rolling_window.py @@ -100,18 +100,21 @@ def test_step(self): def test_window_too_short(self): # raise an error if the window length is less than 1 a = np.empty([5]) - with pytest.raises(ValueError): + msg = "`window` must be at least 1." + with pytest.raises(ValueError, match=msg): rolling_window(a, window=0) def test_window_too_long(self): # raise an error if the window length is longer than the # corresponding array dimension a = np.empty([7, 5]) - with pytest.raises(ValueError): + msg = "`window` is too long." + with pytest.raises(ValueError, match=msg): rolling_window(a, window=6, axis=1) def test_invalid_step(self): # raise an error if the step between windows is less than 1 a = np.empty([5]) - with pytest.raises(ValueError): + msg = "`step` must be at least 1." + with pytest.raises(ValueError, match=msg): rolling_window(a, step=0) diff --git a/lib/iris/time.py b/lib/iris/time.py index f2bc4a08ce..6a9629b9ce 100644 --- a/lib/iris/time.py +++ b/lib/iris/time.py @@ -3,7 +3,13 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Time handling.""" +"""Time handling. + +.. z_reference:: iris.time + :tags: topic_data_model + + API reference +""" import functools diff --git a/lib/iris/util.py b/lib/iris/util.py index 2c413d2822..551b5aeb68 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -2,7 +2,13 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Miscellaneous utility functions.""" +"""Miscellaneous utility functions. + +.. z_reference:: iris.util + :tags: topic_data_model;topic_slice_combine + + API reference +""" from __future__ import annotations @@ -92,7 +98,7 @@ def broadcast_to_shape(array, shape, dim_map, chunks=None): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if isinstance(array, da.Array): @@ -175,7 +181,7 @@ def delta(ndarray, dimension, circular=False): .. note:: This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if circular is not False: @@ -217,7 +223,7 @@ def describe_diff(cube_a, cube_b, output_file=None): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. .. note:: @@ -294,7 +300,7 @@ def guess_coord_axis(coord) -> Axis | None: Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. The ``guess_coord_axis`` behaviour can be skipped by setting the :attr:`~iris.coords.Coord.ignore_axis` property on `coord` to ``False``. @@ -371,7 +377,7 @@ def rolling_window( Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if window < 1: @@ -491,7 +497,7 @@ def array_equal(array1, array2, withnans: bool = False) -> bool: additional support for arrays of strings and NaN-tolerant operation. This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ def normalise_array(array): @@ -540,7 +546,7 @@ def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. .. deprecated:: 3.2.0 @@ -600,7 +606,7 @@ def between(lh, rh, lh_inclusive=True, rh_inclusive=True): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if lh_inclusive and rh_inclusive: @@ -660,7 +666,7 @@ def reverse(cube_or_array, coords_or_dims): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.cube import Cube @@ -732,7 +738,7 @@ def monotonic(array, strict=False, return_direction=False): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if array.ndim != 1 or len(array) <= 1: @@ -789,7 +795,7 @@ def column_slices_generator(full_slice, ndims): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ list_of_slices = [] @@ -1193,7 +1199,7 @@ def clip_string(the_str, clip_length=70, rider="..."): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if clip_length >= len(the_str) or clip_length <= 0: @@ -1228,7 +1234,7 @@ def format_array(arr, edgeitems=3): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ max_line_len = 50 @@ -1282,7 +1288,7 @@ def new_axis(src_cube, scalar_coord=None, expand_extras=()): # maybe not lazy Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ @@ -1401,7 +1407,7 @@ def squeeze(cube): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ slices = [0 if cube.shape[dim] == 1 else slice(None) for dim in range(cube.ndim)] @@ -1478,7 +1484,7 @@ def is_regular(coord): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ try: regular_step(coord) @@ -1495,7 +1501,7 @@ def regular_step(coord): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ if coord.ndim != 1: @@ -1529,7 +1535,7 @@ def regular_points(zeroth, step, count): Notes ----- This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ def make_steps(dtype: np.dtype): @@ -1561,7 +1567,7 @@ def points_step(points): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # Calculations only make sense with multiple points points = np.asanyarray(points) @@ -1596,7 +1602,7 @@ def unify_time_units(cubes): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ epochs = {} @@ -1738,7 +1744,7 @@ def promote_aux_coord_to_dim_coord(cube, name_or_coord): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.coords import Coord, DimCoord @@ -1857,7 +1863,7 @@ def demote_dim_coord_to_aux_coord(cube, name_or_coord): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ from iris.coords import Coord @@ -1951,7 +1957,7 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): Notes ----- This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ lats_and_lons = [ @@ -2085,7 +2091,7 @@ def mask_cube(cube, points_to_mask, in_place=False, dim=None): If either ``cube`` or ``points_to_mask`` is lazy, the result will be lazy. This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ @@ -2136,7 +2142,7 @@ def equalise_attributes(cubes): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ # deferred import to avoid circularity problem @@ -2212,7 +2218,7 @@ def is_masked(array): Notes ----- This function maintains laziness when called; it does not realise data. - See more at :doc:`/userguide/real_and_lazy_data`. + See more at :doc:`/user_manual/explanation/real_and_lazy_data`. """ diff --git a/lib/iris/warnings.py b/lib/iris/warnings.py index 1a885f60a3..d59ecf7885 100644 --- a/lib/iris/warnings.py +++ b/lib/iris/warnings.py @@ -4,6 +4,11 @@ # See LICENSE in the root of the repository for full licensing details. """Warnings specific to the :mod:`iris` package. +.. z_reference:: iris.warnings + :tags: topic_troubleshooting + + API reference + PLEASE NAMESPACE ALL WARNING CLASSES (i.e. prefix with Iris...). """ diff --git a/noxfile.py b/noxfile.py index 415e4fc3d5..4d733fa2d4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -15,7 +15,7 @@ nox.options.reuse_existing_virtualenvs = True #: Python versions we can run sessions under -_PY_VERSIONS_ALL = ["3.11", "3.12", "3.13"] +_PY_VERSIONS_ALL = ["3.12", "3.13", "3.14"] _PY_VERSION_LATEST = _PY_VERSIONS_ALL[-1] #: One specific python version for docs builds diff --git a/pyproject.toml b/pyproject.toml index e2b87467a5..88ae1c3bcc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,9 +21,9 @@ classifiers = [ "Operating System :: Unix", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Atmospheric Science", @@ -50,7 +50,7 @@ keywords = [ license = "BSD-3-Clause" license-files = ["LICENSE"] name = "scitools-iris" -requires-python = ">=3.11" +requires-python = ">=3.12" [project.urls] Code = "https://github.com/SciTools/iris" @@ -64,7 +64,7 @@ Issues = "https://github.com/SciTools/iris/issues" extend-exclude = [ "_ff_cross_references.py", "um_cf_map.py", - "docs/src/sphinxext", + "docs/src/sphinxext/api_rst_formatting.py", "tools", ] line-length = 88 @@ -112,6 +112,11 @@ known-first-party = ["iris"] "D401", # 1 First line of docstring should be in imperative mood ] +# Deprecated unittest tests + +"lib/iris/tests/__init__.py" = ["PT"] +"lib/iris/tests/unit/tests/test_IrisTest.py" = ["PT"] + [tool.ruff.lint.pydocstyle] convention = "numpy" diff --git a/requirements/iris.yml b/requirements/iris.yml index 331a25f10d..053725666f 120000 --- a/requirements/iris.yml +++ b/requirements/iris.yml @@ -1 +1 @@ -py312.yml \ No newline at end of file +py314.yml \ No newline at end of file diff --git a/requirements/locks/py312-linux-64.lock b/requirements/locks/py312-linux-64.lock index 938dcba4ca..61922cfa50 100644 --- a/requirements/locks/py312-linux-64.lock +++ b/requirements/locks/py312-linux-64.lock @@ -1,53 +1,52 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: c1404b61114d4a2769a5146aba7f4917724cd621278e6d44175768c07bf5a6b7 +# input_hash: ca8b16b5beee5e7904ebf63d5ce008b9de4a9434ad4f7a3cfe3ae09a5479ef0c @EXPLICIT -https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 -https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_2.tar.bz2#bbf6f174dcd3254e19a2f5d2295ce808 +https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_3.conda#129e404c5b001f3ef5581316971e3ea0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.12.0-h54a6638_1.conda#16c2a0e9c4a166e53632cfca4f68d020 https://conda.anaconda.org/conda-forge/noarch/python_abi-3.12-8_cp312.conda#c3efd25ac4d74b1584d2f7a57195ddf1 https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda#ad659d0a2b3e47e38d829aa8cad2d610 https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.09-ha770c72_0.conda#99884244028fe76046e3914f90d4ad05 -https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 +https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda#4492fd26db29495f0ba23f146cd5638d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_16.conda#26c46f90d0e727e95c6c9498a33a09f3 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda#239c5e9546c38a1e884d69effcf4c882 +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda#a9f577daf3de00bca7c3c76c0ecbd1de https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda#7df50d44d4a14d6c31a2c54f2cd92157 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_16.conda#6d0363467e6ed84f11435eb309f2ff06 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.2-hb03c661_0.conda#ada39f5726bc5481e9dce293709dfabc +https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda#0aa00f03f9e39fb9876085dee11a85d4 +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.3-hb03c661_0.conda#dcdc58c15961dbf17a0621312b01f5cb https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda#51a19bba1b8ebfb60df25cde030b7ebc +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda#d2ffd7602c02f2b316fd921d39876885 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda#920bb03579f15389b9e512095ad995b7 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.16-hb03c661_0.conda#f9f81ea472684d75b9dd8d0b328cf655 https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda#38f5dbc9ac808e31c00650f7be1db93f https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda#b38117a3c920364aff79f870c984b4a3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1.conda#72c8fd1af66bd67bf580645b426513ed https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 -https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 -https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h9ec8514_0.conda#35f29eec58405aaf55e01cb470d8c26a -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_16.conda#5a68259fac2da8f2ee6f7bfe49c9eb8b -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_16.conda#39183d4e0c05609fd65f130633194e37 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda#e7f7ce06ec24cfcfb9e36d28cf82ba57 +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda#d5e96b1ed75ca01906b3d2469b4ce493 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_18.conda#646855f357199a12f02a87382d429b75 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a -https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_2.conda#1a580f7796c7bf6393fddb8bbbde58dc +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d864d34357c3b65a4b731f78c0801dc4 https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda#68e52064ed3897463c0e958ab5c8f91b https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda#70e3400cbbfa03e96dcde7fc13e38c7b -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_16.conda#68f68355000ec3f1d6f26ea13e8f525f +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda#1b08cd684f34175e4514474793d44bcb https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda#db409b7c1720428638e7c0d509d3e1b5 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda#aea31d2e5b1091feca96fcfe945c3cf9 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-h280c20c_1002.conda#45161d96307e3a447cc3eb5896cf6f8c https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda#47e340acb35de30501a76c7c799c41d7 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.0-h26f9b46_0.conda#9ee58d5c534af06558933af3c845a780 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.1-h35e630c_1.conda#f61eb8cd60ff9057122a3d338b99c00f https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda#b2895afaf55bf96a8c8282a2e47a5de0 @@ -56,7 +55,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxshmfence-1.3.3-hb9d3cd8 https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2025.1-hb03c661_0.conda#aa8d21be4b461ce612d8f5fb791decae https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.3-hb47aa4a_0.conda#607e13a8caac17f9a664bcab5302ce06 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h280c20c_3.conda#a77f85f77be52ff59391544bfe73390a -https://conda.anaconda.org/conda-forge/linux-64/cli11-2.6.0-h54a6638_0.conda#ddf9fed4661bace13f33f08fe38a5f45 +https://conda.anaconda.org/conda-forge/linux-64/cli11-2.6.2-h54a6638_0.conda#83dae3dfadcfec9b37a9fbff6f7f7378 https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.4.0-hecca717_0.conda#dbe3ec0f120af456b3477743ffd99b74 https://conda.anaconda.org/conda-forge/linux-64/fmt-12.1.0-hff5e90c_0.conda#f7d7a4104082b39e3b3473fbd4a38229 https://conda.anaconda.org/conda-forge/linux-64/geos-3.14.1-h480dda7_0.conda#4d4efd0645cd556fab54617c4ad477ef @@ -65,18 +64,18 @@ https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.14-hecca717_2.cond https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda#186a18e3ba246eccfc7cff00cd19a870 https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h0aef613_1.conda#9344155d33912347b37f0ae6c410a835 -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.4-h3f801dc_0.conda#01ba04e414e47f95c03d6ddd81fd37be +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.5-h088129d_0.conda#86f7414544ae606282352fa1e116b41f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.2.0-hb03c661_1.conda#366b40a69f0ad6072561c1d09301c886 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.conda#4ffbb341c8b616aa2494b6afb26a0c5f https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda#c277e0a4d549b03ac1e9d6cbbe3d017b https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_16.conda#40d9b534410403c821ff64f00d0adc22 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_18.conda#9063115da5bc35fdc3e1002e69b9ef6e https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h5888daf_1.conda#8422fcc9e5e172c91e99aef703b3ce65 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.53-h421ea60_0.conda#00d4e66b1f746cb14944cad23fffb405 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.55-h421ea60_0.conda#5f13ffc7d30ffec87864e678df9957b4 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_16.conda#1b3152694d236cf233b76b8c56bf0eae +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_18.conda#6235adb93d064ecdf3d44faee6f468de https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h54a6638_2.conda#b4ecbefe517ed0157c37f8182768271c https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 @@ -89,28 +88,28 @@ https://conda.anaconda.org/conda-forge/linux-64/pixman-0.46.4-h54a6638_1.conda#c https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.15-h3f63f65_0.conda#b11a4c6bf6f6f44e5e143f759ffa2087 https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda#d7d95fc8287ea7bf33e0e7116d2b95ec https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.2-h03e3b7b_1.conda#98b6c9dc80eb87b2519b97bcf7e578dd -https://conda.anaconda.org/conda-forge/linux-64/spirv-tools-2025.4-hb700be7_0.conda#aace50912e0f7361d0d223e7f7cfa6e5 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_ha0e22de_103.conda#86bc20552bf46075e3d92b67f089172d +https://conda.anaconda.org/conda-forge/linux-64/spirv-tools-2025.5-hb700be7_0.conda#058d5f16eaa3018be91aa3508df00d7c +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda#cffd3bdd58090148f4cfcd831f4b26ab https://conda.anaconda.org/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda#035da2e4f5770f036ff704fa17aace24 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda#1c74ff8c35dcadf952a16f752ca5aa49 https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-h909a3a2_5.conda#6a0eb48e58684cca4d7acc8b7a0fd3c7 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 -https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.2-hceb46e0_1.conda#40feea2979654ed579f1cda7c63ccb94 +https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.3-hceb46e0_1.conda#2aadb0d17215603a82a2a6b0afd9a4cb https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda#4a13eeac0b5c8e5b8ab496e6c4ddd829 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.2.0-hb03c661_1.conda#af39b9a8711d4a8d437b52c1d78eb6a1 https://conda.anaconda.org/conda-forge/linux-64/gl2ps-1.4.2-hae5d5c5_1.conda#00e642ec191a19bf806a3915800e9524 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45-default_hbd61a6d_105.conda#3ec0aa5037d39b06554109a01e6fb0c6 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.22.2-ha1258a1_0.conda#fb53fb07ce46a575c5d004bbc96032c2 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee -https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-hf08fa70_5.conda#82954a6f42e3fba59628741dca105c98 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda#bb26456332b07f68bf3b7622ed71c0da +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h46dd2a8_20.conda#df81fd57eacf341588d728c97920e86d -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.1-hf4e2dac_1.conda#b1f35e70f047918b49fb4b181e40300e +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.2-hf4e2dac_0.conda#da5be73701eecd0e8454423fd6ffcf30 https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.1-h9d88235_1.conda#cd5a90476766d53e901500df9215e927 https://conda.anaconda.org/conda-forge/linux-64/libxml2-16-2.15.1-hca6bf5a_1.conda#3fdd8d99683da9fe279c2f4cecd1e048 @@ -123,29 +122,29 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda#0e0cbe0564d03a99afd5fd7b362feecd https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.3.0-hd9031aa_1.conda#66a1db55ecdb7377d2b91f54cd56eafa -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.12-h4f16b4b_0.conda#db038ce880f100acc74dba10302b5630 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda#861fb6ccbc677bb9a9fb2468430b9c6a https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8ccf913aaba749a5496c17629d859ed1 -https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda#cae723309a49399d2949362f4ab5c9e4 +https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hac629b4_1.conda#af491aae930edc096b58466c51c4126c https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.4-h2b0a6b4_0.conda#c379d67c686fb83475c1a6ed41cc41ff -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_0.conda#fd6acbf37b40cbe919450fa58309fbe1 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_1.conda#7eb4977dd6f60b3aaab0715a0ea76f11 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_1.conda#b52b769cd13f7adaa6ccdc68ef801709 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.17-h717163a_0.conda#000e85703f0fd9594c81710dd5066471 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-hb8b1518_5.conda#d4a250da4737ee127fb1fa6452a9002e -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-h4e3cde8_0.conda#0a5563efed19ca4461cf927419b6eb73 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h7a8fb5f_6.conda#49c553b47ff679a6a1e9fc80b9c5a2d4 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-hcf29cc6_1.conda#1707cdd636af2ff697b53186572c9f77 https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.14.1-ha770c72_0.conda#f4084e4e6577797150f9b04a4560ceb0 https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd -https://conda.anaconda.org/conda-forge/linux-64/python-3.12.12-hd63d673_1_cpython.conda#5c00c8cea14ee8d02941cab9121dce41 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.1-h04a0ce9_1.conda#941ee610ebf7a8047140831091dcb1f7 +https://conda.anaconda.org/conda-forge/linux-64/python-3.12.12-hd63d673_2_cpython.conda#c4540d3de3fa228d9fa95e31f8e97f89 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda#b56e0c8432b56decafae7e78c5f29ba5 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda#34e54f03dfea3e7a2dcf1453a85f1085 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda#ba231da7fccf9ea1e768caf5c7099b84 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_1.conda#8c4061f499edec6b8ac7000f6d586829 @@ -154,29 +153,33 @@ https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/noarch/attrs-25.4.0-pyhcf101f3_1.conda#537296d57ea995666c68c821b00e360b https://conda.anaconda.org/conda-forge/linux-64/backports.zstd-1.3.0-py312h90b7ffd_0.conda#5b8c55fed2e576dde4b0b33693a4fdb1 https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.2.0-py312hdb49522_1.conda#64088dffd7413a2dd557ce837b4cbbdb -https://conda.anaconda.org/conda-forge/noarch/certifi-2026.1.4-pyhd8ed1ab_0.conda#eacc711330cd46939f66cd401ff9c44b +https://conda.anaconda.org/conda-forge/noarch/certifi-2026.2.25-pyhd8ed1ab_0.conda#765c4d97e877cdbbb88ff33152b86125 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.5.0-pyhd8ed1ab_0.conda#381bd45fb7aa032691f3063aff47e3a1 https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.4-pyhd8ed1ab_0.conda#a22d1fd9bf98827e280a02875d9a007a https://conda.anaconda.org/conda-forge/noarch/click-8.3.1-pyh8f84b5b_1.conda#ea8a6c3256897cc31263de9f455e25d9 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda#61b8078a0905b12529abc622406cb62c https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a +https://conda.anaconda.org/conda-forge/noarch/cpython-3.12.12-py312hd8ed1ab_2.conda#ef3e093ecfd4533eee992cdaa155b47e https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda#4c2a8fef270f6c69591889b93f9f55c1 https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py312h68e6be4_0.conda#14f638dad5953c83443a2c4f011f1c9e https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 +https://conda.anaconda.org/conda-forge/noarch/docstring_parser-0.17.0-pyhd8ed1ab_0.conda#ce49d3e5a7d20be2ba57a2c670bdd82e https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.2-pyhd8ed1ab_0.conda#7e7cf4d6c2be6991e6ae2b3f4331701c +https://conda.anaconda.org/conda-forge/noarch/filelock-3.24.3-pyhd8ed1ab_0.conda#9dbb20eec24beb026291c20a35ce1ff9 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.17.1-h27c8c51_0.conda#867127763fbe935bab59815b6e0b7b5c https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py312h447239a_0.conda#63e20cf7b7460019b423fc06abb96c60 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.1.0-pyhd8ed1ab_0.conda#1daaf94a304a27ba3446a306235a37ea -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_104.conda#0857f4d157820dcd5625f61fdfefb780 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h19486de_106.conda#c223ee1429ba538f3e48cfb4a0b97357 https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe63df7e10a6ba605dc5f9f961d36 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.3.0-pyhd8ed1ab_0.conda#9614359868482abba1bd15ce465e3c42 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.2-pyhd8ed1ab_0.conda#895f6625dd8a246fece9279fcc12c1de +https://conda.anaconda.org/conda-forge/linux-64/jsonschema-rs-0.37.4-py312h121d7ae_0.conda#1a2a2624770f712a536b8c5758c3387d https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py312h0a2e395_2.conda#3a3004fddd39e3bb1a631b08d7045156 https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.8.5-gpl_hc2c16d8_100.conda#5fdaa8b856683a5598459dead3976578 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.11.0-5_h0358290_openblas.conda#6636a2b6f1a87572df2970d3ebc87cc0 @@ -185,159 +188,171 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-devel-1.7.0-ha4b6fd6_2.co https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.12.2-default_hafda6a7_1000.conda#0ed3aa3e3e6bc85050d38881673a692f https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.11.0-5_h47877c9_openblas.conda#b38076eb5c8e40d0106beda6f95d7609 https://conda.anaconda.org/conda-forge/linux-64/libllvm20-20.1.8-hf7376ad_1.conda#eafa8fd1dfc9a107fe62f7f12cabbc9c -https://conda.anaconda.org/conda-forge/linux-64/libllvm21-21.1.8-hf7376ad_0.conda#1a2708a460884d6861425b7f9a7bef99 +https://conda.anaconda.org/conda-forge/linux-64/libllvm22-22.1.0-hf7376ad_0.conda#213f51bbcce2964ff2ec00d0fdd38541 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda#2bca1fbb221d9c3c8e3a155784bbc2e9 https://conda.anaconda.org/conda-forge/linux-64/libxml2-devel-2.15.1-he237659_1.conda#644b2a3a92ba0bb8e2aa671dd831e793 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/noarch/loguru-0.7.3-pyh707e725_0.conda#49647ac1de4d1e4b49124aedf3934e02 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.3-py312h8a5da7c_0.conda#f775a43412f7f3d7ed218113ad233869 +https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592132998493b3ff25fd7479396e8351 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py312hd9148b4_1.conda#2e489969e38f0b428c39492619b5e6e5 -https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.0-py312h8a5da7c_0.conda#9fe4c848dd01cde9b8d0073744d4eef8 +https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.1-py312h8a5da7c_0.conda#17c77acc59407701b54404cfd3639cac https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 -https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda#2e5bf4f1da39c0b32778561c3c4e5878 -https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda#58335b26c38bf4a20f399384c33cbcf9 -https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.0-py312h50c33e8_0.conda#923b06ad75b7acc888fa20a22dc397cd -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.5.1-pyhcf101f3_0.conda#1bd2e65c8c7ef24f4639ae6e850dacc2 +https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-hbde042b_1.conda#3c40a106eadf7c14c6236ceddb267893 +https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 +https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.1-py312h50c33e8_0.conda#c5eff3ada1a829f0bdb780dc4b62bbae +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.9.2-pyhcf101f3_0.conda#4fefefb892ce9cc1539405bec2f1a6cd https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e -https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-h99ae125_0.conda#8bbc19a6e87fbe8b97796e9a42a47a30 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_3.conda#031e33ae075b336c0ce92b14efa886c5 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.3.1-py312h178313f_0.conda#0cf580c1b73146bb9ff1bbdb4d4c8cf9 -https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.1-py312h5253ce2_0.conda#ff09ba570ce66446db523ea21c12b765 +https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py312h5253ce2_0.conda#dd94c506b119130aef5a9382aed648e7 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda#6b6ece66ebcae2d5f326c77ef2c5a066 -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.1-pyhcf101f3_0.conda#d837065e4e0de4962c3462079c23f969 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda#3687cc0b82a8b4c17e1f0eb7e47163d5 https://conda.anaconda.org/conda-forge/noarch/pyshp-3.0.3-pyhd8ed1ab_0.conda#c138c7aaa6a10b5762dcd92247864aff https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py312h0d868a3_1.conda#1cfb9b04c827219597def32c22fb9ca2 https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py312h8a5da7c_0.conda#fba10c2007c8b06f77c5a23ce3a635ad +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py312h8a5da7c_1.conda#15878599a87992e44c059731771591cb https://conda.anaconda.org/conda-forge/noarch/scooby-0.11.0-pyhd8ed1ab_0.conda#2d707ed62f63d72f4a0141b818e9c7b6 -https://conda.anaconda.org/conda-forge/noarch/setuptools-80.9.0-pyhff2d567_0.conda#4de79c071274a53dcaf2a8c749d1499e +https://conda.anaconda.org/conda-forge/noarch/setuptools-82.0.0-pyh332efcf_0.conda#1d00d46c634177fc8ede8b99d6089239 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda#755cf22df8693aa0d1aec1c123fa5863 https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda#0401a17ae845fa72c7210e206ec5647d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.1-pyhd8ed1ab_0.conda#7de28c27fe620a4f7dbfaea137c6232b +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.3-pyhd8ed1ab_0.conda#18de09b20462742fe093ba39185d9bac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda#f88bb644823094f436792f80fba3207e -https://conda.anaconda.org/conda-forge/noarch/tomli-2.3.0-pyhcf101f3_0.conda#d2732eb636c264dc9aa4cbee404b1a53 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda#72e780e9aa2d0a3295f59b1874e3768b https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda#c07a6153f8306e45794774cf9b13bd32 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py312h4c3975b_0.conda#e03a4bf52d2170d64c816b2a52972097 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.0-py312h4c3975b_1.conda#a0b8efbe73c90f810a171a6c746be087 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.1-pyhd8ed1ab_1.conda#75cb7132eb58d97896e173ef12ac9986 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.1-py312h4c3975b_0.conda#0b6c506ec1f272b685240e70a29261b8 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.6-hb03c661_0.conda#4d1fc190b99912ed557a8236e958c559 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.7-hb03c661_0.conda#f2ba4192d38b6cef2bb2c25029071d90 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.5-h5888daf_1.conda#5e2eb9bf77394fc2e5918beefec9f9ab -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.6-hecca717_0.conda#93f5d4b5c17c8540479ad65f206fea51 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda#e192019153591938acf7322b6459d36e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.7-hb03c661_0.conda#665d152b9c6e78da404086088077c844 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30cd29cb87d819caead4d55184c1d115 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.4.0-pyhd8ed1ab_0.conda#421a865222cd0c9d83ff08bc78bf3a61 -https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda#0a01c169f0ab0f91b26e77a3301fbfe4 +https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_0.conda#ea5be9abc2939c8431893b4e123a2065 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py312h460c074_1.conda#648ee28dcd4e07a1940a17da62eccd40 https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.1-py312h8a5da7c_0.conda#eafe0b486a7910e4a6973029c80d437f -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py312h4c3975b_1.conda#693cda60b9223f55d0836c885621611b +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py312h8a5da7c_0.conda#a8df7f0812ac4fa6bbc7135556d3e2c4 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py312h4c3975b_2.conda#29fd0bdf551881ab3d2801f7deaba528 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.61.1-py312h8a5da7c_0.conda#3bf8fb959dc598c67dac0430b4aff57a https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea943ca4f0d01d6eec6a60d24415dc5 https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_1.conda#e933f92cedca212eb2916f24823cf90b -https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_1.conda#e00afd65b88a3258212661b32c1469cb +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp22.1-22.1.0-default_h99862b1_0.conda#d966a23335e090a5410cc4f0dec8d00a +https://conda.anaconda.org/conda-forge/linux-64/libclang13-22.1.0-default_h746c552_0.conda#140459a7413d8f6884eb68205ce39a0d +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_h11f7409_103.conda#3ccff1066c05a1e6c221356eecc40581 -https://conda.anaconda.org/conda-forge/linux-64/libpq-18.1-hb80d175_3.conda#c39da2ad0e7dd600d1eb3146783b057d +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 +https://conda.anaconda.org/conda-forge/linux-64/libpq-18.3-h9abb657_0.conda#405ec206d230d9d37ad7c2636114cbf4 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f -https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.328.1-h5279c79_0.conda#372a62464d47d9e966b630ffae3abe73 +https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda#31ad065eda3c2d88f8215b1289df9c89 +https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda#5b5203189eb668f042ac2b0826244964 https://conda.anaconda.org/conda-forge/linux-64/mesalib-25.0.5-h57bcd07_2.conda#9b6b685b123906eb4ef270b50cbe826c https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.0-py312h33ff503_0.conda#4ba148299453b88d8fa9b6351eaa0df8 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.2-py312h33ff503_1.conda#3569a8fca2dd3202e4ab08f42499f6d3 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/noarch/pip-25.3-pyh8b19718_0.conda#c55515ca43c6444d2572e0f0d93cb6b9 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py312h9b6a7d9_2.conda#573b9a879a3a42990f9c51d7376dce6b https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 +https://conda.anaconda.org/conda-forge/noarch/python-discovery-1.1.0-pyhcf101f3_0.conda#ee4e2cad073411bdfa8598f599537498 +https://conda.anaconda.org/conda-forge/noarch/python-gil-3.12.12-hd8ed1ab_2.conda#d41b6b394546ee6e1c423e28a581fc71 https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.3.0-hb700be7_2.conda#8f7278ca5f7456a974992a8b34284737 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.36.0-pyhd8ed1ab_0.conda#c9a9b6e144b880308f5eedc905fe503d +https://conda.anaconda.org/conda-forge/noarch/wheel-0.46.3-pyhd8ed1ab_0.conda#bdbd7385b4a67025ac2dba4ef8cb6a8f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.22.0-py312h8a5da7c_0.conda#6a3fd177315aaafd4366930d440e4430 +https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda#aaa2a381ccc56eac91d63b6c1240312f https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.13.3-py312h5d8c7f2_0.conda#7ee12bbdb2e989618c080c7c611048db https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0.conda#5267bef8efea4127aacd1f4e1f149b6e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py312h4f23490_0.conda#acb46785d4866cec0a88b4d6e991c33f +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py312h4f23490_1.conda#84bf349fad55056ed326fc550671b65c https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312hd9148b4_3.conda#86cf7a7d861b79d38e3f0e5097e4965b -https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.12.0-pyhcf101f3_1.conda#cc7b371edd70319942c802c7d828a428 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py312h0a2e395_4.conda#43c2bc96af3ae5ed9e8a10ded942aa50 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.conda#b20e7ce9afd59036ab194f3d1e27edf5 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_0.conda#6ce4ad29c3ae0b74df813409433457ff +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_1.conda#c8ca404aeab8e32c9d6d82a82aeb6511 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py312h4f23490_2.conda#cec5bc5f7d374f8f8095f8e28e31f6cb https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py312hf79963d_1.conda#e597b3e812d9613f659b7d87ad252d18 -https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 +https://conda.anaconda.org/conda-forge/linux-64/pandas-3.0.1-py312h8ecdadd_0.conda#c15e7f8dd2e407188a8b7c0790211206 +https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh8b19718_0.conda#67bdec43082fd8a9cffb9484420b39a2 https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py312h4f23490_2.conda#1af24b0eb99b1158c0d8c64a4d6c5d79 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py312h4f23490_2.conda#ab856c36638ab1acf90e70349c525cf9 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.16.3-py312h54fa4ab_2.conda#e82683871cbc4bb257b7694f31a91327 +https://conda.anaconda.org/conda-forge/noarch/rich-14.3.3-pyhcf101f3_0.conda#7a6289c50631d620652f5045a63eb573 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.1-py312h54fa4ab_0.conda#3e38daeb1fb05a95656ff5af089d2e4c https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py312h383787d_2.conda#69e400d3deca12ee7afd4b73a5596905 https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312hd9148b4_6.conda#f30ece80e76f9cc96e30cc5c71d2818e +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py312hd9148b4_0.conda#55fd03988b1b1bc6faabbfb5b481ecd7 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 -https://conda.anaconda.org/conda-forge/linux-64/viskores-1.0.0-hca82ae8_3.conda#efbc53222863d0f89c123cc3f9ccdc01 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-21.0.0-pyhcf101f3_0.conda#0d574419484a88ee7e753cc0c80ee2c1 +https://conda.anaconda.org/conda-forge/linux-64/viskores-1.1.0-hca82ae8_0.conda#5b4d69a15107ebad71ee9aaf76c4b09e https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py312h4f23490_1.conda#ac0a1a874ce9e3f8940a3a908ff74da9 -https://conda.anaconda.org/conda-forge/noarch/distributed-2025.12.0-pyhcf101f3_1.conda#613cea9275c4773d0b53c879838ac0ad +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py312h4f23490_0.conda#6aef45ba3c0123547eb7b0f15852cac9 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_1.conda#3c155e2914169b807ebb4027a8c0999c https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.0-h6083320_0.conda#1ea5ed29aea252072b975a232b195146 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.15-pyhd8ed1ab_0.conda#25f954b7dae6dd7b0dc004dab74f1ce9 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py312he3d6523_0.conda#b8dc157bbbb69c1407478feede8b7b42 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.3-nompi_py312hf6400b3_100.conda#ed7ab4073fe4c48d0f9d3a80b6a17f74 +https://conda.anaconda.org/conda-forge/linux-64/minijinja-2.16.0-py310h6de7dc8_0.conda#ff98a47d5488a9f504866f46ccae8e7c +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py312h25f8dc5_102.conda#99217b58c029977345b72bb36a1f6596 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 +https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py312h4c3975b_2.conda#1d14b28fa4825ee30fd08e46bbcb5d63 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py312h4c3975b_3.conda#b0610b4174af97290f5f466a72583071 +https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-h17e89b9_5.conda#6c4f73c9a7e9b51f3a8e321c3e867bb6 https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py312hcedc861_0.conda#f0d110978a87b200a06412b56b26407c https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/noarch/rich-rst-1.3.2-pyhd8ed1ab_0.conda#cbd84dbdb3f5a7d762b5fb2b0d49e7cd https://conda.anaconda.org/conda-forge/noarch/wslink-2.5.0-pyhd8ed1ab_0.conda#8fa415e696acd9af59ce0a4425fd1b38 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.25.0-py312hf79963d_1.conda#6c913a686cb4060cbd7639a36fa144f0 https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e +https://conda.anaconda.org/conda-forge/noarch/cyclopts-4.5.4-pyhcf101f3_0.conda#11e433386dd008eca6e25204894e8f94 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.9.1-pyhdfbf58e_0.conda#0155d4a55415cda7f7f09bc0b7760f51 +https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-ha5ea40c_7.conda#f605332e1e4d9ff5c599933ae81db57d +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.2-h61e6d4b_0.conda#d62da3d560992bfa2feb611d7be813b8 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 -https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_3.conda#d2bbbd293097e664ffb01fc4cdaf5729 +https://conda.anaconda.org/conda-forge/noarch/pooch-1.9.0-pyhd8ed1ab_0.conda#dd4b6337bf8886855db6905b336db3c8 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 -https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.1-hb82b983_4.conda#f4dfd61ec958d420bebdcefeb805d658 -https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9f33c65b20e6a61f21714785e3613ec -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.0-h61e6d4b_0.conda#91e6d4d684e237fba31b9815c4b40edf -https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py312h6fba518_6.conda#24f269a608a9032faf6a1bcaea8d9e21 +https://conda.anaconda.org/conda-forge/noarch/requests-file-2.1.0-pyhd8ed1ab_1.conda#69be0472744969b97324d5d3b21845a9 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py312h6fba518_7.conda#2edca3790f2a372db44ff1aa159769fc https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e -https://conda.anaconda.org/conda-forge/noarch/pyvista-0.46.4-pyhd8ed1ab_0.conda#89f80194003ce06e6bdf25fba539d9b1 +https://conda.anaconda.org/conda-forge/noarch/pyvista-0.47.1-pyhd8ed1ab_0.conda#470eec436327b4ba57068baf83d57ed4 https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.0-h8b86629_0.conda#39dcf8bb370df27fd81dbe41d4cb605e -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 -https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda#3e6c15d914b03f83fc96344f917e0838 +https://conda.anaconda.org/conda-forge/noarch/sphinx-data-viewer-0.1.5-pyhd8ed1ab_1.conda#fbe6437c6899c54e52f9258518035aca +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.7.0-pyhd8ed1ab_0.conda#28eddfb8b9ecdd044a6f609f985398a7 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.20.0-pyhd8ed1ab_0.conda#4cae490c8d142824fb80d9aed672fddd +https://conda.anaconda.org/conda-forge/noarch/sphinx-reredirects-1.1.0-pyhd8ed1ab_0.conda#8392493ce6fbb8ab72a101af8e8a5d03 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jquery-4.1-pyhd8ed1ab_1.conda#403185829255321ea427333f7773dd1f +https://conda.anaconda.org/conda-forge/noarch/sphinx-needs-7.0.0-pyh2c0ff20_1.conda#f1b94f4ad598a4548fd08dbd46a73480 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 diff --git a/requirements/locks/py313-linux-64.lock b/requirements/locks/py313-linux-64.lock index fdd605632c..023aa62b41 100644 --- a/requirements/locks/py313-linux-64.lock +++ b/requirements/locks/py313-linux-64.lock @@ -1,54 +1,53 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: aa998005b5dacd37fb319f00d41becfc0c4eee198d70c2eaba80542b72968540 +# input_hash: 166090584da1a695f5de5cca145a0d4f98f482456d21a57920f19d9a23cd9bba @EXPLICIT -https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 -https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_2.tar.bz2#bbf6f174dcd3254e19a2f5d2295ce808 +https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_3.conda#129e404c5b001f3ef5581316971e3ea0 https://conda.anaconda.org/conda-forge/linux-64/nlohmann_json-3.12.0-h54a6638_1.conda#16c2a0e9c4a166e53632cfca4f68d020 https://conda.anaconda.org/conda-forge/noarch/python_abi-3.13-8_cp313.conda#94305520c52a4aa3f6c2b1ff6008d9f8 https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda#ad659d0a2b3e47e38d829aa8cad2d610 https://conda.anaconda.org/conda-forge/linux-64/utfcpp-4.09-ha770c72_0.conda#99884244028fe76046e3914f90d4ad05 -https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.1.4-hbd8a1cb_0.conda#bddacf101bb4dd0e51811cb69c7790e2 +https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda#4492fd26db29495f0ba23f146cd5638d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_16.conda#26c46f90d0e727e95c6c9498a33a09f3 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda#239c5e9546c38a1e884d69effcf4c882 +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda#a9f577daf3de00bca7c3c76c0ecbd1de https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libopengl-1.7.0-ha4b6fd6_2.conda#7df50d44d4a14d6c31a2c54f2cd92157 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_16.conda#6d0363467e6ed84f11435eb309f2ff06 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.2-hb03c661_0.conda#ada39f5726bc5481e9dce293709dfabc +https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda#0aa00f03f9e39fb9876085dee11a85d4 +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.15.3-hb03c661_0.conda#dcdc58c15961dbf17a0621312b01f5cb https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_8.conda#51a19bba1b8ebfb60df25cde030b7ebc +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda#d2ffd7602c02f2b316fd921d39876885 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda#920bb03579f15389b9e512095ad995b7 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.16-hb03c661_0.conda#f9f81ea472684d75b9dd8d0b328cf655 https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda#38f5dbc9ac808e31c00650f7be1db93f https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda#b38117a3c920364aff79f870c984b4a3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1.conda#72c8fd1af66bd67bf580645b426513ed https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 -https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.3-hecca717_0.conda#8b09ae86839581147ef2e5c5e229d164 -https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h9ec8514_0.conda#35f29eec58405aaf55e01cb470d8c26a -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_16.conda#5a68259fac2da8f2ee6f7bfe49c9eb8b -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_16.conda#39183d4e0c05609fd65f130633194e37 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda#e7f7ce06ec24cfcfb9e36d28cf82ba57 +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda#d5e96b1ed75ca01906b3d2469b4ce493 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_18.conda#646855f357199a12f02a87382d429b75 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a -https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.1-hb9d3cd8_2.conda#1a580f7796c7bf6393fddb8bbbde58dc -https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb9d3cd8_0.conda#c7e925f37e3b40d893459e625f6a53f1 +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 +https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb03c661_1.conda#2c21e66f50753a083cbe6b80f38268fa https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d864d34357c3b65a4b731f78c0801dc4 https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.8-hb9d3cd8_0.conda#7c7927b404672409d9917d49bff5f2d6 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.5-hd0c01bc_1.conda#68e52064ed3897463c0e958ab5c8f91b https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda#70e3400cbbfa03e96dcde7fc13e38c7b -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_16.conda#68f68355000ec3f1d6f26ea13e8f525f +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda#1b08cd684f34175e4514474793d44bcb https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda#db409b7c1720428638e7c0d509d3e1b5 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda#aea31d2e5b1091feca96fcfe945c3cf9 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-h280c20c_1002.conda#45161d96307e3a447cc3eb5896cf6f8c https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda#47e340acb35de30501a76c7c799c41d7 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.0-h26f9b46_0.conda#9ee58d5c534af06558933af3c845a780 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.1-h35e630c_1.conda#f61eb8cd60ff9057122a3d338b99c00f https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda#b2895afaf55bf96a8c8282a2e47a5de0 @@ -57,7 +56,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxshmfence-1.3.3-hb9d3cd8 https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2025.1-hb03c661_0.conda#aa8d21be4b461ce612d8f5fb791decae https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.3-hb47aa4a_0.conda#607e13a8caac17f9a664bcab5302ce06 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h280c20c_3.conda#a77f85f77be52ff59391544bfe73390a -https://conda.anaconda.org/conda-forge/linux-64/cli11-2.6.0-h54a6638_0.conda#ddf9fed4661bace13f33f08fe38a5f45 +https://conda.anaconda.org/conda-forge/linux-64/cli11-2.6.2-h54a6638_0.conda#83dae3dfadcfec9b37a9fbff6f7f7378 https://conda.anaconda.org/conda-forge/linux-64/double-conversion-3.4.0-hecca717_0.conda#dbe3ec0f120af456b3477743ffd99b74 https://conda.anaconda.org/conda-forge/linux-64/fmt-12.1.0-hff5e90c_0.conda#f7d7a4104082b39e3b3473fbd4a38229 https://conda.anaconda.org/conda-forge/linux-64/geos-3.14.1-h480dda7_0.conda#4d4efd0645cd556fab54617c4ad477ef @@ -66,18 +65,18 @@ https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.14-hecca717_2.cond https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda#186a18e3ba246eccfc7cff00cd19a870 https://conda.anaconda.org/conda-forge/linux-64/jsoncpp-1.9.6-hf42df4d_1.conda#7bdc5e2cc11cb0a0f795bdad9732b0f2 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h0aef613_1.conda#9344155d33912347b37f0ae6c410a835 -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.4-h3f801dc_0.conda#01ba04e414e47f95c03d6ddd81fd37be +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.5-h088129d_0.conda#86f7414544ae606282352fa1e116b41f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.2.0-hb03c661_1.conda#366b40a69f0ad6072561c1d09301c886 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.conda#4ffbb341c8b616aa2494b6afb26a0c5f https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda#c277e0a4d549b03ac1e9d6cbbe3d017b https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_16.conda#40d9b534410403c821ff64f00d0adc22 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_18.conda#9063115da5bc35fdc3e1002e69b9ef6e https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h5888daf_1.conda#8422fcc9e5e172c91e99aef703b3ce65 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.53-h421ea60_0.conda#00d4e66b1f746cb14944cad23fffb405 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.55-h421ea60_0.conda#5f13ffc7d30ffec87864e678df9957b4 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_16.conda#1b3152694d236cf233b76b8c56bf0eae +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_18.conda#6235adb93d064ecdf3d44faee6f468de https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h54a6638_2.conda#b4ecbefe517ed0157c37f8182768271c https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 @@ -90,28 +89,28 @@ https://conda.anaconda.org/conda-forge/linux-64/pixman-0.46.4-h54a6638_1.conda#c https://conda.anaconda.org/conda-forge/linux-64/pugixml-1.15-h3f63f65_0.conda#b11a4c6bf6f6f44e5e143f759ffa2087 https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda#d7d95fc8287ea7bf33e0e7116d2b95ec https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.2-h03e3b7b_1.conda#98b6c9dc80eb87b2519b97bcf7e578dd -https://conda.anaconda.org/conda-forge/linux-64/spirv-tools-2025.4-hb700be7_0.conda#aace50912e0f7361d0d223e7f7cfa6e5 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_ha0e22de_103.conda#86bc20552bf46075e3d92b67f089172d +https://conda.anaconda.org/conda-forge/linux-64/spirv-tools-2025.5-hb700be7_0.conda#058d5f16eaa3018be91aa3508df00d7c +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda#cffd3bdd58090148f4cfcd831f4b26ab https://conda.anaconda.org/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda#035da2e4f5770f036ff704fa17aace24 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda#1c74ff8c35dcadf952a16f752ca5aa49 https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-h909a3a2_5.conda#6a0eb48e58684cca4d7acc8b7a0fd3c7 https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 -https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.2-hceb46e0_1.conda#40feea2979654ed579f1cda7c63ccb94 +https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.3-hceb46e0_1.conda#2aadb0d17215603a82a2a6b0afd9a4cb https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda#4a13eeac0b5c8e5b8ab496e6c4ddd829 https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.2.0-hb03c661_1.conda#af39b9a8711d4a8d437b52c1d78eb6a1 https://conda.anaconda.org/conda-forge/linux-64/gl2ps-1.4.2-hae5d5c5_1.conda#00e642ec191a19bf806a3915800e9524 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45-default_hbd61a6d_105.conda#3ec0aa5037d39b06554109a01e6fb0c6 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.22.2-ha1258a1_0.conda#fb53fb07ce46a575c5d004bbc96032c2 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.3-h6548e54_0.conda#034bea55a4feef51c98e8449938e9cee -https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.1-hf08fa70_5.conda#82954a6f42e3fba59628741dca105c98 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda#bb26456332b07f68bf3b7622ed71c0da +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h46dd2a8_20.conda#df81fd57eacf341588d728c97920e86d -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.1-hf4e2dac_1.conda#b1f35e70f047918b49fb4b181e40300e +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.2-hf4e2dac_0.conda#da5be73701eecd0e8454423fd6ffcf30 https://conda.anaconda.org/conda-forge/linux-64/libtheora-1.1.1-h4ab18f5_1006.conda#553281a034e9cf8693c9df49f6c78ea1 https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.1-h9d88235_1.conda#cd5a90476766d53e901500df9215e927 https://conda.anaconda.org/conda-forge/linux-64/libxml2-16-2.15.1-hca6bf5a_1.conda#3fdd8d99683da9fe279c2f4cecd1e048 @@ -124,29 +123,29 @@ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.1-hb711507_ https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.10-hb711507_0.conda#0e0cbe0564d03a99afd5fd7b362feecd https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.2-hb711507_0.conda#608e0ef8256b81d04456e8d211eee3e8 https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.3.0-hd9031aa_1.conda#66a1db55ecdb7377d2b91f54cd56eafa -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.12-h4f16b4b_0.conda#db038ce880f100acc74dba10302b5630 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda#861fb6ccbc677bb9a9fb2468430b9c6a https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8ccf913aaba749a5496c17629d859ed1 -https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hd9c7081_0.conda#cae723309a49399d2949362f4ab5c9e4 +https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.28-hac629b4_1.conda#af491aae930edc096b58466c51c4126c https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.4-h2b0a6b4_0.conda#c379d67c686fb83475c1a6ed41cc41ff -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.3-hf516916_0.conda#fd6acbf37b40cbe919450fa58309fbe1 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_1.conda#7eb4977dd6f60b3aaab0715a0ea76f11 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_1.conda#b52b769cd13f7adaa6ccdc68ef801709 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.17-h717163a_0.conda#000e85703f0fd9594c81710dd5066471 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 -https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-hb8b1518_5.conda#d4a250da4737ee127fb1fa6452a9002e -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-h4e3cde8_0.conda#0a5563efed19ca4461cf927419b6eb73 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h7a8fb5f_6.conda#49c553b47ff679a6a1e9fc80b9c5a2d4 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-hcf29cc6_1.conda#1707cdd636af2ff697b53186572c9f77 https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.14.1-ha770c72_0.conda#f4084e4e6577797150f9b04a4560ceb0 https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd -https://conda.anaconda.org/conda-forge/linux-64/python-3.13.11-hc97d973_100_cp313.conda#0cbb0010f1d8ecb64a428a8d4214609e -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.1-h04a0ce9_1.conda#941ee610ebf7a8047140831091dcb1f7 +https://conda.anaconda.org/conda-forge/linux-64/python-3.13.12-hc97d973_100_cp313.conda#4c875ed0e78c2d407ec55eadffb8cf3d +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-hb711507_2.conda#a0901183f08b6c7107aab109733a3c91 -https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.46-hb03c661_0.conda#71ae752a748962161b4740eaff510258 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda#b56e0c8432b56decafae7e78c5f29ba5 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda#34e54f03dfea3e7a2dcf1453a85f1085 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda#ba231da7fccf9ea1e768caf5c7099b84 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_1.conda#8c4061f499edec6b8ac7000f6d586829 @@ -155,29 +154,33 @@ https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/noarch/attrs-25.4.0-pyhcf101f3_1.conda#537296d57ea995666c68c821b00e360b https://conda.anaconda.org/conda-forge/linux-64/backports.zstd-1.3.0-py313h18e8e13_0.conda#d9e90792551a527200637e23a915dd79 https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.2.0-py313hf159716_1.conda#6c4d3597cf43f3439a51b2b13e29a4ba -https://conda.anaconda.org/conda-forge/noarch/certifi-2026.1.4-pyhd8ed1ab_0.conda#eacc711330cd46939f66cd401ff9c44b +https://conda.anaconda.org/conda-forge/noarch/certifi-2026.2.25-pyhd8ed1ab_0.conda#765c4d97e877cdbbb88ff33152b86125 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.5.0-pyhd8ed1ab_0.conda#381bd45fb7aa032691f3063aff47e3a1 https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.4-pyhd8ed1ab_0.conda#a22d1fd9bf98827e280a02875d9a007a https://conda.anaconda.org/conda-forge/noarch/click-8.3.1-pyh8f84b5b_1.conda#ea8a6c3256897cc31263de9f455e25d9 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda#61b8078a0905b12529abc622406cb62c https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/colorcet-3.1.0-pyhd8ed1ab_1.conda#91d7152c744dc0f18ef8beb3cbc9980a +https://conda.anaconda.org/conda-forge/noarch/cpython-3.13.12-py313hd8ed1ab_100.conda#9a4b8a37303b933b847c14a310f0557b https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda#4c2a8fef270f6c69591889b93f9f55c1 https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py313hc80a56d_0.conda#4a08e7dd57fdc0a13dc699c4c6d76c3a https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 +https://conda.anaconda.org/conda-forge/noarch/docstring_parser-0.17.0-pyhd8ed1ab_0.conda#ce49d3e5a7d20be2ba57a2c670bdd82e https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.20.2-pyhd8ed1ab_0.conda#7e7cf4d6c2be6991e6ae2b3f4331701c +https://conda.anaconda.org/conda-forge/noarch/filelock-3.24.3-pyhd8ed1ab_0.conda#9dbb20eec24beb026291c20a35ce1ff9 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.17.1-h27c8c51_0.conda#867127763fbe935bab59815b6e0b7b5c https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.7.0-py313h6b9daa2_0.conda#3a0be7abedcbc2aee92ea228efea8eba -https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.1.0-pyhd8ed1ab_0.conda#1daaf94a304a27ba3446a306235a37ea -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h1b119a7_104.conda#0857f4d157820dcd5625f61fdfefb780 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h19486de_106.conda#c223ee1429ba538f3e48cfb4a0b97357 https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe63df7e10a6ba605dc5f9f961d36 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.3.0-pyhd8ed1ab_0.conda#9614359868482abba1bd15ce465e3c42 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.2-pyhd8ed1ab_0.conda#895f6625dd8a246fece9279fcc12c1de +https://conda.anaconda.org/conda-forge/linux-64/jsonschema-rs-0.37.4-py313hdeb11d6_0.conda#e736b02c4a0905ad7636d798d48f3bed https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py313hc8edb43_2.conda#3e0e65595330e26515e31b7fc6d933c7 https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.8.5-gpl_hc2c16d8_100.conda#5fdaa8b856683a5598459dead3976578 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.11.0-5_h0358290_openblas.conda#6636a2b6f1a87572df2970d3ebc87cc0 @@ -186,157 +189,169 @@ https://conda.anaconda.org/conda-forge/linux-64/libglx-devel-1.7.0-ha4b6fd6_2.co https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.12.2-default_hafda6a7_1000.conda#0ed3aa3e3e6bc85050d38881673a692f https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.11.0-5_h47877c9_openblas.conda#b38076eb5c8e40d0106beda6f95d7609 https://conda.anaconda.org/conda-forge/linux-64/libllvm20-20.1.8-hf7376ad_1.conda#eafa8fd1dfc9a107fe62f7f12cabbc9c -https://conda.anaconda.org/conda-forge/linux-64/libllvm21-21.1.8-hf7376ad_0.conda#1a2708a460884d6861425b7f9a7bef99 +https://conda.anaconda.org/conda-forge/linux-64/libllvm22-22.1.0-hf7376ad_0.conda#213f51bbcce2964ff2ec00d0fdd38541 https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda#2bca1fbb221d9c3c8e3a155784bbc2e9 https://conda.anaconda.org/conda-forge/linux-64/libxml2-devel-2.15.1-he237659_1.conda#644b2a3a92ba0bb8e2aa671dd831e793 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/noarch/loguru-0.7.3-pyh707e725_0.conda#49647ac1de4d1e4b49124aedf3934e02 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.3-py313h3dea7bd_0.conda#c14389156310b8ed3520d84f854be1ee +https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592132998493b3ff25fd7479396e8351 https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py313h7037e92_1.conda#cd1cfde0ea3bca6c805c73ffa988b12a -https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.0-py313h3dea7bd_0.conda#d182804a222acc8f2c7e215f344d229f +https://conda.anaconda.org/conda-forge/linux-64/multidict-6.7.1-py313h3dea7bd_0.conda#4f3e7bf5a9fc60a7d39047ba9e84c84c https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 -https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-he970967_0.conda#2e5bf4f1da39c0b32778561c3c4e5878 -https://conda.anaconda.org/conda-forge/noarch/packaging-25.0-pyh29332c3_1.conda#58335b26c38bf4a20f399384c33cbcf9 -https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.0-py313h80991f8_0.conda#183fe6b9e99e5c2b464c1573ec78eac8 -https://conda.anaconda.org/conda-forge/noarch/pip-25.3-pyh145f28c_0.conda#bf47878473e5ab9fdb4115735230e191 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.5.1-pyhcf101f3_0.conda#1bd2e65c8c7ef24f4639ae6e850dacc2 +https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.10-hbde042b_1.conda#3c40a106eadf7c14c6236ceddb267893 +https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 +https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.1-py313h80991f8_0.conda#2d5ee4938cdde91a8967f3eea686c546 +https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh145f28c_0.conda#09a970fbf75e8ed1aa633827ded6aa4f +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.9.2-pyhcf101f3_0.conda#4fefefb892ce9cc1539405bec2f1a6cd https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e -https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-h99ae125_0.conda#8bbc19a6e87fbe8b97796e9a42a47a30 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_3.conda#031e33ae075b336c0ce92b14efa886c5 https://conda.anaconda.org/conda-forge/linux-64/propcache-0.3.1-py313h8060acc_0.conda#b62867739241368f43f164889b45701b -https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.1-py313h54dd161_0.conda#d362949a1ed1ad4693b3928ad1d32c93 +https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py313h54dd161_0.conda#25fe6e02c2083497b3239e21b49d8093 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda#6b6ece66ebcae2d5f326c77ef2c5a066 -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.1-pyhcf101f3_0.conda#d837065e4e0de4962c3462079c23f969 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda#3687cc0b82a8b4c17e1f0eb7e47163d5 https://conda.anaconda.org/conda-forge/noarch/pyshp-3.0.3-pyhd8ed1ab_0.conda#c138c7aaa6a10b5762dcd92247864aff https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2025.3-pyhd8ed1ab_0.conda#7ead57407430ba33f681738905278d03 https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py313heab5758_1.conda#82df5d372f2796c389fcbe5104664f5a https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py313h3dea7bd_0.conda#4794ea0adaebd9f844414e594b142cb2 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py313h3dea7bd_1.conda#f256753e840c3cd3766488c9437a8f8b https://conda.anaconda.org/conda-forge/noarch/scooby-0.11.0-pyhd8ed1ab_0.conda#2d707ed62f63d72f4a0141b818e9c7b6 -https://conda.anaconda.org/conda-forge/noarch/setuptools-80.9.0-pyhff2d567_0.conda#4de79c071274a53dcaf2a8c749d1499e +https://conda.anaconda.org/conda-forge/noarch/setuptools-82.0.0-pyh332efcf_0.conda#1d00d46c634177fc8ede8b99d6089239 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda#755cf22df8693aa0d1aec1c123fa5863 https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda#0401a17ae845fa72c7210e206ec5647d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.1-pyhd8ed1ab_0.conda#7de28c27fe620a4f7dbfaea137c6232b +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.3-pyhd8ed1ab_0.conda#18de09b20462742fe093ba39185d9bac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda#f88bb644823094f436792f80fba3207e -https://conda.anaconda.org/conda-forge/noarch/tomli-2.3.0-pyhcf101f3_0.conda#d2732eb636c264dc9aa4cbee404b1a53 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda#72e780e9aa2d0a3295f59b1874e3768b https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda#c07a6153f8306e45794774cf9b13bd32 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py313h07c4f96_0.conda#82da2dcf1ea3e298f2557b50459809e0 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d https://conda.anaconda.org/conda-forge/linux-64/xcb-util-cursor-0.1.6-hb03c661_0.conda#4d1fc190b99912ed557a8236e958c559 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.6-hb9d3cd8_2.conda#d3c295b50f092ab525ffe3c2aa4b7413 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.7-hb03c661_0.conda#f2ba4192d38b6cef2bb2c25029071d90 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.5-h5888daf_1.conda#5e2eb9bf77394fc2e5918beefec9f9ab -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.4-hb9d3cd8_0.conda#2de7f99d6581a4a7adbff607b5c278ca -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.6-hecca717_0.conda#93f5d4b5c17c8540479ad65f206fea51 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda#e192019153591938acf7322b6459d36e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.7-hb03c661_0.conda#665d152b9c6e78da404086088077c844 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30cd29cb87d819caead4d55184c1d115 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.4.0-pyhd8ed1ab_0.conda#421a865222cd0c9d83ff08bc78bf3a61 -https://conda.anaconda.org/conda-forge/noarch/babel-2.17.0-pyhd8ed1ab_0.conda#0a01c169f0ab0f91b26e77a3301fbfe4 +https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_0.conda#ea5be9abc2939c8431893b4e123a2065 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py313hf46b229_1.conda#d0616e7935acab407d1543b28c446f6f https://conda.anaconda.org/conda-forge/noarch/click-default-group-1.2.4-pyhd8ed1ab_1.conda#7cd83dd6831b61ad9624a694e4afd7dc https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.1-py313h3dea7bd_0.conda#82315acb438e857f809f556e2dcdb822 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py313h07c4f96_1.conda#bcca9afd203fe05d9582249ac12762da +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py313h3dea7bd_0.conda#77e1fc7133e03ccd62070f2405c82ea9 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py313h07c4f96_2.conda#7e7e3c5a8a28c6b8eb430183e0554adf https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.61.1-py313h3dea7bd_0.conda#c0f36dfbb130da4f6ce2df31f6b25ea8 https://conda.anaconda.org/conda-forge/linux-64/glew-2.2.0-h3abd4de_0.conda#6ea943ca4f0d01d6eec6a60d24415dc5 https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d -https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp21.1-21.1.8-default_h99862b1_1.conda#e933f92cedca212eb2916f24823cf90b -https://conda.anaconda.org/conda-forge/linux-64/libclang13-21.1.8-default_h746c552_1.conda#e00afd65b88a3258212661b32c1469cb +https://conda.anaconda.org/conda-forge/linux-64/libclang-cpp22.1-22.1.0-default_h99862b1_0.conda#d966a23335e090a5410cc4f0dec8d00a +https://conda.anaconda.org/conda-forge/linux-64/libclang13-22.1.0-default_h746c552_0.conda#140459a7413d8f6884eb68205ce39a0d +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_h11f7409_103.conda#3ccff1066c05a1e6c221356eecc40581 -https://conda.anaconda.org/conda-forge/linux-64/libpq-18.1-hb80d175_3.conda#c39da2ad0e7dd600d1eb3146783b057d +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 +https://conda.anaconda.org/conda-forge/linux-64/libpq-18.3-h9abb657_0.conda#405ec206d230d9d37ad7c2636114cbf4 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f -https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.328.1-h5279c79_0.conda#372a62464d47d9e966b630ffae3abe73 +https://conda.anaconda.org/conda-forge/linux-64/libvulkan-loader-1.4.341.0-h5279c79_0.conda#31ad065eda3c2d88f8215b1289df9c89 +https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-4.0.0-pyhd8ed1ab_0.conda#5b5203189eb668f042ac2b0826244964 https://conda.anaconda.org/conda-forge/linux-64/mesalib-25.0.5-h57bcd07_2.conda#9b6b685b123906eb4ef270b50cbe826c https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.0-py313hf6604e3_0.conda#07963f5dbb5351201035e1f8815ed8da +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.2-py313hf6604e3_1.conda#ca9c6ba4beac38cb3d0a85afde27f94c https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py313h77f6078_2.conda#42d11c7d1ac21ae2085f58353641e71c https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 +https://conda.anaconda.org/conda-forge/noarch/python-discovery-1.1.0-pyhcf101f3_0.conda#ee4e2cad073411bdfa8598f599537498 +https://conda.anaconda.org/conda-forge/noarch/python-gil-3.13.12-h4df99d1_100.conda#3d92938d5b83c49162ade038aab58a59 https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.3.0-hb700be7_2.conda#8f7278ca5f7456a974992a8b34284737 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.36.0-pyhd8ed1ab_0.conda#c9a9b6e144b880308f5eedc905fe503d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f https://conda.anaconda.org/conda-forge/linux-64/yarl-1.22.0-py313h3dea7bd_0.conda#e9415b0f7b43d2e32a3f24fd889c9e70 +https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda#aaa2a381ccc56eac91d63b6c1240312f https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.13.3-py313hd6074c6_0.conda#684fb9c78db5024b939a1ed0a107f464 https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0.conda#5267bef8efea4127aacd1f4e1f149b6e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py313h29aa505_0.conda#14dc0f64f2e83f7bc2be5153e2ef730b +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py313h29aa505_1.conda#c63d5f9d63fe2f48b0ad75005fcae7ba https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyhecae5ae_1.conda#04151bb8e351c6209caad045e4b1f4bd -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py313h7037e92_3.conda#6186382cb34a9953bf2a18fc763dc346 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.12.0-pyhcf101f3_1.conda#cc7b371edd70319942c802c7d828a428 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py313hc8edb43_4.conda#33639459bc29437315d4bff9ed5bc7a7 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.conda#b20e7ce9afd59036ab194f3d1e27edf5 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_2.conda#d10d9393680734a8febc4b362a4c94f2 https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.1-hf05ffb4_0.conda#6ce4ad29c3ae0b74df813409433457ff +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_1.conda#c8ca404aeab8e32c9d6d82a82aeb6511 https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py313h29aa505_2.conda#ad53894d278895bf15c8fc324727d224 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.3.3-py313h08cd8bf_2.conda#8a69ea71fdd37bfe42a28f0967dbb75a +https://conda.anaconda.org/conda-forge/linux-64/pandas-3.0.1-py313hbfd7664_0.conda#1c8807728f0333228766dee685394e16 https://conda.anaconda.org/conda-forge/linux-64/pykdtree-1.4.3-py313h29aa505_2.conda#e3a598d20bf2fa7b03a771e9e4471be9 https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py313h29aa505_2.conda#60f5d1c039da10fe89a530cc93ea50ac -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.16.3-py313h4b8bb8b_2.conda#0be9bd58abfb3e8f97260bd0176d5331 +https://conda.anaconda.org/conda-forge/noarch/rich-14.3.3-pyhcf101f3_0.conda#7a6289c50631d620652f5045a63eb573 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.1-py313h4b8bb8b_0.conda#ec81bc03787968decae6765c7f61b7cf https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py313had47c43_2.conda#6e550dd748e9ac9b2925411684e076a1 https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py313h7037e92_6.conda#1fa8d662361896873a165b051322073e +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py313h7037e92_0.conda#cb423e0853b3dde2b3738db4dedf5ba2 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 -https://conda.anaconda.org/conda-forge/linux-64/viskores-1.0.0-hca82ae8_3.conda#efbc53222863d0f89c123cc3f9ccdc01 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-21.0.0-pyhcf101f3_0.conda#0d574419484a88ee7e753cc0c80ee2c1 +https://conda.anaconda.org/conda-forge/linux-64/viskores-1.1.0-hca82ae8_0.conda#5b4d69a15107ebad71ee9aaf76c4b09e https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py313h29aa505_1.conda#36a704169c6a0b4ce8335d160103e218 -https://conda.anaconda.org/conda-forge/noarch/distributed-2025.12.0-pyhcf101f3_1.conda#613cea9275c4773d0b53c879838ac0ad +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py313h29aa505_0.conda#3942b6a86fe92d0888b3373f2c1e1676 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_1.conda#3c155e2914169b807ebb4027a8c0999c https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.0-h6083320_0.conda#1ea5ed29aea252072b975a232b195146 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.15-pyhd8ed1ab_0.conda#25f954b7dae6dd7b0dc004dab74f1ce9 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_2.conda#bb0230917e2473c77d615104dbe8a49d https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py313h683a580_0.conda#ffe67570e1a9192d2f4c189b27f75f89 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.3-nompi_py313hfae5b86_100.conda#d5247c4087289475a8c324bbe03a71ce +https://conda.anaconda.org/conda-forge/linux-64/minijinja-2.16.0-py310h6de7dc8_0.conda#ff98a47d5488a9f504866f46ccae8e7c +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py313h16051e2_102.conda#20ae46c5e9c7106bdb2cac6b44b7d845 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py313h07c4f96_2.conda#424535b78f522124143393ec02f6318c +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py313h07c4f96_3.conda#b7810803a3481e22968022a94107ed93 +https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.2-h17e89b9_5.conda#6c4f73c9a7e9b51f3a8e321c3e867bb6 https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py313h2005660_0.conda#d551bd1d2fcfac36674dbe2be4b0a410 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 +https://conda.anaconda.org/conda-forge/noarch/rich-rst-1.3.2-pyhd8ed1ab_0.conda#cbd84dbdb3f5a7d762b5fb2b0d49e7cd https://conda.anaconda.org/conda-forge/noarch/wslink-2.5.0-pyhd8ed1ab_0.conda#8fa415e696acd9af59ce0a4425fd1b38 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.25.0-py313h08cd8bf_1.conda#a0d8dc5c90850d9f1a79f69c98aef0ff https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_1.conda#dd71e4ec2fbffe38c0359976505f816e +https://conda.anaconda.org/conda-forge/noarch/cyclopts-4.5.4-pyhcf101f3_0.conda#11e433386dd008eca6e25204894e8f94 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.9.1-pyhdfbf58e_0.conda#0155d4a55415cda7f7f09bc0b7760f51 +https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-ha5ea40c_7.conda#f605332e1e4d9ff5c599933ae81db57d +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.2-h61e6d4b_0.conda#d62da3d560992bfa2feb611d7be813b8 https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 -https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_3.conda#d2bbbd293097e664ffb01fc4cdaf5729 +https://conda.anaconda.org/conda-forge/noarch/pooch-1.9.0-pyhd8ed1ab_0.conda#dd4b6337bf8886855db6905b336db3c8 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 -https://conda.anaconda.org/conda-forge/linux-64/qt6-main-6.10.1-hb82b983_4.conda#f4dfd61ec958d420bebdcefeb805d658 -https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-h993cebd_6.conda#f9f33c65b20e6a61f21714785e3613ec -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.0-h61e6d4b_0.conda#91e6d4d684e237fba31b9815c4b40edf -https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py313hbb97348_6.conda#9f13c027bf4668c4f8a76a7bf10bd63e +https://conda.anaconda.org/conda-forge/noarch/requests-file-2.1.0-pyhd8ed1ab_1.conda#69be0472744969b97324d5d3b21845a9 +https://conda.anaconda.org/conda-forge/linux-64/vtk-base-9.5.2-py313hbb97348_7.conda#03c6ddd039b6877278b5c4df20b61f29 https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e -https://conda.anaconda.org/conda-forge/noarch/pyvista-0.46.4-pyhd8ed1ab_0.conda#89f80194003ce06e6bdf25fba539d9b1 +https://conda.anaconda.org/conda-forge/noarch/pyvista-0.47.1-pyhd8ed1ab_0.conda#470eec436327b4ba57068baf83d57ed4 https://conda.anaconda.org/conda-forge/noarch/geovista-0.5.3-pyhd8ed1ab_1.conda#64348d05eedb1b1b5676f63101d004f2 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.0-h8b86629_0.conda#39dcf8bb370df27fd81dbe41d4cb605e -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 -https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.6.1-pyhd8ed1ab_2.conda#3e6c15d914b03f83fc96344f917e0838 +https://conda.anaconda.org/conda-forge/noarch/sphinx-data-viewer-0.1.5-pyhd8ed1ab_1.conda#fbe6437c6899c54e52f9258518035aca +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.7.0-pyhd8ed1ab_0.conda#28eddfb8b9ecdd044a6f609f985398a7 https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.20.0-pyhd8ed1ab_0.conda#4cae490c8d142824fb80d9aed672fddd +https://conda.anaconda.org/conda-forge/noarch/sphinx-reredirects-1.1.0-pyhd8ed1ab_0.conda#8392493ce6fbb8ab72a101af8e8a5d03 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jquery-4.1-pyhd8ed1ab_1.conda#403185829255321ea427333f7773dd1f +https://conda.anaconda.org/conda-forge/noarch/sphinx-needs-7.0.0-pyh2c0ff20_1.conda#f1b94f4ad598a4548fd08dbd46a73480 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 diff --git a/requirements/locks/py314-linux-64.lock b/requirements/locks/py314-linux-64.lock new file mode 100644 index 0000000000..faef16acc7 --- /dev/null +++ b/requirements/locks/py314-linux-64.lock @@ -0,0 +1,292 @@ +# Generated by conda-lock. +# platform: linux-64 +# input_hash: cb29f99d14bcb9e02d4cad9b06609f6e0bd28b089ec3613bf13282c0636a7c94 +@EXPLICIT +https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 +https://conda.anaconda.org/conda-forge/linux-64/hicolor-icon-theme-0.17-ha770c72_3.conda#129e404c5b001f3ef5581316971e3ea0 +https://conda.anaconda.org/conda-forge/noarch/python_abi-3.14-8_cp314.conda#0539938c55b6b1a59b560e843ad864a4 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2025c-hc9c84f9_1.conda#ad659d0a2b3e47e38d829aa8cad2d610 +https://conda.anaconda.org/conda-forge/noarch/ca-certificates-2026.2.25-hbd8a1cb_0.conda#4492fd26db29495f0ba23f146cd5638d +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-hc364b38_1.conda#a7970cd949a077b7cb9696379d338681 +https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-15.2.0-he0feb66_18.conda#239c5e9546c38a1e884d69effcf4c882 +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-20_gnu.conda#a9f577daf3de00bca7c3c76c0ecbd1de +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab +https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-15.2.0-he0feb66_18.conda#0aa00f03f9e39fb9876085dee11a85d4 +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.2-h39aace5_0.conda#791365c5f65975051e4e017b5da3abf5 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hda65f42_9.conda#d2ffd7602c02f2b316fd921d39876885 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.6-hb03c661_0.conda#920bb03579f15389b9e512095ad995b7 +https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.16-hb03c661_0.conda#f9f81ea472684d75b9dd8d0b328cf655 +https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda#38f5dbc9ac808e31c00650f7be1db93f +https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.3-hb9d3cd8_0.conda#b38117a3c920364aff79f870c984b4a3 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.2.0-hb03c661_1.conda#72c8fd1af66bd67bf580645b426513ed +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.25-h17f619e_0.conda#6c77a605a7a689d17d4819c0f8ac9a00 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.7.4-hecca717_0.conda#e7f7ce06ec24cfcfb9e36d28cf82ba57 +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.5.2-h3435931_0.conda#a360c33a5abe61c07959e449fa1453eb +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-15.2.0-h69a702a_18.conda#d5e96b1ed75ca01906b3d2469b4ce493 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-15.2.0-h68bc16d_18.conda#646855f357199a12f02a87382d429b75 +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.18-h3b78370_2.conda#915f5995e94f60e9a4826e0b0920ee88 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.1.2-hb03c661_0.conda#8397539e3a0bbd1695584fb4f927485a +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.8.2-hb03c661_0.conda#c7c83eecbb72d88b940c249af56c8b17 +https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-hb03c661_1.conda#2c21e66f50753a083cbe6b80f38268fa +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hb9d3cd8_1.conda#d864d34357c3b65a4b731f78c0801dc4 +https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hb9d3cd8_0.conda#70e3400cbbfa03e96dcde7fc13e38c7b +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-15.2.0-h934c35e_18.conda#1b08cd684f34175e4514474793d44bcb +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.41.3-h5347b49_0.conda#db409b7c1720428638e7c0d509d3e1b5 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.6.0-hd42ef1d_0.conda#aea31d2e5b1091feca96fcfe945c3cf9 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 +https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-h280c20c_1002.conda#45161d96307e3a447cc3eb5896cf6f8c +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_3.conda#47e340acb35de30501a76c7c799c41d7 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.6.1-h35e630c_1.conda#f61eb8cd60ff9057122a3d338b99c00f +https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb03c661_1.conda#b2895afaf55bf96a8c8282a2e47a5de0 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb03c661_1.conda#1dafce8548e38671bea82e3f5c6ce22f +https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2025.1-hb03c661_0.conda#aa8d21be4b461ce612d8f5fb791decae +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.3-hb47aa4a_0.conda#607e13a8caac17f9a664bcab5302ce06 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h280c20c_3.conda#a77f85f77be52ff59391544bfe73390a +https://conda.anaconda.org/conda-forge/linux-64/geos-3.14.1-h480dda7_0.conda#4d4efd0645cd556fab54617c4ad477ef +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda#3bf7b9fd5a7136126e0234db4b87c8b6 +https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.14-hecca717_2.conda#2cd94587f3a401ae05e03a6caf09539d +https://conda.anaconda.org/conda-forge/linux-64/icu-78.2-h33c6efd_0.conda#186a18e3ba246eccfc7cff00cd19a870 +https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h0aef613_1.conda#9344155d33912347b37f0ae6c410a835 +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.5-h088129d_0.conda#86f7414544ae606282352fa1e116b41f +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.2.0-hb03c661_1.conda#366b40a69f0ad6072561c1d09301c886 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.2.0-hb03c661_1.conda#4ffbb341c8b616aa2494b6afb26a0c5f +https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.125-hb03c661_1.conda#9314bc5a1fe7d1044dc9dfd3ef400535 +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20250104-pl5321h7949ede_0.conda#c277e0a4d549b03ac1e9d6cbbe3d017b +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-15.2.0-h69a702a_18.conda#9063115da5bc35fdc3e1002e69b9ef6e +https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.3.0-h4c17acf_1.conda#c2a0c1d0120520e979685034e0b79859 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.55-h421ea60_0.conda#5f13ffc7d30ffec87864e678df9957b4 +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hcf80075_0.conda#eecce068c7e4eddeb169591baac20ac4 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-15.2.0-hdf11a46_18.conda#6235adb93d064ecdf3d44faee6f468de +https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda#9de5350a85c4a20c685259b889aa6393 +https://conda.anaconda.org/conda-forge/linux-64/muparser-2.3.5-h5888daf_0.conda#ab3e3db511033340e75e7002e80ce8c0 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.47-haa7fec5_0.conda#7a3bff861a6583f1889021facefc08b1 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.46.4-h54a6638_1.conda#c01af13bdc553d1a8fbfff6e8db075f0 +https://conda.anaconda.org/conda-forge/linux-64/readline-8.3-h853b02a_0.conda#d7d95fc8287ea7bf33e0e7116d2b95ec +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.2-h03e3b7b_1.conda#98b6c9dc80eb87b2519b97bcf7e578dd +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h366c992_103.conda#cffd3bdd58090148f4cfcd831f4b26ab +https://conda.anaconda.org/conda-forge/linux-64/wayland-1.24.0-hd6090a7_1.conda#035da2e4f5770f036ff704fa17aace24 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.6-he73a12e_0.conda#1c74ff8c35dcadf952a16f752ca5aa49 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 +https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.3.3-hceb46e0_1.conda#2aadb0d17215603a82a2a6b0afd9a4cb +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.7-hb78ec9c_6.conda#4a13eeac0b5c8e5b8ab496e6c4ddd829 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.2.0-hb03c661_1.conda#af39b9a8711d4a8d437b52c1d78eb6a1 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.22.2-ha1258a1_0.conda#fb53fb07ce46a575c5d004bbc96032c2 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.45.1-default_hbd61a6d_101.conda#12bd9a3f089ee6c9266a37dab82afabd +https://conda.anaconda.org/conda-forge/linux-64/libfreetype6-2.14.1-h73754d4_0.conda#8e7251989bca326a28f4a5ffbd74557a +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.86.4-h6548e54_1.conda#bb26456332b07f68bf3b7622ed71c0da +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.2-ha09017c_0.conda#1df8c1b1d6665642107883685db6cf37 +https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.67.0-had1ee68_0.conda#b499ce4b026493a13774bcf0f4c33849 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.30-pthreads_h94d23a6_4.conda#be43915efc66345cccb3c310b6ed0374 +https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h46dd2a8_20.conda#df81fd57eacf341588d728c97920e86d +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.51.2-hf4e2dac_0.conda#da5be73701eecd0e8454423fd6ffcf30 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.1-h9d88235_1.conda#cd5a90476766d53e901500df9215e927 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-16-2.15.1-hca6bf5a_1.conda#3fdd8d99683da9fe279c2f4cecd1e048 +https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.10-h05a5f5f_0.conda#da01bb40572e689bd1535a5cee6b1d68 +https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 +https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda#d71d3a66528853c0a1ac2c02d79a0284 +https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.3.0-hd9031aa_1.conda#66a1db55ecdb7377d2b91f54cd56eafa +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.13-he1eb515_0.conda#861fb6ccbc677bb9a9fb2468430b9c6a +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.2.0-hed03a55_1.conda#8ccf913aaba749a5496c17629d859ed1 +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.16.2-h24cb091_1.conda#ce96f2f470d39bd96ce03945af92e280 +https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.44.5-h2b0a6b4_1.conda#7eb4977dd6f60b3aaab0715a0ea76f11 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.86.4-hf516916_1.conda#b52b769cd13f7adaa6ccdc68ef801709 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.18-h0c24ade_0.conda#6f2e2c8f58160147c4d1c6f4c14cbac4 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.11.0-5_h4a7cf45_openblas.conda#c160954f7418d7b6e87eaf05a8913fa9 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h7a8fb5f_6.conda#49c553b47ff679a6a1e9fc80b9c5a2d4 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.18.0-hcf29cc6_1.conda#1707cdd636af2ff697b53186572c9f77 +https://conda.anaconda.org/conda-forge/linux-64/libfreetype-2.14.1-ha770c72_0.conda#f4084e4e6577797150f9b04a4560ceb0 +https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c +https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-haa4a5bd_1022.conda#00f0f4a9d2eb174015931b1a234d61ca +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.15.1-he237659_1.conda#417955234eccd8f252b86a265ccdab7f +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.4-h55fea9a_0.conda#11b3379b191f63139e29c0d19dee24cd +https://conda.anaconda.org/conda-forge/linux-64/python-3.14.3-h32b2ec7_101_cp314.conda#c014ad06e60441661737121d3eae8a60 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.51.2-h04a0ce9_0.conda#bb88d9335d09e54c7e6b5529d1856917 +https://conda.anaconda.org/conda-forge/linux-64/xkeyboard-config-2.47-hb03c661_0.conda#b56e0c8432b56decafae7e78c5f29ba5 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.7-hb03c661_0.conda#34e54f03dfea3e7a2dcf1453a85f1085 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.2-hb03c661_0.conda#ba231da7fccf9ea1e768caf5c7099b84 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e +https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_1.conda#8c4061f499edec6b8ac7000f6d586829 +https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda#1fd9696649f65fd6611fcdb4ffec738a +https://conda.anaconda.org/conda-forge/noarch/attrs-25.4.0-pyhcf101f3_1.conda#537296d57ea995666c68c821b00e360b +https://conda.anaconda.org/conda-forge/noarch/backports.zstd-1.3.0-py314h680f03e_0.conda#a2ac7763a9ac75055b68f325d3255265 +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.2.0-py314h3de4e8d_1.conda#8910d2c46f7e7b519129f486e0fe927a +https://conda.anaconda.org/conda-forge/noarch/certifi-2026.2.25-pyhd8ed1ab_0.conda#765c4d97e877cdbbb88ff33152b86125 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.5.0-pyhd8ed1ab_0.conda#381bd45fb7aa032691f3063aff47e3a1 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.4-pyhd8ed1ab_0.conda#a22d1fd9bf98827e280a02875d9a007a +https://conda.anaconda.org/conda-forge/noarch/click-8.3.1-pyh8f84b5b_1.conda#ea8a6c3256897cc31263de9f455e25d9 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.2-pyhcf101f3_1.conda#61b8078a0905b12529abc622406cb62c +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 +https://conda.anaconda.org/conda-forge/noarch/cpython-3.14.3-py314hd8ed1ab_101.conda#3bb89e4f795e5414addaa531d6b1500a +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhcf101f3_2.conda#4c2a8fef270f6c69591889b93f9f55c1 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.2.4-py314h1807b08_0.conda#866fd3d25b767bccb4adc8476f4035cd +https://conda.anaconda.org/conda-forge/noarch/distlib-0.4.0-pyhd8ed1ab_0.conda#003b8ba0a94e2f1e117d0bd46aebc901 +https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc +https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.2-pyhd8ed1ab_0.conda#a57b4be42619213a94f31d2c69c5dda7 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.24.3-pyhd8ed1ab_0.conda#9dbb20eec24beb026291c20a35ce1ff9 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.17.1-h27c8c51_0.conda#867127763fbe935bab59815b6e0b7b5c +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.14.1-ha770c72_0.conda#4afc585cd97ba8a23809406cd8a9eda8 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2026.2.0-pyhd8ed1ab_0.conda#496c6c9411a6284addf55c898d6ed8d7 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.6-nompi_h19486de_106.conda#c223ee1429ba538f3e48cfb4a0b97357 +https://conda.anaconda.org/conda-forge/noarch/hpack-4.1.0-pyhd8ed1ab_0.conda#0a802cb9888dd14eeefc611f05c40b6e +https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.1.0-pyhd8ed1ab_0.conda#8e6923fc12f1fe8f8c4e5c9f343256ac +https://conda.anaconda.org/conda-forge/noarch/idna-3.11-pyhd8ed1ab_0.conda#53abe63df7e10a6ba605dc5f9f961d36 +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.3.0-pyhd8ed1ab_0.conda#9614359868482abba1bd15ce465e3c42 +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.5.2-pyhd8ed1ab_0.conda#895f6625dd8a246fece9279fcc12c1de +https://conda.anaconda.org/conda-forge/linux-64/jsonschema-rs-0.37.4-py314h5059d10_0.conda#bbdf9e1de411fd55c05751bea6e2361c +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.9-py314h97ea11e_2.conda#57f1ce4f7ba6bcd460be8f83c8f04c69 +https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.8.5-gpl_hc2c16d8_100.conda#5fdaa8b856683a5598459dead3976578 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.11.0-5_h0358290_openblas.conda#6636a2b6f1a87572df2970d3ebc87cc0 +https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a +https://conda.anaconda.org/conda-forge/linux-64/libglx-devel-1.7.0-ha4b6fd6_2.conda#27ac5ae872a21375d980bd4a6f99edf3 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.11.0-5_h47877c9_openblas.conda#b38076eb5c8e40d0106beda6f95d7609 +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.13.1-hca5e8e5_0.conda#2bca1fbb221d9c3c8e3a155784bbc2e9 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-devel-2.15.1-he237659_1.conda#644b2a3a92ba0bb8e2aa671dd831e793 +https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 +https://conda.anaconda.org/conda-forge/noarch/markupsafe-3.0.3-pyh7db6752_0.conda#fab1be106a50e20f10fe5228fd1d1651 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.2-py314h9891dd4_1.conda#c6752022dcdbf4b9ef94163de1ab7f03 +https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyhd8ed1ab_1.conda#37293a85a0f4f77bbd9cf7aaefc62609 +https://conda.anaconda.org/conda-forge/noarch/packaging-26.0-pyhcf101f3_0.conda#b76541e68fea4d511b1ac46a28dcd2c6 +https://conda.anaconda.org/conda-forge/linux-64/pillow-12.1.1-py314h8ec4b1a_0.conda#79678378ae235e24b3aa83cee1b38207 +https://conda.anaconda.org/conda-forge/noarch/pip-26.0.1-pyh145f28c_0.conda#09a970fbf75e8ed1aa633827ded6aa4f +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.9.2-pyhcf101f3_0.conda#4fefefb892ce9cc1539405bec2f1a6cd +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.6.0-pyhf9edf01_1.conda#d7585b6550ad04c8c5e21097ada2888e +https://conda.anaconda.org/conda-forge/linux-64/proj-9.7.1-he0df7b0_3.conda#031e33ae075b336c0ce92b14efa886c5 +https://conda.anaconda.org/conda-forge/linux-64/psutil-7.2.2-py314h0f05182_0.conda#4f225a966cfee267a79c5cb6382bd121 +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef +https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.2-pyhd8ed1ab_0.conda#6b6ece66ebcae2d5f326c77ef2c5a066 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.3.2-pyhcf101f3_0.conda#3687cc0b82a8b4c17e1f0eb7e47163d5 +https://conda.anaconda.org/conda-forge/noarch/pyshp-3.0.3-pyhd8ed1ab_0.conda#c138c7aaa6a10b5762dcd92247864aff +https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.6.0-py314he82b845_1.conda#21dce7c80bbdb9785633011ad348e530 +https://conda.anaconda.org/conda-forge/noarch/pytz-2025.2-pyhd8ed1ab_0.conda#bc8e3267d44011051f2eb14d22fb0960 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.3-py314h67df5f8_1.conda#2035f68f96be30dc60a5dfd7452c7941 +https://conda.anaconda.org/conda-forge/noarch/setuptools-82.0.0-pyh332efcf_0.conda#1d00d46c634177fc8ede8b99d6089239 +https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhe01879c_1.conda#3339e3b65d58accf4ca4fb8748ab16b3 +https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-3.0.1-pyhd8ed1ab_0.conda#755cf22df8693aa0d1aec1c123fa5863 +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_1.conda#0401a17ae845fa72c7210e206ec5647d +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.8.3-pyhd8ed1ab_0.conda#18de09b20462742fe093ba39185d9bac +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb +https://conda.anaconda.org/conda-forge/noarch/tblib-3.2.2-pyhcf101f3_0.conda#f88bb644823094f436792f80fba3207e +https://conda.anaconda.org/conda-forge/noarch/tomli-2.4.0-pyhcf101f3_0.conda#72e780e9aa2d0a3295f59b1874e3768b +https://conda.anaconda.org/conda-forge/noarch/toolz-1.1.0-pyhd8ed1ab_1.conda#c07a6153f8306e45794774cf9b13bd32 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.5.3-py314h5bd0f2a_0.conda#e35f08043f54d26a1be93fdbf90d30c3 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.15.0-pyhcf101f3_0.conda#0caa1af407ecff61170c9437a808404d +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-17.0.1-py314h5bd0f2a_0.conda#494fdf358c152f9fdd0673c128c2f3dd +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcomposite-0.4.7-hb03c661_0.conda#f2ba4192d38b6cef2bb2c25029071d90 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxcursor-1.2.3-hb9d3cd8_0.conda#2ccd714aa2242315acaf0a67faea780b +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxinerama-1.1.6-hecca717_0.conda#93f5d4b5c17c8540479ad65f206fea51 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrandr-1.5.5-hb03c661_0.conda#e192019153591938acf7322b6459d36e +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.7-hb03c661_0.conda#665d152b9c6e78da404086088077c844 +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e +https://conda.anaconda.org/conda-forge/noarch/zipp-3.23.0-pyhcf101f3_1.conda#30cd29cb87d819caead4d55184c1d115 +https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 +https://conda.anaconda.org/conda-forge/noarch/babel-2.18.0-pyhcf101f3_0.conda#ea5be9abc2939c8431893b4e123a2065 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.4-he90730b_1.conda#bb6c4808bfa69d6f7f6b07e5846ced37 +https://conda.anaconda.org/conda-forge/linux-64/cffi-2.0.0-py314h4a8dc5f_1.conda#cf45f4278afd6f4e6d03eda0f435d527 +https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1.2-pyhd8ed1ab_0.conda#e9b05deb91c013e5224672a4ba9cf8d1 +https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.13.4-py314h67df5f8_0.conda#6c7efc167cee337d9c41200506d022b8 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.1.0-py314h5bd0f2a_2.conda#a6a32cab83d59c7812ddbb03220057e3 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.3.1-pyhd8ed1ab_0.conda#8e662bd460bda79b1ea39194e3c4c9ab +https://conda.anaconda.org/conda-forge/noarch/fonttools-4.61.1-pyh7db6752_0.conda#d5da976e963e70364b9e3ff270842b9f +https://conda.anaconda.org/conda-forge/noarch/h2-4.3.0-pyhcf101f3_0.conda#164fc43f0b53b6e3a7bc7dce5e4f1dc9 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.7.0-pyhe01879c_1.conda#63ccfdc3a3ce25b027b8767eb722fca8 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.6-pyhcf101f3_1.conda#04558c96691bed63104678757beb4f8d +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5fbf134_12.conda#88c1c66987cd52a712eea89c27104be6 +https://conda.anaconda.org/conda-forge/linux-64/libgl-devel-1.7.0-ha4b6fd6_2.conda#53e7cbb2beb03d69a478631e23e340e9 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.3-nompi_hbf2fc22_104.conda#a2956b63b1851e9d5eb9f882d02fa3a9 +https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-gpl_h2abfd87_119.conda#887245164c408c289d0cb45bd508ce5f +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.10.0-pyhd8ed1ab_0.conda#eb52d14a901e23c39e9e7b4a1a5c015f +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.4.2-py314h2b28147_1.conda#4ea6b620fdf24a1a0bc4f1c7134dfafb +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 +https://conda.anaconda.org/conda-forge/noarch/pbr-7.0.3-pyhd8ed1ab_0.conda#d312c4472944752588d76e119e6dd8f9 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.2-py314h24aeaa0_2.conda#b46a7e6a2b8c064488576c3e42d85df0 +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhe01879c_2.conda#5b8d21249ff20967101ffa321cab24e8 +https://conda.anaconda.org/conda-forge/noarch/python-discovery-1.1.0-pyhcf101f3_0.conda#ee4e2cad073411bdfa8598f599537498 +https://conda.anaconda.org/conda-forge/noarch/python-gil-3.14.3-h4df99d1_101.conda#235765e4ea0d0301c75965985163b5a1 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.15.0-h396c80c_0.conda#edd329d7d3a4ab45dcf905899a7a6115 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxtst-1.2.5-hb9d3cd8_3.conda#7bbe9a0cc0df0ac5f5a8ad6d6a11af2f +https://conda.anaconda.org/conda-forge/noarch/_python_abi3_support-1.0-hd8ed1ab_2.conda#aaa2a381ccc56eac91d63b6c1240312f +https://conda.anaconda.org/conda-forge/noarch/asv_runner-0.2.1-pyhd8ed1ab_0.conda#fdcbeb072c80c805a2ededaa5f91cd79 +https://conda.anaconda.org/conda-forge/linux-64/at-spi2-core-2.40.3-h0630a04_0.tar.bz2#8cb2fc4cd6cc63f1369cfa318f581cc3 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.14.3-pyha770c72_0.conda#5267bef8efea4127aacd1f4e1f149b6e +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.5-py314hc02f841_1.conda#552b5d9d8a2a4be882e1c638953e7281 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.3-py314h97ea11e_4.conda#95bede9cdb7a30a4b611223d52a01aa4 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2026.1.2-pyhcf101f3_0.conda#b20e7ce9afd59036ab194f3d1e27edf5 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-12.3.2-h6083320_0.conda#d170a70fc1d5c605fcebdf16851bd54a +https://conda.anaconda.org/conda-forge/linux-64/libegl-devel-1.7.0-ha4b6fd6_2.conda#b513eb83b3137eca1192c34bf4f013a7 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.12.2-hc2c0581_1.conda#c8ca404aeab8e32c9d6d82a82aeb6511 +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.3.1-py314hc02f841_2.conda#55ac6d85f5dd8ec5e9919e7762fcb31a +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.2-nompi_h90de81b_102.conda#f4c67a50ac3008a578530e7fc32f3d98 +https://conda.anaconda.org/conda-forge/linux-64/pandas-3.0.1-py314hb4ffadd_0.conda#23fc526360815090f6bfcd7c6c8e4954 +https://conda.anaconda.org/conda-forge/noarch/pytest-9.0.2-pyhcf101f3_0.conda#2b694bad8a50dc2f712f5368de866480 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.9.0-py314hc02f841_2.conda#5be92985870940eac3f3b8cda57002cc +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.17.1-py314hf07bd8e_0.conda#d0510124f87c75403090e220db1e9d41 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-9.2.2-pyhd8ed1ab_0.conda#e2e4d7094d0580ccd62e2a41947444f3 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.1.2-py314hbe3edd8_2.conda#5963e6ee81772d450a35e6bc95522761 +https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_2.conda#9aa358575bbd4be126eaa5e0039f835c +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-apidoc-0.3.0-py_1.tar.bz2#855b087883443abb10f5faf6eef40860 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.1.0-py314h9891dd4_0.conda#5d3c008e54c7f49592fca9c32896a76f +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.6.3-pyhd8ed1ab_0.conda#9272daa869e03efe68833e3dc7a02130 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-21.0.0-pyhcf101f3_0.conda#0d574419484a88ee7e753cc0c80ee2c1 +https://conda.anaconda.org/conda-forge/linux-64/at-spi2-atk-2.38.0-h0630a04_3.tar.bz2#6b889f174df1e0f816276ae69281af4d +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.1-py314hc02f841_0.conda#de50a60eab348de04809a33e180b4b01 +https://conda.anaconda.org/conda-forge/noarch/distributed-2026.1.2-pyhcf101f3_1.conda#3c155e2914169b807ebb4027a8c0999c +https://conda.anaconda.org/conda-forge/linux-64/epoxy-1.5.10-hb03c661_2.conda#057083b06ccf1c2778344b6dabace38b +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.9.1-nompi_h8d4c64c_0.conda#f9f36d9d61c0c643308f9f6a842e5834 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.16-pyhd8ed1ab_0.conda#8bc5851c415865334882157127e75799 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.2-pyhd8ed1ab_0.conda#efbc812363856906a80344b496389d2e +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.8-py314h1194b4b_0.conda#b8683e6068099b69c10dbfcf7204203f +https://conda.anaconda.org/conda-forge/linux-64/minijinja-2.16.0-py310h6de7dc8_0.conda#ff98a47d5488a9f504866f46ccae8e7c +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.4-nompi_py314h4ae7121_102.conda#cf495d9fc5e01a2ee10e0867ce957a44 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.4-hadf4263_0.conda#79f71230c069a287efe3a8614069ddf1 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-7.0.0-pyhcf101f3_1.conda#6891acad5e136cb62a8c2ed2679d6528 +https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.15.1-pyhd8ed1ab_0.conda#0511afbe860b1a653125d77c719ece53 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.8.0-pyhd8ed1ab_0.conda#8375cfbda7c57fbceeda18229be10417 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.4.0-py314h5bd0f2a_3.conda#b55fcaf9e1ad884241180b9c3c94384e +https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.5.0-py314ha1f92a4_0.conda#15b1e205270451c078c79d0480438e8e +https://conda.anaconda.org/conda-forge/noarch/requests-2.32.5-pyhcf101f3_1.conda#c65df89a0b2e321045a9e01d1337b182 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.25.0-py314ha0b5721_1.conda#fe89c5fa422f215b0d75046ecd4667de +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.9.1-pyhdfbf58e_0.conda#0155d4a55415cda7f7f09bc0b7760f51 +https://conda.anaconda.org/conda-forge/linux-64/gtk3-3.24.43-ha5ea40c_7.conda#f605332e1e4d9ff5c599933ae81db57d +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.60.2-h61e6d4b_0.conda#d62da3d560992bfa2feb611d7be813b8 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.5.1-pyha770c72_0.conda#7f3ac694319c7eaf81a0325d6405e974 +https://conda.anaconda.org/conda-forge/noarch/requests-file-2.1.0-pyhd8ed1ab_1.conda#69be0472744969b97324d5d3b21845a9 +https://conda.anaconda.org/conda-forge/noarch/adwaita-icon-theme-49.0-unix_0.conda#b3f0179590f3c0637b7eb5309898f79e +https://conda.anaconda.org/conda-forge/linux-64/graphviz-14.1.2-h8b86629_0.conda#341fc61cfe8efa5c72d24db56c776f44 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.2-pyhd8ed1ab_1.conda#bf22cb9c439572760316ce0748af3713 +https://conda.anaconda.org/conda-forge/noarch/sphinx-data-viewer-0.1.5-pyhd8ed1ab_1.conda#fbe6437c6899c54e52f9258518035aca +https://conda.anaconda.org/conda-forge/noarch/sphinx-design-0.7.0-pyhd8ed1ab_0.conda#28eddfb8b9ecdd044a6f609f985398a7 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.20.0-pyhd8ed1ab_0.conda#4cae490c8d142824fb80d9aed672fddd +https://conda.anaconda.org/conda-forge/noarch/sphinx-reredirects-1.1.0-pyhd8ed1ab_0.conda#8392493ce6fbb8ab72a101af8e8a5d03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jquery-4.1-pyhd8ed1ab_1.conda#403185829255321ea427333f7773dd1f +https://conda.anaconda.org/conda-forge/noarch/sphinx-needs-7.0.0-pyh2c0ff20_1.conda#f1b94f4ad598a4548fd08dbd46a73480 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 +https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 diff --git a/requirements/py312.yml b/requirements/py312.yml index fbb1e8aea5..a9a6bcdc3d 100644 --- a/requirements/py312.yml +++ b/requirements/py312.yml @@ -56,7 +56,10 @@ dependencies: - sphinx-copybutton - sphinx-gallery >=0.11.0 - sphinx-design - - pydata-sphinx-theme >=0.13.0 + # Pinned reason: https://github.com/SciTools/iris/issues/6885 + - pydata-sphinx-theme !=0.16.0,!=0.16.1,<0.16.2 + - sphinx-needs + - sphinx-reredirects # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 diff --git a/requirements/py313.yml b/requirements/py313.yml index a76d819e75..ab771263e1 100644 --- a/requirements/py313.yml +++ b/requirements/py313.yml @@ -56,7 +56,10 @@ dependencies: - sphinx-copybutton - sphinx-gallery >=0.11.0 - sphinx-design - - pydata-sphinx-theme >=0.13.0 + # Pinned reason: https://github.com/SciTools/iris/issues/6885 + - pydata-sphinx-theme !=0.16.0,!=0.16.1,<0.16.2 + - sphinx-needs + - sphinx-reredirects # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 diff --git a/requirements/py311.yml b/requirements/py314.yml similarity index 79% rename from requirements/py311.yml rename to requirements/py314.yml index a128b77eee..fa6d8b275b 100644 --- a/requirements/py311.yml +++ b/requirements/py314.yml @@ -4,7 +4,7 @@ channels: - conda-forge dependencies: - - python =3.11 + - python =3.14 # Setup dependencies. - setuptools >=77.0.3 @@ -26,7 +26,7 @@ dependencies: # Optional dependencies. - esmpy >=7.0 - - geovista +# - geovista # Temporarily removed until pyvista is py3.14 compatible; see #6902 - graphviz - iris-sample-data >=2.4.0 - mo_pack @@ -56,7 +56,10 @@ dependencies: - sphinx-copybutton - sphinx-gallery >=0.11.0 - sphinx-design - - pydata-sphinx-theme >=0.13.0 + # Pinned reason: https://github.com/SciTools/iris/issues/6885 + - pydata-sphinx-theme !=0.16.0,!=0.16.1,<0.16.2 + - sphinx-needs + - sphinx-reredirects # Temporary minimum pins. # See https://github.com/SciTools/iris/pull/5051 diff --git a/tools/generate_std_names.py b/tools/generate_std_names.py index 85372b7cc7..1e491ad5f7 100644 --- a/tools/generate_std_names.py +++ b/tools/generate_std_names.py @@ -33,6 +33,11 @@ to another dictionary of other standard name attributes. Currently only the `canonical_unit` exists in these attribute dictionaries. +.. z_reference:: iris.std_names + :tags: topic_load_save;topic_data_model + + API reference + This file is automatically generated. Do not edit this file by hand. Generated from CF standard-name table version : {table_version}