diff --git a/.cirrus.yml b/.cirrus.yml deleted file mode 100644 index 92b8d788e6..0000000000 --- a/.cirrus.yml +++ /dev/null @@ -1,198 +0,0 @@ -# Reference: -# - https://cirrus-ci.org/guide/writing-tasks/ -# - https://cirrus-ci.org/guide/writing-tasks/#environment-variables -# - https://cirrus-ci.org/guide/tips-and-tricks/#sharing-configuration-between-tasks -# - https://cirrus-ci.org/guide/linux/ -# - https://hub.docker.com/_/gcc/ -# - https://hub.docker.com/_/python/ - -# -# Global defaults. -# -container: - image: gcc:latest - cpu: 2 - memory: 4G - - -env: - # Skip specific tasks by name. Set to a non-empty string to skip. - SKIP_LINT_TASK: "" - SKIP_TEST_TASK: "" - SKIP_DOCTEST_TASK: "" - SKIP_LINKCHECK_TASK: "" - # Skip task groups by type. Set to a non-empty string to skip. - SKIP_ALL_DOC_TASKS: "" - # Maximum cache period (in weeks) before forcing a new cache upload. - CACHE_PERIOD: "2" - # Increment the build number to force new cartopy cache upload. - CARTOPY_CACHE_BUILD: "3" - # Increment the build number to force new conda cache upload. - CONDA_CACHE_BUILD: "0" - # Increment the build number to force new nox cache upload. - NOX_CACHE_BUILD: "0" - # Increment the build number to force new pip cache upload. - PIP_CACHE_BUILD: "0" - # Pip packages to be upgraded/installed. - PIP_CACHE_PACKAGES: "nox pip pyyaml setuptools wheel" - # Conda packages to be installed. - CONDA_CACHE_PACKAGES: "nox pip" - # Git commit hash for iris test data. - IRIS_TEST_DATA_VERSION: "2.5" - # Base directory for the iris-test-data. - IRIS_TEST_DATA_DIR: ${HOME}/iris-test-data - - -# -# YAML alias for common linux test infra-structure. -# -linux_task_template: &LINUX_TASK_TEMPLATE - auto_cancellation: true - env: - PATH: ${HOME}/miniconda/bin:${PATH} - SITE_CFG: ${CIRRUS_WORKING_DIR}/lib/iris/etc/site.cfg - conda_cache: - folder: ${HOME}/miniconda - fingerprint_script: - - wget --quiet https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh - - echo "${CIRRUS_OS} $(sha256sum miniconda.sh)" - - echo "${CONDA_CACHE_PACKAGES}" - - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${CONDA_CACHE_BUILD}" - - uname -r - populate_script: - - bash miniconda.sh -b -p ${HOME}/miniconda - - conda config --set always_yes yes --set changeps1 no - - conda config --set show_channel_urls True - - conda config --add channels conda-forge - - conda update --quiet --name base conda - - conda install --quiet --name base ${CONDA_CACHE_PACKAGES} - cartopy_cache: - folder: ${HOME}/.local/share/cartopy - fingerprint_script: - - echo "${CIRRUS_OS}" - - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${CARTOPY_CACHE_BUILD}" - populate_script: - - conda create --quiet --name cartopy-cache cartopy - - source ${HOME}/miniconda/etc/profile.d/conda.sh >/dev/null 2>&1 - - conda activate cartopy-cache >/dev/null 2>&1 - - cd $(mktemp -d) - - wget --quiet https://raw.githubusercontent.com/SciTools/cartopy/v0.20.0/tools/cartopy_feature_download.py - - python cartopy_feature_download.py physical --output ${HOME}/.local/share/cartopy --no-warn - - conda deactivate >/dev/null 2>&1 - nox_cache: - folder: ${CIRRUS_WORKING_DIR}/.nox - reupload_on_changes: true - fingerprint_script: - - echo "${CIRRUS_TASK_NAME}" - - echo "${NOX_CACHE_BUILD}" - - -# -# YAML alias for compute credits. -# -compute_credits_template: &CREDITS_TEMPLATE - # Restrict where compute credits are used. - use_compute_credits: ${CIRRUS_REPO_FULL_NAME} == "SciTools/iris" && ${CIRRUS_USER_COLLABORATOR} == "true" && ${CIRRUS_PR_DRAFT} == "false" && ${CIRRUS_PR} != "" - - -# -# YAML alias for the iris-test-data cache. -# -iris_test_data_template: &IRIS_TEST_DATA_TEMPLATE - data_cache: - folder: ${IRIS_TEST_DATA_DIR} - fingerprint_script: - - echo "iris-test-data v${IRIS_TEST_DATA_VERSION}" - populate_script: - - wget --quiet https://github.com/SciTools/iris-test-data/archive/v${IRIS_TEST_DATA_VERSION}.zip -O iris-test-data.zip - - unzip -q iris-test-data.zip - - mv iris-test-data-${IRIS_TEST_DATA_VERSION} ${IRIS_TEST_DATA_DIR} - - -# -# Linting -# -task: - only_if: ${SKIP_LINT_TASK} == "" - << : *CREDITS_TEMPLATE - auto_cancellation: true - container: - image: python:3.8 - cpu: 2 - memory: 4G - name: "${CIRRUS_OS}: pre-commit hooks" - pip_cache: - folder: ~/.cache/pip - fingerprint_script: - - echo "${CIRRUS_TASK_NAME} py${PYTHON_VERSION}" - - echo "$(date +%Y).$(expr $(date +%U) / ${CACHE_PERIOD}):${PIP_CACHE_BUILD} ${PIP_CACHE_PACKAGES}" - precommit_script: - - pip list - - python -m pip install --retries 3 --upgrade ${PIP_CACHE_PACKAGES} - - pip list - - nox --session precommit - - -# -# Testing (Linux) -# -task: - only_if: ${SKIP_TEST_TASK} == "" - << : *CREDITS_TEMPLATE - matrix: - env: - PY_VER: 3.8 - name: "${CIRRUS_OS}: py${PY_VER} tests" - container: - image: gcc:latest - cpu: 6 - memory: 8G - << : *IRIS_TEST_DATA_TEMPLATE - << : *LINUX_TASK_TEMPLATE - tests_script: - - echo "[Resources]" > ${SITE_CFG} - - echo "test_data_dir = ${IRIS_TEST_DATA_DIR}/test_data" >> ${SITE_CFG} - - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} - - nox --session tests -- --verbose - - -# -# Documentation Testing and Gallery (Linux) -# -task: - only_if: ${SKIP_DOCTEST_TASK} == "" && ${SKIP_ALL_DOC_TASKS} == "" - << : *CREDITS_TEMPLATE - env: - PY_VER: 3.8 - MPL_RC_DIR: ${HOME}/.config/matplotlib - MPL_RC_FILE: ${HOME}/.config/matplotlib/matplotlibrc - name: "${CIRRUS_OS}: py${PY_VER} doctests and gallery" - << : *IRIS_TEST_DATA_TEMPLATE - << : *LINUX_TASK_TEMPLATE - tests_script: - - echo "[Resources]" > ${SITE_CFG} - - echo "test_data_dir = ${IRIS_TEST_DATA_DIR}/test_data" >> ${SITE_CFG} - - echo "doc_dir = ${CIRRUS_WORKING_DIR}/docs" >> ${SITE_CFG} - - mkdir -p ${MPL_RC_DIR} - - echo "backend : agg" > ${MPL_RC_FILE} - - echo "image.cmap : viridis" >> ${MPL_RC_FILE} - - nox --session doctest -- --verbose - - -# -# Documentation Link Check (Linux) -# -task: - only_if: ${SKIP_LINKCHECK_TASK} == "" && ${SKIP_ALL_DOC_TASKS} == "" - << : *CREDITS_TEMPLATE - env: - PY_VER: 3.8 - MPL_RC_DIR: ${HOME}/.config/matplotlib - MPL_RC_FILE: ${HOME}/.config/matplotlib/matplotlibrc - name: "${CIRRUS_OS}: py${PY_VER} link check" - << : *LINUX_TASK_TEMPLATE - tests_script: - - mkdir -p ${MPL_RC_DIR} - - echo "backend : agg" > ${MPL_RC_FILE} - - echo "image.cmap : viridis" >> ${MPL_RC_FILE} - - nox --session linkcheck -- --verbose diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 0000000000..3994ec0a83 --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,4 @@ +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true)$ +ref-names: $Format:%D$ diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..82bf71c1c5 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +.git_archival.txt export-subst \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md index 5f65470c82..134b6ff8da 100644 --- a/.github/ISSUE_TEMPLATE/bug-report.md +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -2,7 +2,7 @@ name: "\U0001F41B Bug Report" about: Submit a bug report to help us improve Iris title: '' -labels: 'New: Issue, Type: Bug' +labels: 'Type: Bug' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 500a2183d2..84af305034 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -4,3 +4,6 @@ contact_links: - name: 💬 Iris GitHub Discussions url: https://github.com/SciTools/iris/discussions about: Engage with the Iris community to discuss your issue + - name: ❓ Usage Question + url: https://github.com/SciTools/iris/discussions/categories/q-a + about: Raise a question about how to use Iris in the Q&A section of Discussions diff --git a/.github/ISSUE_TEMPLATE/documentation.md b/.github/ISSUE_TEMPLATE/documentation.md index 8caa62a1c7..01eb2a6734 100644 --- a/.github/ISSUE_TEMPLATE/documentation.md +++ b/.github/ISSUE_TEMPLATE/documentation.md @@ -2,7 +2,7 @@ name: "\U0001F4DA Documentation" about: Report an issue with the Iris documentation title: '' -labels: 'New: Documentation, Type: Documentation' +labels: 'Type: Documentation' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/feature-request.md b/.github/ISSUE_TEMPLATE/feature-request.md index b17b6066e4..2f66321405 100644 --- a/.github/ISSUE_TEMPLATE/feature-request.md +++ b/.github/ISSUE_TEMPLATE/feature-request.md @@ -2,7 +2,6 @@ name: "✨ Feature Request" about: Submit a request for a new feature in Iris title: '' -labels: 'New: Feature' assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/issue.md b/.github/ISSUE_TEMPLATE/issue.md index e66042609c..63de163743 100644 --- a/.github/ISSUE_TEMPLATE/issue.md +++ b/.github/ISSUE_TEMPLATE/issue.md @@ -2,7 +2,6 @@ name: "\U0001F4F0 Custom Issue" about: Submit a generic issue to help us improve Iris title: '' -labels: 'New: Issue' assignees: '' --- diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..e9b45d116a --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,15 @@ +# Reference: +# - https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/keeping-your-actions-up-to-date-with-dependabot +# - https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates + +version: 2 +updates: + + - package-ecosystem: "github-actions" + directory: "/" + schedule: + # Check for updates to GitHub Actions every weekday + interval: "daily" + labels: + - "New: Pull Request" + - "Bot" diff --git a/.github/deploy_key.scitools-docs.enc b/.github/deploy_key.scitools-docs.enc deleted file mode 100644 index 165a7c1970..0000000000 --- a/.github/deploy_key.scitools-docs.enc +++ /dev/null @@ -1 +0,0 @@ -gAAAAABZSMeGIlHxHu4oCV_h8shbCRf1qJYoLO9Z0q9uKRDTlytoigzlvfxhN-9WMjc3Js1f1Zg55PfEpTOpL82p6QHF-gqW0k0qGjanO3lnQzM6EzIu3KyJPrVrL-O6edwoPMYKqwsNO3VQHNuEspsFKY0TbjnTPHc45SPU5LjEGX4c_SADSDcLDJm2rbrU2eVkT-gFHy_-ZzK0Di83WlDc79YzIkVe5BAn5PbWv3O9BROR4fJzecbjmWRT_rp1cqI_gaUpVcwTdRK3II9YnazBtW4h2WbCeTcySLD7N4o9K0P71SR6gG_XFbpML3Haf5IUdRi0qPBuvJ_4YVnnuJo6mhiIOJfUEcNj_bbLOYVzPmKyQMHvrPf_lK5JhdX6MUvqluhqHuc0i_z_j1O2y32lB7b1iiY6eE_BsNlXJHlOX1GiXkX0nZLI48p-D22jya44WshWSnVcoalcCDkdbvdFbpOscwXDR3nB-PCOmRUF_d1BlMbp1if-VP0yt3tJ_5yyCrqSRWwFusaibQTF6yoImetl7Am95hh2FjFDNkalHqtarnUv86w-26v1ukcTIjJ0iHzNbCK1m0VMkvE6uDeqRgIZnVKON5cesmM3YbulRrHpaOiSly_sMhLhfg5jTxAuOa319AQGoHEOcRLRUYdw2TQkDEiHGiUh_U4-nC7GTGDGcXyeBIa4ciuC2Qi0QXf9qyEGoIRcU8BP34LDNdtovJoZOBDzhr5Ajnu7yA3GB3TD_kiZrgm6agFuu7a51OMfjezhwGzUJ4X-empPctwm9woOJmPCTFqCvxB2VwVV0L6yngsTooyAHCi5st_AG-p5FIT3VZGx7EgCd68ze9XlRoACoe9XOdSFklbaSMGRbJlvKCPAA0zj4__PfIhlD8Cxwwjq_VXlSr_QxygIGZJlhkT46P9TroolgdipaBp1aQ3_PKHfgw5Y9ZqBKCZF5DOJejqUbfVKUp2JdqoX3yQBD0ByQFdfCuLvoiYcM2ofKdIMvel3Jwn0Nx4NYR2qg3h7FYti0jdrNlC89gnL4tKsf0DAGxZ1UYmqQMWJ3-GKCKrlKyeaHYB2djPRGP8VeoRZh_UorSNHU56KSztK_hTP6P0nFymRJRUSRBMKTaTfJf1aBlk9zJHSe9hOKwxyUNkwcTftGn5P0WNcnaTk3ecTVe-1QJKbPWwMBDzqQtTCsCizgN4UdQsmy4iMYq-LT2TC-JXXo0CPTNDybUj92wSa7KeKTvKnbN8DMZbGRdgy5BOSGw4hMIoIFSB-6tnBIvTntNfMT9ac9e9jKm47Q4qXpaeF3AsvBqxkMRQLaYVppPng6cA49VjJQDZ0gTdPKSSKZkApfeeQL0LLCGwzQ4C52TWK2NJSQ3pvRYI1F0taDQWopIiwFfox-OSYnOJECHkHjxaxhHQzVb3w47xKKZNXbLb-LV7QI-kGuKLfoqO1lq94cw1H-EVrXaGJcDDLjK2jRgdVfDyPsHMcW1oUDJqu8gQ6fCXYPbqJzdmFNFsc1hywHWCU7crV61D2QubwzbLRnP8053MvsMnbdhWtwocTlvvdG-qW6CiEA9Eanfpf0RW1W9oh6yQJ__0vS9UWswqq5ahkkpHY9LTE0US4L3xbFOrq7HgbA2jelTdPVfxo3BfUHuL8oKpFDTzgZi07gNmkhIZfpuXj2KFnm9XM31AsY6V2rXL0xSx-9rvi4FP0LK6V5vQ8OKI8aRPCDyzLUv2xnayMW4yaYg3GHD5yo7pIOswKc6GOEmetPnay3j0dVN3hfpkpfJWhss3vjZ2Zl0NmjJ7OuS25tjUGLy82A1yFSpL8mKRkHZJuMDZbd_Or6gaPVoVT_Otbkh-6pMZuDeOHOUfgey0Z374jCjRpyQ9k-Fpw8ykow8iIIQ088kC5CeQy6jRhD7mO3iR4-U1XKDJQNlNg1z_JYyDrwykp7FFN2sQn7RRYHIXx2iMrEDXdrdTrujMFN6omC13yDuXJukAgZb6zBBUTlonxRUBjUJWt2P-1sRRTsG8mr9EaE5K-xhR5Ust_37L3svNQ0vwLtPLIpWGZHhD8P_dYNR2RL4679xyzI8A7wLY82wFBHrcghAd4UtLJH9ul6IuS_CaVo-gbfowNRaQ0Zw7WHZGIXpZWEx1_zck6qDEaCY8TpQeciBWpH5uJDSYqdLdMwigdQEGzAJ1DHSWsyTrmOR7Lhwi9WqOzfWe4ahxAkAUH_Jdr_i-nGfl_x3OgQdHM7jWVMXDcXEmR0bkw-s0EKXCn20q2bxDkm5SUWkYtWAZ2aZRgo4wHOqGBcP99xZ25mq9uxtNOkLBF81lnVbn_4BAZBNnnKwwj4SafeIW4KR1ZOpnEI47sGUR6NhEk9VtJsv0zeZIv8VjRbNLh3QCxkNMue60SjJ48kjotZSX1RQJN0xwPftiABBf8MX9tyZe8emQvPeIcdQTSQPnYEUx22xZGeeJTNrZ9soQyP6mrkkRihp6o9tG7HT9QEVLGM19wAigwAAMMXGqdGzWwpar30JtJU94gAmIlwFUJqeO_fdJKFspnUyJ6gt5_oHsKNEV7Uz5EJwGpa94tlPJXjvZpu-wWQfu8U0trTU2mTCA0bmZIDID-Xk4vCW_SD4OVnsvWyga4QHSg3AqVTjnjlapAjsYcFjiOo2C_U3besloprpyuAwpTdn7zdfMHIJO0ckBFnXlk8XB3kT0YGrCpBvW6gYMXlnePVcr3wJehCvMg1Q9Dc5fVQUqt65zcjbgiudfzFGtTe9T4f1IttoAtrJgTN4W1mtbZzSK864I_ngaX5YWgZSinjkbocCCFEJDcbiXMnV7OWOZefqW6VZu4BZKEKlN9k2kH3UCECCK3uRAQIPn_48DgaVnAff2-fMADltiosSPJ_a3057acJP0cf-1QsJuV7r3zdzL3shgrMRjpSsSTCYdMhZ6disFGcJg7hJJvtH1FieZ76jps5FYi5lE8Ua9yBKlG4dCGuUBnikvpfy2FLMLFNn-iXLflu2oiBbcLvn_ReZUnFIR6KgGRN8xKEBaXATQVtb2E678GtQptK8PHP2DoAtbsIXUDn60YH04D9pEck8NnmWYAz7sWbiL6OKdaO7jQep4mt3CgkyFC0NCKP9zCbVNtmfHRVmHtckjgfHF-tK_v59KeAuwWPtm7ow2BjynAK42IGR9nWtQFRUZIboaND8UF76YGKFF7kOf_XTvoNrVTCRkD6b8KJy2IFfdoHP6WET9QLvwDSXgYLPlCX9z7aQ_lc57u5d_dGO-7NZ_Qbs69ByyIvQoztVBjw6fa7EzSwccqPfMQL_fiecNCng-r4gHaH6TlgSbfqQOISHxTtvmbym1no560ZsHfnQfuL6BCI8s6OoygxhOnQhaDqyOUVBut_x3VR_DKFMyUazXYNgLbRsdITaAvR-0gIx5TAX9n3A4HwHuiBZCtwRYaiJnW8FX9lk1Y_g5UHL2OC3rsNFui3aBLzAFhx58lALxnxhlUItuHHK9BgexnR2yCj2nOWLoWQzfFaf2_fpjEh_QBHTqUxdQZ8ighg_8lh6hmLbW4PcUxKX71RFmikLyS3-idlzsiEomNlPNaVllRF21vE6dR-nZ6xsxzTvNB4wumP2irQ9mFBTN1WpiLMyNoEEucA2I848YHUfkZrjTG_dcCQNp7H_2gKdIsZ135lUEG6lYfhLMHTmP5uYxxx3Pipjp6wF2GFCsZPIlIPsgrhbSxqkWg1EOViHtpw6ypFKn7wQHHfnrnHkFWnrKbMARVBjJUB-FhK4b6qLU_k_MTMipemneMUFXlj3EkEhKM18MIHGkIOkwG5QtPYcjUAf_2sZlxSMVnh6sQ8kVwF6lfk_l8jhoO93HUTntZUSv7GrE3s80yJgII4Qw37AdgcJiAkoPn1-17HfSsAy6uRh5-OvrCtkDqQxfuJSyn_4pRMh6hZT7N9pI5limMXXn2nHnxU93UT3qU-smA8q0ECfvK3JwoaYy_llSx0wSBvpmxjLQ302sFYM5FVZ9zRbHuLCCZShVopiyMDLHVJe_1g9Ou1KL-h6RVZgg3Ttyb5m2KDfoHEVLeZkW81YLCsyo7uNb6SVRM-615TIVGT6Eq7oJ6wO2LMDKjEpHKFiOFpY2fpR8noM81UqgLddYfl_lei7RVjaNO98otqE4iSNtpgJgyhAx4CdYm__yQRSXhckR4K7yAhM9Kh5BLbQQnf2_0WS1sWTmNMZZNMfOSqmTCRVwcYvg4TDGOA-vZARbZW1M7npVMldV_SbvgcEZD6InY9c40eheRqS0YD2W2HEZIiNeLRw0y5WBcYuJIpXhI3ViTXx-frJnv0Mo9uwmuLbJmWFcn6RdIVcU68_oPZZlZD4Vm7SjikbuZKF1BF3lXamTTDIBcWiDLwuNDv2lUkURDCWa5WJsfUCfTAJ6PTe8= \ No newline at end of file diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index b489eba036..9ae3534c76 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -1,10 +1,17 @@ -# This is a basic workflow to help you get started with Actions +# Use ASV to check for performance regressions in the last 24 hours' commits. name: benchmark-check on: - # Triggers the workflow on push or pull request events but only for the master branch - pull_request: + schedule: + # Runs every day at 23:00. + - cron: "0 23 * * *" + workflow_dispatch: + inputs: + first_commit: + description: "Argument to be passed to the overnight benchmark script." + required: false + type: string jobs: benchmark: @@ -14,41 +21,35 @@ jobs: env: IRIS_TEST_DATA_LOC_PATH: benchmarks IRIS_TEST_DATA_PATH: benchmarks/iris-test-data - IRIS_TEST_DATA_VERSION: "2.5" + IRIS_TEST_DATA_VERSION: "2.18" # Lets us manually bump the cache to rebuild + ENV_CACHE_BUILD: "0" TEST_DATA_CACHE_BUILD: "2" + PY_VER: 3.8 steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - uses: actions/checkout@v2 - - - name: Fetch the PR base branch too - run: | - git fetch --depth=1 origin ${{ github.event.pull_request.base.ref }} - git branch _base FETCH_HEAD - echo PR_BASE_SHA=$(git rev-parse _base) >> $GITHUB_ENV + - uses: actions/checkout@v3 + with: + fetch-depth: 0 - name: Install Nox run: | pip install nox - - name: Cache .nox and .asv/env directories + - name: Cache environment directories id: cache-env-dir - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: | .nox benchmarks/.asv/env - # Make sure GHA never gets an exact cache match by using the unique - # github.sha. This means it will always store this run as a new - # cache (Nox may have made relevant changes during run). Cache - # restoration still succeeds via the partial restore-key match. - key: ${{ runner.os }}-${{ github.sha }} - restore-keys: ${{ runner.os }} + $CONDA/pkgs + key: ${{ runner.os }}-${{ hashFiles('requirements/') }}-${{ env.ENV_CACHE_BUILD }} - name: Cache test data directory id: cache-test-data - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: | ${{ env.IRIS_TEST_DATA_PATH }} @@ -62,22 +63,74 @@ jobs: unzip -q iris-test-data.zip mkdir --parents ${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_LOC_PATH} mv iris-test-data-${IRIS_TEST_DATA_VERSION} ${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_PATH} - + - name: Set test data var run: | echo "OVERRIDE_TEST_DATA_REPOSITORY=${GITHUB_WORKSPACE}/${IRIS_TEST_DATA_PATH}/test_data" >> $GITHUB_ENV - - name: Run CI benchmarks + - name: Run overnight benchmarks + run: | + first_commit=${{ inputs.first_commit }} + if [ "$first_commit" == "" ] + then + first_commit=$(git log --after="$(date -d "1 day ago" +"%Y-%m-%d") 23:00:00" --pretty=format:"%h" | tail -n 1) + fi + + if [ "$first_commit" != "" ] + then + nox --session="benchmarks(overnight)" -- $first_commit + fi + + - name: Create issues for performance shifts + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - mkdir --parents benchmarks/.asv - set -o pipefail - nox --session="benchmarks(ci compare)" | tee benchmarks/.asv/ci_compare.txt + if [ -d benchmarks/.asv/performance-shifts ] + then + cd benchmarks/.asv/performance-shifts + for commit_file in * + do + commit="${commit_file%.*}" + pr_number=$(git log "$commit"^! --oneline | grep -o "#[0-9]*" | tail -1 | cut -c 2-) + author=$(gh pr view $pr_number --json author -q '.["author"]["login"]' --repo $GITHUB_REPOSITORY) + merger=$(gh pr view $pr_number --json mergedBy -q '.["mergedBy"]["login"]' --repo $GITHUB_REPOSITORY) + # Find a valid assignee from author/merger/nothing. + if curl -s https://api.github.com/users/$author | grep -q '"type": "User"'; then + assignee=$author + elif curl -s https://api.github.com/users/$merger | grep -q '"type": "User"'; then + assignee=$merger + else + assignee="" + fi + title="Performance Shift(s): \`$commit\`" + body=" + Benchmark comparison has identified performance shifts at + + * commit $commit (#$pr_number). + + Please review the report below and \ + take corrective/congratulatory action as appropriate \ + :slightly_smiling_face: + +
+ Performance shift report + + \`\`\` + $(cat $commit_file) + \`\`\` + +
+ + Generated by GHA run [\`${{github.run_id}}\`](https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}) + " + gh issue create --title "$title" --body "$body" --assignee $assignee --label "Bot" --label "Type: Performance" --repo $GITHUB_REPOSITORY + done + fi - name: Archive asv results if: ${{ always() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: asv-report path: | benchmarks/.asv/results - benchmarks/.asv/ci_compare.txt diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml new file mode 100644 index 0000000000..81f5132ccf --- /dev/null +++ b/.github/workflows/ci-tests.yml @@ -0,0 +1,144 @@ +# reference: +# - https://github.com/actions/cache +# - https://github.com/actions/checkout +# - https://github.com/marketplace/actions/setup-miniconda + +name: ci-tests + +on: + push: + branches: + - "main" + - "v*x" + tags: + - "v*" + pull_request: + branches: + - "*" + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + tests: + name: "${{ matrix.session }} (py${{ matrix.python-version }} ${{ matrix.os }})" + + runs-on: ${{ matrix.os }} + + defaults: + run: + shell: bash -l {0} + + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest"] + python-version: ["3.10"] + session: ["doctest", "gallery", "linkcheck"] + include: + - os: "ubuntu-latest" + python-version: "3.10" + session: "tests" + coverage: "--coverage" + - os: "ubuntu-latest" + python-version: "3.9" + session: "tests" + - os: "ubuntu-latest" + python-version: "3.8" + session: "tests" + + env: + IRIS_TEST_DATA_VERSION: "2.18" + ENV_NAME: "ci-tests" + + steps: + - name: "checkout" + uses: actions/checkout@v3 + + - name: "environment configure" + env: + # Maximum cache period (in weeks) before forcing a cache refresh. + CACHE_WEEKS: 2 + run: | + echo "CACHE_PERIOD=$(date +%Y).$(expr $(date +%U) / ${CACHE_WEEKS})" >> ${GITHUB_ENV} + echo "LOCK_FILE=requirements/ci/nox.lock/py$(echo ${{ matrix.python-version }} | tr -d '.')-linux-64.lock" >> ${GITHUB_ENV} + + - name: "data cache" + uses: ./.github/workflows/composite/iris-data-cache + with: + cache_build: 0 + env_name: ${{ env.ENV_NAME }} + version: ${{ env.IRIS_TEST_DATA_VERSION }} + + - name: "conda package cache" + uses: ./.github/workflows/composite/conda-pkg-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + + - name: "conda install" + uses: conda-incubator/setup-miniconda@v2 + with: + miniforge-version: latest + channels: conda-forge,defaults + activate-environment: ${{ env.ENV_NAME }} + auto-update-conda: false + use-only-tar-bz2: true + + - name: "conda environment cache" + uses: ./.github/workflows/composite/conda-env-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + install_packages: "cartopy nox pip" + + - name: "conda info" + run: | + conda info + conda list + + - name: "cartopy cache" + uses: ./.github/workflows/composite/cartopy-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + + - name: "nox cache" + uses: ./.github/workflows/composite/nox-cache + with: + cache_build: 0 + env_name: ${{ env.ENV_NAME }} + lock_file: ${{ env.LOCK_FILE }} + + # TODO: drop use of site.cfg and explicit use of mplrc + - name: "iris configure" + env: + SITE_CFG: lib/iris/etc/site.cfg + MPL_RC: ${HOME}/.config/matplotlib/matplotlibrc + run: | + mkdir -p $(dirname ${SITE_CFG}) + echo ${SITE_CFG} + echo "[Resources]" >> ${SITE_CFG} + echo "test_data_dir = ${HOME}/iris-test-data/test_data" >> ${SITE_CFG} + echo "doc_dir = ${GITHUB_WORKSPACE}/docs" >> ${SITE_CFG} + cat ${SITE_CFG} + mkdir -p $(dirname ${MPL_RC}) + echo ${MPL_RC} + echo "backend : agg" >> ${MPL_RC} + echo "image.cmap : viridis" >> ${MPL_RC} + cat ${MPL_RC} + + - name: "iris ${{ matrix.session }}" + env: + PY_VER: ${{ matrix.python-version }} + run: | + nox --session ${{ matrix.session }} -- --verbose ${{ matrix.coverage }} + + - name: Upload coverage report + uses: codecov/codecov-action@v3 + if: ${{ matrix.coverage }} diff --git a/.github/workflows/ci-wheels.yml b/.github/workflows/ci-wheels.yml new file mode 100644 index 0000000000..a00833b118 --- /dev/null +++ b/.github/workflows/ci-wheels.yml @@ -0,0 +1,166 @@ +# Reference: +# - https://github.com/actions/checkout +# - https://github.com/actions/download-artifact +# - https://github.com/actions/upload-artifact +# - https://github.com/pypa/build +# - https://github.com/pypa/gh-action-pypi-publish +# - https://test.pypi.org/help/#apitoken + +name: ci-wheels + +on: + pull_request: + + push: + tags: + - "v*" + branches-ignore: + - "auto-update-lockfiles" + - "pre-commit-ci-update-config" + - "dependabot/*" + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build: + name: "build sdist & wheel" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: "building" + shell: bash + run: | + # require build with explicit --sdist and --wheel in order to + # get correct version associated with sdist and bdist artifacts + pipx run build --sdist --wheel + + - uses: actions/upload-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist/* + + test-wheel: + needs: build + name: "test wheel (py${{ matrix.python-version }})" + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} + strategy: + fail-fast: false + matrix: + python-version: ["3.8", "3.9", "3.10"] + session: ["wheel"] + env: + ENV_NAME: "ci-wheels" + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - name: "environment configure" + env: + # Maximum cache period (in weeks) before forcing a cache refresh. + CACHE_WEEKS: 2 + run: | + echo "CACHE_PERIOD=$(date +%Y).$(expr $(date +%U) / ${CACHE_WEEKS})" >> ${GITHUB_ENV} + echo "LOCK_FILE=requirements/ci/nox.lock/py$(echo ${{ matrix.python-version }} | tr -d '.')-linux-64.lock" >> ${GITHUB_ENV} + + - name: "conda package cache" + uses: ./.github/workflows/composite/conda-pkg-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + + - name: "conda install" + uses: conda-incubator/setup-miniconda@v2 + with: + miniforge-version: latest + channels: conda-forge,defaults + activate-environment: ${{ env.ENV_NAME }} + auto-update-conda: false + use-only-tar-bz2: true + + - name: "conda environment cache" + uses: ./.github/workflows/composite/conda-env-cache + with: + cache_build: 0 + cache_period: ${{ env.CACHE_PERIOD }} + env_name: ${{ env.ENV_NAME }} + install_packages: "nox pip" + + - name: "nox cache" + uses: ./.github/workflows/composite/nox-cache + with: + cache_build: 0 + env_name: ${{ env.ENV_NAME }} + lock_file: ${{ env.LOCK_FILE }} + + - name: "nox install and test wheel" + env: + PY_VER: ${{ matrix.python-version }} + run: | + nox --session ${{ matrix.session }} -- --verbose + + show-artifacts: + needs: build + name: "show artifacts" + runs-on: ubuntu-latest + steps: + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - shell: bash + run: | + ls -l ${{ github.workspace }}/dist + + publish-artifacts-test-pypi: + needs: test-wheel + name: "publish to test.pypi" + runs-on: ubuntu-latest + # upload to Test PyPI for every commit on main branch + if: github.event_name == 'push' && github.event.ref == 'refs/heads/main' + steps: + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.TEST_PYPI_API_TOKEN }} + repository_url: https://test.pypi.org/legacy/ + skip_existing: true + print_hash: true + + publish-artifacts-pypi: + needs: test-wheel + name: "publish to pypi" + runs-on: ubuntu-latest + # upload to PyPI for every tag starting with 'v' + if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') + steps: + - uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: ${{ github.workspace }}/dist + + - uses: pypa/gh-action-pypi-publish@release/v1 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} + print_hash: true diff --git a/.github/workflows/composite/cartopy-cache/action.yml b/.github/workflows/composite/cartopy-cache/action.yml new file mode 100644 index 0000000000..e805cbacc1 --- /dev/null +++ b/.github/workflows/composite/cartopy-cache/action.yml @@ -0,0 +1,41 @@ +name: "cartopy-cache" +description: "create and cache cartopy assets" + +# +# Assumes the environment contains the following variables: +# - CONDA +# +inputs: + cache_build: + description: "conda environment cache build number" + required: false + default: "0" + cache_period: + description: "conda environment cache timestamp" + required: true + env_name: + description: "environment name" + required: true + +runs: + using: "composite" + steps: + - uses: actions/cache@v3 + id: cartopy-cache + with: + path: ~/.local/share/cartopy + key: ${{ runner.os }}-cartopy-${{ inputs.env_name }}-p${{ inputs.cache_period }}-b${{ inputs.cache_build }} + + - if: steps.cartopy-cache.outputs.cache-hit != 'true' + env: + CARTOPY_SHARE_DIR: ~/.local/share/cartopy + CARTOPY_FEATURE: https://raw.githubusercontent.com/SciTools/cartopy/v0.20.0/tools/cartopy_feature_download.py + shell: bash + run: | + # Require to explicitly activate the environment within the composite action. + source ${{ env.CONDA }}/etc/profile.d/conda.sh >/dev/null 2>&1 + conda activate ${{ inputs.env_name }} + wget --quiet ${CARTOPY_FEATURE} + mkdir -p ${CARTOPY_SHARE_DIR} + # Requires a pre-installed version of cartopy within the environment. + python cartopy_feature_download.py physical --output ${CARTOPY_SHARE_DIR} --no-warn diff --git a/.github/workflows/composite/conda-env-cache/action.yml b/.github/workflows/composite/conda-env-cache/action.yml new file mode 100644 index 0000000000..6bfd6fff90 --- /dev/null +++ b/.github/workflows/composite/conda-env-cache/action.yml @@ -0,0 +1,35 @@ +name: "conda-env-cache" +description: "create and cache the conda environment" + +# +# Assumes the environment contains the following variables: +# - CONDA +# +inputs: + cache_build: + description: "conda environment cache build number" + required: false + default: "0" + cache_period: + description: "conda environment cache timestamp" + required: true + env_name: + description: "environment name" + required: true + install_packages: + description: "conda packages to install into environment" + required: true + +runs: + using: "composite" + steps: + - uses: actions/cache@v3 + id: conda-env-cache + with: + path: ${{ env.CONDA }}/envs/${{ inputs.env_name }} + key: ${{ runner.os }}-conda-env-${{ inputs.env_name }}-p${{ inputs.cache_period }}-b${{ inputs.cache_build }} + + - if: steps.conda-env-cache.outputs.cache-hit != 'true' + shell: bash + run: | + conda install --quiet --name ${{ inputs.env_name }} ${{ inputs.install_packages }} diff --git a/.github/workflows/composite/conda-pkg-cache/action.yml b/.github/workflows/composite/conda-pkg-cache/action.yml new file mode 100644 index 0000000000..4472d7e415 --- /dev/null +++ b/.github/workflows/composite/conda-pkg-cache/action.yml @@ -0,0 +1,22 @@ +name: "conda-pkg-cache" +description: "cache the conda environment packages" + +inputs: + cache_build: + description: "conda environment cache build number" + required: false + default: "0" + cache_period: + description: "conda environment cache timestamp" + required: true + env_name: + description: "environment name" + required: true + +runs: + using: "composite" + steps: + - uses: actions/cache@v3 + with: + path: ~/conda_pkgs_dir + key: ${{ runner.os }}-conda-pkgs-${{ inputs.env_name }}-p${{ inputs.cache_period }}-b${{ inputs.cache_build }} diff --git a/.github/workflows/composite/iris-data-cache/action.yml b/.github/workflows/composite/iris-data-cache/action.yml new file mode 100644 index 0000000000..7bf72fae8b --- /dev/null +++ b/.github/workflows/composite/iris-data-cache/action.yml @@ -0,0 +1,30 @@ +name: "iris-data-cache" +description: "create and cache the iris test data" + +inputs: + cache_build: + description: "data cache build number" + required: false + default: "0" + env_name: + description: "environment name" + required: true + version: + description: "iris test data version" + required: true + +runs: + using: "composite" + steps: + - uses: actions/cache@v3 + id: data-cache + with: + path: ~/iris-test-data + key: ${{ runner.os }}-iris-test-data-${{ inputs.env_name }}-v${{ inputs.version }}-b${{ inputs.cache_build }} + + - if: steps.data-cache.outputs.cache-hit != 'true' + shell: bash + run: | + wget --quiet https://github.com/SciTools/iris-test-data/archive/v${{ inputs.version }}.zip -O iris-test-data.zip + unzip -q iris-test-data.zip + mv iris-test-data-${{ inputs.version }} ~/iris-test-data diff --git a/.github/workflows/composite/nox-cache/action.yml b/.github/workflows/composite/nox-cache/action.yml new file mode 100644 index 0000000000..468dd22d81 --- /dev/null +++ b/.github/workflows/composite/nox-cache/action.yml @@ -0,0 +1,22 @@ +name: "nox cache" +description: "cache the nox test environments" + +inputs: + cache_build: + description: "nox cache build number" + required: false + default: "0" + env_name: + description: "environment name" + required: true + lock_file: + description: "conda-lock environment requirements filename" + required: true + +runs: + using: "composite" + steps: + - uses: actions/cache@v3 + with: + path: ${{ github.workspace }}/.nox + key: ${{ runner.os }}-nox-${{ inputs.env_name }}-s${{ matrix.session }}-py${{ matrix.python-version }}-b${{ inputs.cache_build }}-${{ hashFiles(inputs.lock_file) }} diff --git a/.github/workflows/refresh-lockfiles.yml b/.github/workflows/refresh-lockfiles.yml old mode 100755 new mode 100644 index 3106d94a67..94c20aedb9 --- a/.github/workflows/refresh-lockfiles.yml +++ b/.github/workflows/refresh-lockfiles.yml @@ -1,120 +1,18 @@ -# This workflow periodically creates new environment lock files based on the newest -# available packages and dependencies. -# -# Environment specifications are given as conda environment.yml files found in -# `requirements/ci/py**.yml`. These state the pacakges required, the conda channels -# that the packages will be pulled from, and any versions of packages that need to be -# pinned at specific versions. -# -# For environments that have changed, a pull request will be made and submitted -# to the main branch +# Updates the environment lock files. See the called workflow in the +# scitools/workflows repo for more details. name: Refresh Lockfiles on: workflow_dispatch: - inputs: - clobber: - description: | - Force the workflow to run, potentially clobbering any commits already made to the branch. - Enter "yes" or "true" to run. - default: "no" schedule: # Run once a week on a Saturday night - - cron: 1 0 * * 6 - + # N.B. "should" be quoted, according to + # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule + - cron: "1 0 * * 6" jobs: - - no_clobber: - if: "github.repository == 'SciTools/iris'" - runs-on: ubuntu-latest - steps: - # check if the auto-update-lockfiles branch exists. If it does, and someone other than - # the lockfile bot has made the head commit, abort the workflow. - # This job can be manually overridden by running directly from the github actions panel - # (known as a "workflow_dispatch") and setting the `clobber` input to "yes". - - uses: actions/script@v4 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - if (context.eventName == "workflow_dispatch") { - const clobber = context.payload.inputs.clobber || "no"; - if (["yes", "true", "y"].includes(clobber.trim().toLowerCase())) { - core.info("Manual override, continuing workflow, potentially overwriting previous commits to auto-update-lockfiles"); - return - } - } - github.repos.getBranch({...context.repo, branch: "auto-update-lockfiles"}).then(res => { - const committer = res.data.commit.commit.committer; - if (committer && committer.name === "Lockfile bot") { - core.info("Lockfile bot was the last to push to auto-update-lockfiles. Continue."); - } else { - core.setFailed("New commits to auto-update-lockfiles since bot last ran. Abort!"); - } - }).catch(err => { - if (err.status === 404) { - core.info("auto-update-lockfiles branch not found, continue"); - } - }) - - gen_lockfiles: - # this is a matrix job: it splits to create new lockfiles for each - # of the CI test python versions. - # this list below should be changed when covering more python versions - # TODO: generate this matrix automatically from the list of available py**.yml files - # ref: https://tomasvotruba.com/blog/2020/11/16/how-to-make-dynamic-matrix-in-github-actions/ - if: "github.repository == 'SciTools/iris'" - runs-on: ubuntu-latest - needs: no_clobber - - strategy: - matrix: - python: ['38'] - - steps: - - uses: actions/checkout@v2 - - name: install conda-lock - run: | - source $CONDA/bin/activate base - conda install -y -c conda-forge conda-lock - - name: generate lockfile - run: | - $CONDA/bin/conda-lock lock -p linux-64 -f requirements/ci/py${{matrix.python}}.yml - mv conda-linux-64.lock py${{matrix.python}}-linux-64.lock - - name: output lockfile - uses: actions/upload-artifact@v2 - with: - path: py${{matrix.python}}-linux-64.lock - - create_pr: - # once the matrix job has completed all the lock files will have been uploaded as artifacts. - # Download the artifacts, add them to the repo, and create a PR. - if: "github.repository == 'SciTools/iris'" - runs-on: ubuntu-latest - needs: gen_lockfiles - - steps: - - uses: actions/checkout@v2 - - name: get artifacts - uses: actions/download-artifact@v2 - with: - path: artifacts - - - name: Update lock files in repo - run: | - cp artifacts/artifact/*.lock requirements/ci/nox.lock - rm -r artifacts - - - name: Create Pull Request - uses: peter-evans/create-pull-request@052fc72b4198ba9fbc81b818c6e1859f747d49a8 - with: - commit-message: Updated environment lockfiles - committer: "Lockfile bot " - author: "Lockfile bot " - delete-branch: true - branch: auto-update-lockfiles - title: Update CI environment lockfiles - body: | - Lockfiles updated to the latest resolvable environment. + refresh_lockfiles: + uses: scitools/workflows/.github/workflows/refresh-lockfiles.yml@main + secrets: inherit diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index a38a03637e..44b77e5c7d 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -1,16 +1,20 @@ # See https://github.com/actions/stale name: Stale issues and pull-requests + on: schedule: - - cron: 0 0 * * * + # Run once a day + # N.B. "should" be quoted, according to + # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule + - cron: "0 0 * * *" jobs: stale: if: "github.repository == 'SciTools/iris'" runs-on: ubuntu-latest steps: - - uses: actions/stale@v4.0.0 + - uses: actions/stale@v7 with: repo-token: ${{ secrets.GITHUB_TOKEN }} @@ -59,11 +63,11 @@ jobs: stale-pr-label: Stale # Labels on issues exempted from stale. - exempt-issue-labels: | + exempt-issue-labels: "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" # Labels on prs exempted from stale. - exempt-pr-labels: | + exempt-pr-labels: "Status: Blocked,Status: Decision Required,Peloton 🚴‍♂️,Good First Issue" # Max number of operations per run. diff --git a/.gitignore b/.gitignore index b9fa92139d..512fbab231 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,8 @@ *.py[co] +# setuptools-scm +_version.py + # Environment file which should be autogenerated *conda_requirements.txt* @@ -26,8 +29,9 @@ pip-cache .tox .pytest_cache -# asv environments, results +# asv data, environments, results .asv +benchmarks/.data #Translations *.mo @@ -54,6 +58,9 @@ lib/iris/tests/results/imagerepo.lock /.idea *.cover +# vscode files +.vscode + # Auto generated documentation files docs/src/_build/* docs/src/generated diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 97dff666cf..7c95eeaca3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ minimum_pre_commit_version: 1.21.0 repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.1.0 + rev: v4.4.0 hooks: # Prevent giant files from being committed. - id: check-added-large-files @@ -29,35 +29,34 @@ repos: - id: no-commit-to-branch - repo: https://github.com/psf/black - rev: 21.12b0 + rev: 23.1.0 hooks: - id: black pass_filenames: false args: [--config=./pyproject.toml, .] - repo: https://github.com/PyCQA/flake8 - rev: 4.0.1 + rev: 6.0.0 hooks: - id: flake8 types: [file, python] args: [--config=./setup.cfg] - repo: https://github.com/pycqa/isort - rev: 5.10.1 + rev: 5.12.0 hooks: - id: isort types: [file, python] args: [--filter-files] - repo: https://github.com/asottile/blacken-docs - rev: v1.12.0 + rev: 1.13.0 hooks: - id: blacken-docs types: [file, rst] - additional_dependencies: [black==21.6b0] - repo: https://github.com/aio-libs/sort-all - rev: v1.1.0 + rev: v1.2.0 hooks: - id: sort-all types: [file, python] diff --git a/.readthedocs.yml b/.readthedocs.yml index 63c4798050..95f828a873 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -4,6 +4,19 @@ build: os: ubuntu-20.04 tools: python: mambaforge-4.10 + jobs: + post_checkout: + # The SciTools/iris repository is shallow i.e., has a .git/shallow, + # therefore complete the repository with a full history in order + # to allow setuptools-scm to correctly auto-discover the version. + - git fetch --unshallow + - git fetch --all + # Need to stash the local changes that Read the Docs makes so that + # setuptools_scm can generate the correct Iris version. + pre_install: + - git stash + post_install: + - git stash pop conda: environment: requirements/ci/readthedocs.yml diff --git a/MANIFEST.in b/MANIFEST.in index 52492b17b2..ad28df9c7c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,15 +1,13 @@ # Top-level files include CHANGES COPYING COPYING.LESSER +prune .github +exclude .gitignore -# Files from setup.py package_data that are not automatically added to source distributions -recursive-include lib/iris/tests/results *.cml *.cdl *.txt *.xml *.json -recursive-include lib/iris/etc * -include lib/iris/tests/stock/file_headers/* - +# Files required for conda package management recursive-include requirements * -# File required to build docs -recursive-include docs Makefile *.js *.png *.py *.rst +# Files required to build docs +recursive-include docs * prune docs/src/_build prune docs/src/generated prune docs/gallery_tests @@ -18,6 +16,5 @@ prune docs/gallery_tests include tools/generate_std_names.py include etc/cf-standard-name-table.xml -global-exclude *.pyc -global-exclude __pycache__ -global-exclude iris_image_test_output +global-exclude *.py[cod] +global-exclude __pycache__ \ No newline at end of file diff --git a/README.md b/README.md index 1b2a7b496f..cdf4b2b043 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@

- Iris
+ Iris

@@ -10,15 +10,18 @@

- -Cirrus-CI + +ci-tests Documentation Status pre-commit.ci status +

+ +

conda-forge @@ -51,3 +54,24 @@ For documentation see the developer version or the most recent released stable version.

+ +## [#ShowYourStripes](https://showyourstripes.info/s/globe) + +

+ + #showyourstripes Global 1850-2021 +

+ +**Graphics and Lead Scientist**: [Ed Hawkins](http://www.met.reading.ac.uk/~ed/home/index.php), National Centre for Atmospheric Science, University of Reading. + +**Data**: Berkeley Earth, NOAA, UK Met Office, MeteoSwiss, DWD, SMHI, UoR, Meteo France & ZAMG. + +

+#ShowYourStripes is distributed under a +Creative Commons Attribution 4.0 International License + + creative-commons-by +

+ diff --git a/benchmarks/README.md b/benchmarks/README.md new file mode 100644 index 0000000000..8dffd473f3 --- /dev/null +++ b/benchmarks/README.md @@ -0,0 +1,99 @@ +# Iris Performance Benchmarking + +Iris uses an [Airspeed Velocity](https://github.com/airspeed-velocity/asv) +(ASV) setup to benchmark performance. This is primarily designed to check for +performance shifts between commits using statistical analysis, but can also +be easily repurposed for manual comparative and scalability analyses. + +The benchmarks are automatically run overnight +[by a GitHub Action](../.github/workflows/benchmark.yml), with any notable +shifts in performance being flagged in a new GitHub issue. + +## Running benchmarks + +`asv ...` commands must be run from this directory. You will need to have ASV +installed, as well as Nox (see +[Benchmark environments](#benchmark-environments)). + +[Iris' noxfile](../noxfile.py) includes a `benchmarks` session that provides +conveniences for setting up before benchmarking, and can also replicate the +automated overnight run locally. See the session docstring for detail. + +### Environment variables + +* `OVERRIDE_TEST_DATA_REPOSITORY` - required - some benchmarks use +`iris-test-data` content, and your local `site.cfg` is not available for +benchmark scripts. +* `DATA_GEN_PYTHON` - required - path to a Python executable that can be +used to generate benchmark test objects/files; see +[Data generation](#data-generation). The Nox session sets this automatically, +but will defer to any value already set in the shell. +* `BENCHMARK_DATA` - optional - path to a directory for benchmark synthetic +test data, which the benchmark scripts will create if it doesn't already +exist. Defaults to `/benchmarks/.data/` if not set. Note that some of +the generated files, especially in the 'SPerf' suite, are many GB in size so +plan accordingly. +* `ON_DEMAND_BENCHMARKS` - optional - when set (to any value): benchmarks +decorated with `@on_demand_benchmark` are included in the ASV run. Usually +coupled with the ASV `--bench` argument to only run the benchmark(s) of +interest. Is set during the Nox `cperf` and `sperf` sessions. + +## Writing benchmarks + +[See the ASV docs](https://asv.readthedocs.io/) for full detail. + +### Data generation +**Important:** be sure not to use the benchmarking environment to generate any +test objects/files, as this environment changes with each commit being +benchmarked, creating inconsistent benchmark 'conditions'. The +[generate_data](./benchmarks/generate_data/__init__.py) module offers a +solution; read more detail there. + +### ASV re-run behaviour + +Note that ASV re-runs a benchmark multiple times between its `setup()` routine. +This is a problem for benchmarking certain Iris operations such as data +realisation, since the data will no longer be lazy after the first run. +Consider writing extra steps to restore objects' original state _within_ the +benchmark itself. + +If adding steps to the benchmark will skew the result too much then re-running +can be disabled by setting an attribute on the benchmark: `number = 1`. To +maintain result accuracy this should be accompanied by increasing the number of +repeats _between_ `setup()` calls using the `repeat` attribute. +`warmup_time = 0` is also advisable since ASV performs independent re-runs to +estimate run-time, and these will still be subject to the original problem. + +### Scaling / non-Scaling Performance Differences + +When comparing performance between commits/file-type/whatever it can be helpful +to know if the differences exist in scaling or non-scaling parts of the Iris +functionality in question. This can be done using a size parameter, setting +one value to be as small as possible (e.g. a scalar `Cube`), and the other to +be significantly larger (e.g. a 1000x1000 `Cube`). Performance differences +might only be seen for the larger value, or the smaller, or both, getting you +closer to the root cause. + +### On-demand benchmarks + +Some benchmarks provide useful insight but are inappropriate to be included in +a benchmark run by default, e.g. those with long run-times or requiring a local +file. These benchmarks should be decorated with `@on_demand_benchmark` +(see [benchmarks init](./benchmarks/__init__.py)), which +sets the benchmark to only be included in a run when the `ON_DEMAND_BENCHMARKS` +environment variable is set. Examples include the CPerf and SPerf benchmark +suites for the UK Met Office NG-VAT project. + +## Benchmark environments + +We have disabled ASV's standard environment management, instead using an +environment built using the same Nox scripts as Iris' test environments. This +is done using ASV's plugin architecture - see +[asv_delegated_conda.py](asv_delegated_conda.py) and the extra config items in +[asv.conf.json](asv.conf.json). + +(ASV is written to control the environment(s) that benchmarks are run in - +minimising external factors and also allowing it to compare between a matrix +of dependencies (each in a separate environment). We have chosen to sacrifice +these features in favour of testing each commit with its intended dependencies, +controlled by Nox + lock-files). diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json index 9ea1cdb101..7337eaa8c7 100644 --- a/benchmarks/asv.conf.json +++ b/benchmarks/asv.conf.json @@ -3,18 +3,26 @@ "project": "scitools-iris", "project_url": "https://github.com/SciTools/iris", "repo": "..", - "environment_type": "nox-conda", + "environment_type": "conda-delegated", "show_commit_url": "http://github.com/scitools/iris/commit/", + "branches": ["upstream/main"], "benchmark_dir": "./benchmarks", "env_dir": ".asv/env", "results_dir": ".asv/results", "html_dir": ".asv/html", - "plugins": [".nox_asv_plugin"], - // The commit to checkout to first run Nox to set up the environment. - "nox_setup_commit": "HEAD", - // The path of the noxfile's location relative to the project root. - "noxfile_rel_path": "noxfile.py", - // The ``--session`` arg to be used with ``--install-only`` to prep an environment. - "nox_session_name": "tests" + "plugins": [".asv_delegated_conda"], + + // The command(s) that create/update an environment correctly for the + // checked-out commit. + // Interpreted the same as build_command, with following exceptions: + // * No build-time environment variables. + // * Is run in the same environment as the ASV install itself. + "delegated_env_commands": [ + "sed -i 's/_PY_VERSIONS_ALL/_PY_VERSION_LATEST/g' noxfile.py", + "nox --envdir={conf_dir}/.asv/env/nox01 --session=tests --install-only --no-error-on-external-run --verbose" + ], + // The parent directory of the above environment. + // The most recently modified environment in the directory will be used. + "delegated_env_parent": "{conf_dir}/.asv/env/nox01" } diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py new file mode 100644 index 0000000000..250a4e032d --- /dev/null +++ b/benchmarks/asv_delegated_conda.py @@ -0,0 +1,208 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +ASV plug-in providing an alternative :class:`asv.plugins.conda.Conda` +subclass that manages the Conda environment via custom user scripts. + +""" + +from os import environ +from os.path import getmtime +from pathlib import Path +from shutil import copy2, copytree, rmtree +from tempfile import TemporaryDirectory + +from asv import util as asv_util +from asv.config import Config +from asv.console import log +from asv.plugins.conda import Conda +from asv.repo import Repo + + +class CondaDelegated(Conda): + """ + Manage a Conda environment using custom user scripts, run at each commit. + + Ignores user input variations - ``matrix`` / ``pythons`` / + ``conda_environment_file``, since environment is being managed outside ASV. + + Original environment creation behaviour is inherited, but upon checking out + a commit the custom script(s) are run and the original environment is + replaced with a symlink to the custom environment. This arrangement is then + re-used in subsequent runs. + + """ + + tool_name = "conda-delegated" + + def __init__( + self, + conf: Config, + python: str, + requirements: dict, + tagged_env_vars: dict, + ) -> None: + """ + Parameters + ---------- + conf : Config instance + + python : str + Version of Python. Must be of the form "MAJOR.MINOR". + + requirements : dict + Dictionary mapping a PyPI package name to a version + identifier string. + + tagged_env_vars : dict + Environment variables, tagged for build vs. non-build + + """ + ignored = ["`python`"] + if requirements: + ignored.append("`requirements`") + if tagged_env_vars: + ignored.append("`tagged_env_vars`") + if conf.conda_environment_file: + ignored.append("`conda_environment_file`") + message = ( + f"Ignoring ASV setting(s): {', '.join(ignored)}. Benchmark " + "environment management is delegated to third party script(s)." + ) + log.warning(message) + requirements = {} + tagged_env_vars = {} + conf.conda_environment_file = None + + super().__init__(conf, python, requirements, tagged_env_vars) + self._update_info() + + self._env_commands = self._interpolate_commands( + conf.delegated_env_commands + ) + # Again using _interpolate_commands to get env parent path - allows use + # of the same ASV env variables. + env_parent_interpolated = self._interpolate_commands( + conf.delegated_env_parent + ) + # Returns list of tuples, we just want the first. + env_parent_first = env_parent_interpolated[0] + # The 'command' is the first item in the returned tuple. + env_parent_string = " ".join(env_parent_first[0]) + self._delegated_env_parent = Path(env_parent_string).resolve() + + @property + def name(self): + """Get a name to uniquely identify this environment.""" + return asv_util.sanitize_filename(self.tool_name) + + def _update_info(self) -> None: + """Make sure class properties reflect the actual environment being used.""" + # Follow symlink if it has been created. + actual_path = Path(self._path).resolve() + self._path = str(actual_path) + + # Get custom environment's Python version if it exists yet. + try: + get_version = ( + "from sys import version_info; " + "print(f'{version_info.major}.{version_info.minor}')" + ) + actual_python = self.run(["-c", get_version]) + self._python = actual_python + except OSError: + pass + + def _prep_env(self) -> None: + """Run the custom environment script(s) and switch to using that environment.""" + message = f"Running delegated environment management for: {self.name}" + log.info(message) + env_path = Path(self._path) + + def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: + """For copying between self._path and a temporary cache.""" + asv_files = list(src_parent.glob("asv*")) + # build_root_path.name usually == "project" . + asv_files += [src_parent / Path(self._build_root).name] + for src_path in asv_files: + dst_path = dst_parent / src_path.name + if not dst_path.exists(): + # Only caching in case the environment has been rebuilt. + # If the dst_path already exists: rebuilding hasn't + # happened. Also a non-issue when copying in the reverse + # direction because the cache dir is temporary. + if src_path.is_dir(): + func = copytree + else: + func = copy2 + func(src_path, dst_path) + + with TemporaryDirectory(prefix="delegated_asv_cache_") as asv_cache: + asv_cache_path = Path(asv_cache) + # Cache all of ASV's files as delegated command may remove and + # re-build the environment. + copy_asv_files(env_path.resolve(), asv_cache_path) + + # Adapt the build_dir to the cache location. + build_root_path = Path(self._build_root) + build_dir_original = build_root_path / self._repo_subdir + build_dir_subpath = build_dir_original.relative_to( + build_root_path.parent + ) + build_dir = asv_cache_path / build_dir_subpath + + # Run the script(s) for delegated environment creation/updating. + # (An adaptation of self._interpolate_and_run_commands). + for command, env, return_codes, cwd in self._env_commands: + local_envs = dict(environ) + local_envs.update(env) + if cwd is None: + cwd = str(build_dir) + _ = asv_util.check_output( + command, + timeout=self._install_timeout, + cwd=cwd, + env=local_envs, + valid_return_codes=return_codes, + ) + + # Replace the env that ASV created with a symlink to the env + # created/updated by the custom script. + delegated_env_path = sorted( + self._delegated_env_parent.glob("*"), + key=getmtime, + reverse=True, + )[0] + if env_path.resolve() != delegated_env_path: + try: + env_path.unlink(missing_ok=True) + except IsADirectoryError: + rmtree(env_path) + env_path.symlink_to( + delegated_env_path, target_is_directory=True + ) + + # Check that environment exists. + try: + env_path.resolve(strict=True) + except FileNotFoundError: + message = f"Path does not resolve to environment: {env_path}" + log.error(message) + raise RuntimeError(message) + + # Restore ASV's files from the cache (if necessary). + copy_asv_files(asv_cache_path, env_path.resolve()) + + # Record new environment information in properties. + self._update_info() + + def checkout_project(self, repo: Repo, commit_hash: str) -> None: + """Check out the working tree of the project at given commit hash.""" + super().checkout_project(repo, commit_hash) + self._prep_env() + log.info( + f"Environment {self.name} updated to spec at {commit_hash[:8]}" + ) diff --git a/benchmarks/benchmarks/__init__.py b/benchmarks/benchmarks/__init__.py index 2e741c3da0..c86682ca4a 100644 --- a/benchmarks/benchmarks/__init__.py +++ b/benchmarks/benchmarks/__init__.py @@ -4,46 +4,121 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """Common code for benchmarks.""" +from os import environ +import resource -import os -from pathlib import Path +ARTIFICIAL_DIM_SIZE = int(10e3) # For all artificial cubes, coords etc. -# Environment variable names -_ASVDIR_VARNAME = "ASV_DIR" # As set in nightly script "asv_nightly/asv.sh" -_DATADIR_VARNAME = "BENCHMARK_DATA" # For local runs -ARTIFICIAL_DIM_SIZE = int(10e3) # For all artificial cubes, coords etc. +def disable_repeat_between_setup(benchmark_object): + """ + Decorator for benchmarks where object persistence would be inappropriate. + + E.g: + * Benchmarking data realisation + * Benchmarking Cube coord addition + + Can be applied to benchmark classes/methods/functions. + + https://asv.readthedocs.io/en/stable/benchmarks.html#timing-benchmarks + + """ + # Prevent repeat runs between setup() runs - object(s) will persist after 1st. + benchmark_object.number = 1 + # Compensate for reduced certainty by increasing number of repeats. + # (setup() is run between each repeat). + # Minimum 5 repeats, run up to 30 repeats / 20 secs whichever comes first. + benchmark_object.repeat = (5, 30, 20.0) + # ASV uses warmup to estimate benchmark time before planning the real run. + # Prevent this, since object(s) will persist after first warmup run, + # which would give ASV misleading info (warmups ignore ``number``). + benchmark_object.warmup_time = 0.0 + + return benchmark_object + + +class TrackAddedMemoryAllocation: + """ + Context manager which measures by how much process resident memory grew, + during execution of its enclosed code block. + + Obviously limited as to what it actually measures : Relies on the current + process not having significant unused (de-allocated) memory when the + tested codeblock runs, and only reliable when the code allocates a + significant amount of new memory. + + Example: + with TrackAddedMemoryAllocation() as mb: + initial_call() + other_call() + result = mb.addedmem_mb() + + Attributes + ---------- + RESULT_MINIMUM_MB : float + The smallest result that should ever be returned, in Mb. Results + fluctuate from run to run (usually within 1Mb) so if a result is + sufficiently small this noise will produce a before-after ratio over + AVD's detection threshold and be treated as 'signal'. Results + smaller than this value will therefore be returned as equal to this + value, ensuring fractionally small noise / no noise at all. + + """ + + RESULT_MINIMUM_MB = 5.0 + + @staticmethod + def process_resident_memory_mb(): + return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024.0 + + def __enter__(self): + self.mb_before = self.process_resident_memory_mb() + return self + + def __exit__(self, *_): + self.mb_after = self.process_resident_memory_mb() + + def addedmem_mb(self): + """Return measured memory growth, in Mb.""" + result = self.mb_after - self.mb_before + # Small results are too vulnerable to noise being interpreted as signal. + result = max(self.RESULT_MINIMUM_MB, result) + return result + + @staticmethod + def decorator(decorated_func): + """ + Decorates this benchmark to track growth in resident memory during execution. + + Intended for use on ASV ``track_`` benchmarks. Applies the + :class:`TrackAddedMemoryAllocation` context manager to the benchmark + code, sets the benchmark ``unit`` attribute to ``Mb``. + + """ + + def _wrapper(*args, **kwargs): + assert decorated_func.__name__[:6] == "track_" + # Run the decorated benchmark within the added memory context + # manager. + with TrackAddedMemoryAllocation() as mb: + decorated_func(*args, **kwargs) + return mb.addedmem_mb() + + decorated_func.unit = "Mb" + return _wrapper + + +def on_demand_benchmark(benchmark_object): + """ + Decorator. Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set. -# Work out where the benchmark data dir is. -asv_dir = os.environ.get("ASV_DIR", None) -if asv_dir: - # For an overnight run, this comes from the 'ASV_DIR' setting. - benchmark_data_dir = Path(asv_dir) / "data" -else: - # For a local run, you set 'BENCHMARK_DATA'. - benchmark_data_dir = os.environ.get(_DATADIR_VARNAME, None) - if benchmark_data_dir is not None: - benchmark_data_dir = Path(benchmark_data_dir) + For benchmarks that, for whatever reason, should not be run by default. + E.g: + * Require a local file + * Used for scalability analysis instead of commit monitoring. + Can be applied to benchmark classes/methods/functions. -def testdata_path(*path_names): """ - Return the path of a benchmark test data file. - - These are based from a test-data location dir, which is either - ${}/data (for overnight tests), or ${} for local testing. - - If neither of these were set, an error is raised. - - """.format( - _ASVDIR_VARNAME, _DATADIR_VARNAME - ) - if benchmark_data_dir is None: - msg = ( - "Benchmark data dir is not defined : " - 'Either "${}" or "${}" must be set.' - ) - raise (ValueError(msg.format(_ASVDIR_VARNAME, _DATADIR_VARNAME))) - path = benchmark_data_dir.joinpath(*path_names) - path = str(path) # Because Iris doesn't understand Path objects yet. - return path + if "ON_DEMAND_BENCHMARKS" in environ: + return benchmark_object diff --git a/benchmarks/benchmarks/aux_factory.py b/benchmarks/benchmarks/aux_factory.py index 270119da71..4cc4f6c70a 100644 --- a/benchmarks/benchmarks/aux_factory.py +++ b/benchmarks/benchmarks/aux_factory.py @@ -10,9 +10,10 @@ import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import aux_factory, coords +from . import ARTIFICIAL_DIM_SIZE + class FactoryCommon: # TODO: once https://github.com/airspeed-velocity/asv/pull/828 is released: @@ -43,10 +44,6 @@ def time_create(self): specified in the subclass.""" self.create() - def time_return(self): - """Return an instance of the benchmarked factory.""" - self.factory - class HybridHeightFactory(FactoryCommon): def setup(self): diff --git a/benchmarks/benchmarks/coords.py b/benchmarks/benchmarks/coords.py index fce7318d49..3107dcf077 100644 --- a/benchmarks/benchmarks/coords.py +++ b/benchmarks/benchmarks/coords.py @@ -10,9 +10,10 @@ import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import coords +from . import ARTIFICIAL_DIM_SIZE, disable_repeat_between_setup + def setup(): """General variables needed by multiple benchmark classes.""" @@ -50,10 +51,6 @@ def time_create(self): specified in the subclass.""" self.create() - def time_return(self): - """Return an instance of the benchmarked coord.""" - self.component - class DimCoord(CoordCommon): def setup(self): @@ -92,6 +89,23 @@ def setup(self): def create(self): return coords.AuxCoord(**self.create_kwargs) + def time_points(self): + _ = self.component.points + + def time_bounds(self): + _ = self.component.bounds + + +@disable_repeat_between_setup +class AuxCoordLazy(AuxCoord): + """Lazy equivalent of :class:`AuxCoord`.""" + + def setup(self): + super().setup() + self.create_kwargs["points"] = self.component.lazy_points() + self.create_kwargs["bounds"] = self.component.lazy_bounds() + self.setup_common() + class CellMeasure(CoordCommon): def setup(self): diff --git a/benchmarks/benchmarks/cperf/__init__.py b/benchmarks/benchmarks/cperf/__init__.py new file mode 100644 index 0000000000..fb311c44dc --- /dev/null +++ b/benchmarks/benchmarks/cperf/__init__.py @@ -0,0 +1,97 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. + +CPerf = comparing performance working with data in UM versus LFRic formats. + +Files available from the UK Met Office: + moo ls moose:/adhoc/projects/avd/asv/data_for_nightly_tests/ +""" +import numpy as np + +from iris import load_cube + +# TODO: remove uses of PARSE_UGRID_ON_LOAD once UGRID parsing is core behaviour. +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + +from ..generate_data import BENCHMARK_DATA +from ..generate_data.ugrid import make_cubesphere_testfile + +# The data of the core test UM files has dtype=np.float32 shape=(1920, 2560) +_UM_DIMS_YX = (1920, 2560) +# The closest cubesphere size in terms of datapoints is sqrt(1920*2560 / 6) +# This gives ~= 905, i.e. "C905" +_N_CUBESPHERE_UM_EQUIVALENT = int(np.sqrt(np.prod(_UM_DIMS_YX) / 6)) + + +class SingleDiagnosticMixin: + """For use in any benchmark classes that work on a single diagnostic file.""" + + params = [ + ["LFRic", "UM", "UM_lbpack0", "UM_netcdf"], + [False, True], + [False, True], + ] + param_names = ["file type", "height dim (len 71)", "time dim (len 3)"] + + def setup(self, file_type, three_d, three_times): + if file_type == "LFRic": + # Generate an appropriate synthetic LFRic file. + if three_times: + n_times = 3 + else: + n_times = 1 + + # Use a cubesphere size ~equivalent to our UM test data. + cells_per_panel_edge = _N_CUBESPHERE_UM_EQUIVALENT + create_kwargs = dict(c_size=cells_per_panel_edge, n_times=n_times) + + if three_d: + create_kwargs["n_levels"] = 71 + + # Will re-use a file if already present. + file_path = make_cubesphere_testfile(**create_kwargs) + + else: + # Locate the appropriate UM file. + if three_times: + # pa/pb003 files + numeric = "003" + else: + # pa/pb000 files + numeric = "000" + + if three_d: + # theta diagnostic, N1280 file w/ 71 levels (1920, 2560, 71) + file_name = f"umglaa_pb{numeric}-theta" + else: + # surface_temp diagnostic, N1280 file (1920, 2560) + file_name = f"umglaa_pa{numeric}-surfacetemp" + + file_suffices = { + "UM": "", # packed FF (WGDOS lbpack = 1) + "UM_lbpack0": ".uncompressed", # unpacked FF (lbpack = 0) + "UM_netcdf": ".nc", # UM file -> Iris -> NetCDF file + } + suffix = file_suffices[file_type] + + file_path = (BENCHMARK_DATA / file_name).with_suffix(suffix) + if not file_path.exists(): + message = "\n".join( + [ + f"Expected local file not found: {file_path}", + "Available from the UK Met Office.", + ] + ) + raise FileNotFoundError(message) + + self.file_path = file_path + self.file_type = file_type + + def load(self): + with PARSE_UGRID_ON_LOAD.context(): + return load_cube(str(self.file_path)) diff --git a/benchmarks/benchmarks/cperf/equality.py b/benchmarks/benchmarks/cperf/equality.py new file mode 100644 index 0000000000..47eb255513 --- /dev/null +++ b/benchmarks/benchmarks/cperf/equality.py @@ -0,0 +1,58 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Equality benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. +""" +from . import SingleDiagnosticMixin +from .. import on_demand_benchmark + + +class EqualityMixin(SingleDiagnosticMixin): + """ + Uses :class:`SingleDiagnosticMixin` as the realistic case will be comparing + :class:`~iris.cube.Cube`\\ s that have been loaded from file. + """ + + # Cut down the parent parameters. + params = [["LFRic", "UM"]] + + def setup(self, file_type, three_d=False, three_times=False): + super().setup(file_type, three_d, three_times) + self.cube = self.load() + self.other_cube = self.load() + + +@on_demand_benchmark +class CubeEquality(EqualityMixin): + """ + Benchmark time and memory costs of comparing LFRic and UM + :class:`~iris.cube.Cube`\\ s. + """ + + def _comparison(self): + _ = self.cube == self.other_cube + + def peakmem_eq(self, file_type): + self._comparison() + + def time_eq(self, file_type): + self._comparison() + + +@on_demand_benchmark +class MeshEquality(EqualityMixin): + """Provides extra context for :class:`CubeEquality`.""" + + params = [["LFRic"]] + + def _comparison(self): + _ = self.cube.mesh == self.other_cube.mesh + + def peakmem_eq(self, file_type): + self._comparison() + + def time_eq(self, file_type): + self._comparison() diff --git a/benchmarks/benchmarks/cperf/load.py b/benchmarks/benchmarks/cperf/load.py new file mode 100644 index 0000000000..04bb7e1a61 --- /dev/null +++ b/benchmarks/benchmarks/cperf/load.py @@ -0,0 +1,57 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File loading benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. +""" +from . import SingleDiagnosticMixin +from .. import on_demand_benchmark + + +@on_demand_benchmark +class SingleDiagnosticLoad(SingleDiagnosticMixin): + def time_load(self, _, __, ___): + """ + The 'real world comparison' + * UM coords are always realised (DimCoords). + * LFRic coords are not realised by default (MeshCoords). + + """ + cube = self.load() + assert cube.has_lazy_data() + # UM files load lon/lat as DimCoords, which are always realised. + expecting_lazy_coords = self.file_type == "LFRic" + for coord_name in "longitude", "latitude": + coord = cube.coord(coord_name) + assert coord.has_lazy_points() == expecting_lazy_coords + assert coord.has_lazy_bounds() == expecting_lazy_coords + + def time_load_w_realised_coords(self, _, __, ___): + """A valuable extra comparison where both UM and LFRic coords are realised.""" + cube = self.load() + for coord_name in "longitude", "latitude": + coord = cube.coord(coord_name) + # Don't touch actual points/bounds objects - permanent + # realisation plays badly with ASV's re-run strategy. + if coord.has_lazy_points(): + coord.core_points().compute() + if coord.has_lazy_bounds(): + coord.core_bounds().compute() + + +@on_demand_benchmark +class SingleDiagnosticRealise(SingleDiagnosticMixin): + # The larger files take a long time to realise. + timeout = 600.0 + + def setup(self, file_type, three_d, three_times): + super().setup(file_type, three_d, three_times) + self.loaded_cube = self.load() + + def time_realise(self, _, __, ___): + # Don't touch loaded_cube.data - permanent realisation plays badly with + # ASV's re-run strategy. + assert self.loaded_cube.has_lazy_data() + self.loaded_cube.core_data().compute() diff --git a/benchmarks/benchmarks/cperf/save.py b/benchmarks/benchmarks/cperf/save.py new file mode 100644 index 0000000000..2eb60e2ab5 --- /dev/null +++ b/benchmarks/benchmarks/cperf/save.py @@ -0,0 +1,47 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File saving benchmarks for the CPerf scheme of the UK Met Office's NG-VAT project. +""" + +from iris import save + +from . import _N_CUBESPHERE_UM_EQUIVALENT, _UM_DIMS_YX +from .. import TrackAddedMemoryAllocation, on_demand_benchmark +from ..generate_data.ugrid import ( + make_cube_like_2d_cubesphere, + make_cube_like_umfield, +) + + +@on_demand_benchmark +class NetcdfSave: + """ + Benchmark time and memory costs of saving ~large-ish data cubes to netcdf. + Parametrised by file type. + + """ + + params = ["LFRic", "UM"] + param_names = ["data type"] + + def setup(self, data_type): + if data_type == "LFRic": + self.cube = make_cube_like_2d_cubesphere( + n_cube=_N_CUBESPHERE_UM_EQUIVALENT, with_mesh=True + ) + else: + self.cube = make_cube_like_umfield(_UM_DIMS_YX) + + def _save_data(self, cube): + save(cube, "tmp.nc") + + def time_save_data_netcdf(self, data_type): + self._save_data(self.cube) + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_save_data_netcdf(self, data_type): + self._save_data(self.cube) diff --git a/benchmarks/benchmarks/cube.py b/benchmarks/benchmarks/cube.py index 3cfa6b248b..5889ce872b 100644 --- a/benchmarks/benchmarks/cube.py +++ b/benchmarks/benchmarks/cube.py @@ -10,11 +10,13 @@ import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import analysis, aux_factory, coords, cube +from . import ARTIFICIAL_DIM_SIZE, disable_repeat_between_setup +from .generate_data.stock import sample_meshcoord -def setup(): + +def setup(*params): """General variables needed by multiple benchmark classes.""" global data_1d global data_2d @@ -66,10 +68,6 @@ def time_add(self): general_cube_copy = general_cube.copy(data=data_2d) self.add_method(general_cube_copy, *self.add_args) - def time_return(self): - """Return a cube that includes an instance of the benchmarked component.""" - self.cube - class Cube: def time_basic(self): @@ -170,6 +168,41 @@ def setup(self): self.setup_common() +class MeshCoord: + params = [ + 6, # minimal cube-sphere + int(1e6), # realistic cube-sphere size + ARTIFICIAL_DIM_SIZE, # To match size in :class:`AuxCoord` + ] + param_names = ["number of faces"] + + def setup(self, n_faces): + mesh_kwargs = dict( + n_nodes=n_faces + 2, n_edges=n_faces * 2, n_faces=n_faces + ) + + self.mesh_coord = sample_meshcoord(sample_mesh_kwargs=mesh_kwargs) + self.data = np.zeros(n_faces) + self.cube_blank = cube.Cube(data=self.data) + self.cube = self.create() + + def create(self): + return cube.Cube( + data=self.data, aux_coords_and_dims=[(self.mesh_coord, 0)] + ) + + def time_create(self, n_faces): + _ = self.create() + + @disable_repeat_between_setup + def time_add(self, n_faces): + self.cube_blank.add_aux_coord(self.mesh_coord, 0) + + @disable_repeat_between_setup + def time_remove(self, n_faces): + self.cube.remove_coord(self.mesh_coord) + + class Merge: def setup(self): self.cube_list = cube.CubeList() diff --git a/lib/iris/tests/integration/__init__.py b/benchmarks/benchmarks/experimental/__init__.py similarity index 78% rename from lib/iris/tests/integration/__init__.py rename to benchmarks/benchmarks/experimental/__init__.py index 71b911cbb0..f16e400bce 100644 --- a/lib/iris/tests/integration/__init__.py +++ b/benchmarks/benchmarks/experimental/__init__.py @@ -3,4 +3,7 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Integration tests for the :mod:`iris` package.""" +""" +Benchmark tests for the experimental module. + +""" diff --git a/benchmarks/benchmarks/experimental/ugrid/__init__.py b/benchmarks/benchmarks/experimental/ugrid/__init__.py new file mode 100644 index 0000000000..2e40c525a6 --- /dev/null +++ b/benchmarks/benchmarks/experimental/ugrid/__init__.py @@ -0,0 +1,191 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Benchmark tests for the experimental.ugrid module. + +""" + +from copy import deepcopy + +import numpy as np + +from iris.experimental import ugrid + +from ... import ARTIFICIAL_DIM_SIZE, disable_repeat_between_setup +from ...generate_data.stock import sample_mesh + + +class UGridCommon: + """ + A base class running a generalised suite of benchmarks for any ugrid object. + Object to be specified in a subclass. + + ASV will run the benchmarks within this class for any subclasses. + + ASV will not benchmark this class as setup() triggers a NotImplementedError. + (ASV has not yet released ABC/abstractmethod support - asv#838). + + """ + + params = [ + 6, # minimal cube-sphere + int(1e6), # realistic cube-sphere size + ] + param_names = ["number of faces"] + + def setup(self, *params): + self.object = self.create() + + def create(self): + raise NotImplementedError + + def time_create(self, *params): + """Create an instance of the benchmarked object. create() method is + specified in the subclass.""" + self.create() + + +class Connectivity(UGridCommon): + def setup(self, n_faces): + self.array = np.zeros([n_faces, 3], dtype=int) + super().setup(n_faces) + + def create(self): + return ugrid.Connectivity( + indices=self.array, cf_role="face_node_connectivity" + ) + + def time_indices(self, n_faces): + _ = self.object.indices + + def time_location_lengths(self, n_faces): + # Proofed against the Connectivity name change (633ed17). + if getattr(self.object, "src_lengths", False): + meth = self.object.src_lengths + else: + meth = self.object.location_lengths + _ = meth() + + def time_validate_indices(self, n_faces): + self.object.validate_indices() + + +@disable_repeat_between_setup +class ConnectivityLazy(Connectivity): + """Lazy equivalent of :class:`Connectivity`.""" + + def setup(self, n_faces): + super().setup(n_faces) + self.array = self.object.lazy_indices() + self.object = self.create() + + +class Mesh(UGridCommon): + def setup(self, n_faces, lazy=False): + #### + # Steal everything from the sample mesh for benchmarking creation of a + # brand new mesh. + source_mesh = sample_mesh( + n_nodes=n_faces + 2, + n_edges=n_faces * 2, + n_faces=n_faces, + lazy_values=lazy, + ) + + def get_coords_and_axes(location): + search_kwargs = {f"include_{location}s": True} + return [ + (source_mesh.coord(axis=axis, **search_kwargs), axis) + for axis in ("x", "y") + ] + + self.mesh_kwargs = dict( + topology_dimension=source_mesh.topology_dimension, + node_coords_and_axes=get_coords_and_axes("node"), + connectivities=source_mesh.connectivities(), + edge_coords_and_axes=get_coords_and_axes("edge"), + face_coords_and_axes=get_coords_and_axes("face"), + ) + #### + + super().setup(n_faces) + + self.face_node = self.object.face_node_connectivity + self.node_x = self.object.node_coords.node_x + # Kwargs for reuse in search and remove methods. + self.connectivities_kwarg = dict(cf_role="edge_node_connectivity") + self.coords_kwarg = dict(include_faces=True) + + # TODO: an opportunity for speeding up runtime if needed, since + # eq_object is not needed for all benchmarks. Just don't generate it + # within a benchmark - the execution time is large enough that it + # could be a significant portion of the benchmark - makes regressions + # smaller and could even pick up regressions in copying instead! + self.eq_object = deepcopy(self.object) + + def create(self): + return ugrid.Mesh(**self.mesh_kwargs) + + def time_add_connectivities(self, n_faces): + self.object.add_connectivities(self.face_node) + + def time_add_coords(self, n_faces): + self.object.add_coords(node_x=self.node_x) + + def time_connectivities(self, n_faces): + _ = self.object.connectivities(**self.connectivities_kwarg) + + def time_coords(self, n_faces): + _ = self.object.coords(**self.coords_kwarg) + + def time_eq(self, n_faces): + _ = self.object == self.eq_object + + def time_remove_connectivities(self, n_faces): + self.object.remove_connectivities(**self.connectivities_kwarg) + + def time_remove_coords(self, n_faces): + self.object.remove_coords(**self.coords_kwarg) + + +@disable_repeat_between_setup +class MeshLazy(Mesh): + """Lazy equivalent of :class:`Mesh`.""" + + def setup(self, n_faces, lazy=True): + super().setup(n_faces, lazy=lazy) + + +class MeshCoord(UGridCommon): + # Add extra parameter value to match AuxCoord benchmarking. + params = UGridCommon.params + [ARTIFICIAL_DIM_SIZE] + + def setup(self, n_faces, lazy=False): + self.mesh = sample_mesh( + n_nodes=n_faces + 2, + n_edges=n_faces * 2, + n_faces=n_faces, + lazy_values=lazy, + ) + + super().setup(n_faces) + + def create(self): + return ugrid.MeshCoord(mesh=self.mesh, location="face", axis="x") + + def time_points(self, n_faces): + _ = self.object.points + + def time_bounds(self, n_faces): + _ = self.object.bounds + + +@disable_repeat_between_setup +class MeshCoordLazy(MeshCoord): + """Lazy equivalent of :class:`MeshCoord`.""" + + def setup(self, n_faces, lazy=True): + super().setup(n_faces, lazy=lazy) diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py new file mode 100644 index 0000000000..3b2d77a80a --- /dev/null +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -0,0 +1,250 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Benchmarks stages of operation of the function +:func:`iris.experimental.ugrid.utils.recombine_submeshes`. + +Where possible benchmarks should be parameterised for two sizes of input data: + * minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + * large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. + +""" +import os + +import dask.array as da +import numpy as np + +from iris import load, load_cube, save +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD +from iris.experimental.ugrid.utils import recombine_submeshes + +from ... import TrackAddedMemoryAllocation +from ...generate_data.ugrid import make_cube_like_2d_cubesphere + + +class MixinCombineRegions: + # Characterise time taken + memory-allocated, for various stages of combine + # operations on cubesphere-like test data. + params = [4, 500] + param_names = ["cubesphere-N"] + + def _parametrised_cache_filename(self, n_cubesphere, content_name): + return f"cube_C{n_cubesphere}_{content_name}.nc" + + def _make_region_cubes(self, full_mesh_cube): + """Make a fixed number of region cubes from a full meshcube.""" + # Divide the cube into regions. + n_faces = full_mesh_cube.shape[-1] + # Start with a simple list of face indices + # first extend to multiple of 5 + n_faces_5s = 5 * ((n_faces + 1) // 5) + i_faces = np.arange(n_faces_5s, dtype=int) + # reshape (5N,) to (N, 5) + i_faces = i_faces.reshape((n_faces_5s // 5, 5)) + # reorder [2, 3, 4, 0, 1] within each block of 5 + i_faces = np.concatenate([i_faces[:, 2:], i_faces[:, :2]], axis=1) + # flatten to get [2 3 4 0 1 (-) 8 9 10 6 7 (-) 13 14 15 11 12 ...] + i_faces = i_faces.flatten() + # reduce back to orignal length, wrap any overflows into valid range + i_faces = i_faces[:n_faces] % n_faces + + # Divide into regions -- always slightly uneven, since 7 doesn't divide + n_regions = 7 + n_facesperregion = n_faces // n_regions + i_face_regions = (i_faces // n_facesperregion) % n_regions + region_inds = [ + np.where(i_face_regions == i_region)[0] + for i_region in range(n_regions) + ] + # NOTE: this produces 7 regions, with near-adjacent value ranges but + # with some points "moved" to an adjacent region. + # Also, region-0 is bigger (because of not dividing by 7). + + # Finally, make region cubes with these indices. + region_cubes = [full_mesh_cube[..., inds] for inds in region_inds] + return region_cubes + + def setup_cache(self): + """Cache all the necessary source data on disk.""" + + # Control dask, to minimise memory usage + allow largest data. + self.fix_dask_settings() + + for n_cubesphere in self.params: + # Do for each parameter, since "setup_cache" is NOT parametrised + mesh_cube = make_cube_like_2d_cubesphere( + n_cube=n_cubesphere, with_mesh=True + ) + # Save to files which include the parameter in the names. + save( + mesh_cube, + self._parametrised_cache_filename(n_cubesphere, "meshcube"), + ) + region_cubes = self._make_region_cubes(mesh_cube) + save( + region_cubes, + self._parametrised_cache_filename(n_cubesphere, "regioncubes"), + ) + + def setup( + self, n_cubesphere, imaginary_data=True, create_result_cube=True + ): + """ + The combine-tests "standard" setup operation. + + Load the source cubes (full-mesh + region) from disk. + These are specific to the cubesize parameter. + The data is cached on disk rather than calculated, to avoid any + pre-loading of the process memory allocation. + + If 'imaginary_data' is set (default), the region cubes data is replaced + with lazy data in the form of a da.zeros(). Otherwise, the region data + is lazy data from the files. + + If 'create_result_cube' is set, create "self.combined_cube" containing + the (still lazy) result. + + NOTE: various test classes override + extend this. + + """ + + # Load source cubes (full-mesh and regions) + with PARSE_UGRID_ON_LOAD.context(): + self.full_mesh_cube = load_cube( + self._parametrised_cache_filename(n_cubesphere, "meshcube") + ) + self.region_cubes = load( + self._parametrised_cache_filename(n_cubesphere, "regioncubes") + ) + + # Remove all var-names from loaded cubes, which can otherwise cause + # problems. Also implement 'imaginary' data. + for cube in self.region_cubes + [self.full_mesh_cube]: + cube.var_name = None + for coord in cube.coords(): + coord.var_name = None + if imaginary_data: + # Replace cube data (lazy file data) with 'imaginary' data. + # This has the same lazy-array attributes, but is allocated by + # creating chunks on demand instead of loading from file. + data = cube.lazy_data() + data = da.zeros( + data.shape, dtype=data.dtype, chunks=data.chunksize + ) + cube.data = data + + if create_result_cube: + self.recombined_cube = self.recombine() + + # Fix dask usage mode for all the subsequent performance tests. + self.fix_dask_settings() + + def fix_dask_settings(self): + """ + Fix "standard" dask behaviour for time+space testing. + + Currently this is single-threaded mode, with known chunksize, + which is optimised for space saving so we can test largest data. + + """ + + import dask.config as dcfg + + # Use single-threaded, to avoid process-switching costs and minimise memory usage. + # N.B. generally may be slower, but use less memory ? + dcfg.set(scheduler="single-threaded") + # Configure iris._lazy_data.as_lazy_data to aim for 100Mb chunks + dcfg.set({"array.chunk-size": "128Mib"}) + + def recombine(self): + # A handy general shorthand for the main "combine" operation. + result = recombine_submeshes( + self.full_mesh_cube, + self.region_cubes, + index_coord_name="i_mesh_face", + ) + return result + + +class CombineRegionsCreateCube(MixinCombineRegions): + """ + Time+memory costs of creating a combined-regions cube. + + The result is lazy, and we don't do the actual calculation. + + """ + + def setup(self, n_cubesphere): + # In this case only, do *not* create the result cube. + # That is the operation we want to test. + super().setup(n_cubesphere, create_result_cube=False) + + def time_create_combined_cube(self, n_cubesphere): + self.recombine() + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_create_combined_cube(self, n_cubesphere): + self.recombine() + + +class CombineRegionsComputeRealData(MixinCombineRegions): + """ + Time+memory costs of computing combined-regions data. + """ + + def time_compute_data(self, n_cubesphere): + _ = self.recombined_cube.data + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_compute_data(self, n_cubesphere): + _ = self.recombined_cube.data + + +class CombineRegionsSaveData(MixinCombineRegions): + """ + Test saving *only*, having replaced the input cube data with 'imaginary' + array data, so that input data is not loaded from disk during the save + operation. + + """ + + def time_save(self, n_cubesphere): + # Save to disk, which must compute data + stream it to file. + save(self.recombined_cube, "tmp.nc") + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_save(self, n_cubesphere): + save(self.recombined_cube, "tmp.nc") + + def track_filesize_saved(self, n_cubesphere): + save(self.recombined_cube, "tmp.nc") + return os.path.getsize("tmp.nc") * 1.0e-6 + + +CombineRegionsSaveData.track_filesize_saved.unit = "Mb" + + +class CombineRegionsFileStreamedCalc(MixinCombineRegions): + """ + Test the whole cost of file-to-file streaming. + Uses the combined cube which is based on lazy data loading from the region + cubes on disk. + """ + + def setup(self, n_cubesphere): + # In this case only, do *not* replace the loaded regions data with + # 'imaginary' data, as we want to test file-to-file calculation+save. + super().setup(n_cubesphere, imaginary_data=False) + + def time_stream_file2file(self, n_cubesphere): + # Save to disk, which must compute data + stream it to file. + save(self.recombined_cube, "tmp.nc") + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_stream_file2file(self, n_cubesphere): + save(self.recombined_cube, "tmp.nc") diff --git a/benchmarks/benchmarks/generate_data/__init__.py b/benchmarks/benchmarks/generate_data/__init__.py new file mode 100644 index 0000000000..52a5aceca8 --- /dev/null +++ b/benchmarks/benchmarks/generate_data/__init__.py @@ -0,0 +1,123 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Scripts for generating supporting data for benchmarking. + +Data generated using Iris should use :func:`run_function_elsewhere`, which +means that data is generated using a fixed version of Iris and a fixed +environment, rather than those that get changed when the benchmarking run +checks out a new commit. + +Downstream use of data generated 'elsewhere' requires saving; usually in a +NetCDF file. Could also use pickling but there is a potential risk if the +benchmark sequence runs over two different Python versions. + +""" +from contextlib import contextmanager +from inspect import getsource +from os import environ +from pathlib import Path +from subprocess import CalledProcessError, check_output, run +from textwrap import dedent +from warnings import warn + +from iris._lazy_data import as_concrete_data +from iris.fileformats import netcdf + +#: Python executable used by :func:`run_function_elsewhere`, set via env +#: variable of same name. Must be path of Python within an environment that +#: includes Iris (including dependencies and test modules) and Mule. +try: + DATA_GEN_PYTHON = environ["DATA_GEN_PYTHON"] + _ = check_output([DATA_GEN_PYTHON, "-c", "a = True"]) +except KeyError: + error = "Env variable DATA_GEN_PYTHON not defined." + raise KeyError(error) +except (CalledProcessError, FileNotFoundError, PermissionError): + error = ( + "Env variable DATA_GEN_PYTHON not a runnable python executable path." + ) + raise ValueError(error) + +# The default location of data files used in benchmarks. Used by CI. +default_data_dir = (Path(__file__).parents[2] / ".data").resolve() +# Optionally override the default data location with environment variable. +BENCHMARK_DATA = Path(environ.get("BENCHMARK_DATA", default_data_dir)) +if BENCHMARK_DATA == default_data_dir: + BENCHMARK_DATA.mkdir(exist_ok=True) + message = ( + f"No BENCHMARK_DATA env var, defaulting to {BENCHMARK_DATA}. " + "Note that some benchmark files are GB in size." + ) + warn(message) +elif not BENCHMARK_DATA.is_dir(): + message = f"Not a directory: {BENCHMARK_DATA} ." + raise ValueError(message) + +# Manual flag to allow the rebuilding of synthetic data. +# False forces a benchmark run to re-make all the data files. +REUSE_DATA = True + + +def run_function_elsewhere(func_to_run, *args, **kwargs): + """ + Run a given function using the :const:`DATA_GEN_PYTHON` executable. + + This structure allows the function to be written natively. + + Parameters + ---------- + func_to_run : FunctionType + The function object to be run. + NOTE: the function must be completely self-contained, i.e. perform all + its own imports (within the target :const:`DATA_GEN_PYTHON` + environment). + *args : tuple, optional + Function call arguments. Must all be expressible as simple literals, + i.e. the ``repr`` must be a valid literal expression. + **kwargs: dict, optional + Function call keyword arguments. All values must be expressible as + simple literals (see ``*args``). + + Returns + ------- + str + The ``stdout`` from the run. + + """ + func_string = dedent(getsource(func_to_run)) + func_string = func_string.replace("@staticmethod\n", "") + func_call_term_strings = [repr(arg) for arg in args] + func_call_term_strings += [ + f"{name}={repr(val)}" for name, val in kwargs.items() + ] + func_call_string = ( + f"{func_to_run.__name__}(" + ",".join(func_call_term_strings) + ")" + ) + python_string = "\n".join([func_string, func_call_string]) + result = run( + [DATA_GEN_PYTHON, "-c", python_string], capture_output=True, check=True + ) + return result.stdout + + +@contextmanager +def load_realised(): + """ + Force NetCDF loading with realised arrays. + + Since passing between data generation and benchmarking environments is via + file loading, but some benchmarks are only meaningful if starting with real + arrays. + """ + from iris.fileformats.netcdf.loader import _get_cf_var_data as pre_patched + + def patched(cf_var, filename): + return as_concrete_data(pre_patched(cf_var, filename)) + + netcdf._get_cf_var_data = patched + yield netcdf + netcdf._get_cf_var_data = pre_patched diff --git a/benchmarks/benchmarks/generate_data/stock.py b/benchmarks/benchmarks/generate_data/stock.py new file mode 100644 index 0000000000..eaf46bb405 --- /dev/null +++ b/benchmarks/benchmarks/generate_data/stock.py @@ -0,0 +1,166 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Wrappers for using :mod:`iris.tests.stock` methods for benchmarking. + +See :mod:`benchmarks.generate_data` for an explanation of this structure. +""" + +from hashlib import sha256 +import json +from pathlib import Path + +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD, load_mesh + +from . import BENCHMARK_DATA, REUSE_DATA, load_realised, run_function_elsewhere + + +def hash_args(*args, **kwargs): + """Convert arguments into a short hash - for preserving args in filenames.""" + arg_string = str(args) + kwarg_string = json.dumps(kwargs) + full_string = arg_string + kwarg_string + return sha256(full_string.encode()).hexdigest()[:10] + + +def _create_file__xios_common(func_name, **kwargs): + def _external(func_name_, temp_file_dir, **kwargs_): + from iris.tests.stock import netcdf + + func = getattr(netcdf, func_name_) + print(func(temp_file_dir, **kwargs_), end="") + + args_hash = hash_args(**kwargs) + save_path = (BENCHMARK_DATA / f"{func_name}_{args_hash}").with_suffix( + ".nc" + ) + if not REUSE_DATA or not save_path.is_file(): + # The xios functions take control of save location so need to move to + # a more specific name that allows re-use. + actual_path = run_function_elsewhere( + _external, + func_name_=func_name, + temp_file_dir=str(BENCHMARK_DATA), + **kwargs, + ) + Path(actual_path.decode()).replace(save_path) + return save_path + + +def create_file__xios_2d_face_half_levels( + temp_file_dir, dataset_name, n_faces=866, n_times=1 +): + """ + Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_2d_face_half_levels`. + + Have taken control of temp_file_dir + + todo: is create_file__xios_2d_face_half_levels still appropriate now we can + properly save Mesh Cubes? + """ + + return _create_file__xios_common( + func_name="create_file__xios_2d_face_half_levels", + dataset_name=dataset_name, + n_faces=n_faces, + n_times=n_times, + ) + + +def create_file__xios_3d_face_half_levels( + temp_file_dir, dataset_name, n_faces=866, n_times=1, n_levels=38 +): + """ + Wrapper for :meth:`iris.tests.stock.netcdf.create_file__xios_3d_face_half_levels`. + + Have taken control of temp_file_dir + + todo: is create_file__xios_3d_face_half_levels still appropriate now we can + properly save Mesh Cubes? + """ + + return _create_file__xios_common( + func_name="create_file__xios_3d_face_half_levels", + dataset_name=dataset_name, + n_faces=n_faces, + n_times=n_times, + n_levels=n_levels, + ) + + +def sample_mesh(n_nodes=None, n_faces=None, n_edges=None, lazy_values=False): + """Wrapper for :meth:iris.tests.stock.mesh.sample_mesh`.""" + + def _external(*args, **kwargs): + from iris.experimental.ugrid import save_mesh + from iris.tests.stock.mesh import sample_mesh + + save_path_ = kwargs.pop("save_path") + # Always saving, so laziness is irrelevant. Use lazy to save time. + kwargs["lazy_values"] = True + new_mesh = sample_mesh(*args, **kwargs) + save_mesh(new_mesh, save_path_) + + arg_list = [n_nodes, n_faces, n_edges] + args_hash = hash_args(*arg_list) + save_path = (BENCHMARK_DATA / f"sample_mesh_{args_hash}").with_suffix( + ".nc" + ) + if not REUSE_DATA or not save_path.is_file(): + _ = run_function_elsewhere( + _external, *arg_list, save_path=str(save_path) + ) + with PARSE_UGRID_ON_LOAD.context(): + if not lazy_values: + # Realise everything. + with load_realised(): + mesh = load_mesh(str(save_path)) + else: + mesh = load_mesh(str(save_path)) + return mesh + + +def sample_meshcoord(sample_mesh_kwargs=None, location="face", axis="x"): + """ + Wrapper for :meth:`iris.tests.stock.mesh.sample_meshcoord`. + + Parameters deviate from the original as cannot pass a + :class:`iris.experimental.ugrid.Mesh to the separate Python instance - must + instead generate the Mesh as well. + + MeshCoords cannot be saved to file, so the _external method saves the + MeshCoord's Mesh, then the original Python instance loads in that Mesh and + regenerates the MeshCoord from there. + """ + + def _external(sample_mesh_kwargs_, save_path_): + from iris.experimental.ugrid import save_mesh + from iris.tests.stock.mesh import sample_mesh, sample_meshcoord + + if sample_mesh_kwargs_: + input_mesh = sample_mesh(**sample_mesh_kwargs_) + else: + input_mesh = None + # Don't parse the location or axis arguments - only saving the Mesh at + # this stage. + new_meshcoord = sample_meshcoord(mesh=input_mesh) + save_mesh(new_meshcoord.mesh, save_path_) + + args_hash = hash_args(**sample_mesh_kwargs) + save_path = ( + BENCHMARK_DATA / f"sample_mesh_coord_{args_hash}" + ).with_suffix(".nc") + if not REUSE_DATA or not save_path.is_file(): + _ = run_function_elsewhere( + _external, + sample_mesh_kwargs_=sample_mesh_kwargs, + save_path_=str(save_path), + ) + with PARSE_UGRID_ON_LOAD.context(): + with load_realised(): + source_mesh = load_mesh(str(save_path)) + # Regenerate MeshCoord from its Mesh, which we saved. + return source_mesh.to_MeshCoord(location=location, axis=axis) diff --git a/benchmarks/benchmarks/generate_data/ugrid.py b/benchmarks/benchmarks/generate_data/ugrid.py new file mode 100644 index 0000000000..527b49a6bb --- /dev/null +++ b/benchmarks/benchmarks/generate_data/ugrid.py @@ -0,0 +1,195 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Scripts for generating supporting data for UGRID-related benchmarking. +""" +from iris import load_cube as iris_loadcube +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + +from . import BENCHMARK_DATA, REUSE_DATA, load_realised, run_function_elsewhere +from .stock import ( + create_file__xios_2d_face_half_levels, + create_file__xios_3d_face_half_levels, +) + + +def generate_cube_like_2d_cubesphere( + n_cube: int, with_mesh: bool, output_path: str +): + """ + Construct and save to file an LFRIc cubesphere-like cube for a given + cubesphere size, *or* a simpler structured (UM-like) cube of equivalent + size. + + NOTE: this function is *NEVER* called from within this actual package. + Instead, it is to be called via benchmarks.remote_data_generation, + so that it can use up-to-date facilities, independent of the ASV controlled + environment which contains the "Iris commit under test". + This means: + * it must be completely self-contained : i.e. it includes all its + own imports, and saves results to an output file. + + """ + from iris import save + from iris.tests.stock.mesh import sample_mesh, sample_mesh_cube + + n_face_nodes = n_cube * n_cube + n_faces = 6 * n_face_nodes + + # Set n_nodes=n_faces and n_edges=2*n_faces + # : Not exact, but similar to a 'real' cubesphere. + n_nodes = n_faces + n_edges = 2 * n_faces + if with_mesh: + mesh = sample_mesh( + n_nodes=n_nodes, n_faces=n_faces, n_edges=n_edges, lazy_values=True + ) + cube = sample_mesh_cube(mesh=mesh, n_z=1) + else: + cube = sample_mesh_cube(nomesh_faces=n_faces, n_z=1) + + # Strip off the 'extra' aux-coord mapping the mesh, which sample-cube adds + # but which we don't want. + cube.remove_coord("mesh_face_aux") + + # Save the result to a named file. + save(cube, output_path) + + +def make_cube_like_2d_cubesphere(n_cube: int, with_mesh: bool): + """ + Generate an LFRIc cubesphere-like cube for a given cubesphere size, + *or* a simpler structured (UM-like) cube of equivalent size. + + All the cube data, coords and mesh content are LAZY, and produced without + allocating large real arrays (to allow peak-memory testing). + + NOTE: the actual cube generation is done in a stable Iris environment via + benchmarks.remote_data_generation, so it is all channeled via cached netcdf + files in our common testdata directory. + + """ + identifying_filename = ( + f"cube_like_2d_cubesphere_C{n_cube}_Mesh={with_mesh}.nc" + ) + filepath = BENCHMARK_DATA / identifying_filename + if not filepath.exists(): + # Create the required testfile, by running the generation code remotely + # in a 'fixed' python environment. + run_function_elsewhere( + generate_cube_like_2d_cubesphere, + n_cube, + with_mesh=with_mesh, + output_path=str(filepath), + ) + + # File now *should* definitely exist: content is simply the desired cube. + with PARSE_UGRID_ON_LOAD.context(): + cube = iris_loadcube(str(filepath)) + + # Ensure correct laziness. + _ = cube.data + for coord in cube.coords(mesh_coords=False): + assert not coord.has_lazy_points() + assert not coord.has_lazy_bounds() + if cube.mesh: + for coord in cube.mesh.coords(): + assert coord.has_lazy_points() + for conn in cube.mesh.connectivities(): + assert conn.has_lazy_indices() + + return cube + + +def make_cube_like_umfield(xy_dims): + """ + Create a "UM-like" cube with lazy content, for save performance testing. + + Roughly equivalent to a single current UM cube, to be compared with + a "make_cube_like_2d_cubesphere(n_cube=_N_CUBESPHERE_UM_EQUIVALENT)" + (see below). + + Note: probably a bit over-simplified, as there is no time coord, but that + is probably equally true of our LFRic-style synthetic data. + + Args: + * xy_dims (2-tuple): + Set the horizontal dimensions = n-lats, n-lons. + + """ + + def _external(xy_dims_, save_path_): + from dask import array as da + import numpy as np + + from iris import save + from iris.coords import DimCoord + from iris.cube import Cube + + nz, ny, nx = (1,) + xy_dims_ + + # Base data : Note this is float32 not float64 like LFRic/XIOS outputs. + lazy_data = da.zeros((nz, ny, nx), dtype=np.float32) + cube = Cube(lazy_data, long_name="structured_phenom") + + # Add simple dim coords also. + z_dimco = DimCoord(np.arange(nz), long_name="level", units=1) + y_dimco = DimCoord( + np.linspace(-90.0, 90.0, ny), + standard_name="latitude", + units="degrees", + ) + x_dimco = DimCoord( + np.linspace(-180.0, 180.0, nx), + standard_name="longitude", + units="degrees", + ) + for idim, co in enumerate([z_dimco, y_dimco, x_dimco]): + cube.add_dim_coord(co, idim) + + save(cube, save_path_) + + save_path = ( + BENCHMARK_DATA / f"make_cube_like_umfield_{xy_dims}" + ).with_suffix(".nc") + if not REUSE_DATA or not save_path.is_file(): + _ = run_function_elsewhere(_external, xy_dims, str(save_path)) + with PARSE_UGRID_ON_LOAD.context(): + with load_realised(): + cube = iris_loadcube(str(save_path)) + + return cube + + +def make_cubesphere_testfile(c_size, n_levels=0, n_times=1): + """ + Build a C cubesphere testfile in a given directory, with a standard naming. + If n_levels > 0 specified: 3d file with the specified number of levels. + Return the file path. + + todo: is create_file__xios... still appropriate now we can properly save + Mesh Cubes? + + """ + n_faces = 6 * c_size * c_size + stem_name = f"mesh_cubesphere_C{c_size}_t{n_times}" + kwargs = dict( + temp_file_dir=None, + dataset_name=stem_name, # N.B. function adds the ".nc" extension + n_times=n_times, + n_faces=n_faces, + ) + + three_d = n_levels > 0 + if three_d: + kwargs["n_levels"] = n_levels + kwargs["dataset_name"] += f"_{n_levels}levels" + func = create_file__xios_3d_face_half_levels + else: + func = create_file__xios_2d_face_half_levels + + file_path = func(**kwargs) + return file_path diff --git a/benchmarks/benchmarks/generate_data/um_files.py b/benchmarks/benchmarks/generate_data/um_files.py new file mode 100644 index 0000000000..39773bbb4b --- /dev/null +++ b/benchmarks/benchmarks/generate_data/um_files.py @@ -0,0 +1,197 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Generate FF, PP and NetCDF files based on a minimal synthetic FF file. + +NOTE: uses the Mule package, so depends on an environment with Mule installed. +""" + + +def _create_um_files( + len_x: int, len_y: int, len_z: int, len_t: int, compress, save_paths: dict +) -> None: + """ + Generate an FF object of given shape and compression, save to FF/PP/NetCDF. + + This is run externally + (:func:`benchmarks.generate_data.run_function_elsewhere`), so all imports + are self-contained and input parameters are simple types. + """ + from copy import deepcopy + from datetime import datetime + from tempfile import NamedTemporaryFile + + from mule import ArrayDataProvider, Field3, FieldsFile + from mule.pp import fields_to_pp_file + import numpy as np + + from iris import load_cube + from iris import save as save_cube + + template = { + "fixed_length_header": {"dataset_type": 3, "grid_staggering": 3}, + "integer_constants": { + "num_p_levels": len_z, + "num_cols": len_x, + "num_rows": len_y, + }, + "real_constants": {}, + "level_dependent_constants": {"dims": (len_z + 1, None)}, + } + new_ff = FieldsFile.from_template(deepcopy(template)) + + data_array = np.arange(len_x * len_y).reshape(len_x, len_y) + array_provider = ArrayDataProvider(data_array) + + def add_field(level_: int, time_step_: int) -> None: + """ + Add a minimal field to the new :class:`~mule.FieldsFile`. + + Includes the minimum information to allow Mule saving and Iris + loading, as well as incrementation for vertical levels and time + steps to allow generation of z and t dimensions. + """ + new_field = Field3.empty() + # To correspond to the header-release 3 class used. + new_field.lbrel = 3 + # Mule uses the first element of the lookup to test for + # unpopulated fields (and skips them), so the first element should + # be set to something. The year will do. + new_field.raw[1] = datetime.now().year + + # Horizontal. + new_field.lbcode = 1 + new_field.lbnpt = len_x + new_field.lbrow = len_y + new_field.bdx = new_ff.real_constants.col_spacing + new_field.bdy = new_ff.real_constants.row_spacing + new_field.bzx = new_ff.real_constants.start_lon - 0.5 * new_field.bdx + new_field.bzy = new_ff.real_constants.start_lat - 0.5 * new_field.bdy + + # Hemisphere. + new_field.lbhem = 32 + # Processing. + new_field.lbproc = 0 + + # Vertical. + # Hybrid height values by simulating sequences similar to those in a + # theta file. + new_field.lbvc = 65 + if level_ == 0: + new_field.lblev = 9999 + else: + new_field.lblev = level_ + + level_1 = level_ + 1 + six_rec = 20 / 3 + three_rec = six_rec / 2 + + new_field.blev = level_1**2 * six_rec - six_rec + new_field.brsvd1 = ( + level_1**2 * six_rec + (six_rec * level_1) - three_rec + ) + + brsvd2_simulated = np.linspace(0.995, 0, len_z) + shift = min(len_z, 2) + bhrlev_simulated = np.concatenate( + [np.ones(shift), brsvd2_simulated[:-shift]] + ) + new_field.brsvd2 = brsvd2_simulated[level_] + new_field.bhrlev = bhrlev_simulated[level_] + + # Time. + new_field.lbtim = 11 + + new_field.lbyr = time_step_ + for attr_name in ["lbmon", "lbdat", "lbhr", "lbmin", "lbsec"]: + setattr(new_field, attr_name, 0) + + new_field.lbyrd = time_step_ + 1 + for attr_name in ["lbmond", "lbdatd", "lbhrd", "lbmind", "lbsecd"]: + setattr(new_field, attr_name, 0) + + # Data and packing. + new_field.lbuser1 = 1 + new_field.lbpack = int(compress) + new_field.bacc = 0 + new_field.bmdi = -1 + new_field.lbext = 0 + new_field.set_data_provider(array_provider) + + new_ff.fields.append(new_field) + + for time_step in range(len_t): + for level in range(len_z): + add_field(level, time_step + 1) + + ff_path = save_paths.get("FF", None) + pp_path = save_paths.get("PP", None) + nc_path = save_paths.get("NetCDF", None) + + if ff_path: + new_ff.to_file(ff_path) + if pp_path: + fields_to_pp_file(str(pp_path), new_ff.fields) + if nc_path: + temp_ff_path = None + # Need an Iris Cube from the FF content. + if ff_path: + # Use the existing file. + ff_cube = load_cube(ff_path) + else: + # Make a temporary file. + temp_ff_path = NamedTemporaryFile() + new_ff.to_file(temp_ff_path.name) + ff_cube = load_cube(temp_ff_path.name) + + save_cube(ff_cube, nc_path, zlib=compress) + if temp_ff_path: + temp_ff_path.close() + + +FILE_EXTENSIONS = {"FF": "", "PP": ".pp", "NetCDF": ".nc"} + + +def create_um_files( + len_x: int, + len_y: int, + len_z: int, + len_t: int, + compress: bool, + file_types: list, +) -> dict: + """ + Generate FF-based FF / PP / NetCDF files with specified shape and compression. + + All files representing a given shape are saved in a dedicated directory. A + dictionary of the saved paths is returned. + + If the required files exist, they are re-used, unless + :const:`benchmarks.REUSE_DATA` is ``False``. + """ + # Self contained imports to avoid linting confusion with _create_um_files(). + from . import BENCHMARK_DATA, REUSE_DATA, run_function_elsewhere + + save_name_sections = ["UM", len_x, len_y, len_z, len_t] + save_name = "_".join(str(section) for section in save_name_sections) + save_dir = BENCHMARK_DATA / save_name + if not save_dir.is_dir(): + save_dir.mkdir(parents=True) + + save_paths = {} + files_exist = True + for file_type in file_types: + file_ext = FILE_EXTENSIONS[file_type] + save_path = (save_dir / f"{compress}").with_suffix(file_ext) + files_exist = files_exist and save_path.is_file() + save_paths[file_type] = str(save_path) + + if not REUSE_DATA or not files_exist: + _ = run_function_elsewhere( + _create_um_files, len_x, len_y, len_z, len_t, compress, save_paths + ) + + return save_paths diff --git a/benchmarks/benchmarks/import_iris.py b/benchmarks/benchmarks/import_iris.py index 3e83ea8cfe..fc32ac289b 100644 --- a/benchmarks/benchmarks/import_iris.py +++ b/benchmarks/benchmarks/import_iris.py @@ -3,240 +3,274 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -import sys +from importlib import import_module, reload +################ +# Prepare info for reset_colormaps: -class Iris: - warmup_time = 0 - number = 1 - repeat = 10 +# Import and capture colormaps. +from matplotlib import colormaps # isort:skip + +_COLORMAPS_ORIG = set(colormaps) + +# Import iris.palette, which modifies colormaps. +import iris.palette + +# Derive which colormaps have been added by iris.palette. +_COLORMAPS_MOD = set(colormaps) +COLORMAPS_EXTRA = _COLORMAPS_MOD - _COLORMAPS_ORIG - def setup(self): - self.before = set(sys.modules.keys()) +# Touch iris.palette to prevent linters complaining. +_ = iris.palette - def teardown(self): - after = set(sys.modules.keys()) - diff = after - self.before - for module in diff: - sys.modules.pop(module) +################ + + +class Iris: + @staticmethod + def _import(module_name, reset_colormaps=False): + """ + Have experimented with adding sleep() commands into the imported + modules. The results reveal: + + ASV avoids invoking `import x` if nothing gets called in the + benchmark (some imports were timed, but only those where calls + happened during import). + + Using reload() is not identical to importing, but does produce + results that are very close to expected import times, so this is fine + for monitoring for regressions. + It is also ideal for accurate repetitions, without the need to mess + with the ASV `number` attribute etc, since cached imports are not used + and the repetitions are therefore no faster than the first run. + """ + mod = import_module(module_name) + + if reset_colormaps: + # Needed because reload() will attempt to register new colormaps a + # second time, which errors by default. + for cm_name in COLORMAPS_EXTRA: + colormaps.unregister(cm_name) + + reload(mod) def time_iris(self): - import iris + self._import("iris") def time__concatenate(self): - import iris._concatenate + self._import("iris._concatenate") def time__constraints(self): - import iris._constraints + self._import("iris._constraints") def time__data_manager(self): - import iris._data_manager + self._import("iris._data_manager") def time__deprecation(self): - import iris._deprecation + self._import("iris._deprecation") def time__lazy_data(self): - import iris._lazy_data + self._import("iris._lazy_data") def time__merge(self): - import iris._merge + self._import("iris._merge") def time__representation(self): - import iris._representation + self._import("iris._representation") def time_analysis(self): - import iris.analysis + self._import("iris.analysis") def time_analysis__area_weighted(self): - import iris.analysis._area_weighted + self._import("iris.analysis._area_weighted") def time_analysis__grid_angles(self): - import iris.analysis._grid_angles + self._import("iris.analysis._grid_angles") def time_analysis__interpolation(self): - import iris.analysis._interpolation + self._import("iris.analysis._interpolation") def time_analysis__regrid(self): - import iris.analysis._regrid + self._import("iris.analysis._regrid") def time_analysis__scipy_interpolate(self): - import iris.analysis._scipy_interpolate + self._import("iris.analysis._scipy_interpolate") def time_analysis_calculus(self): - import iris.analysis.calculus + self._import("iris.analysis.calculus") def time_analysis_cartography(self): - import iris.analysis.cartography + self._import("iris.analysis.cartography") def time_analysis_geomerty(self): - import iris.analysis.geometry + self._import("iris.analysis.geometry") def time_analysis_maths(self): - import iris.analysis.maths + self._import("iris.analysis.maths") def time_analysis_stats(self): - import iris.analysis.stats + self._import("iris.analysis.stats") def time_analysis_trajectory(self): - import iris.analysis.trajectory + self._import("iris.analysis.trajectory") def time_aux_factory(self): - import iris.aux_factory + self._import("iris.aux_factory") def time_common(self): - import iris.common + self._import("iris.common") def time_common_lenient(self): - import iris.common.lenient + self._import("iris.common.lenient") def time_common_metadata(self): - import iris.common.metadata + self._import("iris.common.metadata") def time_common_mixin(self): - import iris.common.mixin + self._import("iris.common.mixin") def time_common_resolve(self): - import iris.common.resolve + self._import("iris.common.resolve") def time_config(self): - import iris.config + self._import("iris.config") def time_coord_categorisation(self): - import iris.coord_categorisation + self._import("iris.coord_categorisation") def time_coord_systems(self): - import iris.coord_systems + self._import("iris.coord_systems") def time_coords(self): - import iris.coords + self._import("iris.coords") def time_cube(self): - import iris.cube + self._import("iris.cube") def time_exceptions(self): - import iris.exceptions + self._import("iris.exceptions") def time_experimental(self): - import iris.experimental + self._import("iris.experimental") def time_fileformats(self): - import iris.fileformats + self._import("iris.fileformats") def time_fileformats__ff(self): - import iris.fileformats._ff + self._import("iris.fileformats._ff") def time_fileformats__ff_cross_references(self): - import iris.fileformats._ff_cross_references + self._import("iris.fileformats._ff_cross_references") def time_fileformats__pp_lbproc_pairs(self): - import iris.fileformats._pp_lbproc_pairs + self._import("iris.fileformats._pp_lbproc_pairs") def time_fileformats_structured_array_identification(self): - import iris.fileformats._structured_array_identification + self._import("iris.fileformats._structured_array_identification") def time_fileformats_abf(self): - import iris.fileformats.abf + self._import("iris.fileformats.abf") def time_fileformats_cf(self): - import iris.fileformats.cf + self._import("iris.fileformats.cf") def time_fileformats_dot(self): - import iris.fileformats.dot + self._import("iris.fileformats.dot") def time_fileformats_name(self): - import iris.fileformats.name + self._import("iris.fileformats.name") def time_fileformats_name_loaders(self): - import iris.fileformats.name_loaders + self._import("iris.fileformats.name_loaders") def time_fileformats_netcdf(self): - import iris.fileformats.netcdf + self._import("iris.fileformats.netcdf") def time_fileformats_nimrod(self): - import iris.fileformats.nimrod + self._import("iris.fileformats.nimrod") def time_fileformats_nimrod_load_rules(self): - import iris.fileformats.nimrod_load_rules + self._import("iris.fileformats.nimrod_load_rules") def time_fileformats_pp(self): - import iris.fileformats.pp + self._import("iris.fileformats.pp") def time_fileformats_pp_load_rules(self): - import iris.fileformats.pp_load_rules + self._import("iris.fileformats.pp_load_rules") def time_fileformats_pp_save_rules(self): - import iris.fileformats.pp_save_rules + self._import("iris.fileformats.pp_save_rules") def time_fileformats_rules(self): - import iris.fileformats.rules + self._import("iris.fileformats.rules") def time_fileformats_um(self): - import iris.fileformats.um + self._import("iris.fileformats.um") def time_fileformats_um__fast_load(self): - import iris.fileformats.um._fast_load + self._import("iris.fileformats.um._fast_load") def time_fileformats_um__fast_load_structured_fields(self): - import iris.fileformats.um._fast_load_structured_fields + self._import("iris.fileformats.um._fast_load_structured_fields") def time_fileformats_um__ff_replacement(self): - import iris.fileformats.um._ff_replacement + self._import("iris.fileformats.um._ff_replacement") def time_fileformats_um__optimal_array_structuring(self): - import iris.fileformats.um._optimal_array_structuring + self._import("iris.fileformats.um._optimal_array_structuring") def time_fileformats_um_cf_map(self): - import iris.fileformats.um_cf_map + self._import("iris.fileformats.um_cf_map") def time_io(self): - import iris.io + self._import("iris.io") def time_io_format_picker(self): - import iris.io.format_picker + self._import("iris.io.format_picker") def time_iterate(self): - import iris.iterate + self._import("iris.iterate") def time_palette(self): - import iris.palette + self._import("iris.palette", reset_colormaps=True) def time_plot(self): - import iris.plot + self._import("iris.plot") def time_quickplot(self): - import iris.quickplot + self._import("iris.quickplot") def time_std_names(self): - import iris.std_names + self._import("iris.std_names") def time_symbols(self): - import iris.symbols + self._import("iris.symbols") def time_tests(self): - import iris.tests + self._import("iris.tests") def time_time(self): - import iris.time + self._import("iris.time") def time_util(self): - import iris.util + self._import("iris.util") # third-party imports def time_third_party_cartopy(self): - import cartopy + self._import("cartopy") def time_third_party_cf_units(self): - import cf_units + self._import("cf_units") def time_third_party_cftime(self): - import cftime + self._import("cftime") def time_third_party_matplotlib(self): - import matplotlib + self._import("matplotlib") def time_third_party_numpy(self): - import numpy + self._import("numpy") def time_third_party_scipy(self): - import scipy + self._import("scipy") diff --git a/benchmarks/benchmarks/iterate.py b/benchmarks/benchmarks/iterate.py index 20422750ef..0a5415ac2b 100644 --- a/benchmarks/benchmarks/iterate.py +++ b/benchmarks/benchmarks/iterate.py @@ -9,9 +9,10 @@ """ import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import coords, cube, iterate +from . import ARTIFICIAL_DIM_SIZE + def setup(): """General variables needed by multiple benchmark classes.""" diff --git a/benchmarks/benchmarks/load/__init__.py b/benchmarks/benchmarks/load/__init__.py new file mode 100644 index 0000000000..1b0ea696f6 --- /dev/null +++ b/benchmarks/benchmarks/load/__init__.py @@ -0,0 +1,187 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File loading benchmark tests. + +Where applicable benchmarks should be parameterised for two sizes of input data: + * minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + * large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. Size should be _just_ large + enough - don't want to bloat benchmark runtime. + +""" + +from iris import AttributeConstraint, Constraint, load, load_cube +from iris.cube import Cube +from iris.fileformats.um import structured_um_loading + +from ..generate_data import BENCHMARK_DATA, REUSE_DATA, run_function_elsewhere +from ..generate_data.um_files import create_um_files + + +class LoadAndRealise: + # For data generation + timeout = 600.0 + params = [ + [(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], + [False, True], + ["FF", "PP", "NetCDF"], + ] + param_names = ["xyz", "compressed", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[2] + file_path_dict = {} + for xyz in self.params[0]: + file_path_dict[xyz] = {} + x, y, z = xyz + for compress in self.params[1]: + file_path_dict[xyz][compress] = create_um_files( + x, y, z, 1, compress, file_type_args + ) + return file_path_dict + + def setup( + self, + file_path_dict: dict, + xyz: tuple, + compress: bool, + file_format: str, + ) -> None: + self.file_path = file_path_dict[xyz][compress][file_format] + self.cube = self.load() + + def load(self) -> Cube: + return load_cube(self.file_path) + + def time_load(self, _, __, ___, ____) -> None: + _ = self.load() + + def time_realise(self, _, __, ___, ____) -> None: + # Don't touch cube.data - permanent realisation plays badly with ASV's + # re-run strategy. + assert self.cube.has_lazy_data() + self.cube.core_data().compute() + + +class STASHConstraint: + # xyz sizes mimic LoadAndRealise to maximise file re-use. + params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], ["FF", "PP"]] + param_names = ["xyz", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[1] + file_path_dict = {} + for xyz in self.params[0]: + x, y, z = xyz + file_path_dict[xyz] = create_um_files( + x, y, z, 1, False, file_type_args + ) + return file_path_dict + + def setup( + self, file_path_dict: dict, xyz: tuple, file_format: str + ) -> None: + self.file_path = file_path_dict[xyz][file_format] + + def time_stash_constraint(self, _, __, ___) -> None: + _ = load_cube(self.file_path, AttributeConstraint(STASH="m??s??i901")) + + +class TimeConstraint: + params = [[3, 20], ["FF", "PP", "NetCDF"]] + param_names = ["time_dim_len", "file_format"] + + def setup_cache(self) -> dict: + file_type_args = self.params[1] + file_path_dict = {} + for time_dim_len in self.params[0]: + file_path_dict[time_dim_len] = create_um_files( + 20, 20, 5, time_dim_len, False, file_type_args + ) + return file_path_dict + + def setup( + self, file_path_dict: dict, time_dim_len: int, file_format: str + ) -> None: + self.file_path = file_path_dict[time_dim_len][file_format] + self.time_constr = Constraint(time=lambda cell: cell.point.year < 3) + + def time_time_constraint(self, _, __, ___) -> None: + _ = load_cube(self.file_path, self.time_constr) + + +class ManyVars: + FILE_PATH = BENCHMARK_DATA / "many_var_file.nc" + + @staticmethod + def _create_file(save_path: str) -> None: + """Is run externally - everything must be self-contained.""" + import numpy as np + + from iris import save + from iris.coords import AuxCoord + from iris.cube import Cube + + data_len = 8 + data = np.arange(data_len) + cube = Cube(data, units="unknown") + extra_vars = 80 + names = ["coord_" + str(i) for i in range(extra_vars)] + for name in names: + coord = AuxCoord(data, long_name=name, units="unknown") + cube.add_aux_coord(coord, 0) + save(cube, save_path) + + def setup_cache(self) -> None: + if not REUSE_DATA or not self.FILE_PATH.is_file(): + # See :mod:`benchmarks.generate_data` docstring for full explanation. + _ = run_function_elsewhere( + self._create_file, + str(self.FILE_PATH), + ) + + def time_many_var_load(self) -> None: + _ = load(str(self.FILE_PATH)) + + +class StructuredFF: + """ + Test structured loading of a large-ish fieldsfile. + + Structured load of the larger size should show benefit over standard load, + avoiding the cost of merging. + """ + + params = [[(2, 2, 2), (1280, 960, 5), (2, 2, 1000)], [False, True]] + param_names = ["xyz", "structured_loading"] + + def setup_cache(self) -> dict: + file_path_dict = {} + for xyz in self.params[0]: + x, y, z = xyz + file_path_dict[xyz] = create_um_files(x, y, z, 1, False, ["FF"]) + return file_path_dict + + def setup(self, file_path_dict, xyz, structured_load): + self.file_path = file_path_dict[xyz]["FF"] + self.structured_load = structured_load + + def load(self): + """Load the whole file (in fact there is only 1 cube).""" + + def _load(): + _ = load(self.file_path) + + if self.structured_load: + with structured_um_loading(): + _load() + else: + _load() + + def time_structured_load(self, _, __, ___): + self.load() diff --git a/benchmarks/benchmarks/load/ugrid.py b/benchmarks/benchmarks/load/ugrid.py new file mode 100644 index 0000000000..350a78e128 --- /dev/null +++ b/benchmarks/benchmarks/load/ugrid.py @@ -0,0 +1,130 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Mesh data loading benchmark tests. + +Where possible benchmarks should be parameterised for two sizes of input data: + * minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + * large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. + +""" + +from iris import load_cube as iris_load_cube +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD +from iris.experimental.ugrid import load_mesh as iris_load_mesh + +from ..generate_data.stock import create_file__xios_2d_face_half_levels + + +def synthetic_data(**kwargs): + # Ensure all uses of the synthetic data function use the common directory. + # File location is controlled by :mod:`generate_data`, hence temp_file_dir=None. + return create_file__xios_2d_face_half_levels(temp_file_dir=None, **kwargs) + + +def load_cube(*args, **kwargs): + with PARSE_UGRID_ON_LOAD.context(): + return iris_load_cube(*args, **kwargs) + + +def load_mesh(*args, **kwargs): + with PARSE_UGRID_ON_LOAD.context(): + return iris_load_mesh(*args, **kwargs) + + +class BasicLoading: + params = [1, int(2e5)] + param_names = ["number of faces"] + + def setup_common(self, **kwargs): + self.data_path = synthetic_data(**kwargs) + + def setup(self, *args): + self.setup_common(dataset_name="Loading", n_faces=args[0]) + + def time_load_file(self, *args): + _ = load_cube(str(self.data_path)) + + def time_load_mesh(self, *args): + _ = load_mesh(str(self.data_path)) + + +class BasicLoadingTime(BasicLoading): + """Same as BasicLoading, but scaling over a time series - an unlimited dimension.""" + + # NOTE iris#4834 - careful how big the time dimension is (time dimension + # is UNLIMITED). + + param_names = ["number of time steps"] + + def setup(self, *args): + self.setup_common(dataset_name="Loading", n_faces=1, n_times=args[0]) + + +class DataRealisation: + # Prevent repeat runs between setup() runs - data won't be lazy after 1st. + number = 1 + # Compensate for reduced certainty by increasing number of repeats. + repeat = (10, 10, 10.0) + # Prevent ASV running its warmup, which ignores `number` and would + # therefore get a false idea of typical run time since the data would stop + # being lazy. + warmup_time = 0.0 + timeout = 300.0 + + params = [1, int(2e5)] + param_names = ["number of faces"] + + def setup_common(self, **kwargs): + data_path = synthetic_data(**kwargs) + self.cube = load_cube(str(data_path)) + + def setup(self, *args): + self.setup_common(dataset_name="Realisation", n_faces=args[0]) + + def time_realise_data(self, *args): + assert self.cube.has_lazy_data() + _ = self.cube.data[0] + + +class DataRealisationTime(DataRealisation): + """Same as DataRealisation, but scaling over a time series - an unlimited dimension.""" + + param_names = ["number of time steps"] + + def setup(self, *args): + self.setup_common( + dataset_name="Realisation", n_faces=1, n_times=args[0] + ) + + +class Callback: + params = [1, int(2e5)] + param_names = ["number of faces"] + + def setup_common(self, **kwargs): + def callback(cube, field, filename): + return cube[::2] + + self.data_path = synthetic_data(**kwargs) + self.callback = callback + + def setup(self, *args): + self.setup_common(dataset_name="Loading", n_faces=args[0]) + + def time_load_file_callback(self, *args): + _ = load_cube(str(self.data_path), callback=self.callback) + + +class CallbackTime(Callback): + """Same as Callback, but scaling over a time series - an unlimited dimension.""" + + param_names = ["number of time steps"] + + def setup(self, *args): + self.setup_common(dataset_name="Loading", n_faces=1, n_times=args[0]) diff --git a/benchmarks/benchmarks/mixin.py b/benchmarks/benchmarks/mixin.py index e78b150438..bec5518eee 100644 --- a/benchmarks/benchmarks/mixin.py +++ b/benchmarks/benchmarks/mixin.py @@ -10,10 +10,11 @@ import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import coords from iris.common.metadata import AncillaryVariableMetadata +from . import ARTIFICIAL_DIM_SIZE + LONG_NAME = "air temperature" STANDARD_NAME = "air_temperature" VAR_NAME = "air_temp" diff --git a/benchmarks/benchmarks/plot.py b/benchmarks/benchmarks/plot.py index 45905abd2f..75195c86e9 100644 --- a/benchmarks/benchmarks/plot.py +++ b/benchmarks/benchmarks/plot.py @@ -10,9 +10,10 @@ import matplotlib import numpy as np -from benchmarks import ARTIFICIAL_DIM_SIZE from iris import coords, cube, plot +from . import ARTIFICIAL_DIM_SIZE + matplotlib.use("agg") @@ -22,7 +23,7 @@ def setup(self): # Should generate 10 distinct contours, regardless of dim size. dim_size = int(ARTIFICIAL_DIM_SIZE / 5) repeat_number = int(dim_size / 10) - repeat_range = range(int((dim_size ** 2) / repeat_number)) + repeat_range = range(int((dim_size**2) / repeat_number)) data = np.repeat(repeat_range, repeat_number) data = data.reshape((dim_size,) * 2) diff --git a/benchmarks/benchmarks/regridding.py b/benchmarks/benchmarks/regridding.py index 6db33aa192..44bd1b6c95 100644 --- a/benchmarks/benchmarks/regridding.py +++ b/benchmarks/benchmarks/regridding.py @@ -12,8 +12,11 @@ # importing anything else from iris import tests # isort:skip +import numpy as np + import iris -from iris.analysis import AreaWeighted +from iris.analysis import AreaWeighted, PointInCell +from iris.coords import AuxCoord class HorizontalChunkedRegridding: @@ -25,16 +28,76 @@ def setup(self) -> None: ) self.cube = iris.load_cube(cube_file_path) + # Prepare a tougher cube and chunk it + chunked_cube_file_path = tests.get_data_path( + ["NetCDF", "regrid", "regrid_xyt.nc"] + ) + self.chunked_cube = iris.load_cube(chunked_cube_file_path) + + # Chunked data makes the regridder run repeatedly + self.cube.data = self.cube.lazy_data().rechunk((1, -1, -1)) + template_file_path = tests.get_data_path( ["NetCDF", "regrid", "regrid_template_global_latlon.nc"] ) self.template_cube = iris.load_cube(template_file_path) - # Chunked data makes the regridder run repeatedly - self.cube.data = self.cube.lazy_data().rechunk((1, -1, -1)) + # Prepare a regridding scheme + self.scheme_area_w = AreaWeighted() def time_regrid_area_w(self) -> None: # Regrid the cube onto the template. - out = self.cube.regrid(self.template_cube, AreaWeighted()) + out = self.cube.regrid(self.template_cube, self.scheme_area_w) + # Realise the data + out.data + + def time_regrid_area_w_new_grid(self) -> None: + # Regrid the chunked cube + out = self.chunked_cube.regrid(self.template_cube, self.scheme_area_w) + # Realise data + out.data + + +class CurvilinearRegridding: + def setup(self) -> None: + # Prepare a cube and a template + + cube_file_path = tests.get_data_path( + ["NetCDF", "regrid", "regrid_xyt.nc"] + ) + self.cube = iris.load_cube(cube_file_path) + + # Make the source cube curvilinear + x_coord = self.cube.coord("longitude") + y_coord = self.cube.coord("latitude") + xx, yy = np.meshgrid(x_coord.points, y_coord.points) + self.cube.remove_coord(x_coord) + self.cube.remove_coord(y_coord) + x_coord_2d = AuxCoord( + xx, + standard_name=x_coord.standard_name, + units=x_coord.units, + coord_system=x_coord.coord_system, + ) + y_coord_2d = AuxCoord( + yy, + standard_name=y_coord.standard_name, + units=y_coord.units, + coord_system=y_coord.coord_system, + ) + self.cube.add_aux_coord(x_coord_2d, (1, 2)) + self.cube.add_aux_coord(y_coord_2d, (1, 2)) + + template_file_path = tests.get_data_path( + ["NetCDF", "regrid", "regrid_template_global_latlon.nc"] + ) + self.template_cube = iris.load_cube(template_file_path) + + # Prepare a regridding scheme + self.scheme_pic = PointInCell() + + def time_regrid_pic(self) -> None: + # Regrid the cube onto the template. + out = self.cube.regrid(self.template_cube, self.scheme_pic) # Realise the data out.data diff --git a/benchmarks/benchmarks/save.py b/benchmarks/benchmarks/save.py new file mode 100644 index 0000000000..3551c72528 --- /dev/null +++ b/benchmarks/benchmarks/save.py @@ -0,0 +1,54 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File saving benchmarks. + +Where possible benchmarks should be parameterised for two sizes of input data: + * minimal: enables detection of regressions in parts of the run-time that do + NOT scale with data size. + * large: large enough to exclusively detect regressions in parts of the + run-time that scale with data size. + +""" +from iris import save +from iris.experimental.ugrid import save_mesh + +from . import TrackAddedMemoryAllocation +from .generate_data.ugrid import make_cube_like_2d_cubesphere + + +class NetcdfSave: + params = [[1, 600], [False, True]] + param_names = ["cubesphere-N", "is_unstructured"] + + def setup(self, n_cubesphere, is_unstructured): + self.cube = make_cube_like_2d_cubesphere( + n_cube=n_cubesphere, with_mesh=is_unstructured + ) + + def _save_data(self, cube, do_copy=True): + if do_copy: + # Copy the cube, to avoid distorting the results by changing it + # Because we known that older Iris code realises lazy coords + cube = cube.copy() + save(cube, "tmp.nc") + + def _save_mesh(self, cube): + # In this case, we are happy that the mesh is *not* modified + save_mesh(cube.mesh, "mesh.nc") + + def time_netcdf_save_cube(self, n_cubesphere, is_unstructured): + self._save_data(self.cube) + + def time_netcdf_save_mesh(self, n_cubesphere, is_unstructured): + if is_unstructured: + self._save_mesh(self.cube) + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_netcdf_save(self, n_cubesphere, is_unstructured): + # Don't need to copy the cube here since track_ benchmarks don't + # do repeats between self.setup() calls. + self._save_data(self.cube, do_copy=False) diff --git a/benchmarks/benchmarks/sperf/__init__.py b/benchmarks/benchmarks/sperf/__init__.py new file mode 100644 index 0000000000..eccad56f6f --- /dev/null +++ b/benchmarks/benchmarks/sperf/__init__.py @@ -0,0 +1,43 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. + +SPerf = assessing performance against a series of increasingly large LFRic +datasets. +""" +from iris import load_cube + +# TODO: remove uses of PARSE_UGRID_ON_LOAD once UGRID parsing is core behaviour. +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD + +from ..generate_data.ugrid import make_cubesphere_testfile + + +class FileMixin: + """For use in any benchmark classes that work on a file.""" + + # Allows time for large file generation. + timeout = 3600.0 + # Largest file with these params: ~90GB. + # Total disk space: ~410GB. + params = [ + [12, 384, 640, 960, 1280, 1668], + [1, 36, 72], + [1, 3, 10], + ] + param_names = ["cubesphere_C", "N levels", "N time steps"] + # cubesphere_C: notation refers to faces per panel. + # e.g. C1 is 6 faces, 8 nodes + + def setup(self, c_size, n_levels, n_times): + self.file_path = make_cubesphere_testfile( + c_size=c_size, n_levels=n_levels, n_times=n_times + ) + + def load_cube(self): + with PARSE_UGRID_ON_LOAD.context(): + return load_cube(str(self.file_path)) diff --git a/benchmarks/benchmarks/sperf/combine_regions.py b/benchmarks/benchmarks/sperf/combine_regions.py new file mode 100644 index 0000000000..d3d128c7d8 --- /dev/null +++ b/benchmarks/benchmarks/sperf/combine_regions.py @@ -0,0 +1,257 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Region combine benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +""" +import os.path + +from dask import array as da +import numpy as np + +from iris import load, load_cube, save +from iris.experimental.ugrid import PARSE_UGRID_ON_LOAD +from iris.experimental.ugrid.utils import recombine_submeshes + +from .. import TrackAddedMemoryAllocation, on_demand_benchmark +from ..generate_data.ugrid import BENCHMARK_DATA, make_cube_like_2d_cubesphere + + +class Mixin: + # Characterise time taken + memory-allocated, for various stages of combine + # operations on cubesphere-like test data. + timeout = 300.0 + params = [100, 200, 300, 500, 1000, 1668] + param_names = ["cubesphere_C"] + # Fix result units for the tracking benchmarks. + unit = "Mb" + temp_save_path = BENCHMARK_DATA / "tmp.nc" + + def _parametrised_cache_filename(self, n_cubesphere, content_name): + return BENCHMARK_DATA / f"cube_C{n_cubesphere}_{content_name}.nc" + + def _make_region_cubes(self, full_mesh_cube): + """Make a fixed number of region cubes from a full meshcube.""" + # Divide the cube into regions. + n_faces = full_mesh_cube.shape[-1] + # Start with a simple list of face indices + # first extend to multiple of 5 + n_faces_5s = 5 * ((n_faces + 1) // 5) + i_faces = np.arange(n_faces_5s, dtype=int) + # reshape (5N,) to (N, 5) + i_faces = i_faces.reshape((n_faces_5s // 5, 5)) + # reorder [2, 3, 4, 0, 1] within each block of 5 + i_faces = np.concatenate([i_faces[:, 2:], i_faces[:, :2]], axis=1) + # flatten to get [2 3 4 0 1 (-) 8 9 10 6 7 (-) 13 14 15 11 12 ...] + i_faces = i_faces.flatten() + # reduce back to orignal length, wrap any overflows into valid range + i_faces = i_faces[:n_faces] % n_faces + + # Divide into regions -- always slightly uneven, since 7 doesn't divide + n_regions = 7 + n_facesperregion = n_faces // n_regions + i_face_regions = (i_faces // n_facesperregion) % n_regions + region_inds = [ + np.where(i_face_regions == i_region)[0] + for i_region in range(n_regions) + ] + # NOTE: this produces 7 regions, with near-adjacent value ranges but + # with some points "moved" to an adjacent region. + # Also, region-0 is bigger (because of not dividing by 7). + + # Finally, make region cubes with these indices. + region_cubes = [full_mesh_cube[..., inds] for inds in region_inds] + return region_cubes + + def setup_cache(self): + """Cache all the necessary source data on disk.""" + + # Control dask, to minimise memory usage + allow largest data. + self.fix_dask_settings() + + for n_cubesphere in self.params: + # Do for each parameter, since "setup_cache" is NOT parametrised + mesh_cube = make_cube_like_2d_cubesphere( + n_cube=n_cubesphere, with_mesh=True + ) + # Save to files which include the parameter in the names. + save( + mesh_cube, + self._parametrised_cache_filename(n_cubesphere, "meshcube"), + ) + region_cubes = self._make_region_cubes(mesh_cube) + save( + region_cubes, + self._parametrised_cache_filename(n_cubesphere, "regioncubes"), + ) + + def setup( + self, n_cubesphere, imaginary_data=True, create_result_cube=True + ): + """ + The combine-tests "standard" setup operation. + + Load the source cubes (full-mesh + region) from disk. + These are specific to the cubesize parameter. + The data is cached on disk rather than calculated, to avoid any + pre-loading of the process memory allocation. + + If 'imaginary_data' is set (default), the region cubes data is replaced + with lazy data in the form of a da.zeros(). Otherwise, the region data + is lazy data from the files. + + If 'create_result_cube' is set, create "self.combined_cube" containing + the (still lazy) result. + + NOTE: various test classes override + extend this. + + """ + + # Load source cubes (full-mesh and regions) + with PARSE_UGRID_ON_LOAD.context(): + self.full_mesh_cube = load_cube( + self._parametrised_cache_filename(n_cubesphere, "meshcube") + ) + self.region_cubes = load( + self._parametrised_cache_filename(n_cubesphere, "regioncubes") + ) + + # Remove all var-names from loaded cubes, which can otherwise cause + # problems. Also implement 'imaginary' data. + for cube in self.region_cubes + [self.full_mesh_cube]: + cube.var_name = None + for coord in cube.coords(): + coord.var_name = None + if imaginary_data: + # Replace cube data (lazy file data) with 'imaginary' data. + # This has the same lazy-array attributes, but is allocated by + # creating chunks on demand instead of loading from file. + data = cube.lazy_data() + data = da.zeros( + data.shape, dtype=data.dtype, chunks=data.chunksize + ) + cube.data = data + + if create_result_cube: + self.recombined_cube = self.recombine() + + # Fix dask usage mode for all the subsequent performance tests. + self.fix_dask_settings() + + def teardown(self, _): + self.temp_save_path.unlink(missing_ok=True) + + def fix_dask_settings(self): + """ + Fix "standard" dask behaviour for time+space testing. + + Currently this is single-threaded mode, with known chunksize, + which is optimised for space saving so we can test largest data. + + """ + + import dask.config as dcfg + + # Use single-threaded, to avoid process-switching costs and minimise memory usage. + # N.B. generally may be slower, but use less memory ? + dcfg.set(scheduler="single-threaded") + # Configure iris._lazy_data.as_lazy_data to aim for 100Mb chunks + dcfg.set({"array.chunk-size": "128Mib"}) + + def recombine(self): + # A handy general shorthand for the main "combine" operation. + result = recombine_submeshes( + self.full_mesh_cube, + self.region_cubes, + index_coord_name="i_mesh_face", + ) + return result + + def save_recombined_cube(self): + save(self.recombined_cube, self.temp_save_path) + + +@on_demand_benchmark +class CreateCube(Mixin): + """ + Time+memory costs of creating a combined-regions cube. + + The result is lazy, and we don't do the actual calculation. + + """ + + def setup( + self, n_cubesphere, imaginary_data=True, create_result_cube=False + ): + # In this case only, do *not* create the result cube. + # That is the operation we want to test. + super().setup(n_cubesphere, imaginary_data, create_result_cube) + + def time_create_combined_cube(self, n_cubesphere): + self.recombine() + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_create_combined_cube(self, n_cubesphere): + self.recombine() + + +@on_demand_benchmark +class ComputeRealData(Mixin): + """ + Time+memory costs of computing combined-regions data. + """ + + def time_compute_data(self, n_cubesphere): + _ = self.recombined_cube.data + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_compute_data(self, n_cubesphere): + _ = self.recombined_cube.data + + +@on_demand_benchmark +class SaveData(Mixin): + """ + Test saving *only*, having replaced the input cube data with 'imaginary' + array data, so that input data is not loaded from disk during the save + operation. + + """ + + def time_save(self, n_cubesphere): + # Save to disk, which must compute data + stream it to file. + self.save_recombined_cube() + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_save(self, n_cubesphere): + self.save_recombined_cube() + + def track_filesize_saved(self, n_cubesphere): + self.save_recombined_cube() + return self.temp_save_path.stat().st_size * 1.0e-6 + + +@on_demand_benchmark +class FileStreamedCalc(Mixin): + """ + Test the whole cost of file-to-file streaming. + Uses the combined cube which is based on lazy data loading from the region + cubes on disk. + """ + + def setup( + self, n_cubesphere, imaginary_data=False, create_result_cube=True + ): + # In this case only, do *not* replace the loaded regions data with + # 'imaginary' data, as we want to test file-to-file calculation+save. + super().setup(n_cubesphere, imaginary_data, create_result_cube) + + def time_stream_file2file(self, n_cubesphere): + # Save to disk, which must compute data + stream it to file. + self.save_recombined_cube() + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_stream_file2file(self, n_cubesphere): + self.save_recombined_cube() diff --git a/benchmarks/benchmarks/sperf/equality.py b/benchmarks/benchmarks/sperf/equality.py new file mode 100644 index 0000000000..85c73ab92b --- /dev/null +++ b/benchmarks/benchmarks/sperf/equality.py @@ -0,0 +1,36 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Equality benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +""" +from . import FileMixin +from .. import on_demand_benchmark + + +@on_demand_benchmark +class CubeEquality(FileMixin): + """ + Benchmark time and memory costs of comparing :class:`~iris.cube.Cube`\\ s + with attached :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es. + + Uses :class:`FileMixin` as the realistic case will be comparing + :class:`~iris.cube.Cube`\\ s that have been loaded from file. + + """ + + # Cut down paremt parameters. + params = [FileMixin.params[0]] + + def setup(self, c_size, n_levels=1, n_times=1): + super().setup(c_size, n_levels, n_times) + self.cube = self.load_cube() + self.other_cube = self.load_cube() + + def peakmem_eq(self, n_cube): + _ = self.cube == self.other_cube + + def time_eq(self, n_cube): + _ = self.cube == self.other_cube diff --git a/benchmarks/benchmarks/sperf/load.py b/benchmarks/benchmarks/sperf/load.py new file mode 100644 index 0000000000..6a60355976 --- /dev/null +++ b/benchmarks/benchmarks/sperf/load.py @@ -0,0 +1,29 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File loading benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +""" +from . import FileMixin +from .. import on_demand_benchmark + + +@on_demand_benchmark +class Load(FileMixin): + def time_load_cube(self, _, __, ___): + _ = self.load_cube() + + +@on_demand_benchmark +class Realise(FileMixin): + def setup(self, c_size, n_levels, n_times): + super().setup(c_size, n_levels, n_times) + self.loaded_cube = self.load_cube() + + def time_realise_cube(self, _, __, ___): + # Don't touch loaded_cube.data - permanent realisation plays badly with + # ASV's re-run strategy. + assert self.loaded_cube.has_lazy_data() + self.loaded_cube.core_data().compute() diff --git a/benchmarks/benchmarks/sperf/save.py b/benchmarks/benchmarks/sperf/save.py new file mode 100644 index 0000000000..dd33924c6c --- /dev/null +++ b/benchmarks/benchmarks/sperf/save.py @@ -0,0 +1,56 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +File saving benchmarks for the SPerf scheme of the UK Met Office's NG-VAT project. +""" +import os.path + +from iris import save +from iris.experimental.ugrid import save_mesh + +from .. import TrackAddedMemoryAllocation, on_demand_benchmark +from ..generate_data.ugrid import make_cube_like_2d_cubesphere + + +@on_demand_benchmark +class NetcdfSave: + """ + Benchmark time and memory costs of saving ~large-ish data cubes to netcdf. + + """ + + params = [[1, 100, 200, 300, 500, 1000, 1668], [False, True]] + param_names = ["cubesphere_C", "is_unstructured"] + # Fix result units for the tracking benchmarks. + unit = "Mb" + + def setup(self, n_cubesphere, is_unstructured): + self.cube = make_cube_like_2d_cubesphere( + n_cube=n_cubesphere, with_mesh=is_unstructured + ) + + def _save_cube(self, cube): + save(cube, "tmp.nc") + + def _save_mesh(self, cube): + save_mesh(cube.mesh, "mesh.nc") + + def time_save_cube(self, n_cubesphere, is_unstructured): + self._save_cube(self.cube) + + @TrackAddedMemoryAllocation.decorator + def track_addedmem_save_cube(self, n_cubesphere, is_unstructured): + self._save_cube(self.cube) + + def time_save_mesh(self, n_cubesphere, is_unstructured): + if is_unstructured: + self._save_mesh(self.cube) + + # The filesizes make a good reference point for the 'addedmem' memory + # usage results. + def track_filesize_save_cube(self, n_cubesphere, is_unstructured): + self._save_cube(self.cube) + return os.path.getsize("tmp.nc") * 1.0e-6 diff --git a/benchmarks/benchmarks/trajectory.py b/benchmarks/benchmarks/trajectory.py new file mode 100644 index 0000000000..5c1d10d218 --- /dev/null +++ b/benchmarks/benchmarks/trajectory.py @@ -0,0 +1,48 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Trajectory benchmark test + +""" + +# import iris tests first so that some things can be initialised before +# importing anything else +from iris import tests # isort:skip + +import numpy as np + +import iris +from iris.analysis.trajectory import interpolate + + +class TrajectoryInterpolation: + def setup(self) -> None: + # Prepare a cube and a template + + cube_file_path = tests.get_data_path( + ["NetCDF", "regrid", "regrid_xyt.nc"] + ) + self.cube = iris.load_cube(cube_file_path) + + trajectory = np.array( + [np.array((-50 + i, -50 + i)) for i in range(100)] + ) + self.sample_points = [ + ("longitude", trajectory[:, 0]), + ("latitude", trajectory[:, 1]), + ] + + def time_trajectory_linear(self) -> None: + # Regrid the cube onto the template. + out_cube = interpolate(self.cube, self.sample_points, method="linear") + # Realise the data + out_cube.data + + def time_trajectory_nearest(self) -> None: + # Regrid the cube onto the template. + out_cube = interpolate(self.cube, self.sample_points, method="nearest") + # Realise the data + out_cube.data diff --git a/benchmarks/nox_asv_plugin.py b/benchmarks/nox_asv_plugin.py deleted file mode 100644 index 6c9ce14272..0000000000 --- a/benchmarks/nox_asv_plugin.py +++ /dev/null @@ -1,249 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -ASV plug-in providing an alternative ``Environment`` subclass, which uses Nox -for environment management. - -""" -from importlib.util import find_spec -from pathlib import Path -from shutil import copy2, copytree -from tempfile import TemporaryDirectory - -from asv import util as asv_util -from asv.config import Config -from asv.console import log -from asv.environment import get_env_name -from asv.plugins.conda import Conda, _find_conda -from asv.repo import Repo, get_repo - - -class NoxConda(Conda): - """ - Manage a Conda environment using Nox, updating environment at each commit. - - Defers environment management to the project's noxfile, which must be able - to create/update the benchmarking environment using ``nox --install-only``, - with the ``--session`` specified in ``asv.conf.json.nox_session_name``. - - Notes - ----- - If not all benchmarked commits support this use of Nox: the plugin will - need to be modified to prep the environment in other ways. - - """ - - tool_name = "nox-conda" - - @classmethod - def matches(cls, python: str) -> bool: - """Used by ASV to work out if this type of environment can be used.""" - result = find_spec("nox") is not None - if result: - result = super().matches(python) - - if result: - message = ( - f"NOTE: ASV env match check incomplete. Not possible to know " - f"if selected Nox session (asv.conf.json.nox_session_name) is " - f"compatible with ``--python={python}`` until project is " - f"checked out." - ) - log.warning(message) - - return result - - def __init__(self, conf: Config, python: str, requirements: dict) -> None: - """ - Parameters - ---------- - conf: Config instance - - python : str - Version of Python. Must be of the form "MAJOR.MINOR". - - requirements : dict - Dictionary mapping a PyPI package name to a version - identifier string. - - """ - from nox.sessions import _normalize_path - - # Need to checkout the project BEFORE the benchmark run - to access a noxfile. - self.project_temp_checkout = TemporaryDirectory( - prefix="nox_asv_checkout_" - ) - repo = get_repo(conf) - repo.checkout(self.project_temp_checkout.name, conf.nox_setup_commit) - self.noxfile_rel_path = conf.noxfile_rel_path - self.setup_noxfile = ( - Path(self.project_temp_checkout.name) / self.noxfile_rel_path - ) - self.nox_session_name = conf.nox_session_name - - # Some duplication of parent code - need these attributes BEFORE - # running inherited code. - self._python = python - self._requirements = requirements - self._env_dir = conf.env_dir - - # Prepare the actual environment path, to override self._path. - nox_envdir = str(Path(self._env_dir).absolute() / self.hashname) - nox_friendly_name = self._get_nox_session_name(python) - self._nox_path = Path(_normalize_path(nox_envdir, nox_friendly_name)) - - # For storing any extra conda requirements from asv.conf.json. - self._extra_reqs_path = self._nox_path / "asv-extra-reqs.yaml" - - super().__init__(conf, python, requirements) - - @property - def _path(self) -> str: - """ - Using a property to override getting and setting in parent classes - - unable to modify parent classes as this is a plugin. - - """ - return str(self._nox_path) - - @_path.setter - def _path(self, value) -> None: - """Enforce overriding of this variable by disabling modification.""" - pass - - @property - def name(self) -> str: - """Overridden to prevent inclusion of user input requirements.""" - return get_env_name(self.tool_name, self._python, {}) - - def _get_nox_session_name(self, python: str) -> str: - nox_cmd_substring = ( - f"--noxfile={self.setup_noxfile} " - f"--session={self.nox_session_name} " - f"--python={python}" - ) - - list_output = asv_util.check_output( - ["nox", "--list", *nox_cmd_substring.split(" ")], - display_error=False, - dots=False, - ) - list_output = list_output.split("\n") - list_matches = list(filter(lambda s: s.startswith("*"), list_output)) - matches_count = len(list_matches) - - if matches_count == 0: - message = f"No Nox sessions found for: {nox_cmd_substring} ." - log.error(message) - raise RuntimeError(message) - elif matches_count > 1: - message = ( - f"Ambiguous - >1 Nox session found for: {nox_cmd_substring} ." - ) - log.error(message) - raise RuntimeError(message) - else: - line = list_matches[0] - session_name = line.split(" ")[1] - assert isinstance(session_name, str) - return session_name - - def _nox_prep_env(self, setup: bool = False) -> None: - message = f"Running Nox environment update for: {self.name}" - log.info(message) - - build_root_path = Path(self._build_root) - env_path = Path(self._path) - - def copy_asv_files(src_parent: Path, dst_parent: Path) -> None: - """For copying between self._path and a temporary cache.""" - asv_files = list(src_parent.glob("asv*")) - # build_root_path.name usually == "project" . - asv_files += [src_parent / build_root_path.name] - for src_path in asv_files: - dst_path = dst_parent / src_path.name - if not dst_path.exists(): - # Only cache-ing in case Nox has rebuilt the env @ - # self._path. If the dst_path already exists: rebuilding - # hasn't happened. Also a non-issue when copying in the - # reverse direction because the cache dir is temporary. - if src_path.is_dir(): - func = copytree - else: - func = copy2 - func(src_path, dst_path) - - with TemporaryDirectory(prefix="nox_asv_cache_") as asv_cache: - asv_cache_path = Path(asv_cache) - if setup: - noxfile = self.setup_noxfile - else: - # Cache all of ASV's files as Nox may remove and re-build the environment. - copy_asv_files(env_path, asv_cache_path) - # Get location of noxfile in cache. - noxfile_original = ( - build_root_path / self._repo_subdir / self.noxfile_rel_path - ) - noxfile_subpath = noxfile_original.relative_to( - build_root_path.parent - ) - noxfile = asv_cache_path / noxfile_subpath - - nox_cmd = [ - "nox", - f"--noxfile={noxfile}", - # Place the env in the ASV env directory, instead of the default. - f"--envdir={env_path.parent}", - f"--session={self.nox_session_name}", - f"--python={self._python}", - "--install-only", - "--no-error-on-external-run", - "--verbose", - ] - - _ = asv_util.check_output(nox_cmd) - if not env_path.is_dir(): - message = f"Expected Nox environment not found: {env_path}" - log.error(message) - raise RuntimeError(message) - - if not setup: - # Restore ASV's files from the cache (if necessary). - copy_asv_files(asv_cache_path, env_path) - - def _setup(self) -> None: - """Used for initial environment creation - mimics parent method where possible.""" - try: - self.conda = _find_conda() - except IOError as e: - raise asv_util.UserError(str(e)) - if find_spec("nox") is None: - raise asv_util.UserError("Module not found: nox") - - message = f"Creating Nox-Conda environment for {self.name} ." - log.info(message) - - try: - self._nox_prep_env(setup=True) - finally: - # No longer need the setup checkout now that the environment has been built. - self.project_temp_checkout.cleanup() - - conda_args, pip_args = self._get_requirements(self.conda) - if conda_args or pip_args: - message = ( - "Ignoring user input package requirements. Benchmark " - "environment management is exclusively performed by Nox." - ) - log.warning(message) - - def checkout_project(self, repo: Repo, commit_hash: str) -> None: - """Check out the working tree of the project at given commit hash.""" - super().checkout_project(repo, commit_hash) - self._nox_prep_env() - log.info( - f"Environment {self.name} updated to spec at {commit_hash[:8]}" - ) diff --git a/docs/Makefile b/docs/Makefile index 44c89206d2..fcb0ec0116 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -20,11 +20,6 @@ html-quick: echo "make html-quick in $$i..."; \ (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) html-quick); done -spelling: - @for i in $(SUBDIRS); do \ - echo "make spelling in $$i..."; \ - (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) spelling); done - all: @for i in $(SUBDIRS); do \ echo "make all in $$i..."; \ @@ -55,8 +50,8 @@ linkcheck: echo "Running linkcheck in $$i..."; \ (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) linkcheck); done -gallerytest: - @echo - @echo "Running \"gallery\" tests..." - @echo - python -m unittest discover -v -t . +show: + @for i in $(SUBDIRS); do \ + echo "Running show in $$i..."; \ + (cd $$i; $(MAKE) $(MFLAGS) $(MYMAKEFLAGS) show); done + diff --git a/docs/gallery_code/README.rst b/docs/gallery_code/README.rst index 720fd1e6f6..85bf0552b4 100644 --- a/docs/gallery_code/README.rst +++ b/docs/gallery_code/README.rst @@ -1,3 +1,5 @@ +.. _gallery_index: + Gallery ======= diff --git a/docs/gallery_code/general/README.rst b/docs/gallery_code/general/README.rst index c846755f1e..3a48e7cd8e 100644 --- a/docs/gallery_code/general/README.rst +++ b/docs/gallery_code/general/README.rst @@ -1,2 +1,3 @@ General ------- + diff --git a/docs/gallery_code/general/plot_custom_file_loading.py b/docs/gallery_code/general/plot_custom_file_loading.py index 025f395789..4b817aea66 100644 --- a/docs/gallery_code/general/plot_custom_file_loading.py +++ b/docs/gallery_code/general/plot_custom_file_loading.py @@ -57,7 +57,7 @@ import datetime -from cf_units import CALENDAR_GREGORIAN, Unit +from cf_units import CALENDAR_STANDARD, Unit import matplotlib.pyplot as plt import numpy as np @@ -225,7 +225,7 @@ def NAME_to_cube(filenames, callback): # define the time unit and use it to serialise the datetime for the # time coordinate - time_unit = Unit("hours since epoch", calendar=CALENDAR_GREGORIAN) + time_unit = Unit("hours since epoch", calendar=CALENDAR_STANDARD) time_coord = icoords.AuxCoord( time_unit.date2num(field_headings["time"]), standard_name="time", diff --git a/docs/gallery_code/general/plot_lineplot_with_legend.py b/docs/gallery_code/general/plot_lineplot_with_legend.py index 78401817ba..aad7906acd 100644 --- a/docs/gallery_code/general/plot_lineplot_with_legend.py +++ b/docs/gallery_code/general/plot_lineplot_with_legend.py @@ -24,7 +24,6 @@ def main(): ) for cube in temperature.slices("longitude"): - # Create a string label to identify this cube (i.e. latitude: value). cube_label = "latitude: %s" % cube.coord("latitude").points[0] diff --git a/docs/gallery_code/general/plot_projections_and_annotations.py b/docs/gallery_code/general/plot_projections_and_annotations.py index 75122591b9..2cf42e66e0 100644 --- a/docs/gallery_code/general/plot_projections_and_annotations.py +++ b/docs/gallery_code/general/plot_projections_and_annotations.py @@ -26,7 +26,6 @@ def make_plot(projection_name, projection_crs): - # Create a matplotlib Figure. plt.figure() diff --git a/docs/gallery_code/general/plot_zonal_means.py b/docs/gallery_code/general/plot_zonal_means.py new file mode 100644 index 0000000000..195f8b4bb0 --- /dev/null +++ b/docs/gallery_code/general/plot_zonal_means.py @@ -0,0 +1,88 @@ +""" +Zonal Mean Diagram of Air Temperature +===================================== +This example demonstrates aligning a linear plot and a cartographic plot using Matplotlib. +""" + +import cartopy.crs as ccrs +import matplotlib.pyplot as plt +from mpl_toolkits.axes_grid1 import make_axes_locatable +import numpy as np + +import iris +from iris.analysis import MEAN +import iris.plot as iplt +import iris.quickplot as qplt + + +def main(): + # Loads air_temp.pp and "collapses" longitude into a single, average value. + fname = iris.sample_data_path("air_temp.pp") + temperature = iris.load_cube(fname) + collapsed_temp = temperature.collapsed("longitude", MEAN) + + # Set y-axes with -90 and 90 limits and steps of 15 per tick. + start, stop, step = -90, 90, 15 + yticks = np.arange(start, stop + step, step) + ylim = [start, stop] + + # Plot "temperature" on a cartographic plot and set the ticks and titles + # on the axes. + fig = plt.figure(figsize=[12, 4]) + + ax1 = fig.add_subplot(111, projection=ccrs.PlateCarree()) + im = iplt.contourf(temperature, cmap="RdYlBu_r") + ax1.coastlines() + ax1.gridlines() + ax1.set_xticks([-180, -90, 0, 90, 180]) + ax1.set_yticks(yticks) + ax1.set_title("Air Temperature") + ax1.set_ylabel(f"Latitude / {temperature.coord('latitude').units}") + ax1.set_xlabel(f"Longitude / {temperature.coord('longitude').units}") + ax1.set_ylim(*ylim) + + # Create a Matplotlib AxesDivider object to allow alignment of other + # Axes objects. + divider = make_axes_locatable(ax1) + + # Gives the air temperature bar size, colour and a title. + ax2 = divider.new_vertical( + size="5%", pad=0.5, axes_class=plt.Axes, pack_start=True + ) # creates 2nd axis + fig.add_axes(ax2) + cbar = plt.colorbar( + im, cax=ax2, orientation="horizontal" + ) # puts colour bar on second axis + cbar.ax.set_xlabel(f"{temperature.units}") # labels colour bar + + # Plot "collapsed_temp" on the mean graph and set the ticks and titles + # on the axes. + ax3 = divider.new_horizontal( + size="30%", pad=0.4, axes_class=plt.Axes + ) # create 3rd axis + fig.add_axes(ax3) + qplt.plot( + collapsed_temp, collapsed_temp.coord("latitude") + ) # plots temperature collapsed over longitude against latitude + ax3.axhline(0, color="k", linewidth=0.5) + + # Creates zonal mean details + ax3.set_title("Zonal Mean") + ax3.yaxis.set_label_position("right") + ax3.yaxis.tick_right() + ax3.set_yticks(yticks) + ax3.grid() + + # Round each tick for the third ax to the nearest 20 (ready for use). + data_max = collapsed_temp.data.max() + x_max = data_max - data_max % -20 + data_min = collapsed_temp.data.min() + x_min = data_min - data_min % 20 + ax3.set_xlim(x_min, x_max) + ax3.set_ylim(*ylim) + + plt.show() + + +if __name__ == "__main__": + main() diff --git a/docs/gallery_code/meteorology/plot_lagged_ensemble.py b/docs/gallery_code/meteorology/plot_lagged_ensemble.py index 5cd2752f39..e15aa0e6ef 100644 --- a/docs/gallery_code/meteorology/plot_lagged_ensemble.py +++ b/docs/gallery_code/meteorology/plot_lagged_ensemble.py @@ -86,7 +86,6 @@ def main(): # Iterate over all possible latitude longitude slices. for cube in last_timestep.slices(["latitude", "longitude"]): - # Get the ensemble member number from the ensemble coordinate. ens_member = cube.coord("realization").points[0] diff --git a/docs/gallery_code/meteorology/plot_wind_barbs.py b/docs/gallery_code/meteorology/plot_wind_barbs.py index c3c056eb4a..b09040c64e 100644 --- a/docs/gallery_code/meteorology/plot_wind_barbs.py +++ b/docs/gallery_code/meteorology/plot_wind_barbs.py @@ -30,7 +30,7 @@ def main(): # To illustrate the full range of barbs, scale the wind speed up to pretend # that a storm is passing over - magnitude = (uwind ** 2 + vwind ** 2) ** 0.5 + magnitude = (uwind**2 + vwind**2) ** 0.5 magnitude.convert_units("knot") max_speed = magnitude.collapsed( ("latitude", "longitude"), iris.analysis.MAX @@ -41,7 +41,7 @@ def main(): vwind = vwind / max_speed * max_desired # Create a cube containing the wind speed - windspeed = (uwind ** 2 + vwind ** 2) ** 0.5 + windspeed = (uwind**2 + vwind**2) ** 0.5 windspeed.rename("windspeed") windspeed.convert_units("knot") diff --git a/docs/gallery_code/meteorology/plot_wind_speed.py b/docs/gallery_code/meteorology/plot_wind_speed.py index fd03f54205..40d9d0da00 100644 --- a/docs/gallery_code/meteorology/plot_wind_speed.py +++ b/docs/gallery_code/meteorology/plot_wind_speed.py @@ -27,7 +27,7 @@ def main(): vwind = iris.load_cube(infile, "y_wind") # Create a cube containing the wind speed. - windspeed = (uwind ** 2 + vwind ** 2) ** 0.5 + windspeed = (uwind**2 + vwind**2) ** 0.5 windspeed.rename("windspeed") # Plot the wind speed as a contour plot. diff --git a/docs/gallery_code/oceanography/plot_atlantic_profiles.py b/docs/gallery_code/oceanography/plot_atlantic_profiles.py index dc038ecffe..6604b61ec3 100644 --- a/docs/gallery_code/oceanography/plot_atlantic_profiles.py +++ b/docs/gallery_code/oceanography/plot_atlantic_profiles.py @@ -34,7 +34,7 @@ def main(): # the southern portion of the domain, and limit the depth of the profile # to 1000m. lon_cons = iris.Constraint(longitude=330.5) - lat_cons = iris.Constraint(latitude=lambda l: -10 < l < -9) + lat_cons = iris.Constraint(latitude=lambda lat: -10 < lat < -9) depth_cons = iris.Constraint(depth=lambda d: d <= 1000) theta_1000m = theta.extract(depth_cons & lon_cons & lat_cons) salinity_1000m = salinity.extract(depth_cons & lon_cons & lat_cons) diff --git a/docs/gallery_code/oceanography/plot_load_nemo.py b/docs/gallery_code/oceanography/plot_load_nemo.py index 4bfee5ac8e..b19f37e1f5 100644 --- a/docs/gallery_code/oceanography/plot_load_nemo.py +++ b/docs/gallery_code/oceanography/plot_load_nemo.py @@ -13,7 +13,7 @@ import iris import iris.plot as iplt import iris.quickplot as qplt -from iris.util import promote_aux_coord_to_dim_coord +from iris.util import equalise_attributes, promote_aux_coord_to_dim_coord def main(): @@ -21,16 +21,15 @@ def main(): fname = iris.sample_data_path("NEMO/nemo_1m_*.nc") cubes = iris.load(fname) - # Some attributes are unique to each file and must be blanked - # to allow concatenation. - differing_attrs = ["file_name", "name", "timeStamp", "TimeStamp"] - for cube in cubes: - for attribute in differing_attrs: - cube.attributes[attribute] = "" - - # The cubes still cannot be concatenated because their time dimension is - # time_counter rather than time. time needs to be promoted to allow + # Some attributes are unique to each file and must be removed to allow # concatenation. + equalise_attributes(cubes) + + # The cubes still cannot be concatenated because their dimension coordinate + # is "time_counter", which has the same value for each cube. concatenate + # needs distinct values in order to create a new DimCoord for the output + # cube. Here, each cube has a "time" auxiliary coordinate, and these do + # have distinct values, so we can promote them to allow concatenation. for cube in cubes: promote_aux_coord_to_dim_coord(cube, "time") diff --git a/docs/gallery_tests/conftest.py b/docs/gallery_tests/conftest.py new file mode 100644 index 0000000000..a218b305a2 --- /dev/null +++ b/docs/gallery_tests/conftest.py @@ -0,0 +1,67 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. + +"""Pytest fixtures for the gallery tests.""" + +import pathlib + +import matplotlib.pyplot as plt +import pytest + +import iris + +CURRENT_DIR = pathlib.Path(__file__).resolve() +GALLERY_DIR = CURRENT_DIR.parents[1] / "gallery_code" + + +@pytest.fixture +def image_setup_teardown(): + """ + Setup and teardown fixture. + + Ensures all figures are closed before and after test to prevent one test + polluting another if it fails with a figure unclosed. + + """ + plt.close("all") + yield + plt.close("all") + + +@pytest.fixture +def import_patches(monkeypatch): + """ + Replace plt.show() with a function that does nothing, also add all the + gallery examples to sys.path. + + """ + + def no_show(): + pass + + monkeypatch.setattr(plt, "show", no_show) + + for example_dir in GALLERY_DIR.iterdir(): + if example_dir.is_dir(): + monkeypatch.syspath_prepend(example_dir) + + yield + + +@pytest.fixture +def iris_future_defaults(): + """ + Create a fixture which resets all the iris.FUTURE settings to the defaults, + as otherwise changes made in one test can affect subsequent ones. + + """ + # Run with all default settings in iris.FUTURE. + default_future_kwargs = iris.Future().__dict__.copy() + for dead_option in iris.Future.deprecated_options: + # Avoid a warning when setting these ! + del default_future_kwargs[dead_option] + with iris.FUTURE.context(**default_future_kwargs): + yield diff --git a/docs/gallery_tests/gallerytest_util.py b/docs/gallery_tests/gallerytest_util.py deleted file mode 100644 index eb2736f194..0000000000 --- a/docs/gallery_tests/gallerytest_util.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -""" -Provides context managers which are fundamental to the ability -to run the gallery tests. - -""" - -import contextlib -import os.path -import sys -import warnings - -import matplotlib.pyplot as plt - -import iris -from iris._deprecation import IrisDeprecation -import iris.plot as iplt -import iris.quickplot as qplt - -GALLERY_DIRECTORY = os.path.join( - os.path.dirname(os.path.dirname(__file__)), "gallery_code" -) -GALLERY_DIRECTORIES = [ - os.path.join(GALLERY_DIRECTORY, the_dir) - for the_dir in os.listdir(GALLERY_DIRECTORY) -] - - -@contextlib.contextmanager -def add_gallery_to_path(): - """ - Creates a context manager which can be used to add the iris gallery - to the PYTHONPATH. The gallery entries are only importable throughout the lifetime - of this context manager. - - """ - orig_sys_path = sys.path - sys.path = sys.path[:] - sys.path += GALLERY_DIRECTORIES - yield - sys.path = orig_sys_path - - -@contextlib.contextmanager -def show_replaced_by_check_graphic(test_case): - """ - Creates a context manager which can be used to replace the functionality - of matplotlib.pyplot.show with a function which calls the check_graphic - method on the given test_case (iris.tests.IrisTest.check_graphic). - - """ - - def replacement_show(): - # form a closure on test_case and tolerance - test_case.check_graphic() - - orig_show = plt.show - plt.show = iplt.show = qplt.show = replacement_show - yield - plt.show = iplt.show = qplt.show = orig_show - - -@contextlib.contextmanager -def fail_any_deprecation_warnings(): - """ - Create a context in which any deprecation warning will cause an error. - - The context also resets all the iris.FUTURE settings to the defaults, as - otherwise changes made in one test can affect subsequent ones. - - """ - with warnings.catch_warnings(): - # Detect and error all and any Iris deprecation warnings. - warnings.simplefilter("error", IrisDeprecation) - # Run with all default settings in iris.FUTURE. - default_future_kwargs = iris.Future().__dict__.copy() - for dead_option in iris.Future.deprecated_options: - # Avoid a warning when setting these ! - del default_future_kwargs[dead_option] - with iris.FUTURE.context(**default_future_kwargs): - yield diff --git a/docs/gallery_tests/test_gallery_examples.py b/docs/gallery_tests/test_gallery_examples.py new file mode 100644 index 0000000000..0d0793a7da --- /dev/null +++ b/docs/gallery_tests/test_gallery_examples.py @@ -0,0 +1,44 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. + +import importlib + +import matplotlib.pyplot as plt +import pytest + +from iris.tests import _RESULT_PATH +from iris.tests.graphics import check_graphic + +from .conftest import GALLERY_DIR + + +def gallery_examples(): + """Generator to yield all current gallery examples.""" + + for example_file in GALLERY_DIR.glob("*/plot*.py"): + yield example_file.stem + + +@pytest.mark.filterwarnings("error::iris.IrisDeprecation") +@pytest.mark.parametrize("example", gallery_examples()) +def test_plot_example( + example, + image_setup_teardown, + import_patches, + iris_future_defaults, +): + """Test that all figures from example code match KGO.""" + + module = importlib.import_module(example) + + # Run example. + module.main() + # Loop through open figures and set each to be the current figure so check_graphic + # will find it. + for fig_num in plt.get_fignums(): + plt.figure(fig_num) + image_id = f"gallery_tests.test_{example}.{fig_num - 1}" + check_graphic(image_id, _RESULT_PATH) diff --git a/docs/gallery_tests/test_plot_COP_1d.py b/docs/gallery_tests/test_plot_COP_1d.py deleted file mode 100644 index 9771e10fb1..0000000000 --- a/docs/gallery_tests/test_plot_COP_1d.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestCOP1DPlot(tests.GraphicsTest): - """Test the COP_1d_plot gallery code.""" - - def test_plot_COP_1d(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_COP_1d - with show_replaced_by_check_graphic(self): - plot_COP_1d.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_COP_maps.py b/docs/gallery_tests/test_plot_COP_maps.py deleted file mode 100644 index a01e12527f..0000000000 --- a/docs/gallery_tests/test_plot_COP_maps.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestCOPMaps(tests.GraphicsTest): - """Test the COP_maps gallery code.""" - - def test_plot_cop_maps(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_COP_maps - with show_replaced_by_check_graphic(self): - plot_COP_maps.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_SOI_filtering.py b/docs/gallery_tests/test_plot_SOI_filtering.py deleted file mode 100644 index 1da731122a..0000000000 --- a/docs/gallery_tests/test_plot_SOI_filtering.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestSOIFiltering(tests.GraphicsTest): - """Test the SOI_filtering gallery code.""" - - def test_plot_soi_filtering(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_SOI_filtering - with show_replaced_by_check_graphic(self): - plot_SOI_filtering.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_TEC.py b/docs/gallery_tests/test_plot_TEC.py deleted file mode 100644 index cfc1fb8eec..0000000000 --- a/docs/gallery_tests/test_plot_TEC.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestTEC(tests.GraphicsTest): - """Test the TEC gallery code.""" - - def test_plot_TEC(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_TEC - with show_replaced_by_check_graphic(self): - plot_TEC.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_anomaly_log_colouring.py b/docs/gallery_tests/test_plot_anomaly_log_colouring.py deleted file mode 100644 index 41f76cc774..0000000000 --- a/docs/gallery_tests/test_plot_anomaly_log_colouring.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestAnomalyLogColouring(tests.GraphicsTest): - """Test the anomaly colouring gallery code.""" - - def test_plot_anomaly_log_colouring(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_anomaly_log_colouring - with show_replaced_by_check_graphic(self): - plot_anomaly_log_colouring.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_atlantic_profiles.py b/docs/gallery_tests/test_plot_atlantic_profiles.py deleted file mode 100644 index fdcb5fb1d1..0000000000 --- a/docs/gallery_tests/test_plot_atlantic_profiles.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestAtlanticProfiles(tests.GraphicsTest): - """Test the atlantic_profiles gallery code.""" - - def test_plot_atlantic_profiles(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_atlantic_profiles - with show_replaced_by_check_graphic(self): - plot_atlantic_profiles.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_coriolis.py b/docs/gallery_tests/test_plot_coriolis.py deleted file mode 100644 index 2e4cea8a74..0000000000 --- a/docs/gallery_tests/test_plot_coriolis.py +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. - -import iris.tests as tests - -from . import gallerytest_util - -with gallerytest_util.add_gallery_to_path(): - import plot_coriolis - - -class TestCoriolisPlot(tests.GraphicsTest): - """Test the Coriolis Plot gallery code.""" - - def test_plot_coriolis(self): - with gallerytest_util.show_replaced_by_check_graphic(self): - plot_coriolis.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_cross_section.py b/docs/gallery_tests/test_plot_cross_section.py deleted file mode 100644 index b0878d10bc..0000000000 --- a/docs/gallery_tests/test_plot_cross_section.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestCrossSection(tests.GraphicsTest): - """Test the cross_section gallery code.""" - - def test_plot_cross_section(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_cross_section - with show_replaced_by_check_graphic(self): - plot_cross_section.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_custom_aggregation.py b/docs/gallery_tests/test_plot_custom_aggregation.py deleted file mode 100644 index 9d0a40dd3c..0000000000 --- a/docs/gallery_tests/test_plot_custom_aggregation.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestCustomAggregation(tests.GraphicsTest): - """Test the custom aggregation gallery code.""" - - def test_plot_custom_aggregation(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_custom_aggregation - with show_replaced_by_check_graphic(self): - plot_custom_aggregation.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_custom_file_loading.py b/docs/gallery_tests/test_plot_custom_file_loading.py deleted file mode 100644 index 4d0d603a22..0000000000 --- a/docs/gallery_tests/test_plot_custom_file_loading.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestCustomFileLoading(tests.GraphicsTest): - """Test the custom_file_loading gallery code.""" - - def test_plot_custom_file_loading(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_custom_file_loading - with show_replaced_by_check_graphic(self): - plot_custom_file_loading.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_deriving_phenomena.py b/docs/gallery_tests/test_plot_deriving_phenomena.py deleted file mode 100644 index ef2f8cec87..0000000000 --- a/docs/gallery_tests/test_plot_deriving_phenomena.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestDerivingPhenomena(tests.GraphicsTest): - """Test the deriving_phenomena gallery code.""" - - def test_plot_deriving_phenomena(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_deriving_phenomena - with show_replaced_by_check_graphic(self): - plot_deriving_phenomena.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_global_map.py b/docs/gallery_tests/test_plot_global_map.py deleted file mode 100644 index 16f769deae..0000000000 --- a/docs/gallery_tests/test_plot_global_map.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestGlobalMap(tests.GraphicsTest): - """Test the global_map gallery code.""" - - def test_plot_global_map(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_global_map - with show_replaced_by_check_graphic(self): - plot_global_map.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_hovmoller.py b/docs/gallery_tests/test_plot_hovmoller.py deleted file mode 100644 index 29c0e72e05..0000000000 --- a/docs/gallery_tests/test_plot_hovmoller.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestGlobalMap(tests.GraphicsTest): - """Test the hovmoller gallery code.""" - - def test_plot_hovmoller(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_hovmoller - with show_replaced_by_check_graphic(self): - plot_hovmoller.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_inset.py b/docs/gallery_tests/test_plot_inset.py deleted file mode 100644 index 739e0a3224..0000000000 --- a/docs/gallery_tests/test_plot_inset.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. - -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestInsetPlot(tests.GraphicsTest): - """Test the inset plot gallery code.""" - - def test_plot_inset(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_inset - with show_replaced_by_check_graphic(self): - plot_inset.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_lagged_ensemble.py b/docs/gallery_tests/test_plot_lagged_ensemble.py deleted file mode 100644 index f0a0201613..0000000000 --- a/docs/gallery_tests/test_plot_lagged_ensemble.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestLaggedEnsemble(tests.GraphicsTest): - """Test the lagged ensemble gallery code.""" - - def test_plot_lagged_ensemble(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_lagged_ensemble - with show_replaced_by_check_graphic(self): - plot_lagged_ensemble.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_lineplot_with_legend.py b/docs/gallery_tests/test_plot_lineplot_with_legend.py deleted file mode 100644 index 5677667026..0000000000 --- a/docs/gallery_tests/test_plot_lineplot_with_legend.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestLineplotWithLegend(tests.GraphicsTest): - """Test the lineplot_with_legend gallery code.""" - - def test_plot_lineplot_with_legend(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_lineplot_with_legend - with show_replaced_by_check_graphic(self): - plot_lineplot_with_legend.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_load_nemo.py b/docs/gallery_tests/test_plot_load_nemo.py deleted file mode 100644 index f250dc46b4..0000000000 --- a/docs/gallery_tests/test_plot_load_nemo.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestLoadNemo(tests.GraphicsTest): - """Test the load_nemo gallery code.""" - - def test_plot_load_nemo(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_load_nemo - with show_replaced_by_check_graphic(self): - plot_load_nemo.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_orca_projection.py b/docs/gallery_tests/test_plot_orca_projection.py deleted file mode 100644 index c4058c996e..0000000000 --- a/docs/gallery_tests/test_plot_orca_projection.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestOrcaProjection(tests.GraphicsTest): - """Test the orca projection gallery code.""" - - def test_plot_orca_projection(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_orca_projection - with show_replaced_by_check_graphic(self): - plot_orca_projection.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_polar_stereo.py b/docs/gallery_tests/test_plot_polar_stereo.py deleted file mode 100644 index 4d32ee5830..0000000000 --- a/docs/gallery_tests/test_plot_polar_stereo.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestPolarStereo(tests.GraphicsTest): - """Test the polar_stereo gallery code.""" - - def test_plot_polar_stereo(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_polar_stereo - with show_replaced_by_check_graphic(self): - plot_polar_stereo.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_polynomial_fit.py b/docs/gallery_tests/test_plot_polynomial_fit.py deleted file mode 100644 index b522dcf43c..0000000000 --- a/docs/gallery_tests/test_plot_polynomial_fit.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestPolynomialFit(tests.GraphicsTest): - """Test the polynomial_fit gallery code.""" - - def test_plot_polynomial_fit(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_polynomial_fit - with show_replaced_by_check_graphic(self): - plot_polynomial_fit.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_projections_and_annotations.py b/docs/gallery_tests/test_plot_projections_and_annotations.py deleted file mode 100644 index 1c24202251..0000000000 --- a/docs/gallery_tests/test_plot_projections_and_annotations.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestProjectionsAndAnnotations(tests.GraphicsTest): - """Test the atlantic_profiles gallery code.""" - - def test_plot_projections_and_annotations(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_projections_and_annotations - with show_replaced_by_check_graphic(self): - plot_projections_and_annotations.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_rotated_pole_mapping.py b/docs/gallery_tests/test_plot_rotated_pole_mapping.py deleted file mode 100644 index cd9b04fc66..0000000000 --- a/docs/gallery_tests/test_plot_rotated_pole_mapping.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestRotatedPoleMapping(tests.GraphicsTest): - """Test the rotated_pole_mapping gallery code.""" - - def test_plot_rotated_pole_mapping(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_rotated_pole_mapping - with show_replaced_by_check_graphic(self): - plot_rotated_pole_mapping.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_wind_barbs.py b/docs/gallery_tests/test_plot_wind_barbs.py deleted file mode 100644 index 6003860a5e..0000000000 --- a/docs/gallery_tests/test_plot_wind_barbs.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestWindBarbs(tests.GraphicsTest): - """Test the wind_barbs example code.""" - - def test_wind_barbs(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_wind_barbs - with show_replaced_by_check_graphic(self): - plot_wind_barbs.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/gallery_tests/test_plot_wind_speed.py b/docs/gallery_tests/test_plot_wind_speed.py deleted file mode 100644 index ebaf97adbe..0000000000 --- a/docs/gallery_tests/test_plot_wind_speed.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# Import Iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -from .gallerytest_util import ( - add_gallery_to_path, - fail_any_deprecation_warnings, - show_replaced_by_check_graphic, -) - - -class TestWindSpeed(tests.GraphicsTest): - """Test the wind_speed gallery code.""" - - def test_plot_wind_speed(self): - with fail_any_deprecation_warnings(): - with add_gallery_to_path(): - import plot_wind_speed - with show_replaced_by_check_graphic(self): - plot_wind_speed.main() - - -if __name__ == "__main__": - tests.main() diff --git a/docs/src/Makefile b/docs/src/Makefile index c693a2c900..a75da5371b 100644 --- a/docs/src/Makefile +++ b/docs/src/Makefile @@ -16,7 +16,7 @@ PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -.PHONY: help clean html html-noplot dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest +.PHONY: help clean html html-noplot dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest show help: @echo "Please use \`make ' where is one of" @@ -36,6 +36,7 @@ help: @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " show to open the built documentation in the default browser" clean: -rm -rf $(BUILDDIR) @@ -61,11 +62,6 @@ html-quick: @echo @echo "Build finished. The HTML (no gallery or api docs) pages are in $(BUILDDIR)/html" -spelling: - $(SPHINXBUILD) -b spelling $(SRCDIR) $(BUILDDIR) - @echo - @echo "Build finished. The HTML (no gallery) pages are in $(BUILDDIR)/html" - dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @@ -153,3 +149,7 @@ doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." + +show: + @python -c "import webbrowser; webbrowser.open_new_tab('file://$(shell pwd)/$(BUILDDIR)/html/index.html')" + diff --git a/docs/src/_static/Iris7_1_trim_100.png b/docs/src/_static/Iris7_1_trim_100.png deleted file mode 100644 index 2f6f80eff9..0000000000 Binary files a/docs/src/_static/Iris7_1_trim_100.png and /dev/null differ diff --git a/docs/src/_static/Iris7_1_trim_full.png b/docs/src/_static/Iris7_1_trim_full.png deleted file mode 100644 index c381aa3a89..0000000000 Binary files a/docs/src/_static/Iris7_1_trim_full.png and /dev/null differ diff --git a/docs/src/_static/README.md b/docs/src/_static/README.md new file mode 100644 index 0000000000..b9f2877a30 --- /dev/null +++ b/docs/src/_static/README.md @@ -0,0 +1,31 @@ +# Iris logos + +[![iris-logo-title.svg](iris-logo-title.svg)](iris-logo-title.svg) + +Code for generating the logos is at: +[SciTools/marketing/iris/logo/generate_logo.py](https://github.com/SciTools/marketing/blob/master/iris/logo/generate_logo.py) + +See the docstring of the `generate_logo()` function for more information. + +## Why a scripted logo? + +SVG logos are ideal for source-controlled projects: + +* Low file size, with infinitely scaling quality +* Universally recognised vector format, editable by many software packages +* XML-style content = human-readable diff when changes are made + +But Iris' logo is difficult to reproduce/edit using an SVG editor alone: + +* Includes correctly projected, low resolution coastlines +* Needs precise alignment of the 'visual centre' of the iris with the centres + of the Earth and the image + +An SVG image is simply XML format, so can be easily assembled automatically +with a script, which can also be engineered to address the above problems. + +Further advantages of using a script: + +* Parameterised text, making it easy to standardise the logo across all Iris + packages +* Can generate an animated GIF/SVG of a rotating Earth diff --git a/docs/src/_static/favicon.ico b/docs/src/_static/favicon.ico deleted file mode 100644 index 0e5f0492b4..0000000000 Binary files a/docs/src/_static/favicon.ico and /dev/null differ diff --git a/docs/src/_static/icon_api.svg b/docs/src/_static/icon_api.svg new file mode 100644 index 0000000000..841b105973 --- /dev/null +++ b/docs/src/_static/icon_api.svg @@ -0,0 +1,144 @@ + + + +image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/src/_static/icon_development.svg b/docs/src/_static/icon_development.svg new file mode 100644 index 0000000000..dbc342688c --- /dev/null +++ b/docs/src/_static/icon_development.svg @@ -0,0 +1,63 @@ + + + + + + image/svg+xml + + + + + + + + + + diff --git a/docs/src/_static/icon_instructions.svg b/docs/src/_static/icon_instructions.svg new file mode 100644 index 0000000000..62b3fc3620 --- /dev/null +++ b/docs/src/_static/icon_instructions.svg @@ -0,0 +1,162 @@ + + + +image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/src/_static/icon_new_product.svg b/docs/src/_static/icon_new_product.svg new file mode 100644 index 0000000000..f222e1e066 --- /dev/null +++ b/docs/src/_static/icon_new_product.svg @@ -0,0 +1,182 @@ + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/src/_static/icon_shuttle.svg b/docs/src/_static/icon_shuttle.svg new file mode 100644 index 0000000000..46ba64d2e0 --- /dev/null +++ b/docs/src/_static/icon_shuttle.svg @@ -0,0 +1,71 @@ + + + + + + image/svg+xml + + + + + + + + + + + + diff --git a/docs/src/_static/icon_support.png b/docs/src/_static/icon_support.png new file mode 100644 index 0000000000..567cdb1b2f Binary files /dev/null and b/docs/src/_static/icon_support.png differ diff --git a/docs/src/_static/icon_thumb.png b/docs/src/_static/icon_thumb.png new file mode 100644 index 0000000000..6a14875e22 Binary files /dev/null and b/docs/src/_static/icon_thumb.png differ diff --git a/docs/src/_static/iris-logo-title.png b/docs/src/_static/iris-logo-title.png deleted file mode 100644 index e517aa7784..0000000000 Binary files a/docs/src/_static/iris-logo-title.png and /dev/null differ diff --git a/docs/src/_static/iris-logo-title.svg b/docs/src/_static/iris-logo-title.svg index 60ba0a1118..5bc38bfbda 100644 --- a/docs/src/_static/iris-logo-title.svg +++ b/docs/src/_static/iris-logo-title.svg @@ -1,89 +1,107 @@ - - - - - - - - - - image/svg+xml - - - - - - - - Iris - - + + \ No newline at end of file diff --git a/docs/src/_static/iris-logo.svg b/docs/src/_static/iris-logo.svg new file mode 100644 index 0000000000..6c4bdb0e5a --- /dev/null +++ b/docs/src/_static/iris-logo.svg @@ -0,0 +1,104 @@ + + \ No newline at end of file diff --git a/docs/src/_static/theme_override.css b/docs/src/_static/theme_override.css index c56b720f69..326c1d4d4a 100644 --- a/docs/src/_static/theme_override.css +++ b/docs/src/_static/theme_override.css @@ -1,33 +1,10 @@ /* import the standard theme css */ @import url("css/theme.css"); -/* now we can add custom any css */ - -/* set the width of the logo */ -.wy-side-nav-search>a img.logo, -.wy-side-nav-search .wy-dropdown>a img.logo { - width: 12rem -} - -/* color of the logo background in the top left corner */ -.wy-side-nav-search { - background-color: lightgray; -} - -/* color of the font for the version in the top left corner */ -.wy-side-nav-search>div.version { - color: black; - font-weight: bold; -} - -/* Ensures tables do now have width scroll bars */ -table.docutils td { - white-space: unset; - word-wrap: break-word; -} +/* now we can add custom css.... */ /* Used for very strong warning */ -#slim-red-box-message { +#slim-red-box-banner { background: #ff0000; box-sizing: border-box; color: #ffffff; @@ -35,8 +12,17 @@ table.docutils td { padding: 0.5em; } -#slim-red-box-message a { +#slim-red-box-banner a { color: #ffffff; - font-weight: normal; - text-decoration:underline; + font-weight: normal; + text-decoration: underline; +} + +/* bullet point list with green ticks */ +ul.squarelist { + /* https://developer.mozilla.org/en-US/docs/Web/CSS/list-style-type */ + list-style-type: "\2705"; + margin-left: 0; + text-indent: 1em; + padding-left: 5em; } diff --git a/docs/src/_templates/custom_footer.html b/docs/src/_templates/custom_footer.html new file mode 100644 index 0000000000..f81fcc583e --- /dev/null +++ b/docs/src/_templates/custom_footer.html @@ -0,0 +1 @@ +

Built using Python {{ python_version }}.

diff --git a/docs/src/_templates/custom_sidebar_logo_version.html b/docs/src/_templates/custom_sidebar_logo_version.html new file mode 100644 index 0000000000..c9d9ac6e2e --- /dev/null +++ b/docs/src/_templates/custom_sidebar_logo_version.html @@ -0,0 +1,26 @@ +{% if on_rtd %} + {% if rtd_version == 'latest' %} + + + + {% elif rtd_version == 'stable' %} + + + + {% elif rtd_version_type == 'tag' %} + {# Covers builds for specific tags, including RC's. #} + + + + {% else %} + {# Anything else build by RTD will be the HEAD of an activated branch #} + + + + {% endif %} +{%- else %} + {# not on rtd #} + + + +{%- endif %} diff --git a/docs/src/_templates/footer.html b/docs/src/_templates/footer.html deleted file mode 100644 index 1d5fb08b78..0000000000 --- a/docs/src/_templates/footer.html +++ /dev/null @@ -1,5 +0,0 @@ -{% extends "!footer.html" %} -{% block extrafooter %} - Built using Python {{ python_version }}. - {{ super() }} -{% endblock %} diff --git a/docs/src/_templates/layout.html b/docs/src/_templates/layout.html index 96a2e0913e..974bd12753 100644 --- a/docs/src/_templates/layout.html +++ b/docs/src/_templates/layout.html @@ -1,47 +1,20 @@ -{% extends "!layout.html" %} +{% extends "pydata_sphinx_theme/layout.html" %} -{# This uses blocks. See: +{# This uses blocks. See: https://www.sphinx-doc.org/en/master/templating.html #} -/*---------------------------------------------------------------------------*/ -{%- block document %} - {% if READTHEDOCS and rtd_version == 'latest' %} -
+ {%- block docs_body %} + + {% if on_rtd and rtd_version == 'latest' %} +
You are viewing the latest unreleased documentation - v{{ version }}. You may prefer a - stable - version. + v{{ version }}. You can switch to a stable version + via the flyout menu in the bottom corner of the screen.

{%- endif %} {{ super() }} {%- endblock %} - -/*-----------------------------------------------------z----------------------*/ - -{% block menu %} - {{ super() }} - - {# menu_links and menu_links_name are set in conf.py (html_context) #} - - {% if menu_links %} -

- - {% if menu_links_name %} - {{ menu_links_name }} - {% else %} - External links - {% endif %} - -

-
    - {% for text, link in menu_links %} -
  • {{ text }}
  • - {% endfor %} -
- {% endif %} -{% endblock %} - diff --git a/docs/src/common_links.inc b/docs/src/common_links.inc index 67fc493e3e..4d03a92715 100644 --- a/docs/src/common_links.inc +++ b/docs/src/common_links.inc @@ -3,25 +3,25 @@ .. _black: https://black.readthedocs.io/en/stable/ .. _cartopy: https://github.com/SciTools/cartopy -.. _.cirrus.yml: https://github.com/SciTools/iris/blob/main/.cirrus.yml .. _flake8: https://flake8.pycqa.org/en/stable/ .. _.flake8.yml: https://github.com/SciTools/iris/blob/main/.flake8 .. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris .. _conda: https://docs.conda.io/en/latest/ .. _contributor: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json .. _core developers: https://github.com/SciTools/scitools.org.uk/blob/master/contributors.json -.. _discussions: https://github.com/SciTools/iris/discussions -.. _generating sss keys for GitHub: https://docs.github.com/en/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account +.. _generating ssh keys for GitHub: https://docs.github.com/en/github/authenticating-to-github/adding-a-new-ssh-key-to-your-github-account +.. _GitHub Actions: https://docs.github.com/en/actions .. _GitHub Help Documentation: https://docs.github.com/en/github -.. _Iris GitHub Discussions: https://github.com/SciTools/iris/discussions +.. _GitHub Discussions: https://github.com/SciTools/iris/discussions .. _Iris: https://github.com/SciTools/iris .. _Iris GitHub: https://github.com/SciTools/iris +.. _Iris GitHub Actions: https://github.com/SciTools/iris/actions .. _iris-sample-data: https://github.com/SciTools/iris-sample-data .. _iris-test-data: https://github.com/SciTools/iris-test-data .. _isort: https://pycqa.github.io/isort/ .. _issue: https://github.com/SciTools/iris/issues .. _issues: https://github.com/SciTools/iris/issues -.. _legacy documentation: https://scitools.org.uk/iris/docs/v2.4.0/ +.. _legacy documentation: https://github.com/SciTools/scitools.org.uk/tree/master/iris/docs/archive .. _matplotlib: https://matplotlib.org/stable/ .. _napolean: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/sphinxcontrib.napoleon.html .. _nox: https://nox.thea.codes/en/stable/ @@ -38,21 +38,28 @@ .. _using git: https://docs.github.com/en/github/using-git .. _requirements/ci/: https://github.com/SciTools/iris/tree/main/requirements/ci .. _CF-UGRID: https://ugrid-conventions.github.io/ugrid-conventions/ +.. _issues on GitHub: https://github.com/SciTools/iris/issues?q=is%3Aopen+is%3Aissue+sort%3Areactions-%2B1-desc +.. _python-stratify: https://github.com/SciTools/python-stratify +.. _iris-esmf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid +.. _netCDF4: https://github.com/Unidata/netcdf4-python .. comment - Core developers (@github names) in alphabetical order: + Core developers and prolific contributors (@github names) in alphabetical order: .. _@abooton: https://github.com/abooton .. _@alastair-gemmell: https://github.com/alastair-gemmell .. _@ajdawson: https://github.com/ajdawson .. _@bjlittle: https://github.com/bjlittle .. _@bouweandela: https://github.com/bouweandela +.. _@bsherratt: https://github.com/bsherratt .. _@corinnebosley: https://github.com/corinnebosley .. _@cpelley: https://github.com/cpelley .. _@djkirkham: https://github.com/djkirkham .. _@DPeterK: https://github.com/DPeterK +.. _@ESadek-MO: https://github.com/ESadek-MO .. _@esc24: https://github.com/esc24 +.. _@HGWright: https://github.com/HGWright .. _@jamesp: https://github.com/jamesp .. _@jonseddon: https://github.com/jonseddon .. _@jvegasbsc: https://github.com/jvegasbsc @@ -63,6 +70,7 @@ .. _@QuLogic: https://github.com/QuLogic .. _@rcomer: https://github.com/rcomer .. _@rhattersley: https://github.com/rhattersley +.. _@schlunma: https://github.com/schlunma .. _@stephenworsley: https://github.com/stephenworsley .. _@tkknight: https://github.com/tkknight .. _@trexfeathers: https://github.com/trexfeathers diff --git a/docs/src/community/index.rst b/docs/src/community/index.rst new file mode 100644 index 0000000000..114cb96fe9 --- /dev/null +++ b/docs/src/community/index.rst @@ -0,0 +1,58 @@ +.. include:: ../common_links.inc + +.. todo: + consider scientific-python.org + consider scientific-python.org/specs/ + +Iris in the Community +===================== + +Iris aims to be a valuable member of the open source scientific Python +community. + +We listen out for developments in our dependencies and neighbouring projects, +and we reach out to them when we can solve problems together; please feel free +to reach out to us! + +We are aware of our place in the user's wider 'toolbox' - offering unique +functionality and interoperating smoothly with other packages. + +We welcome contributions from all; whether that's an opinion, a 1-line +clarification, or a whole new feature 🙂 + +Quick Links +----------- + +* `GitHub Discussions`_ +* :ref:`Getting involved` +* `Twitter `_ + +Interoperability +---------------- + +There's a big choice of Python tools out there! Each one has strengths and +weaknesses in different areas, so we don't want to force a single choice for your +whole workflow - we'd much rather make it easy for you to choose the right tool +for the moment, switching whenever you need. Below are our ongoing efforts at +smoother interoperability: + +.. not using toctree due to combination of child pages and cross-references. + +* The :mod:`iris.pandas` module +* :doc:`iris_xarray` + +.. toctree:: + :maxdepth: 1 + :hidden: + + iris_xarray + +Plugins +------- + +Iris can be extended with **plugins**! See below for further information: + +.. toctree:: + :maxdepth: 2 + + plugins diff --git a/docs/src/community/iris_xarray.rst b/docs/src/community/iris_xarray.rst new file mode 100644 index 0000000000..859597da78 --- /dev/null +++ b/docs/src/community/iris_xarray.rst @@ -0,0 +1,154 @@ +.. include:: ../common_links.inc + +====================== +Iris ❤️ :term:`Xarray` +====================== + +There is a lot of overlap between Iris and :term:`Xarray`, but some important +differences too. Below is a summary of the most important differences, so that +you can be prepared, and to help you choose the best package for your use case. + +Overall Experience +------------------ + +Iris is the more specialised package, focussed on making it as easy +as possible to work with meteorological and climatological data. Iris +is built to natively handle many key concepts, such as the CF conventions, +coordinate systems and bounded coordinates. Iris offers a smaller toolkit of +operations compared to Xarray, particularly around API for sophisticated +computation such as array manipulation and multi-processing. + +Xarray's more generic data model and community-driven development give it a +richer range of operations and broader possible uses. Using Xarray +specifically for meteorology/climatology may require deeper knowledge +compared to using Iris, and you may prefer to add Xarray plugins +such as :ref:`cfxarray` to get the best experience. Advanced users can likely +achieve better performance with Xarray than with Iris. + +Conversion +---------- +There are multiple ways to convert between Iris and Xarray objects. + +* Xarray includes the :meth:`~xarray.DataArray.to_iris` and + :meth:`~xarray.DataArray.from_iris` methods - detailed in the + `Xarray IO notes on Iris`_. Since Iris evolves independently of Xarray, be + vigilant for concepts that may be lost during the conversion. +* Because both packages are closely linked to the :term:`NetCDF Format`, it is + feasible to save a NetCDF file using one package then load that file using + the other package. This will be lossy in places, as both Iris and Xarray + are opinionated on how certain NetCDF concepts relate to their data models. +* The Iris development team are exploring an improved 'bridge' between the two + packages. Follow the conversation on GitHub: `iris#4994`_. This project is + expressly intended to be as lossless as possible. + +Regridding +---------- +Iris and Xarray offer a range of regridding methods - both natively and via +additional packages such as `iris-esmf-regrid`_ and `xESMF`_ - which overlap +in places +but tend to cover a different set of use cases (e.g. Iris handles unstructured +meshes but offers access to fewer ESMF methods). The behaviour of these +regridders also differs slightly (even between different regridders attached to +the same package) so the appropriate package to use depends highly on the +particulars of the use case. + +Plotting +-------- +Xarray and Iris have a large overlap of functionality when creating +:term:`Matplotlib` plots and both support the plotting of multidimensional +coordinates. This means the experience is largely similar using either package. + +Xarray supports further plotting backends through external packages (e.g. Bokeh through `hvPlot`_) +and, if a user is already familiar with `pandas`_, the interface should be +familiar. It also supports some different plot types to Iris, and therefore can +be used for a wider variety of plots. It also has benefits regarding "out of +the box", quick customisations to plots. However, if further customisation is +required, knowledge of matplotlib is still required. + +In both cases, :term:`Cartopy` is/can be used. Iris does more work +automatically for the user here, creating Cartopy +:class:`~cartopy.mpl.geoaxes.GeoAxes` for latitude and longitude coordinates, +whereas the user has to do this manually in Xarray. + +Statistics +---------- +Both libraries are quite comparable with generally similar capabilities, +performance and laziness. Iris offers more specificity in some cases, such as +some more specific unique functions and masked tolerance in most statistics. +Xarray seems more approachable however, with some less unique but more +convenient solutions (these tend to be wrappers to :term:`Dask` functions). + +Laziness and Multi-Processing with :term:`Dask` +----------------------------------------------- +Iris and Xarray both support lazy data and out-of-core processing through +utilisation of Dask. + +While both Iris and Xarray expose :term:`NumPy` conveniences at the API level +(e.g. the `ndim()` method), only Xarray exposes Dask conveniences. For example +:attr:`xarray.DataArray.chunks`, which gives the user direct control +over the underlying Dask array chunks. The Iris API instead takes control of +such concepts and user control is only possible by manipulating the underlying +Dask array directly (accessed via :meth:`iris.cube.Cube.core_data`). + +:class:`xarray.DataArray`\ s comply with `NEP-18`_, allowing NumPy arrays to be +based on them, and they also include the necessary extra members for Dask +arrays to be based on them too. Neither of these is currently possible with +Iris :class:`~iris.cube.Cube`\ s, although an ambition for the future. + +NetCDF File Control +------------------- +(More info: :term:`NetCDF Format`) + +Unlike Iris, Xarray generally provides full control of major file structures, +i.e. dimensions + variables, including their order in the file. It mostly +respects these in a file input, and can reproduce them on output. +However, attribute handling is not so complete: like Iris, it interprets and +modifies some recognised aspects, and can add some extra attributes not in the +input. + +.. todo: + More detail on dates and fill values (@pp-mo suggestion). + +Handling of dates and fill values have some special problems here. + +Ultimately, nearly everything wanted in a particular desired result file can +be achieved in Xarray, via provided override mechanisms (`loading keywords`_ +and the '`encoding`_' dictionaries). + +Missing Data +------------ +Xarray uses :data:`numpy.nan` to represent missing values and this will support +many simple use cases assuming the data are floats. Iris enables more +sophisticated missing data handling by representing missing values as masks +(:class:`numpy.ma.MaskedArray` for real data and :class:`dask.array.Array` +for lazy data) which allows data to be any data type and to include either/both +a mask and :data:`~numpy.nan`\ s. + +.. _cfxarray: + +`cf-xarray`_ +------------- +Iris has a data model entirely based on :term:`CF Conventions`. Xarray has a +data model based on :term:`NetCDF Format` with cf-xarray acting as translation +into CF. Xarray/cf-xarray methods can be +called and data accessed with CF like arguments (e.g. axis, standard name) and +there are some CF specific utilities (similar +to Iris utilities). Iris tends to cover more of and be stricter about CF. + + +.. seealso:: + + * `Xarray IO notes on Iris`_ + * `Xarray notes on other NetCDF libraries`_ + +.. _Xarray IO notes on Iris: https://docs.xarray.dev/en/stable/user-guide/io.html#iris +.. _Xarray notes on other NetCDF libraries: https://docs.xarray.dev/en/stable/getting-started-guide/faq.html#what-other-netcdf-related-python-libraries-should-i-know-about +.. _loading keywords: https://docs.xarray.dev/en/stable/generated/xarray.open_dataset.html#xarray.open_dataset +.. _encoding: https://docs.xarray.dev/en/stable/user-guide/io.html#writing-encoded-data +.. _xESMF: https://github.com/pangeo-data/xESMF/ +.. _seaborn: https://seaborn.pydata.org/ +.. _hvPlot: https://hvplot.holoviz.org/ +.. _pandas: https://pandas.pydata.org/ +.. _NEP-18: https://numpy.org/neps/nep-0018-array-function-protocol.html +.. _cf-xarray: https://github.com/xarray-contrib/cf-xarray +.. _iris#4994: https://github.com/SciTools/iris/issues/4994 diff --git a/docs/src/community/plugins.rst b/docs/src/community/plugins.rst new file mode 100644 index 0000000000..0d79d64623 --- /dev/null +++ b/docs/src/community/plugins.rst @@ -0,0 +1,68 @@ +.. _namespace package: https://packaging.python.org/en/latest/guides/packaging-namespace-packages/ + +.. _community_plugins: + +Plugins +======= + +Iris supports **plugins** under the ``iris.plugins`` `namespace package`_. +This allows packages that extend Iris' functionality to be developed and +maintained independently, while still being installed into ``iris.plugins`` +instead of a separate package. For example, a plugin may provide loaders or +savers for additional file formats, or alternative visualisation methods. + + +Using plugins +------------- + +Once a plugin is installed, it can be used either via the +:func:`iris.use_plugin` function, or by importing it directly: + +.. code-block:: python + + import iris + + iris.use_plugin("my_plugin") + # OR + import iris.plugins.my_plugin + + +Creating plugins +---------------- + +The choice of a `namespace package`_ makes writing a plugin relatively +straightforward: it simply needs to appear as a folder within ``iris/plugins``, +then can be distributed in the same way as any other package. An example +repository layout: + +.. code-block:: text + + + lib + + iris + + plugins + + my_plugin + - __init__.py + - (more code...) + - README.md + - pyproject.toml + - setup.cfg + - (other project files...) + +In particular, note that there must **not** be any ``__init__.py`` files at +higher levels than the plugin itself. + +The package name - how it is referred to by PyPI/conda, specified by +``metadata.name`` in ``setup.cfg`` - is recommended to include both "iris" and +the plugin name. Continuing this example, its ``setup.cfg`` should include, at +minimum: + +.. code-block:: ini + + [metadata] + name = iris-my-plugin + + [options] + packages = find_namespace: + + [options.packages.find] + where = lib diff --git a/docs/src/conf.py b/docs/src/conf.py index 19f22e808f..576a099b90 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -20,15 +20,16 @@ # ---------------------------------------------------------------------------- import datetime +from importlib.metadata import version as get_version import ntpath import os from pathlib import Path import re +from subprocess import run import sys +from urllib.parse import quote import warnings -import iris - # function to write useful output to stdout, prefixing the source. def autolog(message): @@ -41,20 +42,33 @@ def autolog(message): # -- Are we running on the readthedocs server, if so do some setup ----------- on_rtd = os.environ.get("READTHEDOCS") == "True" +# This is the rtd reference to the version, such as: latest, stable, v3.0.1 etc +rtd_version = os.environ.get("READTHEDOCS_VERSION") +if rtd_version is not None: + # Make rtd_version safe for use in shields.io badges. + rtd_version = rtd_version.replace("_", "__") + rtd_version = rtd_version.replace("-", "--") + rtd_version = quote(rtd_version) + +# branch, tag, external (for pull request builds), or unknown. +rtd_version_type = os.environ.get("READTHEDOCS_VERSION_TYPE") + +# For local testing purposes we can force being on RTD and the version +# on_rtd = True # useful for testing +# rtd_version = "latest" # useful for testing +# rtd_version = "stable" # useful for testing +# rtd_version_type = "tag" # useful for testing +# rtd_version = "my_branch" # useful for testing + if on_rtd: autolog("Build running on READTHEDOCS server") # list all the READTHEDOCS environment variables that may be of use - # at some point autolog("Listing all environment variables on the READTHEDOCS server...") for item, value in os.environ.items(): autolog("[READTHEDOCS] {} = {}".format(item, value)) -# This is the rtd reference to the version, such as: latest, stable, v3.0.1 etc -# For local testing purposes this could be explicitly set latest or stable. -rtd_version = os.environ.get("READTHEDOCS_VERSION") - # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, @@ -82,20 +96,11 @@ def autolog(message): author = "Iris Developers" # The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. - -# The short X.Y version. -if iris.__version__ == "dev": - version = "dev" -else: - # major.minor.patch-dev -> major.minor.patch - version = ".".join(iris.__version__.split("-")[0].split(".")[:3]) -# The full version, including alpha/beta/rc tags. -release = iris.__version__ - -autolog("Iris Version = {}".format(version)) -autolog("Iris Release = {}".format(release)) +# |version|, also used in various other places throughout the built documents. +version = get_version("scitools-iris") +release = version +autolog(f"Iris Version = {version}") +autolog(f"Iris Release = {release}") # -- General configuration --------------------------------------------------- @@ -153,12 +158,9 @@ def _dotv(version): "sphinx_copybutton", "sphinx.ext.napoleon", "sphinx_panels", - # TODO: Spelling extension disabled until the dependencies can be included - # "sphinxcontrib.spelling", "sphinx_gallery.gen_gallery", "matplotlib.sphinxext.mathmpl", "matplotlib.sphinxext.plot_directive", - "image_test_output", ] if skip_api == "1": @@ -171,6 +173,7 @@ def _dotv(version): # -- panels extension --------------------------------------------------------- # See https://sphinx-panels.readthedocs.io/en/latest/ +panels_add_bootstrap_css = False # -- Napoleon extension ------------------------------------------------------- # See https://sphinxcontrib-napoleon.readthedocs.io/en/latest/sphinxcontrib.napoleon.html @@ -188,16 +191,6 @@ def _dotv(version): napoleon_use_keyword = True napoleon_custom_sections = None -# -- spellingextension -------------------------------------------------------- -# See https://sphinxcontrib-spelling.readthedocs.io/en/latest/customize.html -spelling_lang = "en_GB" -# The lines in this file must only use line feeds (no carriage returns). -spelling_word_list_filename = ["spelling_allow.txt"] -spelling_show_suggestions = False -spelling_show_whole_line = False -spelling_ignore_importable_modules = True -spelling_ignore_python_builtins = True - # -- copybutton extension ----------------------------------------------------- # See https://sphinx-copybutton.readthedocs.io/en/latest/ copybutton_prompt_text = r">>> |\.\.\. " @@ -229,6 +222,8 @@ def _dotv(version): "numpy": ("https://numpy.org/doc/stable/", None), "python": ("https://docs.python.org/3/", None), "scipy": ("https://docs.scipy.org/doc/scipy/", None), + "pandas": ("https://pandas.pydata.org/docs/", None), + "dask": ("https://docs.dask.org/en/stable/", None), } # The name of the Pygments (syntax highlighting) style to use. @@ -246,6 +241,10 @@ def _dotv(version): extlinks = { "issue": ("https://github.com/SciTools/iris/issues/%s", "Issue #"), "pull": ("https://github.com/SciTools/iris/pull/%s", "PR #"), + "discussion": ( + "https://github.com/SciTools/iris/discussions/%s", + "Discussion #", + ), } # -- Doctest ("make doctest")-------------------------------------------------- @@ -257,43 +256,74 @@ def _dotv(version): # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_logo = "_static/iris-logo-title.png" -html_favicon = "_static/favicon.ico" -html_theme = "sphinx_rtd_theme" +html_logo = "_static/iris-logo-title.svg" +html_favicon = "_static/iris-logo.svg" +html_theme = "pydata_sphinx_theme" + +# See https://pydata-sphinx-theme.readthedocs.io/en/latest/user_guide/configuring.html#configure-the-search-bar-position +html_sidebars = { + "**": [ + "custom_sidebar_logo_version", + "search-field", + "sidebar-nav-bs", + "sidebar-ethical-ads", + ] +} +# See https://pydata-sphinx-theme.readthedocs.io/en/latest/user_guide/configuring.html html_theme_options = { - "display_version": True, - "style_external_links": True, - "logo_only": "True", + "footer_items": ["copyright", "sphinx-version", "custom_footer"], + "collapse_navigation": True, + "navigation_depth": 3, + "show_prev_next": True, + "navbar_align": "content", + "github_url": "https://github.com/SciTools/iris", + "twitter_url": "https://twitter.com/scitools_iris", + # icons available: https://fontawesome.com/v5.15/icons?d=gallery&m=free + "icon_links": [ + { + "name": "GitHub Discussions", + "url": "https://github.com/SciTools/iris/discussions", + "icon": "far fa-comments", + }, + { + "name": "PyPI", + "url": "https://pypi.org/project/scitools-iris/", + "icon": "fas fa-box", + }, + { + "name": "Conda", + "url": "https://anaconda.org/conda-forge/iris", + "icon": "fas fa-boxes", + }, + ], + "use_edit_page_button": True, + "show_toc_level": 1, + # Omitted `theme-switcher` below to disable it + # Info: https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/light-dark.html#configure-default-theme-mode + "navbar_end": ["navbar-icon-links"], } +rev_parse = run(["git", "rev-parse", "--short", "HEAD"], capture_output=True) +commit_sha = rev_parse.stdout.decode().strip() + html_context = { + # pydata_theme + "github_repo": "iris", + "github_user": "scitools", + "github_version": "main", + "doc_path": "docs/src", + # default theme. Also disabled the button in the html_theme_options. + # Info: https://pydata-sphinx-theme.readthedocs.io/en/stable/user_guide/light-dark.html#configure-default-theme-mode + "default_mode": "light", + # custom + "on_rtd": on_rtd, "rtd_version": rtd_version, + "rtd_version_type": rtd_version_type, "version": version, "copyright_years": copyright_years, "python_version": build_python_version, - # menu_links and menu_links_name are used in _templates/layout.html - # to include some nice icons. See http://fontawesome.io for a list of - # icons (used in the sphinx_rtd_theme) - "menu_links_name": "Support", - "menu_links": [ - ( - ' Source Code', - "https://github.com/SciTools/iris", - ), - ( - ' GitHub Discussions', - "https://github.com/SciTools/iris/discussions", - ), - ( - ' StackOverflow for "How Do I?"', - "https://stackoverflow.com/questions/tagged/python-iris", - ), - ( - ' Legacy Documentation', - "https://scitools.org.uk/iris/docs/v2.4.0/index.html", - ), - ], + "commit_sha": commit_sha, } # Add any paths that contain custom static files (such as style sheets) here, @@ -302,12 +332,24 @@ def _dotv(version): html_static_path = ["_static"] html_style = "theme_override.css" +# this allows for using datatables: https://datatables.net/ +html_css_files = [ + "https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css", +] + +html_js_files = [ + "https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js", +] + # url link checker. Some links work but report as broken, lets ignore them. # See https://www.sphinx-doc.org/en/1.2/config.html#options-for-the-linkcheck-builder linkcheck_ignore = [ + "http://catalogue.ceda.ac.uk/uuid/82adec1f896af6169112d09cc1174499", "http://cfconventions.org", "http://code.google.com/p/msysgit/downloads/list", "http://effbot.org", + "https://help.github.com", + "https://docs.github.com", "https://github.com", "http://www.personal.psu.edu/cab38/ColorBrewer/ColorBrewer_updates.html", "http://schacon.github.com/git", @@ -316,6 +358,7 @@ def _dotv(version): "https://software.ac.uk/how-cite-software", "http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml", "http://www.nationalarchives.gov.uk/doc/open-government-licence", + "https://www.metoffice.gov.uk/", ] # list of sources to exclude from the build. @@ -335,6 +378,11 @@ def _dotv(version): "ignore_pattern": r"__init__\.py", # force gallery building, unless overridden (see src/Makefile) "plot_gallery": "'True'", + # force re-registering of nc-time-axis with matplotlib for each example, + # required for sphinx-gallery>=0.11.0 + "reset_modules": ( + lambda gallery_conf, fname: sys.modules.pop("nc_time_axis", None), + ), } # ----------------------------------------------------------------------------- diff --git a/docs/src/developers_guide/assets/developer-settings-github-apps.png b/docs/src/developers_guide/assets/developer-settings-github-apps.png new file mode 100644 index 0000000000..a63994d087 Binary files /dev/null and b/docs/src/developers_guide/assets/developer-settings-github-apps.png differ diff --git a/docs/src/developers_guide/assets/download-pem.png b/docs/src/developers_guide/assets/download-pem.png new file mode 100644 index 0000000000..cbceb1304d Binary files /dev/null and b/docs/src/developers_guide/assets/download-pem.png differ diff --git a/docs/src/developers_guide/assets/generate-key.png b/docs/src/developers_guide/assets/generate-key.png new file mode 100644 index 0000000000..ac894dc71b Binary files /dev/null and b/docs/src/developers_guide/assets/generate-key.png differ diff --git a/docs/src/developers_guide/assets/gha-token-example.png b/docs/src/developers_guide/assets/gha-token-example.png new file mode 100644 index 0000000000..cba1cf6935 Binary files /dev/null and b/docs/src/developers_guide/assets/gha-token-example.png differ diff --git a/docs/src/developers_guide/assets/install-app.png b/docs/src/developers_guide/assets/install-app.png new file mode 100644 index 0000000000..31259de588 Binary files /dev/null and b/docs/src/developers_guide/assets/install-app.png differ diff --git a/docs/src/developers_guide/assets/install-iris-actions.png b/docs/src/developers_guide/assets/install-iris-actions.png new file mode 100644 index 0000000000..db16dee55b Binary files /dev/null and b/docs/src/developers_guide/assets/install-iris-actions.png differ diff --git a/docs/src/developers_guide/assets/installed-app.png b/docs/src/developers_guide/assets/installed-app.png new file mode 100644 index 0000000000..ab87032393 Binary files /dev/null and b/docs/src/developers_guide/assets/installed-app.png differ diff --git a/docs/src/developers_guide/assets/iris-actions-secret.png b/docs/src/developers_guide/assets/iris-actions-secret.png new file mode 100644 index 0000000000..f32456d0f2 Binary files /dev/null and b/docs/src/developers_guide/assets/iris-actions-secret.png differ diff --git a/docs/src/developers_guide/assets/iris-github-apps.png b/docs/src/developers_guide/assets/iris-github-apps.png new file mode 100644 index 0000000000..50753532b7 Binary files /dev/null and b/docs/src/developers_guide/assets/iris-github-apps.png differ diff --git a/docs/src/developers_guide/assets/iris-secrets-created.png b/docs/src/developers_guide/assets/iris-secrets-created.png new file mode 100644 index 0000000000..19b0ba11dc Binary files /dev/null and b/docs/src/developers_guide/assets/iris-secrets-created.png differ diff --git a/docs/src/developers_guide/assets/iris-security-actions.png b/docs/src/developers_guide/assets/iris-security-actions.png new file mode 100644 index 0000000000..7cbe3a7dc2 Binary files /dev/null and b/docs/src/developers_guide/assets/iris-security-actions.png differ diff --git a/docs/src/developers_guide/assets/iris-settings.png b/docs/src/developers_guide/assets/iris-settings.png new file mode 100644 index 0000000000..70714235c2 Binary files /dev/null and b/docs/src/developers_guide/assets/iris-settings.png differ diff --git a/docs/src/developers_guide/assets/org-perms-members.png b/docs/src/developers_guide/assets/org-perms-members.png new file mode 100644 index 0000000000..99fd8985e2 Binary files /dev/null and b/docs/src/developers_guide/assets/org-perms-members.png differ diff --git a/docs/src/developers_guide/assets/repo-perms-contents.png b/docs/src/developers_guide/assets/repo-perms-contents.png new file mode 100644 index 0000000000..4c325c334d Binary files /dev/null and b/docs/src/developers_guide/assets/repo-perms-contents.png differ diff --git a/docs/src/developers_guide/assets/repo-perms-pull-requests.png b/docs/src/developers_guide/assets/repo-perms-pull-requests.png new file mode 100644 index 0000000000..812f5ef951 Binary files /dev/null and b/docs/src/developers_guide/assets/repo-perms-pull-requests.png differ diff --git a/docs/src/developers_guide/assets/scitools-settings.png b/docs/src/developers_guide/assets/scitools-settings.png new file mode 100644 index 0000000000..8d7e728ab5 Binary files /dev/null and b/docs/src/developers_guide/assets/scitools-settings.png differ diff --git a/docs/src/developers_guide/assets/user-perms.png b/docs/src/developers_guide/assets/user-perms.png new file mode 100644 index 0000000000..607c7dcdb6 Binary files /dev/null and b/docs/src/developers_guide/assets/user-perms.png differ diff --git a/docs/src/developers_guide/assets/webhook-active.png b/docs/src/developers_guide/assets/webhook-active.png new file mode 100644 index 0000000000..538362f335 Binary files /dev/null and b/docs/src/developers_guide/assets/webhook-active.png differ diff --git a/docs/src/developers_guide/asv_example_images/commits.png b/docs/src/developers_guide/asv_example_images/commits.png new file mode 100644 index 0000000000..4e0d695322 Binary files /dev/null and b/docs/src/developers_guide/asv_example_images/commits.png differ diff --git a/docs/src/developers_guide/asv_example_images/comparison.png b/docs/src/developers_guide/asv_example_images/comparison.png new file mode 100644 index 0000000000..e146d30696 Binary files /dev/null and b/docs/src/developers_guide/asv_example_images/comparison.png differ diff --git a/docs/src/developers_guide/asv_example_images/scalability.png b/docs/src/developers_guide/asv_example_images/scalability.png new file mode 100644 index 0000000000..260c3ef536 Binary files /dev/null and b/docs/src/developers_guide/asv_example_images/scalability.png differ diff --git a/docs/src/developers_guide/ci_checks.png b/docs/src/developers_guide/ci_checks.png old mode 100755 new mode 100644 index e088e03a66..54ab672b3c Binary files a/docs/src/developers_guide/ci_checks.png and b/docs/src/developers_guide/ci_checks.png differ diff --git a/docs/src/developers_guide/contributing_benchmarks.rst b/docs/src/developers_guide/contributing_benchmarks.rst new file mode 100644 index 0000000000..65bc9635b6 --- /dev/null +++ b/docs/src/developers_guide/contributing_benchmarks.rst @@ -0,0 +1,62 @@ +.. include:: ../common_links.inc + +.. _contributing.benchmarks: + +Benchmarking +============ +Iris includes architecture for benchmarking performance and other metrics of +interest. This is done using the `Airspeed Velocity`_ (ASV) package. + +Full detail on the setup and how to run or write benchmarks is in +`benchmarks/README.md`_ in the Iris repository. + +Continuous Integration +---------------------- +The primary purpose of `Airspeed Velocity`_, and Iris' specific benchmarking +setup, is to monitor for performance changes using statistical comparison +between commits, and this forms part of Iris' continuous integration. + +Accurately assessing performance takes longer than functionality pass/fail +tests, so the benchmark suite is not automatically run against open pull +requests, instead it is **run overnight against each the commits of the +previous day** to check if any commit has introduced performance shifts. +Detected shifts are reported in a new Iris GitHub issue. + +If a pull request author/reviewer suspects their changes may cause performance +shifts, a convenience is available (currently via Nox) to replicate the +overnight benchmark run but comparing the current ``HEAD`` with a requested +branch (e.g. ``upstream/main``). Read more in `benchmarks/README.md`_. + +Other Uses +---------- +Even when not statistically comparing commits, ASV's accurate execution time +results - recorded using a sophisticated system of repeats - have other +applications. + +* Absolute numbers can be interpreted providing they are recorded on a + dedicated resource. +* Results for a series of commits can be visualised for an intuitive + understanding of when and why changes occurred. + + .. image:: asv_example_images/commits.png + :width: 300 + +* Parameterised benchmarks make it easy to visualise: + + * Comparisons + + .. image:: asv_example_images/comparison.png + :width: 300 + + * Scalability + + .. image:: asv_example_images/scalability.png + :width: 300 + +This also isn't limited to execution times. ASV can also measure memory demand, +and even arbitrary numbers (e.g. file size, regridding accuracy), although +without the repetition logic that execution timing has. + + +.. _Airspeed Velocity: https://github.com/airspeed-velocity/asv +.. _benchmarks/README.md: https://github.com/SciTools/iris/blob/main/benchmarks/README.md diff --git a/docs/src/developers_guide/contributing_ci_tests.rst b/docs/src/developers_guide/contributing_ci_tests.rst index 0257ff7cff..1d06434843 100644 --- a/docs/src/developers_guide/contributing_ci_tests.rst +++ b/docs/src/developers_guide/contributing_ci_tests.rst @@ -13,51 +13,50 @@ The `Iris`_ GitHub repository is configured to run checks against all its branches automatically whenever a pull-request is created, updated or merged. The checks performed are: -* :ref:`testing_cirrus` +* :ref:`testing_gha` * :ref:`testing_cla` * :ref:`pre_commit_ci` -.. _testing_cirrus: +.. _testing_gha: -Cirrus-CI -********* +GitHub Actions +************** Iris unit and integration tests are an essential mechanism to ensure that the Iris code base is working as expected. :ref:`developer_running_tests` may be performed manually by a developer locally. However Iris is configured to -use the `cirrus-ci`_ service for automated Continuous Integration (CI) testing. +use `GitHub Actions`_ (GHA) for automated Continuous Integration (CI) testing. -The `cirrus-ci`_ configuration file `.cirrus.yml`_ in the root of the Iris repository -defines the tasks to be performed by `cirrus-ci`_. For further details -refer to the `Cirrus-CI Documentation`_. The tasks performed during CI include: +The Iris GHA YAML configuration files in the ``.github/workflows`` directory +defines the CI tasks to be performed. For further details +refer to the `GitHub Actions`_ documentation. The tasks performed during CI include: -* linting the code base and ensuring it adheres to the `black`_ format * running the system, integration and unit tests for Iris * ensuring the documentation gallery builds successfully * performing all doc-tests within the code base * checking all URL references within the code base and documentation are valid -The above `cirrus-ci`_ tasks are run automatically against all `Iris`_ branches +The above GHA tasks are run automatically against all `Iris`_ branches on GitHub whenever a pull-request is submitted, updated or merged. See the -`Cirrus-CI Dashboard`_ for details of recent past and active Iris jobs. +`Iris GitHub Actions`_ dashboard for details of recent past and active CI jobs. -.. _cirrus_test_env: +.. _gha_test_env: -Cirrus CI Test environment --------------------------- +GitHub Actions Test Environment +------------------------------- -The test environment on the Cirrus-CI service is determined from the requirement files -in ``requirements/ci/py**.yml``. These are conda environment files that list the entire -set of build, test and run requirements for Iris. +The CI test environments for our GHA is determined from the requirement files +in ``requirements/ci/pyXX.yml``. These are conda environment files list the top-level +package dependencies for running and testing Iris. For reproducible test results, these environments are resolved for all their dependencies -and stored as lock files in ``requirements/ci/nox.lock``. The test environments will not -resolve the dependencies each time, instead they will use the lock file to reproduce the -same exact environment each time. +and stored as conda lock files in the ``requirements/ci/nox.lock`` directory. The test environments +will not resolve the dependencies each time, instead they will use the lock files to reproduce the +exact same environment each time. -**If you have updated the requirement yaml files with new dependencies, you will need to +**If you have updated the requirement YAML files with new dependencies, you will need to generate new lock files.** To do this, run the command:: python tools/update_lockfiles.py -o requirements/ci/nox.lock requirements/ci/py*.yml @@ -68,49 +67,22 @@ or simply:: and add the changed lockfiles to your pull request. +.. note:: + + If your installation of conda runs through Artifactory or another similar + proxy then you will need to amend that lockfile to use URLs that Github + Actions can access. A utility to strip out Artifactory exists in the + ``ssstack`` tool. + New lockfiles are generated automatically each week to ensure that Iris continues to be tested against the latest available version of its dependencies. Each week the yaml files in ``requirements/ci`` are resolved by a GitHub Action. If the resolved environment has changed, a pull request is created with the new lock files. -The CI test suite will run on this pull request and fixes for failed tests can be pushed to -the ``auto-update-lockfiles`` branch to be included in the PR. -Once a developer has pushed to this branch, the auto-update process will not run again until -the PR is merged, to prevent overwriting developer commits. -The auto-updater can still be invoked manually in this situation by going to the `GitHub Actions`_ -page for the workflow, and manually running using the "Run Workflow" button. -By default, this will also not override developer commits. To force an update, you must -confirm "yes" in the "Run Worflow" prompt. - - -.. _skipping Cirrus-CI tasks: - -Skipping Cirrus-CI Tasks ------------------------- - -As a developer you may wish to not run all the CI tasks when you are actively -developing e.g., you are writing documentation and there is no need for linting, -or long running compute intensive testing tasks to be executed. - -As a convenience, it is possible to easily skip one or more tasks by setting -the appropriate environment variable within the `.cirrus.yml`_ file to a -**non-empty** string: - -* ``SKIP_LINT_TASK`` to skip `flake8`_ linting and `black`_ formatting -* ``SKIP_TEST_MINIMAL_TASK`` to skip restricted unit and integration testing -* ``SKIP_TEST_FULL_TASK`` to skip full unit and integration testing -* ``SKIP_GALLERY_TASK`` to skip building the documentation gallery -* ``SKIP_DOCTEST_TASK`` to skip running the documentation doc-tests -* ``SKIP_LINKCHECK_TASK`` to skip checking for broken documentation URL references -* ``SKIP_ALL_TEST_TASKS`` which is equivalent to setting ``SKIP_TEST_MINIMAL_TASK`` and ``SKIP_TEST_FULL_TASK`` -* ``SKIP_ALL_DOC_TASKS`` which is equivalent to setting ``SKIP_GALLERY_TASK``, ``SKIP_DOCTEST_TASK``, and ``SKIP_LINKCHECK_TASK`` - -e.g., to skip the linting task, the following are all equivalent:: - - SKIP_LINT_TASK: "1" - SKIP_LINT_TASK: "true" - SKIP_LINT_TASK: "false" - SKIP_LINT_TASK: "skip" - SKIP_LINT_TASK: "unicorn" +The CI test suite will run on this pull request. If the tests fail, a developer +will need to create a new branch based off the ``auto-update-lockfiles`` branch +and add the required fixes to this new branch. If the fixes are made to the +``auto-update-lockfiles`` branch these will be overwritten the next time the +Github Action is run. GitHub Checklist @@ -146,9 +118,5 @@ pull-requests given the `Iris`_ GitHub repository `.pre-commit-config.yaml`_. See the `pre-commit.ci dashboard`_ for details of recent past and active Iris jobs. - -.. _Cirrus-CI Dashboard: https://cirrus-ci.com/github/SciTools/iris -.. _Cirrus-CI Documentation: https://cirrus-ci.org/guide/writing-tasks/ .. _.pre-commit-config.yaml: https://github.com/SciTools/iris/blob/main/.pre-commit-config.yaml .. _pre-commit.ci dashboard: https://results.pre-commit.ci/repo/github/5312648 -.. _GitHub Actions: https://github.com/SciTools/iris/actions/workflows/refresh-lockfiles.yml diff --git a/docs/src/developers_guide/contributing_codebase_index.rst b/docs/src/developers_guide/contributing_codebase_index.rst index 88986c0c7a..b59a196ff0 100644 --- a/docs/src/developers_guide/contributing_codebase_index.rst +++ b/docs/src/developers_guide/contributing_codebase_index.rst @@ -1,7 +1,7 @@ .. _contributing.documentation.codebase: -Contributing to the Code Base -============================= +Working with the Code Base +========================== .. toctree:: :maxdepth: 3 diff --git a/docs/src/developers_guide/contributing_deprecations.rst b/docs/src/developers_guide/contributing_deprecations.rst index 1ecafdca9f..0b22e2cbd2 100644 --- a/docs/src/developers_guide/contributing_deprecations.rst +++ b/docs/src/developers_guide/contributing_deprecations.rst @@ -25,29 +25,29 @@ deprecation is accompanied by the introduction of a new public API. Under these circumstances the following points apply: - - Using the deprecated API must result in a concise deprecation warning which - is an instance of :class:`iris.IrisDeprecation`. - It is easiest to call - :func:`iris._deprecation.warn_deprecated`, which is a - simple wrapper to :func:`warnings.warn` with the signature - `warn_deprecation(message, **kwargs)`. - - Where possible, your deprecation warning should include advice on - how to avoid using the deprecated API. For example, you might - reference a preferred API, or more detailed documentation elsewhere. - - You must update the docstring for the deprecated API to include a - Sphinx deprecation directive: - - :literal:`.. deprecated:: ` - - where you should replace `` with the major and minor version - of Iris in which this API is first deprecated. For example: `1.8`. - - As with the deprecation warning, you should include advice on how to - avoid using the deprecated API within the content of this directive. - Feel free to include more detail in the updated docstring than in the - deprecation warning. - - You should check the documentation for references to the deprecated - API and update them as appropriate. +- Using the deprecated API must result in a concise deprecation warning which + is an instance of :class:`iris.IrisDeprecation`. + It is easiest to call + :func:`iris._deprecation.warn_deprecated`, which is a + simple wrapper to :func:`warnings.warn` with the signature + `warn_deprecation(message, **kwargs)`. +- Where possible, your deprecation warning should include advice on + how to avoid using the deprecated API. For example, you might + reference a preferred API, or more detailed documentation elsewhere. +- You must update the docstring for the deprecated API to include a + Sphinx deprecation directive: + + :literal:`.. deprecated:: ` + + where you should replace `` with the major and minor version + of Iris in which this API is first deprecated. For example: `1.8`. + + As with the deprecation warning, you should include advice on how to + avoid using the deprecated API within the content of this directive. + Feel free to include more detail in the updated docstring than in the + deprecation warning. +- You should check the documentation for references to the deprecated + API and update them as appropriate. Changing a Default ------------------ @@ -64,14 +64,14 @@ it causes the corresponding public API to use its new default behaviour. The following points apply in addition to those for removing a public API: - - You should add a new boolean attribute to :data:`iris.FUTURE` (by - modifying :class:`iris.Future`) that controls the default behaviour - of the public API that needs updating. The initial state of the new - boolean attribute should be `False`. You should name the new boolean - attribute to indicate that setting it to `True` will select the new - default behaviour. - - You should include a reference to this :data:`iris.FUTURE` flag in your - deprecation warning and corresponding Sphinx deprecation directive. +- You should add a new boolean attribute to :data:`iris.FUTURE` (by + modifying :class:`iris.Future`) that controls the default behaviour + of the public API that needs updating. The initial state of the new + boolean attribute should be `False`. You should name the new boolean + attribute to indicate that setting it to `True` will select the new + default behaviour. +- You should include a reference to this :data:`iris.FUTURE` flag in your + deprecation warning and corresponding Sphinx deprecation directive. Removing a Deprecation @@ -94,11 +94,11 @@ and/or example code should be removed/updated as appropriate. Changing a Default ------------------ - - You should update the initial state of the relevant boolean attribute - of :data:`iris.FUTURE` to `True`. - - You should deprecate setting the relevant boolean attribute of - :class:`iris.Future` in the same way as described in - :ref:`removing-a-public-api`. +- You should update the initial state of the relevant boolean attribute + of :data:`iris.FUTURE` to `True`. +- You should deprecate setting the relevant boolean attribute of + :class:`iris.Future` in the same way as described in + :ref:`removing-a-public-api`. .. rubric:: Footnotes diff --git a/docs/src/developers_guide/contributing_documentation_full.rst b/docs/src/developers_guide/contributing_documentation_full.rst index 77b898c0f3..a470def683 100755 --- a/docs/src/developers_guide/contributing_documentation_full.rst +++ b/docs/src/developers_guide/contributing_documentation_full.rst @@ -1,3 +1,4 @@ +.. include:: ../common_links.inc .. _contributing.documentation_full: @@ -31,7 +32,7 @@ The build can be run from the documentation directory ``docs/src``. The build output for the html is found in the ``_build/html`` sub directory. When updating the documentation ensure the html build has *no errors* or -*warnings* otherwise it may fail the automated `cirrus-ci`_ build. +*warnings* otherwise it may fail the automated `Iris GitHub Actions`_ build. Once the build is complete, if it is rerun it will only rebuild the impacted build artefacts so should take less time. @@ -60,27 +61,36 @@ If you wish to run a full clean build you can run:: make clean make html -This is useful for a final test before committing your changes. +This is useful for a final test before committing your changes. Having built +the documentation, you can view them in your default browser via:: + + make show .. note:: In order to preserve a clean build for the html, all **warnings** have been promoted to be **errors** to ensure they are addressed. This **only** applies when ``make html`` is run. -.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris - .. _contributing.documentation.testing: Testing ~~~~~~~ -There are a ways to test various aspects of the documentation. The -``make`` commands shown below can be run in the ``docs`` or -``docs/src`` directory. +There are various ways to test aspects of the documentation. Each :ref:`contributing.documentation.gallery` entry has a corresponding test. -To run the tests:: +To run all the gallery tests:: + + pytest -v docs/gallery_tests/test_gallery_examples.py + +To run a test for a single gallery example, use the ``pytest -k`` option for +pattern matching, e.g.:: + + pytest -v -k plot_coriolis docs/gallery_tests/test_gallery_examples.py + +If a gallery test fails, follow the instructions in :ref:`testing.graphics`. - make gallerytest +The ``make`` commands shown below can be run in the ``docs`` or ``docs/src`` +directory. Many documentation pages includes python code itself that can be run to ensure it is still valid or to demonstrate examples. To ensure these tests pass @@ -103,19 +113,7 @@ adding it to the ``linkcheck_ignore`` array that is defined in the If this fails check the output for the text **broken** and then correct or ignore the url. -.. comment - Finally, the spelling in the documentation can be checked automatically via the - command:: - - make spelling - - The spelling check may pull up many technical abbreviations and acronyms. This - can be managed by using an **allow** list in the form of a file. This file, - or list of files is set in the `conf.py`_ using the string list - ``spelling_word_list_filename``. - - -.. note:: In addition to the automated `cirrus-ci`_ build of all the +.. note:: In addition to the automated `Iris GitHub Actions`_ build of all the documentation build options above, the https://readthedocs.org/ service is also used. The configuration of this held in a file in the root of the @@ -148,7 +146,7 @@ can exclude the module from the API documentation. Add the entry to the Gallery ~~~~~~~ -The Iris :ref:`sphx_glr_generated_gallery` uses a sphinx extension named +The Iris :ref:`gallery_index` uses a sphinx extension named `sphinx-gallery `_ that auto generates reStructuredText (rst) files based upon a gallery source directory that abides directory and filename convention. @@ -157,13 +155,13 @@ The code for the gallery entries are in ``docs/gallery_code``. Each sub directory in this directory is a sub section of the gallery. The respective ``README.rst`` in each folder is included in the gallery output. -For each gallery entry there must be a corresponding test script located in -``docs/gallery_tests``. - To add an entry to the gallery simple place your python code into the appropriate sub directory and name it with a prefix of ``plot_``. If your gallery entry does not fit into any existing sub directories then create a new -directory and place it in there. +directory and place it in there. A test for the gallery entry will be +automatically generated (see Testing_ for how to run it). To add a new +reference image for this test, follow the instructions in +:ref:`testing.graphics`. The reStructuredText (rst) output of the gallery is located in ``docs/src/generated/gallery``. diff --git a/docs/src/developers_guide/contributing_getting_involved.rst b/docs/src/developers_guide/contributing_getting_involved.rst index f7bd4733a3..9ec6559114 100644 --- a/docs/src/developers_guide/contributing_getting_involved.rst +++ b/docs/src/developers_guide/contributing_getting_involved.rst @@ -1,8 +1,9 @@ .. include:: ../common_links.inc .. _development_where_to_start: +.. _developers_guide: -Getting Involved +Developers Guide ---------------- Iris_ is an Open Source project hosted on Github and as such anyone with a @@ -17,7 +18,7 @@ The `Iris GitHub`_ project has been configured to use templates for each of the above issue types when creating a `new issue`_ to ensure the appropriate information is provided. -Alternatively, **join the conversation** in `Iris GitHub Discussions`_, when +Alternatively, **join the conversation** in Iris `GitHub Discussions`_, when you would like the opinions of the Iris community. A `pull request`_ may also be created by anyone who has become a @@ -25,7 +26,7 @@ A `pull request`_ may also be created by anyone who has become a ``main`` branch are only given to **core developers** of Iris_, this is to ensure a measure of control. -To get started we suggest reading recent `issues`_, `discussions`_ and +To get started we suggest reading recent `issues`_, `GitHub Discussions`_ and `pull requests`_ for Iris. If you are new to using GitHub we recommend reading the @@ -36,5 +37,30 @@ If you are new to using GitHub we recommend reading the `Governance `_ section of the `SciTools`_ ogranization web site. - .. _GitHub getting started: https://docs.github.com/en/github/getting-started-with-github + + +.. toctree:: + :maxdepth: 1 + :caption: Developers Guide + :name: development_index + :hidden: + + gitwash/index + contributing_documentation + contributing_codebase_index + contributing_changes + github_app + release + + +.. toctree:: + :maxdepth: 1 + :caption: Reference + :hidden: + + ../generated/api/iris + ../whatsnew/index + ../techpapers/index + ../copyright + ../voted_issues diff --git a/docs/src/developers_guide/contributing_graphics_tests.rst b/docs/src/developers_guide/contributing_graphics_tests.rst index 1268aa2686..7964c008c5 100644 --- a/docs/src/developers_guide/contributing_graphics_tests.rst +++ b/docs/src/developers_guide/contributing_graphics_tests.rst @@ -2,72 +2,17 @@ .. _testing.graphics: -Graphics Tests -************** +Adding or Updating Graphics Tests +================================= -Iris may be used to create various forms of graphical output; to ensure -the output is consistent, there are automated tests to check against -known acceptable graphical output. See :ref:`developer_running_tests` for -more information. - -At present graphical tests are used in the following areas of Iris: - -* Module ``iris.tests.test_plot`` -* Module ``iris.tests.test_quickplot`` -* :ref:`sphx_glr_generated_gallery` plots contained in - ``docs/gallery_tests``. - - -Challenges -========== - -Iris uses many dependencies that provide functionality, an example that -applies here is matplotlib_. For more information on the dependences, see -:ref:`installing_iris`. When there are updates to the matplotlib_ or a -dependency of matplotlib, this may result in a change in the rendered graphical -output. This means that there may be no changes to Iris_, but due to an -updated dependency any automated tests that compare a graphical output to a -known acceptable output may fail. The failure may also not be visually -perceived as it may be a simple pixel shift. - - -Testing Strategy -================ - -The `Iris Cirrus-CI matrix`_ defines multiple test runs that use -different versions of Python to ensure Iris is working as expected. - -To make this manageable, the ``iris.tests.IrisTest_nometa.check_graphic`` test -routine tests against multiple alternative **acceptable** results. It does -this using an image **hash** comparison technique which avoids storing -reference images in the Iris repository itself. - -This consists of: - - * The ``iris.tests.IrisTest_nometa.check_graphic`` function uses a perceptual - **image hash** of the outputs (see https://github.com/JohannesBuchner/imagehash) - as the basis for checking test results. - - * The hashes of known **acceptable** results for each test are stored in a - lookup dictionary, saved to the repo file - ``lib/iris/tests/results/imagerepo.json`` - (`link `_) . - - * An actual reference image for each hash value is stored in a *separate* - public repository https://github.com/SciTools/test-iris-imagehash. - - * The reference images allow human-eye assessment of whether a new output is - judged to be close enough to the older ones, or not. - - * The utility script ``iris/tests/idiff.py`` automates checking, enabling the - developer to easily compare proposed new **acceptable** result images - against the existing accepted reference images, for each failing test. +.. note:: -The acceptable images for each test can be viewed online. The :ref:`testing.imagehash_index` lists all the graphical tests in the test suite and -shows the known acceptable result images for comparison. + If a large number of images tests are failing due to an update to the + libraries used for image hashing, follow the instructions on + :ref:`refresh-imagerepo`. -Reviewing Failing Tests -======================= +Generating New Results +---------------------- When you find that a graphics test in the Iris testing suite has failed, following changes in Iris or the run dependencies, this is the process @@ -76,14 +21,24 @@ you should follow: #. Create a new, empty directory to store temporary image results, at the path ``lib/iris/tests/result_image_comparison`` in your Iris repository checkout. -#. **In your Iris repo root directory**, run the relevant (failing) tests - directly as python scripts, or by using a command such as:: +#. Run the relevant (failing) tests directly as python scripts, or using + ``pytest``. + +The results of the failing image tests will now be available in +``lib/iris/tests/result_image_comparison``. + +.. note:: + + The ``result_image_comparison`` folder is covered by a project + ``.gitignore`` setting, so those files *will not show up* in a + ``git status`` check. - python -m unittest discover paths/to/test/files +Reviewing Failing Tests +----------------------- -#. In the ``iris/lib/iris/tests`` folder, run the command:: +#. Run ``iris/lib/iris/tests/graphics/idiff.py`` with python, e.g.: - python idiff.py + python idiff.py This will open a window for you to visually inspect side-by-side **old**, **new** and **difference** images for each failed @@ -92,29 +47,28 @@ you should follow: If the change is **accepted**: - * the imagehash value of the new result image is added into the relevant - set of 'valid result hashes' in the image result database file, - ``tests/results/imagerepo.json`` + * the imagehash value of the new result image is added into the relevant + set of 'valid result hashes' in the image result database file, + ``tests/results/imagerepo.json`` - * the relevant output file in ``tests/result_image_comparison`` is - renamed according to the image hash value, as ``.png``. - A copy of this new PNG file must then be added into the reference image - repository at https://github.com/SciTools/test-iris-imagehash - (See below). + * the relevant output file in ``tests/result_image_comparison`` is renamed + according to the test name. A copy of this new PNG file must then be added + into the ``iris-test-data`` repository, at + https://github.com/SciTools/iris-test-data (See below). If a change is **skipped**: - * no further changes are made in the repo. + * no further changes are made in the repo. - * when you run ``iris/tests/idiff.py`` again, the skipped choice will be - presented again. + * when you run ``iris/tests/idiff.py`` again, the skipped choice will be + presented again. If a change is **rejected**: - * the output image is deleted from ``result_image_comparison``. + * the output image is deleted from ``result_image_comparison``. - * when you run ``iris/tests/idiff.py`` again, the skipped choice will not - appear, unless the relevant failing test is re-run. + * when you run ``iris/tests/idiff.py`` again, the skipped choice will not + appear, unless the relevant failing test is re-run. #. **Now re-run the tests**. The **new** result should now be recognised and the relevant test should pass. However, some tests can perform *multiple* @@ -123,46 +77,66 @@ you should follow: re-run may encounter further (new) graphical test failures. If that happens, simply repeat the check-and-accept process until all tests pass. +#. You're now ready to :ref:`add-graphics-test-changes` -Add Your Changes to Iris -======================== -To add your changes to Iris, you need to make two pull requests (PR). +Adding a New Image Test +----------------------- -#. The first PR is made in the ``test-iris-imagehash`` repository, at - https://github.com/SciTools/test-iris-imagehash. +If you attempt to run ``idiff.py`` when there are new graphical tests for which +no baseline yet exists, you will get a warning that ``idiff.py`` is ``Ignoring +unregistered test result...``. In this case, - * First, add all the newly-generated referenced PNG files into the - ``images/v4`` directory. In your Iris repo, these files are to be found - in the temporary results folder ``iris/tests/result_image_comparison``. +#. rename the relevant images from ``iris/tests/result_image_comparison`` by - * Then, to update the file which lists available images, - ``v4_files_listing.txt``, run from the project root directory:: + * removing the ``result-`` prefix - python recreate_v4_files_listing.py + * fully qualifying the test name if it isn't already (i.e. it should start + ``iris.tests...``or ``gallery_tests...``) - * Create a PR proposing these changes, in the usual way. +#. run the tests in the mode that lets them create missing data (see + :ref:`create-missing`). This will update ``imagerepo.json`` with the new + test name and image hash. -#. The second PR is created in the Iris_ repository, and - should only include the change to the image results database, - ``tests/results/imagerepo.json``. - The description box of this pull request should contain a reference to - the matching one in ``test-iris-imagehash``. +#. and then add them to the Iris test data as covered in + :ref:`add-graphics-test-changes`. -.. note:: - The ``result_image_comparison`` folder is covered by a project - ``.gitignore`` setting, so those files *will not show up* in a - ``git status`` check. +.. _refresh-imagerepo: -.. important:: +Refreshing the Stored Hashes +---------------------------- - The Iris pull-request will not test successfully in Cirrus-CI until the - ``test-iris-imagehash`` pull request has been merged. This is because there - is an Iris_ test which ensures the existence of the reference images (uris) - for all the targets in the image results database. It will also fail - if you forgot to run ``recreate_v4_files_listing.py`` to update the - image-listing file in ``test-iris-imagehash``. +From time to time, a new version of the image hashing library will cause all +image hashes to change. The image hashes stored in +``tests/results/imagerepo.json`` can be refreshed using the baseline images +stored in the ``iris-test-data`` repository (at +https://github.com/SciTools/iris-test-data) using the script +``tests/graphics/recreate_imagerepo.py``. Use the ``--help`` argument for the +command line arguments. -.. _Iris Cirrus-CI matrix: https://github.com/scitools/iris/blob/main/.cirrus.yml +.. _add-graphics-test-changes: + +Add Your Changes to Iris +------------------------ + +To add your changes to Iris, you need to make two pull requests (PR). + +#. The first PR is made in the ``iris-test-data`` repository, at + https://github.com/SciTools/iris-test-data. + + * Add all the newly-generated referenced PNG files into the + ``test_data/images`` directory. In your Iris repo, these files are to be found + in the temporary results folder ``iris/tests/result_image_comparison``. + + * Create a PR proposing these changes, in the usual way. + +#. The second PR is the one that makes the changes you intend to the Iris_ repository. + The description box of this pull request should contain a reference to + the matching one in ``iris-test-data``. + + * This PR should include updating the version of the test data in + ``.github/workflows/ci-tests.yml`` and + ``.github/workflows/ci-docs-tests.yml`` to the new version created by the + merging of your ``iris-test-data`` PR. diff --git a/docs/src/developers_guide/contributing_pull_request_checklist.rst b/docs/src/developers_guide/contributing_pull_request_checklist.rst index 5afb461d68..57bc9fd728 100644 --- a/docs/src/developers_guide/contributing_pull_request_checklist.rst +++ b/docs/src/developers_guide/contributing_pull_request_checklist.rst @@ -16,8 +16,8 @@ is merged. Before submitting a pull request please consider this list. #. **Provide a helpful description** of the Pull Request. This should include: - * The aim of the change / the problem addressed / a link to the issue. - * How the change has been delivered. + * The aim of the change / the problem addressed / a link to the issue. + * How the change has been delivered. #. **Include a "What's New" entry**, if appropriate. See :ref:`whats_new_contributions`. @@ -31,10 +31,11 @@ is merged. Before submitting a pull request please consider this list. #. **Check all new dependencies added to the** `requirements/ci/`_ **yaml files.** If dependencies have been added then new nox testing lockfiles - should be generated too, see :ref:`cirrus_test_env`. + should be generated too, see :ref:`gha_test_env`. #. **Check the source documentation been updated to explain all new or changed - features**. See :ref:`docstrings`. + features**. Note, we now use numpydoc strings. Any touched code should + be updated to use the docstrings formatting. See :ref:`docstrings`. #. **Include code examples inside the docstrings where appropriate**. See :ref:`contributing.documentation.testing`. @@ -42,8 +43,6 @@ is merged. Before submitting a pull request please consider this list. #. **Check the documentation builds without warnings or errors**. See :ref:`contributing.documentation.building` -#. **Check for any new dependencies in the** `.cirrus.yml`_ **config file.** - #. **Check for any new dependencies in the** `readthedocs.yml`_ **file**. This file is used to build the documentation that is served from https://scitools-iris.readthedocs.io/en/latest/ @@ -51,12 +50,10 @@ is merged. Before submitting a pull request please consider this list. #. **Check for updates needed for supporting projects for test or example data**. For example: - * `iris-test-data`_ is a github project containing all the data to support - the tests. - * `iris-sample-data`_ is a github project containing all the data to support - the gallery and examples. - * `test-iris-imagehash`_ is a github project containing reference plot - images to support Iris :ref:`testing.graphics`. + * `iris-test-data`_ is a github project containing all the data to support + the tests. + * `iris-sample-data`_ is a github project containing all the data to support + the gallery and examples. If new files are required by tests or code examples, they must be added to the appropriate supporting project via a suitable pull-request. This pull diff --git a/docs/src/developers_guide/contributing_running_tests.rst b/docs/src/developers_guide/contributing_running_tests.rst index ab36172283..f60cedba05 100644 --- a/docs/src/developers_guide/contributing_running_tests.rst +++ b/docs/src/developers_guide/contributing_running_tests.rst @@ -5,13 +5,22 @@ Running the Tests ***************** -Using setuptools for Testing Iris -================================= +There are two options for running the tests: -.. warning:: The `setuptools`_ ``test`` command was deprecated in `v41.5.0`_. See :ref:`using nox`. +* Use an environment you created yourself. This requires more manual steps to + set up, but gives you more flexibility. For example, you can run a subset of + the tests or use ``python`` interactively to investigate any issues. See + :ref:`test manual env`. -A prerequisite of running the tests is to have the Python environment -setup. For more information on this see :ref:`installing_from_source`. +* Use ``nox``. This will automatically generate an environment and run test + sessions consistent with our GitHub continuous integration. See :ref:`using nox`. + +.. _test manual env: + +Testing Iris in a Manually Created Environment +============================================== + +To create a suitable environment for running the tests, see :ref:`installing_from_source`. Many Iris tests will use data that may be defined in the test itself, however this is not always the case as sometimes example files may be used. Due to @@ -32,81 +41,76 @@ The example command below uses ``~/projects`` as the parent directory:: git clone git@github.com:SciTools/iris-test-data.git export OVERRIDE_TEST_DATA_REPOSITORY=~/projects/iris-test-data/test_data -All the Iris tests may be run from the root ``iris`` project directory via:: +All the Iris tests may be run from the root ``iris`` project directory using +``pytest``. For example:: - python setup.py test - -You can also run a specific test, the example below runs the tests for -mapping:: + pytest -n 2 - cd lib/iris/tests - python test_mapping.py +will run the tests across two processes. For more options, use the command +``pytest -h``. Below is a trimmed example of the output:: -When running the test directly as above you can view the command line options -using the commands ``python test_mapping.py -h`` or -``python test_mapping.py --help``. + ============================= test session starts ============================== + platform linux -- Python 3.10.5, pytest-7.1.2, pluggy-1.0.0 + rootdir: /path/to/git/clone/iris, configfile: pyproject.toml, testpaths: lib/iris + plugins: xdist-2.5.0, forked-1.4.0 + gw0 I / gw1 I + gw0 [6361] / gw1 [6361] -.. tip:: A useful command line option to use is ``-d``. This will display - matplotlib_ figures as the tests are run. For example:: - - python test_mapping.py -d - - You can also use the ``-d`` command line option when running all - the tests but this will take a while to run and will require the - manual closing of each of the figures for the tests to continue. - -The output from running the tests is verbose as it will run ~5000 separate -tests. Below is a trimmed example of the output:: - - running test - Running test suite(s): default - - Running test discovery on iris.tests with 2 processors. - test_circular_subset (iris.tests.experimental.regrid.test_regrid_area_weighted_rectilinear_src_and_grid.TestAreaWeightedRegrid) ... ok - test_cross_section (iris.tests.experimental.regrid.test_regrid_area_weighted_rectilinear_src_and_grid.TestAreaWeightedRegrid) ... ok - test_different_cs (iris.tests.experimental.regrid.test_regrid_area_weighted_rectilinear_src_and_grid.TestAreaWeightedRegrid) ... ok - ... + ........................................................................ [ 1%] + ........................................................................ [ 2%] + ........................................................................ [ 3%] ... - test_ellipsoid (iris.tests.unit.experimental.raster.test_export_geotiff.TestProjection) ... SKIP: Test requires 'gdal'. - test_no_ellipsoid (iris.tests.unit.experimental.raster.test_export_geotiff.TestProjection) ... SKIP: Test requires 'gdal'. + .......................ssssssssssssssssss............................... [ 99%] + ........................ [100%] + =============================== warnings summary =============================== ... + -- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html + =========================== short test summary info ============================ + SKIPPED [1] lib/iris/tests/experimental/test_raster.py:152: Test requires 'gdal'. + SKIPPED [1] lib/iris/tests/experimental/test_raster.py:155: Test requires 'gdal'. ... - test_slice (iris.tests.test_util.TestAsCompatibleShape) ... ok - test_slice_and_transpose (iris.tests.test_util.TestAsCompatibleShape) ... ok - test_transpose (iris.tests.test_util.TestAsCompatibleShape) ... ok - - ---------------------------------------------------------------------- - Ran 4762 tests in 238.649s - - OK (SKIP=22) + ========= 6340 passed, 21 skipped, 1659 warnings in 193.57s (0:03:13) ========== There may be some tests that have been **skipped**. This is due to a Python decorator being present in the test script that will intentionally skip a test if a certain condition is not met. In the example output above there are -**22** skipped tests, at the point in time when this was run this was primarily -due to an experimental dependency not being present. - +**21** skipped tests. At the point in time when this was run this was due to an +experimental dependency not being present. .. tip:: The most common reason for tests to be skipped is when the directory for the ``iris-test-data`` has not been set which would shows output such as:: - test_coord_coord_map (iris.tests.test_plot.Test1dScatter) ... SKIP: Test(s) require external data. - test_coord_coord (iris.tests.test_plot.Test1dScatter) ... SKIP: Test(s) require external data. - test_coord_cube (iris.tests.test_plot.Test1dScatter) ... SKIP: Test(s) require external data. - + SKIPPED [1] lib/iris/tests/unit/fileformats/test_rules.py:157: Test(s) require external data. + SKIPPED [1] lib/iris/tests/unit/fileformats/pp/test__interpret_field.py:97: Test(s) require external data. + SKIPPED [1] lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py:29: Test(s) require external data. + All Python decorators that skip tests will be defined in ``lib/iris/tests/__init__.py`` with a function name with a prefix of ``skip_``. +You can also run a specific test module. The example below runs the tests for +mapping:: + + cd lib/iris/tests + python test_mapping.py + +When running the test directly as above you can view the command line options +using the commands ``python test_mapping.py -h`` or +``python test_mapping.py --help``. + +.. tip:: A useful command line option to use is ``-d``. This will display + matplotlib_ figures as the tests are run. For example:: + + python test_mapping.py -d .. _using nox: Using Nox for Testing Iris ========================== -Iris has adopted the use of the `nox`_ tool for automated testing on `cirrus-ci`_ +The `nox`_ tool has for adopted for automated testing on `Iris GitHub Actions`_ and also locally on the command-line for developers. `nox`_ is similar to `tox`_, but instead leverages the expressiveness and power of a Python @@ -124,15 +128,12 @@ automates the process of: * building the documentation and executing the doc-tests * building the documentation gallery * running the documentation URL link check -* linting the code-base -* ensuring the code-base style conforms to the `black`_ standard - You can perform all of these tasks manually yourself, however the onus is on you to first ensure that all of the required package dependencies are installed and available in the testing environment. `Nox`_ has been configured to automatically do this for you, and provides a means to easily replicate -the remote testing behaviour of `cirrus-ci`_ locally for the developer. +the remote testing behaviour of `Iris GitHub Actions`_ locally for the developer. Installing Nox diff --git a/docs/src/developers_guide/contributing_testing.rst b/docs/src/developers_guide/contributing_testing.rst index d0c96834a9..a65bcebd55 100644 --- a/docs/src/developers_guide/contributing_testing.rst +++ b/docs/src/developers_guide/contributing_testing.rst @@ -8,8 +8,8 @@ Test Categories There are two main categories of tests within Iris: - - :ref:`testing.unit_test` - - :ref:`testing.integration` +- :ref:`testing.unit_test` +- :ref:`testing.integration` Ideally, all code changes should be accompanied by one or more unit tests, and by zero or more integration tests. diff --git a/docs/src/developers_guide/contributing_testing_index.rst b/docs/src/developers_guide/contributing_testing_index.rst index c5cf1b997b..2f5ae411e8 100644 --- a/docs/src/developers_guide/contributing_testing_index.rst +++ b/docs/src/developers_guide/contributing_testing_index.rst @@ -7,7 +7,8 @@ Testing :maxdepth: 3 contributing_testing + testing_tools contributing_graphics_tests - imagehash_index contributing_running_tests contributing_ci_tests + contributing_benchmarks diff --git a/docs/src/developers_guide/documenting/docstrings.rst b/docs/src/developers_guide/documenting/docstrings.rst index 8a06024ee2..eeefc71e40 100644 --- a/docs/src/developers_guide/documenting/docstrings.rst +++ b/docs/src/developers_guide/documenting/docstrings.rst @@ -8,10 +8,10 @@ Every public object in the Iris package should have an appropriate docstring. This is important as the docstrings are used by developers to understand the code and may be read directly in the source or via the :ref:`Iris`. -This document has been influenced by the following PEP's, - - * Attribute Docstrings :pep:`224` - * Docstring Conventions :pep:`257` +.. note:: + As of April 2022 we are looking to adopt `numpydoc`_ strings as standard. + We aim to complete the adoption over time as we do changes to the codebase. + For examples of use see `numpydoc`_ and `sphinxcontrib-napoleon`_ For consistency always use: @@ -20,91 +20,14 @@ For consistency always use: docstrings. * ``u"""Unicode triple-quoted string"""`` for Unicode docstrings -All docstrings should be written in reST (reStructuredText) markup. See the -:ref:`reST_quick_start` for more detail. - -There are two forms of docstrings: **single-line** and **multi-line** -docstrings. - - -Single-Line Docstrings -====================== - -The single line docstring of an object must state the **purpose** of that -object, known as the **purpose section**. This terse overview must be on one -line and ideally no longer than 80 characters. - - -Multi-Line Docstrings -===================== - -Multi-line docstrings must consist of at least a purpose section akin to the -single-line docstring, followed by a blank line and then any other content, as -described below. The entire docstring should be indented to the same level as -the quotes at the docstring's first line. - - -Description ------------ - -The multi-line docstring *description section* should expand on what was -stated in the one line *purpose section*. The description section should try -not to document *argument* and *keyword argument* details. Such information -should be documented in the following *arguments and keywords section*. - - -Sample Multi-Line Docstring ---------------------------- - -Here is a simple example of a standard docstring: - -.. literalinclude:: docstrings_sample_routine.py - -This would be rendered as: - - .. currentmodule:: documenting.docstrings_sample_routine - - .. automodule:: documenting.docstrings_sample_routine - :members: - :undoc-members: - -Additionally, a summary can be extracted automatically, which would result in: - - .. autosummary:: - - documenting.docstrings_sample_routine.sample_routine - - -Documenting Classes -=================== - -The class constructor should be documented in the docstring for its -``__init__`` or ``__new__`` method. Methods should be documented by their own -docstring, not in the class header itself. - -If a class subclasses another class and its behaviour is mostly inherited from -that class, its docstring should mention this and summarise the differences. -Use the verb "override" to indicate that a subclass method replaces a -superclass method and does not call the superclass method; use the verb -"extend" to indicate that a subclass method calls the superclass method -(in addition to its own behaviour). - - -Attribute and Property Docstrings ---------------------------------- - -Here is a simple example of a class containing an attribute docstring and a -property docstring: - -.. literalinclude:: docstrings_attribute.py +All docstrings can use reST (reStructuredText) markup to augment the +rendered formatting. See the :ref:`reST_quick_start` for more detail. -This would be rendered as: +For more information including examples pleasee see: - .. currentmodule:: documenting.docstrings_attribute +* `numpydoc`_ +* `sphinxcontrib-napoleon`_ - .. automodule:: documenting.docstrings_attribute - :members: - :undoc-members: -.. note:: The purpose section of the property docstring **must** state whether - the property is read-only. +.. _numpydoc: https://numpydoc.readthedocs.io/en/latest/format.html#style-guide +.. _sphinxcontrib-napoleon: https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_numpy.html \ No newline at end of file diff --git a/docs/src/developers_guide/documenting/rest_guide.rst b/docs/src/developers_guide/documenting/rest_guide.rst index 4845132b15..c4330b1e63 100644 --- a/docs/src/developers_guide/documenting/rest_guide.rst +++ b/docs/src/developers_guide/documenting/rest_guide.rst @@ -14,8 +14,8 @@ reST is a lightweight markup language intended to be highly readable in source format. This guide will cover some of the more frequently used advanced reST markup syntaxes, for the basics of reST the following links may be useful: - * https://www.sphinx-doc.org/en/master/usage/restructuredtext/ - * http://packages.python.org/an_example_pypi_project/sphinx.html +* https://www.sphinx-doc.org/en/master/usage/restructuredtext/ +* http://packages.python.org/an_example_pypi_project/sphinx.html Reference documentation for reST can be found at http://docutils.sourceforge.net/rst.html. diff --git a/docs/src/developers_guide/documenting/whats_new_contributions.rst b/docs/src/developers_guide/documenting/whats_new_contributions.rst index ebb553024b..aa19722a69 100644 --- a/docs/src/developers_guide/documenting/whats_new_contributions.rst +++ b/docs/src/developers_guide/documenting/whats_new_contributions.rst @@ -1,17 +1,19 @@ +.. include:: ../../common_links.inc + .. _whats_new_contributions: ================================= Contributing a "What's New" Entry ================================= -Iris uses a file named ``latest.rst`` to keep a draft of upcoming changes -that will form the next release. Contributions to the :ref:`iris_whatsnew` -document are written by the developer most familiar with the change made. -The contribution should be included as part of the Iris Pull Request that -introduces the change. +Iris uses a file named ``latest.rst`` to keep a draft of upcoming development +changes that will form the next stable release. Contributions to the +:ref:`iris_whatsnew` document are written by the developer most familiar +with the change made. The contribution should be included as part of +the Iris Pull Request that introduces the change. -The ``latest.rst`` and the past release notes are kept in -``docs/src/whatsnew/``. If you are writing the first contribution after +The ``latest.rst`` and the past release notes are kept in the +``docs/src/whatsnew/`` directory. If you are writing the first contribution after an Iris release: **create the new** ``latest.rst`` by copying the content from ``latest.rst.template`` in the same directory. @@ -33,12 +35,12 @@ situation is thought likely (large PR, high repo activity etc.): a **new pull request** be created specifically for the "What's New" entry, which references the main pull request and titled (e.g. for PR#9999): - What's New for #9999 + What's New for #9999 * PR author: create the "What's New" pull request * PR reviewer: once the "What's New" PR is created, **merge the main PR**. - (this will fix any `cirrus-ci`_ linkcheck errors where the links in the + (this will fix any `Iris GitHub Actions`_ linkcheck errors where the links in the "What's New" PR reference new features introduced in the main PR) * PR reviewer: review the "What's New" PR, merge once acceptable @@ -69,6 +71,9 @@ The required content, in order, is as follows: user name. Link the name to their GitHub profile. E.g. ```@tkknight `_ changed...`` + * Bigger changes take a lot of effort to review, too! Make sure you credit + the reviewer(s) where appropriate. + * The new/changed behaviour * Context to the change. Possible examples include: what this fixes, why @@ -82,8 +87,9 @@ The required content, in order, is as follows: For example:: - #. `@tkknight `_ changed changed argument ``x`` - to be optional in :class:`~iris.module.class` and + #. `@tkknight `_ and + `@trexfeathers `_ (reviewer) changed + argument ``x`` to be optional in :class:`~iris.module.class` and :meth:`iris.module.method`. This allows greater flexibility as requested in :issue:`9999`. (:pull:`1111`, :pull:`9999`) @@ -93,13 +99,11 @@ links to code. For more inspiration on possible content and references, please examine past what's :ref:`iris_whatsnew` entries. .. note:: The reStructuredText syntax will be checked as part of building - the documentation. Any warnings should be corrected. - `cirrus-ci`_ will automatically build the documentation when + the documentation. Any warnings should be corrected. The + `Iris GitHub Actions`_ will automatically build the documentation when creating a pull request, however you can also manually :ref:`build ` the documentation. -.. _cirrus-ci: https://cirrus-ci.com/github/SciTools/iris - Contribution Categories ======================= diff --git a/docs/src/developers_guide/github_app.rst b/docs/src/developers_guide/github_app.rst new file mode 100644 index 0000000000..402cfe0c75 --- /dev/null +++ b/docs/src/developers_guide/github_app.rst @@ -0,0 +1,281 @@ +.. include:: ../common_links.inc + +Token GitHub App +---------------- + +.. note:: + + This section of the documentation is applicable only to GitHub `SciTools`_ + Organisation **owners** and **administrators**. + +.. note:: + + The ``iris-actions`` GitHub App has been rebranded with the more generic + name ``scitools-ci``, as the app can be used for any `SciTools`_ repository, + not just ``iris`` specifically. + + All of the following instructions are still applicable. + + +This section describes how to create, configure, install and use our `SciTools`_ +GitHub App for generating tokens for use with *GitHub Actions* (GHA). + + +Background +^^^^^^^^^^ + +Our GitHub *Continuous Integration* (CI) workflows require fully reproducible +`conda`_ environments to test ``iris`` and build our documentation. + +The ``iris`` `refresh-lockfiles`_ GHA workflow uses the `conda-lock`_ package to routinely +generate a platform specific ``lockfile`` containing all the package dependencies +required by ``iris`` for a specific version of ``python``. + +The environment lockfiles created by the `refresh-lockfiles`_ GHA are contributed +back to ``iris`` though a pull-request that is automatically generated using the +third-party `create-pull-request`_ GHA. By default, pull-requests created by such an +action using the standard ``GITHUB_TOKEN`` **cannot** trigger other workflows, such +as our CI. + +As a result, we use a dedicated authentication **GitHub App** to securely generate tokens +for the `create-pull-request`_ GHA, which then permits our full suite of CI testing workflows +to be triggered against the lockfiles pull-request. Ensuring that the CI is triggered gives us +confidence that the proposed new lockfiles have not introduced a package level incompatibility +or issue within ``iris``. See :ref:`use gha`. + + +Create GitHub App +^^^^^^^^^^^^^^^^^ + +The **GitHub App** is created for the sole purpose of generating tokens for use with actions, +and **must** be owned by the `SciTools`_ organisation. + +To create a minimal `GitHub App`_ for this purpose, perform the following steps: + +1. Click the `SciTools`_ organisation ``⚙️ Settings`` option. + +.. figure:: assets/scitools-settings.png + :alt: SciTools organisation Settings option + :align: center + :width: 75% + +2. Click the ``GitHub Apps`` option from the ``<> Developer settings`` + section in the left hand sidebar. + +.. figure:: assets/developer-settings-github-apps.png + :alt: Developer settings, GitHub Apps option + :align: center + :width: 25% + +3. Now click the ``New GitHub App`` button to display the ``Register new GitHub App`` + form. + +Within the ``Register new GitHub App`` form, complete the following fields: + +4. Set the **mandatory** ``GitHub App name`` field to be ``iris-actions``. +5. Set the **mandatory** ``Homepage URL`` field to be ``https://github.com/SciTools/iris`` +6. Under the ``Webhook`` section, **uncheck** the ``Active`` checkbox. + Note that, **no** ``Webhook URL`` is required. + +.. figure:: assets/webhook-active.png + :alt: Webhook active checkbox + :align: center + :width: 75% + +7. Under the ``Repository permissions`` section, set the ``Contents`` field to + be ``Access: Read and write``. + +.. figure:: assets/repo-perms-contents.png + :alt: Repository permissions Contents option + :align: center + :width: 75% + +8. Under the ``Repository permissions`` section, set the ``Pull requests`` field + to be ``Access: Read and write``. + +.. figure:: assets/repo-perms-pull-requests.png + :alt: Repository permissions Pull requests option + :align: center + :width: 75% + +9. Under the ``Organization permissions`` section, set the ``Members`` field to + be ``Access: Read-only``. + +.. figure:: assets/org-perms-members.png + :alt: Organization permissions Members + :align: center + :width: 75% + +10. Under the ``User permissions`` section, for the ``Where can this GitHub App be installed?`` + field, **check** the ``Only on this account`` radio-button i.e., only allow + this GitHub App to be installed on the **SciTools** account. + +.. figure:: assets/user-perms.png + :alt: User permissions + :align: center + :width: 75% + +11. Finally, click the ``Create GitHub App`` button. + + +Configure GitHub App +^^^^^^^^^^^^^^^^^^^^ + +Creating the GitHub App will automatically redirect you to the ``SciTools settings / iris-actions`` +form for the newly created app. + +Perform the following GitHub App configuration steps: + +.. _app id: + +1. Under the ``About`` section, note of the GitHub ``App ID`` as this value is + required later. See :ref:`gha secrets`. +2. Under the ``Display information`` section, optionally upload the ``iris`` logo + as a ``png`` image. +3. Under the ``Private keys`` section, click the ``Generate a private key`` button. + +.. figure:: assets/generate-key.png + :alt: Private keys Generate a private key + :align: center + :width: 75% + +.. _private key: + +GitHub will automatically generate a private key to sign access token requests +for the app. Also a separate browser pop-up window will appear with the GitHub +App private key in ``OpenSSL PEM`` format. + +.. figure:: assets/download-pem.png + :alt: Download OpenSSL PEM file + :align: center + :width: 50% + +.. important:: + + Please ensure that you save the ``OpenSSL PEM`` file and **securely** archive + its contents. The private key within this file is required later. + See :ref:`gha secrets`. + + +Install GitHub App +^^^^^^^^^^^^^^^^^^ + +To install the GitHub App: + +1. Select the ``Install App`` option from the top left menu of the + ``Scitools settings / iris-actions`` form, then click the ``Install`` button. + +.. figure:: assets/install-app.png + :alt: Private keys Generate a private key + :align: center + :width: 75% + +2. Select the ``Only select repositories`` radio-button from the ``Install iris-actions`` + form, and choose the ``SciTools/iris`` repository. + +.. figure:: assets/install-iris-actions.png + :alt: Install iris-actions GitHub App + :align: center + :width: 75% + +3. Click the ``Install`` button. + + The successfully installed ``iris-actions`` GitHub App is now available under + the ``GitHub Apps`` option in the ``Integrations`` section of the `SciTools`_ + organisation ``Settings``. Note that, to reconfigure the installed app click + the ``⚙️ App settings`` option. + +.. figure:: assets/installed-app.png + :alt: Installed GitHub App + :align: center + :width: 80% + +4. Finally, confirm that the ``iris-actions`` GitHub App is now available within + the `SciTools/iris`_ repository by clicking the ``GitHub apps`` option in the + ``⚙️ Settings`` section. + +.. figure:: assets/iris-github-apps.png + :alt: Iris installed GitHub App + :align: center + :width: 80% + + +.. _gha secrets: + +Create Repository Secrets +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The GitHub Action that requests an access token from the ``iris-actions`` +GitHub App must be configured with the following information: + +* the ``App ID``, and +* the ``OpenSSL PEM`` private key + +associated with the ``iris-actions`` GitHub App. This **sensitive** information is +made **securely** available by creating `SciTools/iris`_ repository secrets: + +1. Click the `SciTools/iris`_ repository ``⚙️ Settings`` option. + +.. figure:: assets/iris-settings.png + :alt: Iris Settings + :align: center + :width: 75% + +2. Click the ``Actions`` option from the ``Security`` section in the left hand + sidebar. + +.. figure:: assets/iris-security-actions.png + :alt: Iris Settings Security Actions + :align: center + :width: 25% + +3. Click the ``New repository secret`` button. + +.. figure:: assets/iris-actions-secret.png + :alt: Iris Actions Secret + :align: center + :width: 75% + +4. Complete the ``Actions secrets / New secret`` form for the ``App ID``: + + * Set the ``Name`` field to be ``AUTH_APP_ID``. + * Set the ``Value`` field to be the numerical ``iris-actions`` GitHub ``App ID``. + See :ref:`here `. + * Click the ``Add secret`` button. + +5. Click the ``New repository secret`` button again, and complete the form + for the ``OpenSSL PEM``: + + * Set the ``Name`` field to be ``AUTH_APP_PRIVATE_KEY``. + * Set the ``Value`` field to be the entire contents of the ``OpenSSL PEM`` file. + See :ref:`here `. + * Click the ``Add secret`` button. + +A summary of the newly created `SciTools/iris`_ repository secrets is now available: + +.. figure:: assets/iris-secrets-created.png + :alt: Iris Secrets created + :align: center + :width: 75% + + +.. _use gha: + +Use GitHub App +^^^^^^^^^^^^^^ + +The following example workflow shows how to use the `github-app-token`_ GHA +to generate a token for use with the `create-pull-request`_ GHA: + +.. figure:: assets/gha-token-example.png + :alt: GitHub Action token example + :align: center + :width: 50% + + +.. _GitHub App: https://docs.github.com/en/developers/apps/building-github-apps/creating-a-github-app +.. _SciTools/iris: https://github.com/SciTools/iris +.. _conda-lock: https://github.com/conda-incubator/conda-lock +.. _create-pull-request: https://github.com/peter-evans/create-pull-request +.. _github-app-token: https://github.com/tibdex/github-app-token +.. _refresh-lockfiles: https://github.com/SciTools/iris/blob/main/.github/workflows/refresh-lockfiles.yml diff --git a/docs/src/developers_guide/gitwash/development_workflow.rst b/docs/src/developers_guide/gitwash/development_workflow.rst index 0536ebfb62..b086922d5b 100644 --- a/docs/src/developers_guide/gitwash/development_workflow.rst +++ b/docs/src/developers_guide/gitwash/development_workflow.rst @@ -25,7 +25,7 @@ In what follows we'll refer to the upstream iris ``main`` branch, as * If you can possibly avoid it, avoid merging trunk or any other branches into your feature branch while you are working. * If you do find yourself merging from trunk, consider :ref:`rebase-on-trunk` -* Ask on the `Iris GitHub Discussions`_ if you get stuck. +* Ask on the Iris `GitHub Discussions`_ if you get stuck. * Ask for code review! This way of working helps to keep work well organized, with readable history. @@ -157,7 +157,7 @@ Ask for Your Changes to be Reviewed or Merged When you are ready to ask for someone to review your code and consider a merge: #. Go to the URL of your forked repo, say - ``http://github.com/your-user-name/iris``. + ``https://github.com/your-user-name/iris``. #. Use the 'Switch Branches' dropdown menu near the top left of the page to select the branch with your changes: @@ -190,7 +190,7 @@ Delete a Branch on Github git push origin :my-unwanted-branch Note the colon ``:`` before ``test-branch``. See also: -http://github.com/guides/remove-a-remote-branch +https://github.com/guides/remove-a-remote-branch Several People Sharing a Single Repository @@ -203,7 +203,7 @@ share it via github. First fork iris into your account, as from :ref:`forking`. Then, go to your forked repository github page, say -``http://github.com/your-user-name/iris``, select :guilabel:`Settings`, +``https://github.com/your-user-name/iris``, select :guilabel:`Settings`, :guilabel:`Manage Access` and then :guilabel:`Invite collaborator`. .. note:: For more information on sharing your repository see the diff --git a/docs/src/developers_guide/gitwash/forking.rst b/docs/src/developers_guide/gitwash/forking.rst index 161847ed79..baeb243c86 100644 --- a/docs/src/developers_guide/gitwash/forking.rst +++ b/docs/src/developers_guide/gitwash/forking.rst @@ -7,7 +7,7 @@ Making Your own Copy (fork) of Iris =================================== You need to do this only once. The instructions here are very similar -to the instructions at http://help.github.com/forking/, please see +to the instructions at https://help.github.com/forking/, please see that page for more detail. We're repeating some of it here just to give the specifics for the `Iris`_ project, and to suggest some default names. @@ -18,7 +18,7 @@ Set up and Configure a Github Account If you don't have a github account, go to the github page, and make one. You then need to configure your account to allow write access, see -the `generating sss keys for GitHub`_ help on `github help`_. +the `generating ssh keys for GitHub`_ help on `github help`_. Create Your own Forked Copy of Iris diff --git a/docs/src/developers_guide/gitwash/git_links.inc b/docs/src/developers_guide/gitwash/git_links.inc index 9a87b55d4d..11d037ccf4 100644 --- a/docs/src/developers_guide/gitwash/git_links.inc +++ b/docs/src/developers_guide/gitwash/git_links.inc @@ -9,8 +9,8 @@ nipy, NIPY, Nipy, etc... .. _git: http://git-scm.com/ -.. _github: http://github.com -.. _github help: http://help.github.com +.. _github: https://github.com +.. _github help: https://help.github.com .. _git documentation: https://git-scm.com/docs .. _git clone: http://schacon.github.com/git/git-clone.html diff --git a/docs/src/developers_guide/gitwash/set_up_fork.rst b/docs/src/developers_guide/gitwash/set_up_fork.rst index d5c5bc5c44..5318825488 100644 --- a/docs/src/developers_guide/gitwash/set_up_fork.rst +++ b/docs/src/developers_guide/gitwash/set_up_fork.rst @@ -15,7 +15,7 @@ Overview git clone git@github.com:your-user-name/iris.git cd iris - git remote add upstream git://github.com/SciTools/iris.git + git remote add upstream git@github.com/SciTools/iris.git In Detail ========= diff --git a/docs/src/developers_guide/imagehash_index.rst b/docs/src/developers_guide/imagehash_index.rst deleted file mode 100644 index a11ae8a531..0000000000 --- a/docs/src/developers_guide/imagehash_index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. include:: ../common_links.inc - -.. _testing.imagehash_index: - -Graphical Test Hash Index -************************* - -The iris test suite produces plots of data using matplotlib and cartopy. -The images produced are compared to known "good" output, the images for -which are kept in `scitools/test-iris-imagehash `_. - -For an overview of iris' graphics tests, see :ref:`testing.graphics` - -Typically running the iris test suite will output the rendered -images to ``$PROJECT_DIR/iris_image_test_output``. -The known good output for each test can be seen at the links below -for comparison. - - -.. imagetest-list:: \ No newline at end of file diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index 09b884302b..bae77a7d21 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -19,7 +19,8 @@ A Release Manager will be nominated for each release of Iris. This role involves * deciding which features and bug fixes should be included in the release * managing the project board for the release -* using a `GitHub Releases Discussion Forum`_ for documenting intent and capturing any +* using :discussion:`GitHub Discussion releases category ` + for documenting intent and capturing any discussion about the release The Release Manager will make the release, ensuring that all the steps outlined @@ -99,12 +100,14 @@ Steps to achieve this can be found in the :ref:`iris_development_releases_steps` The Release ----------- -The final steps of the release are to change the version string ``__version__`` -in the source of :literal:`iris.__init__.py` and ensure the release date and details +The final steps of the release are to ensure that the release date and details are correct in the relevant ``whatsnew`` page within the documentation. -Once all checks are complete, the release is cut by the creation of a new tag -in the ``SciTools/iris`` repository. +There is no need to update the ``iris.__version__``, as this is managed +automatically by `setuptools-scm`_. + +Once all checks are complete, the release is published on GitHub by +creating a new tag in the ``SciTools/iris`` repository. Update conda-forge @@ -120,6 +123,14 @@ conda package on the `conda-forge Anaconda channel`_. Update PyPI ----------- +.. note:: + + As part of our Continuous-Integration (CI), the building and publishing of + PyPI artifacts is now automated by a dedicated GitHub Action. + + The following instructions **no longer** require to be performed manually, + but remain part of the documentation for reference purposes only. + Update the `scitools-iris`_ project on PyPI with the latest Iris release. To do this perform the following steps. @@ -178,13 +189,13 @@ For further details on how to test Iris, see :ref:`developer_running_tests`. Merge Back ---------- -After the release is cut, the changes from the release branch should be merged +After the release is published, the changes from the release branch should be merged back onto the ``SciTools/iris`` ``main`` branch. To achieve this, first cut a local branch from the latest ``main`` branch, and `git merge` the :literal:`.x` release branch into it. Ensure that the -``iris.__version__``, ``docs/src/whatsnew/index.rst`` and ``docs/src/whatsnew/latest.rst`` -are correct, before committing these changes and then proposing a pull-request +``docs/src/whatsnew/index.rst`` and ``docs/src/whatsnew/latest.rst`` are +correct, before committing these changes and then proposing a pull-request on the ``main`` branch of ``SciTools/iris``. @@ -198,6 +209,11 @@ branch, and then released by tagging ``v1.9.1``. New features shall not be included in a point release, these are for bug fixes. +``whatsnew`` entries should be added to the existing +``docs/src/whatsnew/v1.9.rst`` file in a new ``v1.9.1`` section. A template for +this bugfix patches section can be found in the +``docs/src/whatsnew/latest.rst.template`` file. + A point release does not require a release candidate, but the rest of the release process is to be followed, including the merge back of changes into ``main``. @@ -213,23 +229,22 @@ These steps assume a release for ``1.9.0`` is to be created. Release Steps ~~~~~~~~~~~~~ -#. Create the release feature branch ``v1.9.x`` on `SciTools/iris`_. - The only exception is for a point/bugfix release, as it should already exist -#. Update the ``iris.__init__.py`` version string e.g., to ``1.9.0`` #. Update the ``whatsnew`` for the release: - * Use ``git`` to rename ``docs/src/whatsnew/latest.rst`` to the release - version file ``v1.9.rst`` - * Use ``git`` to delete the ``docs/src/whatsnew/latest.rst.template`` file - * In ``v1.9.rst`` remove the ``[unreleased]`` caption from the page title. - Note that, the Iris version and release date are updated automatically - when the documentation is built - * Review the file for correctness - * Work with the development team to populate the ``Release Highlights`` - dropdown at the top of the file, which provides extra detail on notable - changes - * Use ``git`` to add and commit all changes, including removal of - ``latest.rst.template`` + * Use ``git`` to rename ``docs/src/whatsnew/latest.rst`` to the release + version file ``v1.9.rst`` + * Use ``git`` to delete the ``docs/src/whatsnew/latest.rst.template`` file + * In ``v1.9.rst`` remove the ``[unreleased]`` caption from the page title. + Replace this with ``[release candidate]`` for the release candidate and + remove this for the actual release. + Note that, the Iris version and release date are updated automatically + when the documentation is built + * Review the file for correctness + * Work with the development team to populate the ``Release Highlights`` + dropdown at the top of the file, which provides extra detail on notable + changes + * Use ``git`` to add and commit all changes, including removal of + ``latest.rst.template``. #. Update the ``whatsnew`` index ``docs/src/whatsnew/index.rst`` @@ -240,6 +255,9 @@ Release Steps #. Once all the above steps are complete, the release is cut, using the :guilabel:`Draft a new release` button on the `Iris release page `_ + and targeting the release branch if it exists +#. Create the release feature branch ``v1.9.x`` on `SciTools/iris`_ if it doesn't + already exist. For point/bugfix releases use the branch which already exists Post Release Steps @@ -247,26 +265,27 @@ Post Release Steps #. Check the documentation has built on `Read The Docs`_. The build is triggered by any commit to ``main``. Additionally check that the versions - available in the pop out menu in the bottom left corner include the new + available in the pop out menu in the bottom right corner include the new release version. If it is not present you will need to configure the versions available in the **admin** dashboard in `Read The Docs`_. #. Review the `Active Versions`_ for the ``scitools-iris`` project on `Read The Docs`_ to ensure that the appropriate versions are ``Active`` and/or ``Hidden``. To do this ``Edit`` the appropriate version e.g., see `Editing v3.0.0rc0`_ (must be logged into Read the Docs). -#. Copy ``docs/src/whatsnew/latest.rst.template`` to - ``docs/src/whatsnew/latest.rst``. This will reset - the file with the ``unreleased`` heading and placeholders for the - ``whatsnew`` headings -#. Add back in the reference to ``latest.rst`` to the ``whatsnew`` index - ``docs/src/whatsnew/index.rst`` -#. Update ``iris.__init__.py`` version string to show as ``1.10.dev0`` -#. Merge back to ``main`` +#. Merge back to ``main``. This should be done after all releases, including + the release candidate, and also after major changes to the release branch. +#. On main, make a new ``latest.rst`` from ``latest.rst.template`` and update + the include statement and the toctree in ``index.rst`` to point at the new + ``latest.rst``. +#. Consider updating ``docs/src/userguide/citation.rst`` on ``main`` to include + the version number, date and `Zenodo DOI `_ + of the new release. Ideally this would be updated before the release, but + the DOI for the new version is only available once the release has been + created in GitHub. .. _SciTools/iris: https://github.com/SciTools/iris .. _tag on the SciTools/Iris: https://github.com/SciTools/iris/releases -.. _GitHub Releases Discussion Forum: https://github.com/SciTools/iris/discussions/categories/releases .. _conda-forge Anaconda channel: https://anaconda.org/conda-forge/iris .. _conda-forge iris-feedstock: https://github.com/conda-forge/iris-feedstock .. _CFEP-05: https://github.com/conda-forge/cfep/blob/master/cfep-05.md @@ -276,4 +295,5 @@ Post Release Steps .. _rc_iris: https://anaconda.org/conda-forge/iris/labels .. _Generating Distribution Archives: https://packaging.python.org/tutorials/packaging-projects/#generating-distribution-archives .. _Packaging Your Project: https://packaging.python.org/guides/distributing-packages-using-setuptools/#packaging-your-project -.. _latest CF standard names: http://cfconventions.org/standard-names.html \ No newline at end of file +.. _latest CF standard names: http://cfconventions.org/standard-names.html +.. _setuptools-scm: https://github.com/pypa/setuptools_scm diff --git a/docs/src/developers_guide/testing_tools.rst b/docs/src/developers_guide/testing_tools.rst new file mode 100755 index 0000000000..dd628d37fc --- /dev/null +++ b/docs/src/developers_guide/testing_tools.rst @@ -0,0 +1,80 @@ +.. include:: ../common_links.inc + +.. _testing_tools: + +Testing tools +************* + +Iris has various internal convenience functions and utilities available to +support writing tests. Using these makes tests quicker and easier to write, and +also consistent with the rest of Iris (which makes it easier to work with the +code). Most of these conveniences are accessed through the +:class:`iris.tests.IrisTest` class, from +which Iris' test classes then inherit. + +.. tip:: + + All functions listed on this page are defined within + :mod:`iris.tests.__init__.py` as methods of + :class:`iris.tests.IrisTest_nometa` (which :class:`iris.tests.IrisTest` + inherits from). They can be accessed within a test using + ``self.exampleFunction``. + +Custom assertions +================= + +:class:`iris.tests.IrisTest` supports a variety of custom unittest-style +assertions, such as :meth:`~iris.tests.IrisTest_nometa.assertArrayEqual`, +:meth:`~iris.tests.IrisTest_nometa.assertArrayAlmostEqual`. + +.. _create-missing: + +Saving results +-------------- + +Some tests compare the generated output to the expected result contained in a +file. Custom assertions for this include +:meth:`~iris.tests.IrisTest_nometa.assertCMLApproxData` +:meth:`~iris.tests.IrisTest_nometa.assertCDL` +:meth:`~iris.tests.IrisTest_nometa.assertCML` and +:meth:`~iris.tests.IrisTest_nometa.assertTextFile`. See docstrings for more +information. + +.. note:: + + Sometimes code changes alter the results expected from a test containing the + above methods. These can be updated by removing the existing result files + and then running the file containing the test with a ``--create-missing`` + command line argument, or setting the ``IRIS_TEST_CREATE_MISSING`` + environment variable to anything non-zero. This will create the files rather + than erroring, allowing you to commit the updated results. + +Context managers +================ + +Capturing exceptions and logging +-------------------------------- + +:class:`iris.tests.IrisTest` includes several context managers that can be used +to make test code tidier and easier to read. These include +:meth:`~iris.tests.IrisTest_nometa.assertWarnsRegexp` and +:meth:`~iris.tests.IrisTest_nometa.assertLogs`. + +Temporary files +--------------- + +It's also possible to generate temporary files in a concise fashion with +:meth:`~iris.tests.IrisTest_nometa.temp_filename`. + +Patching +======== + +:meth:`~iris.tests.IrisTest_nometa.patch` is a wrapper around ``unittest.patch`` +that will be automatically cleaned up at the end of the test. + +Graphic tests +============= + +As a package capable of generating graphical outputs, Iris has utilities for +creating and updating graphical tests - see :ref:`testing.graphics` for more +information. \ No newline at end of file diff --git a/docs/src/further_topics/index.rst b/docs/src/further_topics/index.rst deleted file mode 100644 index 81bff2f764..0000000000 --- a/docs/src/further_topics/index.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. _further topics: - -Introduction -============ - -Some specific areas of Iris may require further explanation or a deep dive -into additional detail above and beyond that offered by the -:ref:`User Guide `. - -This section provides a collection of additional material on focused topics -that may be of interest to the more advanced or curious user. - -.. hint:: - - If you wish further documentation on any specific topics or areas of Iris - that are missing, then please let us know by raising a :issue:`GitHub Documentation Issue` - on `SciTools/Iris`_. - - -* :doc:`metadata` -* :doc:`lenient_metadata` -* :doc:`lenient_maths` -* :ref:`ugrid` - - -.. _SciTools/iris: https://github.com/SciTools/iris diff --git a/docs/src/further_topics/metadata.rst b/docs/src/further_topics/metadata.rst index 1b81f7055c..4c55047d4c 100644 --- a/docs/src/further_topics/metadata.rst +++ b/docs/src/further_topics/metadata.rst @@ -1,3 +1,4 @@ +.. _further topics: .. _metadata: Metadata @@ -63,25 +64,26 @@ For example, the collective metadata used to define an ``var_name``, ``units``, and ``attributes`` members. Note that, these are the actual `data attribute`_ names of the metadata members on the Iris class. + .. _metadata members table: -.. table:: - Iris classes that model `CF Conventions`_ metadata +.. table:: Iris classes that model `CF Conventions`_ metadata :widths: auto :align: center - =================== ======================================= ============================== ========================================== ================================= ======================== ============================== =================== - Metadata Members :class:`~iris.coords.AncillaryVariable` :class:`~iris.coords.AuxCoord` :class:`~iris.aux_factory.AuxCoordFactory` :class:`~iris.coords.CellMeasure` :class:`~iris.cube.Cube` :class:`~iris.coords.DimCoord` Metadata Members - =================== ======================================= ============================== ========================================== ================================= ======================== ============================== =================== - ``standard_name`` ✔ ✔ ✔ ✔ ✔ ✔ ``standard_name`` - ``long_name`` ✔ ✔ ✔ ✔ ✔ ✔ ``long_name`` - ``var_name`` ✔ ✔ ✔ ✔ ✔ ✔ ``var_name`` - ``units`` ✔ ✔ ✔ ✔ ✔ ✔ ``units`` - ``attributes`` ✔ ✔ ✔ ✔ ✔ ✔ ``attributes`` - ``coord_system`` ✔ ✔ ✔ ``coord_system`` - ``climatological`` ✔ ✔ ✔ ``climatological`` - ``measure`` ✔ ``measure`` - ``cell_methods`` ✔ ``cell_methods`` - ``circular`` ✔ ``circular`` - =================== ======================================= ============================== ========================================== ================================= ======================== ============================== =================== + =================== ======================================= ============================== ========================================== ================================= ======================== ============================== + Metadata Members :class:`~iris.coords.AncillaryVariable` :class:`~iris.coords.AuxCoord` :class:`~iris.aux_factory.AuxCoordFactory` :class:`~iris.coords.CellMeasure` :class:`~iris.cube.Cube` :class:`~iris.coords.DimCoord` + =================== ======================================= ============================== ========================================== ================================= ======================== ============================== + ``standard_name`` ✔ ✔ ✔ ✔ ✔ ✔ + ``long_name`` ✔ ✔ ✔ ✔ ✔ ✔ + ``var_name`` ✔ ✔ ✔ ✔ ✔ ✔ + ``units`` ✔ ✔ ✔ ✔ ✔ ✔ + ``attributes`` ✔ ✔ ✔ ✔ ✔ ✔ + ``coord_system`` ✔ ✔ ✔ + ``climatological`` ✔ ✔ ✔ + ``measure`` ✔ + ``cell_methods`` ✔ + ``circular`` ✔ + =================== ======================================= ============================== ========================================== ================================= ======================== ============================== .. note:: @@ -387,10 +389,10 @@ instances. Normally, this would cause issues. For example, .. doctest:: richer-metadata - >>> simply = {"one": np.int(1), "two": np.array([1.0, 2.0])} + >>> simply = {"one": np.int32(1), "two": np.array([1.0, 2.0])} >>> simply {'one': 1, 'two': array([1., 2.])} - >>> fruity = {"one": np.int(1), "two": np.array([1.0, 2.0])} + >>> fruity = {"one": np.int32(1), "two": np.array([1.0, 2.0])} >>> fruity {'one': 1, 'two': array([1., 2.])} >>> simply == fruity @@ -417,7 +419,7 @@ However, metadata class equality is rich enough to handle this eventuality, >>> metadata1 CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'one': 1, 'two': array([1., 2.])}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) - >>> metadata2 = cube.metadata._replace(attributes={"one": np.int(1), "two": np.array([1000.0, 2000.0])}) + >>> metadata2 = cube.metadata._replace(attributes={"one": np.int32(1), "two": np.array([1000.0, 2000.0])}) >>> metadata2 CubeMetadata(standard_name='air_temperature', long_name=None, var_name='air_temperature', units=Unit('K'), attributes={'one': 1, 'two': array([1000., 2000.])}, cell_methods=(CellMethod(method='mean', coord_names=('time',), intervals=('6 hour',), comments=()),)) >>> metadata1 == metadata2 diff --git a/docs/src/further_topics/ugrid/data_model.rst b/docs/src/further_topics/ugrid/data_model.rst index 4a2f64f627..cc3cc7b793 100644 --- a/docs/src/further_topics/ugrid/data_model.rst +++ b/docs/src/further_topics/ugrid/data_model.rst @@ -52,7 +52,7 @@ example. .. _data_structured_grid: .. figure:: images/data_structured_grid.svg :alt: Diagram of how data is represented on a structured grid - :align: right + :align: left :width: 1280 Data on a structured grid. @@ -131,7 +131,7 @@ example of what is described above. .. _data_ugrid_mesh: .. figure:: images/data_ugrid_mesh.svg :alt: Diagram of how data is represented on an unstructured mesh - :align: right + :align: left :width: 1280 Data on an unstructured mesh @@ -157,7 +157,7 @@ elements. See :numref:`ugrid_element_centres` for a visualised example. .. _ugrid_element_centres: .. figure:: images/ugrid_element_centres.svg :alt: Diagram demonstrating mesh face-centred data. - :align: right + :align: left :width: 1280 Data can be assigned to mesh edge/face/volume 'centres' @@ -180,7 +180,7 @@ Every node is completely independent - every one can have unique X andY (and Z) .. _ugrid_node_independence: .. figure:: images/ugrid_node_independence.svg :alt: Diagram demonstrating the independence of each mesh node - :align: right + :align: left :width: 300 Every mesh node is completely independent @@ -199,7 +199,7 @@ array. See :numref:`ugrid_variable_faces`. .. _ugrid_variable_faces: .. figure:: images/ugrid_variable_faces.svg :alt: Diagram demonstrating mesh faces with variable node counts - :align: right + :align: left :width: 300 Mesh faces can have different node counts (using masking) @@ -216,7 +216,7 @@ areas (faces). See :numref:`ugrid_edge_data`. .. _ugrid_edge_data: .. figure:: images/ugrid_edge_data.svg :alt: Diagram demonstrating data assigned to mesh edges - :align: right + :align: left :width: 300 Data can be assigned to mesh edges @@ -405,6 +405,9 @@ the :class:`~iris.cube.Cube`\'s unstructured dimension. Mesh coordinates: latitude x - longitude x - + Mesh: + name my_mesh + location edge >>> print(edge_cube.location) edge diff --git a/docs/src/further_topics/ugrid/images/fesom_mesh.png b/docs/src/further_topics/ugrid/images/fesom_mesh.png new file mode 100644 index 0000000000..283899a94b Binary files /dev/null and b/docs/src/further_topics/ugrid/images/fesom_mesh.png differ diff --git a/docs/src/further_topics/ugrid/images/orca_grid.png b/docs/src/further_topics/ugrid/images/orca_grid.png new file mode 100644 index 0000000000..6676e84fbb Binary files /dev/null and b/docs/src/further_topics/ugrid/images/orca_grid.png differ diff --git a/docs/src/further_topics/ugrid/images/smc_mesh.png b/docs/src/further_topics/ugrid/images/smc_mesh.png new file mode 100644 index 0000000000..8c5a9d86eb Binary files /dev/null and b/docs/src/further_topics/ugrid/images/smc_mesh.png differ diff --git a/docs/src/further_topics/ugrid/index.rst b/docs/src/further_topics/ugrid/index.rst index 81ba24428a..c45fd271a2 100644 --- a/docs/src/further_topics/ugrid/index.rst +++ b/docs/src/further_topics/ugrid/index.rst @@ -38,6 +38,7 @@ Read on to find out more... * :doc:`data_model` - learn why the mesh experience is so different. * :doc:`partner_packages` - meet some optional dependencies that provide powerful mesh operations. * :doc:`operations` - experience how your workflows will look when written for mesh data. +* :doc:`other_meshes` - check out some examples of converting various mesh formats into Iris' mesh format. .. Need an actual TOC to get Sphinx working properly, but have hidden it in @@ -50,5 +51,6 @@ Read on to find out more... data_model partner_packages operations + other_meshes __ CF-UGRID_ diff --git a/docs/src/further_topics/ugrid/operations.rst b/docs/src/further_topics/ugrid/operations.rst index f96e3e406c..a4e0e593d7 100644 --- a/docs/src/further_topics/ugrid/operations.rst +++ b/docs/src/further_topics/ugrid/operations.rst @@ -189,6 +189,9 @@ Creating a :class:`~iris.cube.Cube` is unchanged; the Mesh coordinates: latitude x - longitude x - + Mesh: + name my_mesh + location edge Save @@ -392,6 +395,9 @@ etcetera: Mesh coordinates: latitude x - longitude x - + Mesh: + name my_mesh + location face Attributes: Conventions 'CF-1.7' @@ -620,6 +626,9 @@ the link between :class:`~iris.cube.Cube` and Mesh coordinates: latitude x - longitude x - + Mesh: + name my_mesh + location edge # Sub-setted MeshCoords have become AuxCoords. >>> print(edge_cube[:-1]) @@ -976,13 +985,26 @@ on dimensions other than the :meth:`~iris.cube.Cube.mesh_dim`, since such Arithmetic ---------- -.. |tagline: arithmetic| replace:: |pending| +.. |tagline: arithmetic| replace:: |unchanged| .. rubric:: |tagline: arithmetic| -:class:`~iris.cube.Cube` Arithmetic (described in :doc:`/userguide/cube_maths`) -has not yet been adapted to handle :class:`~iris.cube.Cube`\s that include -:class:`~iris.experimental.ugrid.MeshCoord`\s. +Cube Arithmetic (described in :doc:`/userguide/cube_maths`) +has been extended to handle :class:`~iris.cube.Cube`\s that include +:class:`~iris.experimental.ugrid.MeshCoord`\s, and hence have a ``cube.mesh``. + +Cubes with meshes can be combined in arithmetic operations like +"ordinary" cubes. They can combine with other cubes without that mesh +(and its dimension); or with a matching mesh, which may be on a different +dimension. +Arithmetic can also be performed between a cube with a mesh and a mesh +coordinate with a matching mesh. + +In all cases, the result will have the same mesh as the input cubes. + +Meshes only match if they are fully equal -- i.e. they contain all the same +coordinates and connectivities, with identical names, units, attributes and +data content. .. todo: diff --git a/docs/src/further_topics/ugrid/other_meshes.rst b/docs/src/further_topics/ugrid/other_meshes.rst new file mode 100644 index 0000000000..38abeeca03 --- /dev/null +++ b/docs/src/further_topics/ugrid/other_meshes.rst @@ -0,0 +1,360 @@ +.. _other_meshes: + +Converting Other Mesh Formats +***************************** + +Iris' Mesh Data Model is based primarily on the CF-UGRID conventions (see +:doc:`data_model`), but other mesh formats can be converted to fit into this +model, **enabling use of Iris' specialised mesh support**. Below are some +examples demonstrating how this works for various mesh formats. + +.. contents:: + :local: + +`FESOM 1.4`_ Voronoi Polygons +----------------------------- +.. figure:: images/fesom_mesh.png + :width: 300 + :alt: Sample of FESOM mesh voronoi polygons, with variable numbers of sides. + +A FESOM mesh encoded in a NetCDF file includes: + +* X+Y point coordinates +* X+Y corners coordinates of the Voronoi Polygons around these points - + represented as the bounds of the coordinates + +To represent the Voronoi Polygons as faces, the corner coordinates will be used +as the **nodes** when creating the Iris +:class:`~iris.experimental.ugrid.mesh.Mesh`. + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> import iris + >>> from iris.experimental.ugrid import Mesh + + + >>> temperature_cube = iris.load_cube("my_file.nc", "sea_surface_temperature") + >>> print(temperature_cube) + sea_surface_temperature / (degC) (time: 12; -- : 126859) + Dimension coordinates: + time x - + Auxiliary coordinates: + latitude - x + longitude - x + Cell methods: + mean where sea area + mean time + Attributes: + grid 'FESOM 1.4 (unstructured grid in the horizontal with 126859 wet nodes;... + ... + + >>> print(temperature_cube.coord("longitude")) + AuxCoord : longitude / (degrees) + points: + bounds: + shape: (126859,) bounds(126859, 18) + dtype: float64 + standard_name: 'longitude' + var_name: 'lon' + + # Use a Mesh to represent the Cube's horizontal geography, by replacing + # the existing face AuxCoords with new MeshCoords. + >>> fesom_mesh = Mesh.from_coords(temperature_cube.coord('longitude'), + ... temperature_cube.coord('latitude')) + >>> for new_coord in fesom_mesh.to_MeshCoords("face"): + ... old_coord = temperature_cube.coord(new_coord.name()) + ... unstructured_dim, = old_coord.cube_dims(temperature_cube) + ... temperature_cube.remove_coord(old_coord) + ... temperature_cube.add_aux_coord(new_coord, unstructured_dim) + + >>> print(temperature_cube) + sea_surface_temperature / (degC) (time: 12; -- : 126859) + Dimension coordinates: + time x - + Mesh coordinates: + latitude - x + longitude - x + Cell methods: + mean where sea area + mean time + Attributes: + grid 'FESOM 1.4 (unstructured grid in the horizontal with 126859 wet nodes;... + ... + + >>> print(temperature_cube.mesh) + Mesh : 'unknown' + topology_dimension: 2 + node + node_dimension: 'Mesh2d_node' + node coordinates + shape(2283462,)> + shape(2283462,)> + face + face_dimension: 'Mesh2d_face' + face_node_connectivity: shape(126859, 18)> + face coordinates + shape(126859,)> + shape(126859,)> + +`WAVEWATCH III`_ Spherical Multi-Cell (SMC) WAVE Quad Grid +---------------------------------------------------------- +.. figure:: images/smc_mesh.png + :width: 300 + :alt: Sample of an SMC mesh, with decreasing quad sizes at the coastlines. + +An SMC grid encoded in a NetCDF file includes: + +* X+Y face centre coordinates +* X+Y base face sizes +* X+Y face size factors + +From this information we can derive face corner coordinates, which will be used +as the **nodes** when creating the Iris +:class:`~iris.experimental.ugrid.mesh.Mesh`. + + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> import iris + >>> from iris.experimental.ugrid import Mesh + >>> import numpy as np + + + >>> wave_cube = iris.load_cube("my_file.nc", "sea_surface_wave_significant_height") + >>> print(wave_cube) + sea_surface_wave_significant_height / (m) (time: 7; -- : 666328) + Dimension coordinates: + time x - + Auxiliary coordinates: + forecast_period x - + latitude - x + latitude cell size factor - x + longitude - x + longitude cell size factor - x + Scalar coordinates: + forecast_reference_time 2021-12-05 00:00:00 + Attributes: + SIN4 namelist parameter BETAMAX 1.39 + SMC_grid_type 'seapoint' + WAVEWATCH_III_switches 'NOGRB SHRD PR2 UNO SMC FLX0 LN1 ST4 NL1 BT1 DB1 TR0 BS0 IC0 IS0 REF0 WNT1... + WAVEWATCH_III_version_number '7.13' + altitude_resolution 'n/a' + area 'Global wave model GS512L4EUK' + base_lat_size 0.029296871 + base_lon_size 0.043945305 + ... + + >>> faces_x = wave_cube.coord("longitude") + >>> faces_y = wave_cube.coord("latitude") + >>> face_size_factor_x = wave_cube.coord("longitude cell size factor") + >>> face_size_factor_y = wave_cube.coord("latitude cell size factor") + >>> base_x_size = wave_cube.attributes["base_lon_size"] + >>> base_y_size = wave_cube.attributes["base_lat_size"] + + # Calculate face corners from face centres and face size factors. + >>> face_centres_x = faces_x.points + >>> face_centres_y = faces_y.points + >>> face_size_x = face_size_factor_x.points * base_x_size + >>> face_size_y = face_size_factor_y.points * base_y_size + + >>> x_mins = (face_centres_x - 0.5 * face_size_x).reshape(-1, 1) + >>> x_maxs = (face_centres_x + 0.5 * face_size_x).reshape(-1, 1) + >>> y_mins = (face_centres_y - 0.5 * face_size_y).reshape(-1, 1) + >>> y_maxs = (face_centres_y + 0.5 * face_size_y).reshape(-1, 1) + + >>> face_corners_x = np.hstack([x_mins, x_maxs, x_maxs, x_mins]) + >>> face_corners_y = np.hstack([y_mins, y_mins, y_maxs, y_maxs]) + + # Add face corners as coordinate bounds. + >>> faces_x.bounds = face_corners_x + >>> faces_y.bounds = face_corners_y + + # Use a Mesh to represent the Cube's horizontal geography, by replacing + # the existing face AuxCoords with new MeshCoords. + >>> smc_mesh = Mesh.from_coords(faces_x, faces_y) + >>> for new_coord in smc_mesh.to_MeshCoords("face"): + ... old_coord = wave_cube.coord(new_coord.name()) + ... unstructured_dim, = old_coord.cube_dims(wave_cube) + ... wave_cube.remove_coord(old_coord) + ... wave_cube.add_aux_coord(new_coord, unstructured_dim) + + >>> print(wave_cube) + sea_surface_wave_significant_height / (m) (time: 7; -- : 666328) + Dimension coordinates: + time x - + Mesh coordinates: + latitude - x + longitude - x + Auxiliary coordinates: + forecast_period x - + latitude cell size factor - x + longitude cell size factor - x + Scalar coordinates: + forecast_reference_time 2021-12-05 00:00:00 + Attributes: + SIN4 namelist parameter BETAMAX 1.39 + SMC_grid_type 'seapoint' + WAVEWATCH_III_switches 'NOGRB SHRD PR2 UNO SMC FLX0 LN1 ST4 NL1 BT1 DB1 TR0 BS0 IC0 IS0 REF0 WNT1... + WAVEWATCH_III_version_number '7.13' + altitude_resolution 'n/a' + area 'Global wave model GS512L4EUK' + base_lat_size 0.029296871 + base_lon_size 0.043945305 + ... + + >>> print(wave_cube.mesh) + Mesh : 'unknown' + topology_dimension: 2 + node + node_dimension: 'Mesh2d_node' + node coordinates + + + face + face_dimension: 'Mesh2d_face' + face_node_connectivity: + face coordinates + + + + +.. _ORCA_example: + +`NEMO`_ data on ORCA tripolar grid +---------------------------------- +.. figure:: images/orca_grid.png + :width: 300 + :alt: Plot of ORCA-gridded data from NEMO. + +NEMO can use various grids, but is frequently used with ORCA type grids. +ORCA grids store global data in 2-dimensional ny * nx arrays. All cells are +four-sided. The grids are based on tri-polar layouts, but X and Y spacings are +irregular and not given by any defined functional forms. + +* arrays (ny, nx) of face-located data variables +* arrays (ny, nx) of X+Y face centre coordinates +* arrays (ny, nx, 4) of X+Y face corner coordinates + (all faces are quadrilaterals) + +For simplicity, we treat each face corner as an independent node, and use a face-node +connectivity which simply lists the nodes in sequence, +i.e. [[0, 1, 2, 3], [4, 5, 6, 7], ...]. + +.. Note:: + This is the simplest solution, but produces approx 4x more nodes than + necessary, since the coordinate bounds contain many duplicate locations. + Removing the duplicates is quite easy, but often not necessary. + +To make an unstructured cube, the data must be 'flattened' to convert the given X and Y +dimensions into a single mesh dimension. Since Iris cubes don't support a "reshape" or +"flatten" operations, we create a new cube from the flattened data. + +.. dropdown:: :opticon:`code` + + .. code-block:: python + + >>> import numpy as np + >>> import iris + >>> from iris.coords import AuxCoord, CellMeasure + >>> from iris.cube import Cube + >>> from iris.experimental.ugrid.mesh import Mesh, Connectivity + + + >>> filepath = iris.sample_data_path('orca2_votemper.nc') + >>> cube = iris.load_cube(filepath) + >>> print(cube) + sea_water_potential_temperature / (degC) (-- : 148; -- : 180) + Auxiliary coordinates: + latitude x x + longitude x x + Scalar coordinates: + depth 4.999938 m, bound=(0.0, 10.0) m + time 0001-01-01 12:00:00 + Cell methods: + mean time + Attributes: + Conventions 'CF-1.5' + + + >>> co_x = cube.coord("longitude") + >>> co_y = cube.coord("latitude") + >>> ny, nx = co_x.shape + >>> n_faces = ny * nx + + >>> # Create face coords from flattened face-points + >>> face_x_co = AuxCoord(co_x.points.flatten()) + >>> face_y_co = AuxCoord(co_y.points.flatten()) + >>> assert face_x_co.shape == (n_faces,) + >>> face_x_co.metadata = co_x.metadata + >>> face_y_co.metadata = co_y.metadata + + >>> # Create node coordinates from bound points. + >>> n_nodes = n_faces * 4 + >>> node_x_co = AuxCoord(co_x.bounds.flatten()) + >>> node_y_co = AuxCoord(co_y.bounds.flatten()) + >>> assert node_x_co.shape == (n_nodes,) + >>> node_x_co.metadata = co_x.metadata + >>> node_y_co.metadata = co_y.metadata + + >>> # Create a face-node Connectivity matching the order of nodes in the bounds array + >>> face_node_inds = np.arange(n_nodes).reshape((n_faces, 4)) + >>> face_nodes_conn = Connectivity( + ... indices=face_node_inds, + ... cf_role='face_node_connectivity', + ... long_name='face_inds', units='1', + ... ) + + >>> # Create a mesh object. + >>> mesh = Mesh( + ... topology_dimension=2, + ... node_coords_and_axes=[(node_x_co, 'x'), (node_y_co, 'y')], + ... connectivities=face_nodes_conn, + ... face_coords_and_axes=[(face_x_co, 'x'), (face_y_co, 'y')] + ... ) + >>> print(mesh) + Mesh : 'unknown' + topology_dimension: 2 + node + node_dimension: 'Mesh2d_node' + node coordinates + + + face + face_dimension: 'Mesh2d_face' + face_node_connectivity: + face coordinates + + + + + >>> # Create an unstructured version of the input with flattened data + >>> meshcube = Cube(cube.core_data().flatten()) + >>> meshcube.metadata = cube.metadata + + >>> # Attach the mesh by adding the mesh 'face' MeshCoords into the cube + >>> mesh_dim = meshcube.ndim - 1 + >>> for co in mesh.to_MeshCoords('face'): + ... meshcube.add_aux_coord(co, mesh_dim) + ... + + >>> print(meshcube) + sea_water_potential_temperature / (degC) (-- : 26640) + Mesh coordinates: + latitude x + longitude x + Mesh: + name unknown + location face + Cell methods: + mean time + Attributes: + Conventions 'CF-1.5' + + +.. _WAVEWATCH III: https://github.com/NOAA-EMC/WW3 +.. _FESOM 1.4: https://fesom.de/models/fesom14/ +.. _NEMO: https://www.nemo-ocean.eu/ \ No newline at end of file diff --git a/docs/src/further_topics/ugrid/partner_packages.rst b/docs/src/further_topics/ugrid/partner_packages.rst index 8e36f4ffc2..75b54b037f 100644 --- a/docs/src/further_topics/ugrid/partner_packages.rst +++ b/docs/src/further_topics/ugrid/partner_packages.rst @@ -1,3 +1,5 @@ +.. include:: ../../common_links.inc + .. _ugrid partners: Iris' Mesh Partner Packages @@ -97,4 +99,3 @@ Applications .. _GeoVista: https://github.com/bjlittle/geovista .. _PyVista: https://docs.pyvista.org/index.html -.. _iris-esmf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid diff --git a/docs/src/getting_started.rst b/docs/src/getting_started.rst new file mode 100644 index 0000000000..24299a4060 --- /dev/null +++ b/docs/src/getting_started.rst @@ -0,0 +1,15 @@ +.. _getting_started_index: + +Getting Started +=============== + +To get started with Iris we recommend reading :ref:`why_iris` was created and to +explore the examples in the :ref:`gallery_index` after :ref:`installing_iris` +Iris. + +.. toctree:: + :maxdepth: 1 + + why_iris + installing + generated/gallery/index \ No newline at end of file diff --git a/docs/src/index.rst b/docs/src/index.rst index d6fc5f2f7e..531c0e0b26 100644 --- a/docs/src/index.rst +++ b/docs/src/index.rst @@ -1,7 +1,9 @@ +.. include:: common_links.inc .. _iris_docs: -Iris |version| -======================== + +Iris +==== **A powerful, format-agnostic, community-driven Python package for analysing and visualising Earth science data.** @@ -11,149 +13,152 @@ giving you a powerful, format-agnostic interface for working with your data. It excels when working with multi-dimensional Earth Science data, where tabular representations become unwieldy and inefficient. -`CF Standard names `_, -`units `_, and coordinate metadata -are built into Iris, giving you a rich and expressive interface for maintaining -an accurate representation of your data. Its treatment of data and -associated metadata as first-class objects includes: - -* visualisation interface based on `matplotlib `_ and - `cartopy `_, -* unit conversion, -* subsetting and extraction, -* merge and concatenate, -* aggregations and reductions (including min, max, mean and weighted averages), -* interpolation and regridding (including nearest-neighbor, linear and - area-weighted), and -* operator overloads (``+``, ``-``, ``*``, ``/``, etc.). - -A number of file formats are recognised by Iris, including CF-compliant NetCDF, -GRIB, and PP, and it has a plugin architecture to allow other formats to be -added seamlessly. - -Building upon `NumPy `_ and -`dask `_, Iris scales from efficient -single-machine workflows right through to multi-core clusters and HPC. -Interoperability with packages from the wider scientific Python ecosystem comes -from Iris' use of standard NumPy/dask arrays as its underlying data storage. - -Iris is part of SciTools, for more information see https://scitools.org.uk/. -For **Iris 2.4** and earlier documentation please see the -:link-badge:`https://scitools.org.uk/iris/docs/v2.4.0/,"legacy documentation",cls=badge-info text-white`. - +For more information see :ref:`why_iris`. .. panels:: :container: container-lg pb-3 - :column: col-lg-4 col-md-4 col-sm-6 col-xs-12 p-2 + :column: col-lg-4 col-md-4 col-sm-6 col-xs-12 p-2 text-center + :img-top-cls: w-50 m-auto px-1 py-2 - Install Iris as a user or developer. - +++ - .. link-button:: installing_iris - :type: ref - :text: Installing Iris - :classes: btn-outline-primary btn-block --- - Example code to create a variety of plots. + :img-top: _static/icon_shuttle.svg + + Information on Iris, how to install and a gallery of examples that + create plots. +++ - .. link-button:: sphx_glr_generated_gallery + .. link-button:: getting_started :type: ref - :text: Gallery - :classes: btn-outline-primary btn-block + :text: Getting Started + :classes: btn-outline-info btn-block + + --- - Find out what has recently changed in Iris. + :img-top: _static/icon_instructions.svg + + Learn how to use Iris, including loading, navigating, saving, + plotting and more. +++ - .. link-button:: iris_whatsnew + .. link-button:: user_guide_index :type: ref - :text: What's New - :classes: btn-outline-primary btn-block + :text: User Guide + :classes: btn-outline-info btn-block + --- - Learn how to use Iris. + :img-top: _static/icon_development.svg + + As a developer you can contribute to Iris. +++ - .. link-button:: user_guide_index + .. link-button:: development_where_to_start :type: ref - :text: User Guide - :classes: btn-outline-primary btn-block + :text: Developers Guide + :classes: btn-outline-info btn-block + --- + :img-top: _static/icon_api.svg + Browse full Iris functionality by module. +++ .. link-button:: Iris :type: ref :text: Iris API - :classes: btn-outline-primary btn-block + :classes: btn-outline-info btn-block + --- - As a developer you can contribute to Iris. + :img-top: _static/icon_new_product.svg + + Find out what has recently changed in Iris. +++ - .. link-button:: development_where_to_start + .. link-button:: iris_whatsnew :type: ref - :text: Getting Involved - :classes: btn-outline-primary btn-block + :text: What's New + :classes: btn-outline-info btn-block + + --- + :img-top: _static/icon_thumb.png + + Raise the profile of issues by voting on them. + +++ + .. link-button:: voted_issues_top + :type: ref + :text: Voted Issues + :classes: btn-outline-info btn-block + + +Icons made by `FreePik `_ from +`Flaticon `_ + + +.. _iris_support: + +Support +~~~~~~~ + +We, the Iris developers have adopted `GitHub Discussions`_ to capture any +discussions or support questions related to Iris. + +See also `StackOverflow for "How Do I? `_ +that may be useful but we do not actively monitor this. + +The legacy support resources: + +* `Users Google Group `_ +* `Developers Google Group `_ +* `Legacy Documentation`_ (Iris 2.4 or earlier). This is an archive of zip + files of past documentation. You can download, unzip and view the + documentation locally (index.html). There may be some incorrect rendering + and older javascvript (.js) files may show a warning when uncompressing, in + which case we suggest you use a different unzip tool. .. toctree:: - :maxdepth: 1 :caption: Getting Started + :maxdepth: 1 :hidden: - installing - generated/gallery/index + getting_started .. toctree:: - :maxdepth: 1 :caption: User Guide + :maxdepth: 1 :name: userguide_index :hidden: userguide/index - userguide/iris_cubes - userguide/loading_iris_cubes - userguide/saving_iris_cubes - userguide/navigating_a_cube - userguide/subsetting_a_cube - userguide/real_and_lazy_data - userguide/plotting_a_cube - userguide/interpolation_and_regridding - userguide/merge_and_concat - userguide/cube_statistics - userguide/cube_maths - userguide/citation - userguide/code_maintenance - - -.. _developers_guide: + .. toctree:: + :caption: Developers Guide :maxdepth: 1 - :caption: Further Topics + :name: developers_index :hidden: - further_topics/index - further_topics/metadata - further_topics/lenient_metadata - further_topics/lenient_maths - further_topics/ugrid/index + developers_guide/contributing_getting_involved .. toctree:: - :maxdepth: 2 - :caption: Developers Guide - :name: development_index + :caption: Community + :maxdepth: 1 + :name: community_index :hidden: - developers_guide/contributing_getting_involved - developers_guide/gitwash/index - developers_guide/contributing_documentation - developers_guide/contributing_codebase_index - developers_guide/contributing_changes - developers_guide/release + Community .. toctree:: + :caption: Iris API :maxdepth: 1 - :caption: Reference :hidden: generated/api/iris + + +.. toctree:: + :caption: What's New in Iris + :maxdepth: 1 + :name: whats_new_index + :hidden: + whatsnew/index - techpapers/index - copyright + +.. todolist:: \ No newline at end of file diff --git a/docs/src/installing.rst b/docs/src/installing.rst index 37a8942ab3..b2481973c0 100644 --- a/docs/src/installing.rst +++ b/docs/src/installing.rst @@ -1,7 +1,7 @@ .. _installing_iris: -Installing Iris -=============== +Installing +========== Iris is available using conda for the following platforms: @@ -14,7 +14,7 @@ Subsystem for Linux). This is a great option to get started with Iris for users and developers. Be aware that we do not currently test against any WSL_ distributions. -.. _WSL: https://docs.microsoft.com/en-us/windows/wsl/install-win10 +.. _WSL: https://learn.microsoft.com/en-us/windows/wsl/install .. note:: Iris is currently supported and tested against |python_support| running on Linux. We do not currently actively test on other @@ -119,9 +119,9 @@ Running the Tests To ensure your setup is configured correctly you can run the test suite using the command:: - python setup.py test + pytest -For more information see :ref:`developer_running_tests`. +For more information see :ref:`test manual env`. Custom Site Configuration diff --git a/docs/src/spelling_allow.txt b/docs/src/spelling_allow.txt deleted file mode 100644 index ed883ac3bf..0000000000 --- a/docs/src/spelling_allow.txt +++ /dev/null @@ -1,361 +0,0 @@ -Admin -Albers -Arakawa -Arg -Args -Autoscale -Biggus -CF -CI -Cartopy -Checklist -Color -Conda -Constraining -DAP -Dask -Debian -Duchon -EO -Eos -Exner -Fieldsfile -Fieldsfiles -FillValue -Gb -GeogCS -Hovmoller -Jul -Jun -Jupyter -Lanczos -Mappables -Matplotlib -Mb -Modeling -Mollweide -NetCDF -Nino -PPfield -PPfields -Perez -Proj -Quickplot -Regrids -Royer -Scitools -Scitools -Sep -Stehfest -Steroegraphic -Subsetting -TestCodeFormat -TestLicenseHeaders -Torvalds -Trans -Trenberth -Tri -URIs -URLs -Ubuntu -Ugrid -Unidata -Vol -Vuuren -Workflow -Yury -Zaytsev -Zorder -abf -abl -advection -aggregator -aggregators -alphap -ancils -antimeridian -ap -arg -args -arithmetic -arraylike -atol -auditable -aux -basemap -behaviour -betap -bhulev -biggus -blev -boolean -boundpoints -branchname -broadcastable -bugfix -bugfixes -builtin -bulev -carrée -cartesian -celsius -center -centrepoints -cf -cftime -chunksizes -ci -clabel -cmap -cmpt -codebase -color -colorbar -colorbars -complevel -conda -config -constraining -convertor -coord -coords -cs -datafiles -datatype -datetime -datetimes -ddof -deepcopy -deprecations -der -dewpoint -dict -dicts -diff -discontiguities -discontiguous -djf -docstring -docstrings -doi -dom -dropdown -dtype -dtypes -dx -dy -edgecolor -endian -endianness -equirectangular -eta -etc -fh -fieldsfile -fieldsfiles -fileformat -fileformats -filename -filenames -filepath -filespec -fullname -func -geolocations -github -gregorian -grib -gribapi -gridcell -griddata -gridlines -hPa -hashable -hindcast -hyperlink -hyperlinks -idiff -ieee -ifunc -imagehash -inc -init -inline -inplace -int -interable -interpolator -ints -io -isosurfaces -iterable -jja -jupyter -kwarg -kwargs -landsea -lat -latlon -latlons -lats -lbcode -lbegin -lbext -lbfc -lbft -lblrec -lbmon -lbmond -lbnrec -lbrsvd -lbtim -lbuser -lbvc -lbyr -lbyrd -lh -lhs -linewidth -linted -linting -lon -lons -lt -mam -markup -matplotlib -matplotlibrc -max -mdtol -meaned -mercator -metadata -min -mpl -nanmask -nc -ndarray -neighbor -ness -netCDF -netcdf -netcdftime -nimrod -np -nsigma -numpy -nx -ny -online -orog -paramId -params -parsable -pcolormesh -pdf -placeholders -plugin -png -proj -ps -pseudocolor -pseudocolour -pseudocoloured -py -pyplot -quickplot -rST -rc -rd -reST -reStructuredText -rebase -rebases -rebasing -regrid -regridded -regridder -regridders -regridding -regrids -rel -repo -repos -reprojecting -rh -rhs -rst -rtol -scipy -scitools -seekable -setup -sines -sinh -spec -specs -src -ssh -st -stashcode -stashcodes -stats -std -stdout -str -subcube -subcubes -submodule -submodules -subsetting -sys -tanh -tb -testcases -tgt -th -timepoint -timestamp -timesteps -todo -tol -tos -traceback -travis -tripolar -tuple -tuples -txt -udunits -ufunc -ugrid -ukmo -un -unhandled -unicode -unittest -unrotate -unrotated -uris -url -urls -util -var -versioning -vmax -vmin -waypoint -waypoints -whitespace -wildcard -wildcards -windspeeds -withnans -workflow -workflows -xN -xx -xxx -zeroth -zlev -zonal diff --git a/docs/src/sphinxext/image_test_output.py b/docs/src/sphinxext/image_test_output.py deleted file mode 100644 index 9e492a5be9..0000000000 --- a/docs/src/sphinxext/image_test_output.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -import json -import re -from typing import Dict, List - -from docutils import nodes -from sphinx.application import Sphinx -from sphinx.util.docutils import SphinxDirective - -ImageRepo = Dict[str, List[str]] - -HASH_MATCH = re.compile(r"([^\/]+)\.png$") - - -def hash_from_url(url: str) -> str: - match = HASH_MATCH.search(url) - if not match: - raise ValueError(f"url {url} does not match form `http...hash.png`") - else: - return match.groups()[0] - - -class ImageTestDirective(SphinxDirective): - def run(self): - with open(self.config["image_test_json"], "r") as fh: - imagerepo = json.load(fh) - enum_list = nodes.enumerated_list() - nodelist = [] - nodelist.append(enum_list) - for test in sorted(imagerepo): - link_node = nodes.raw( - "", - f'{test}', - format="html", - ) - li_node = nodes.list_item("") - li_node += link_node - enum_list += li_node - return nodelist - - -def collect_imagehash_pages(app: Sphinx): - """Generate pages for each entry in the imagerepo.json""" - with open(app.config["image_test_json"], "r") as fh: - imagerepo: ImageRepo = json.load(fh) - pages = [] - for test, hashfiles in imagerepo.items(): - hashstrs = [hash_from_url(h) for h in hashfiles] - pages.append( - ( - f"generated/image_test/{test}", - {"test": test, "hashfiles": zip(hashstrs, hashfiles)}, - "imagehash.html", - ) - ) - return pages - - -def setup(app: Sphinx): - app.add_config_value( - "image_test_json", - "../../lib/iris/tests/results/imagerepo.json", - "html", - ) - - app.add_directive("imagetest-list", ImageTestDirective) - app.connect("html-collect-pages", collect_imagehash_pages) - - return { - "version": "0.1", - "parallel_read_safe": True, - "parallel_write_safe": True, - } diff --git a/docs/src/techpapers/um_files_loading.rst b/docs/src/techpapers/um_files_loading.rst index 72d34962ce..f8c94cab08 100644 --- a/docs/src/techpapers/um_files_loading.rst +++ b/docs/src/techpapers/um_files_loading.rst @@ -350,7 +350,7 @@ information is contained in the :attr:`~iris.coords.Coord.units` property. always 1st Jan 1970 (times before this are represented as negative values). The units.calendar property of time coordinates is set from the lowest decimal -digit of LBTIM, known as LBTIM.IC. Note that the non-gregorian calendars (e.g. +digit of LBTIM, known as LBTIM.IC. Note that the non-standard calendars (e.g. 360-day 'model' calendar) are defined in CF, not udunits. There are a number of different time encoding methods used in UM data, but the diff --git a/docs/src/userguide/citation.rst b/docs/src/userguide/citation.rst index 0a3a85fb89..1498b9dfe1 100644 --- a/docs/src/userguide/citation.rst +++ b/docs/src/userguide/citation.rst @@ -15,11 +15,12 @@ For example:: @manual{Iris, author = {{Met Office}}, - title = {Iris: A Python package for analysing and visualising meteorological and oceanographic data sets}, - edition = {v1.2}, - year = {2010 - 2013}, + title = {Iris: A powerful, format-agnostic, and community-driven Python package for analysing and visualising Earth science data }, + edition = {v3.4}, + year = {2010 - 2022}, address = {Exeter, Devon }, - url = {http://scitools.org.uk/} + url = {http://scitools.org.uk/}, + doi = {10.5281/zenodo.7386117} } @@ -33,7 +34,7 @@ Suggested format:: For example:: - Iris. v1.2. 28-Feb-2013. Met Office. UK. https://github.com/SciTools/iris/archive/v1.2.0.tar.gz 01-03-2013 + Iris. v3.4. 1-Dec-2022. Met Office. UK. https://doi.org/10.5281/zenodo.7386117 22-12-2022 ******************** @@ -46,7 +47,7 @@ Suggested format:: For example:: - Iris. Met Office. git@github.com:SciTools/iris.git 06-03-2013 + Iris. Met Office. git@github.com:SciTools/iris.git 22-12-2022 .. _How to cite and describe software: https://software.ac.uk/how-cite-software diff --git a/docs/src/userguide/code_maintenance.rst b/docs/src/userguide/code_maintenance.rst index b2b498bc80..c01c1975a7 100644 --- a/docs/src/userguide/code_maintenance.rst +++ b/docs/src/userguide/code_maintenance.rst @@ -12,17 +12,17 @@ In practice, as Iris develops, most users will want to periodically upgrade their installed version to access new features or at least bug fixes. This is obvious if you are still developing other code that uses Iris, or using -code from other sources. +code from other sources. However, even if you have only legacy code that remains untouched, some code maintenance effort is probably still necessary: - * On the one hand, *in principle*, working code will go on working, as long - as you don't change anything else. +* On the one hand, *in principle*, working code will go on working, as long + as you don't change anything else. - * However, such "version stasis" can easily become a growing burden, if you - are simply waiting until an update becomes unavoidable, often that will - eventually occur when you need to update some other software component, - for some completely unconnected reason. +* However, such "version stasis" can easily become a growing burden, if you + are simply waiting until an update becomes unavoidable, often that will + eventually occur when you need to update some other software component, + for some completely unconnected reason. Principles of Change Management @@ -35,13 +35,13 @@ In Iris, however, we aim to reduce code maintenance problems to an absolute minimum by following defined change management rules. These ensure that, *within a major release number* : - * you can be confident that your code will still work with subsequent minor - releases +* you can be confident that your code will still work with subsequent minor + releases - * you will be aware of future incompatibility problems in advance +* you will be aware of future incompatibility problems in advance - * you can defer making code compatibility changes for some time, until it - suits you +* you can defer making code compatibility changes for some time, until it + suits you The above applies to minor version upgrades : e.g. code that works with version "1.4.2" should still work with a subsequent minor release such as "1.5.0" or diff --git a/docs/src/userguide/cube_maths.rst b/docs/src/userguide/cube_maths.rst index e8a1744a44..56a2041bd3 100644 --- a/docs/src/userguide/cube_maths.rst +++ b/docs/src/userguide/cube_maths.rst @@ -5,8 +5,8 @@ Cube Maths ========== -The section :doc:`navigating_a_cube` highlighted that -every cube has a data attribute; +The section :doc:`navigating_a_cube` highlighted that +every cube has a data attribute; this attribute can then be manipulated directly:: cube.data -= 273.15 @@ -37,8 +37,8 @@ Let's load some air temperature which runs from 1860 to 2100:: filename = iris.sample_data_path('E1_north_america.nc') air_temp = iris.load_cube(filename, 'air_temperature') -We can now get the first and last time slices using indexing -(see :ref:`subsetting_a_cube` for a reminder):: +We can now get the first and last time slices using indexing +(see :ref:`cube_indexing` for a reminder):: t_first = air_temp[0, :, :] t_last = air_temp[-1, :, :] @@ -50,8 +50,8 @@ We can now get the first and last time slices using indexing t_first = air_temp[0, :, :] t_last = air_temp[-1, :, :] -And finally we can subtract the two. -The result is a cube of the same size as the original two time slices, +And finally we can subtract the two. +The result is a cube of the same size as the original two time slices, but with the data representing their difference: >>> print(t_last - t_first) @@ -70,8 +70,8 @@ but with the data representing their difference: .. note:: - Notice that the coordinates "time" and "forecast_period" have been removed - from the resultant cube; + Notice that the coordinates "time" and "forecast_period" have been removed + from the resultant cube; this is because these coordinates differed between the two input cubes. @@ -165,18 +165,24 @@ broadcasting behaviour:: >>> print(result.summary(True)) unknown / (K) (time: 240; latitude: 37; longitude: 49) + +.. seealso:: + + Relevant gallery example: + :ref:`sphx_glr_generated_gallery_general_plot_anomaly_log_colouring.py` (Anomaly) + Combining Multiple Phenomena to Form a New One ---------------------------------------------- -Combining cubes of potential-temperature and pressure we can calculate +Combining cubes of potential-temperature and pressure we can calculate the associated temperature using the equation: .. math:: - + T = \theta (\frac{p}{p_0}) ^ {(287.05 / 1005)} -Where :math:`p` is pressure, :math:`\theta` is potential temperature, -:math:`p_0` is the potential temperature reference pressure +Where :math:`p` is pressure, :math:`\theta` is potential temperature, +:math:`p_0` is the potential temperature reference pressure and :math:`T` is temperature. First, let's load pressure and potential temperature cubes:: @@ -185,7 +191,7 @@ First, let's load pressure and potential temperature cubes:: phenomenon_names = ['air_potential_temperature', 'air_pressure'] pot_temperature, pressure = iris.load_cubes(filename, phenomenon_names) -In order to calculate :math:`\frac{p}{p_0}` we can define a coordinate which +In order to calculate :math:`\frac{p}{p_0}` we can define a coordinate which represents the standard reference pressure of 1000 hPa:: import iris.coords @@ -199,7 +205,7 @@ the :meth:`iris.coords.Coord.convert_units` method:: p0.convert_units(pressure.units) -Now we can combine all of this information to calculate the air temperature +Now we can combine all of this information to calculate the air temperature using the equation above:: temperature = pot_temperature * ( (pressure / p0) ** (287.05 / 1005) ) @@ -213,12 +219,12 @@ The result could now be plotted using the guidance provided in the .. only:: html - A very similar example to this can be found in + A very similar example to this can be found in :ref:`sphx_glr_generated_gallery_meteorology_plot_deriving_phenomena.py`. .. only:: latex - A very similar example to this can be found in the examples section, + A very similar example to this can be found in the examples section, with the title "Deriving Exner Pressure and Air Temperature". .. _cube_maths_combining_units: @@ -243,7 +249,7 @@ unit (if ``a`` had units ``'m2'`` then ``a ** 0.5`` would result in a cube with units ``'m'``). Iris inherits units from `cf_units `_ -which in turn inherits from `UDUNITS `_. +which in turn inherits from `UDUNITS `_. As well as the units UDUNITS provides, cf units also provides the units ``'no-unit'`` and ``'unknown'``. A unit of ``'no-unit'`` means that the associated data is not suitable for describing with a unit, cf units diff --git a/docs/src/userguide/cube_statistics.rst b/docs/src/userguide/cube_statistics.rst index 980f1e132f..08297c2a51 100644 --- a/docs/src/userguide/cube_statistics.rst +++ b/docs/src/userguide/cube_statistics.rst @@ -4,6 +4,11 @@ Cube Statistics =============== +.. seealso:: + + Relevant gallery example: + :ref:`sphx_glr_generated_gallery_general_plot_zonal_means.py` (Collapsing) + .. _cube-statistics-collapsing: Collapsing Entire Data Dimensions diff --git a/docs/src/userguide/glossary.rst b/docs/src/userguide/glossary.rst new file mode 100644 index 0000000000..5c24f03372 --- /dev/null +++ b/docs/src/userguide/glossary.rst @@ -0,0 +1,214 @@ +.. include:: ../common_links.inc + +.. _glossary: + +Glossary +============= + +.. glossary:: + + Cartopy + A python package for producing maps, and other geospatial data. + Allows plotting on these maps, over a range of projections. + + | **Related:** :term:`Matplotlib` + | **More information:** `CartoPy Site `_ + | + + CF Conventions + Rules for storing meteorological Climate and Forecast data in + :term:`NetCDF Format` files, defining a standard metadata format to + describe what the data is. + This also forms the data model which iris is based on. + + | **Related:** :term:`NetCDF Format` + | **More information:** `CF Conventions `_ + | + + Coordinate + A container for data points, comes in three main flavours. + + - Dimensional Coordinate - + A coordinate that describes a single data dimension of a cube. + They can only contain numerical values, in a sorted order (ascending + or descending). + - Auxiliary Coordinate - + A coordinate that can map to multiple data dimensions. Can + contain any type of data. + - Scalar Coordinate - + A coordinate that is not mapped to any data dimension, instead + representing the cube as a whole. + + | **Related:** :term:`Cube` + | **More information:** :doc:`iris_cubes` + | + + Cube + Cubes are the main method of storing data in Iris. A cube can consist of: + + - Array of :term:`Phenomenon` Data (Required) + - :term:`Coordinates ` + - :term:`Standard Name` + - :term:`Long Name` + - :term:`Unit` + - :term:`Cell Methods ` + - :term:`Coordinate Factories ` + + | **Related:** :term:`NumPy` + | **More information:** :doc:`iris_cubes` + | + + Cell Method + A cell method represents that a cube's data has been derived from + a past statistical operation, such as a + MEAN or SUM operation. + + | **Related:** :term:`Cube` + | **More information:** :doc:`iris_cubes` + | + + Coordinate Factory + A coordinate factory derives coordinates (sometimes referred to as + derived coordinates) from the values of existing coordinates. + E.g. A hybrid height factory might use "height above sea level" + and "height at ground level" coordinate data to calculate a + "height above ground level" coordinate. + + | **Related:** :term:`Cube` + | **More information:** :doc:`iris_cubes` + | + + + Dask + A data analytics python library. Iris predominantly uses Dask Arrays; + a collection of NumPy-esque arrays. The data is operated in batches, + so that not all data is in RAM at once. + + | **Related:** :term:`Lazy Data` **|** :term:`NumPy` + | **More information:** :doc:`real_and_lazy_data` + | + + Fields File (FF) Format + A meteorological file format, the output of the Unified Model. + + | **Related:** :term:`GRIB Format` + **|** :term:`Post Processing (PP) Format` **|** :term:`NetCDF Format` + | **More information:** `Unified Model `_ + | + + GRIB Format + A WMO-standard meteorological file format. + + | **Related:** :term:`Fields File (FF) Format` + **|** :term:`Post Processing (PP) Format` **|** :term:`NetCDF Format` + | **More information:** `GRIB 1 User Guide `_ + **|** `GRIB 2 User Guide.pdf `_ + | + + Lazy Data + Data stored in hard drive, and then temporarily loaded into RAM in + batches when needed. Allows of less memory usage and faster performance, + thanks to parallel processing. + + | **Related:** :term:`Dask` **|** :term:`Real Data` + | **More information:** :doc:`real_and_lazy_data` + | + + Long Name + A name describing a :term:`phenomenon`, not limited to the + the same restraints as :term:`standard name`. + + | **Related:** :term:`Standard Name` **|** :term:`Cube` + | **More information:** :doc:`iris_cubes` + | + + Matplotlib + A python package for plotting and projecting data in a wide variety + of formats. + + | **Related:** :term:`CartoPy` **|** :term:`NumPy` + | **More information:** `matplotlib`_ + | + + Metadata + The information which describes a phenomenon. + Within Iris specifically, all information which + distinguishes one phenomenon from another, + e.g. :term:`units ` or :term:`Cell Methods ` + + | **Related:** :term:`Phenomenon` **|** :term:`Cube` + | **More information:** :doc:`../further_topics/metadata` + | + + NetCDF Format + A flexible file format for storing multi-dimensional array-like data. + When Iris loads this format, it also especially recognises and interprets data + encoded according to the :term:`CF Conventions`. + + __ `NetCDF4`_ + + | **Related:** :term:`Fields File (FF) Format` + **|** :term:`GRIB Format` **|** :term:`Post Processing (PP) Format` + | **More information:** `NetCDF-4 Python Git`__ + | + + NumPy + A mathematical Python library, predominantly based around + multi-dimensional arrays. + + | **Related:** :term:`Dask` **|** :term:`Cube` + **|** :term:`Xarray` + | **More information:** `NumPy.org `_ + | + + Phenomenon + The primary data which is measured, usually within a cube, e.g. + air temperature. + + | **Related:** :term:`Metadata` + **|** :term:`Standard Name` **|** :term:`Cube` + | **More information:** :doc:`iris_cubes` + | + + Post Processing (PP) Format + A meteorological file format, created from a post processed + :term:`Fields File (FF) Format`. + + | **Related:** :term:`GRIB Format` **|** :term:`NetCDF Format` + | **More information:** `PP Wikipedia Page `_ + | + + Real Data + Data that has been loaded into RAM, as opposed to sitting + on the hard drive. + + | **Related:** :term:`Lazy Data` **|** :term:`NumPy` + | **More information:** :doc:`real_and_lazy_data` + | + + Standard Name + A name describing a :term:`phenomenon`, one from a fixed list + defined at `CF Standard Names `_. + + | **Related:** :term:`Long Name` **|** :term:`Cube` + | **More information:** :doc:`iris_cubes` + | + + Unit + The unit with which the :term:`phenomenon` is measured e.g. m / sec. + + | **Related:** :term:`Cube` + | **More information:** :doc:`iris_cubes` + | + + Xarray + A python library for sophisticated labelled multi-dimensional operations. + Has a broader scope than Iris - it is not focused on meteorological data. + + | **Related:** :term:`NumPy` + | **More information:** `Xarray Documentation `_ + | + +---- + +`To top `_ diff --git a/docs/src/userguide/index.rst b/docs/src/userguide/index.rst index 2a3b32fe11..fdd0c4d03e 100644 --- a/docs/src/userguide/index.rst +++ b/docs/src/userguide/index.rst @@ -1,31 +1,48 @@ .. _user_guide_index: .. _user_guide_introduction: -Introduction -============ +User Guide +========== -If you are reading this user guide for the first time it is strongly recommended that you read the user guide -fully before experimenting with your own data files. +If you are reading this user guide for the first time it is strongly +recommended that you read the user guide fully before experimenting with your +own data files. - -Much of the content has supplementary links to the reference documentation; you will not need to follow these -links in order to understand the guide but they may serve as a useful reference for future exploration. +Much of the content has supplementary links to the reference documentation; +you will not need to follow these links in order to understand the guide but +they may serve as a useful reference for future exploration. .. only:: html - Since later pages depend on earlier ones, try reading this user guide sequentially using the ``next`` and ``previous`` links. - - -* :doc:`iris_cubes` -* :doc:`loading_iris_cubes` -* :doc:`saving_iris_cubes` -* :doc:`navigating_a_cube` -* :doc:`subsetting_a_cube` -* :doc:`real_and_lazy_data` -* :doc:`plotting_a_cube` -* :doc:`interpolation_and_regridding` -* :doc:`merge_and_concat` -* :doc:`cube_statistics` -* :doc:`cube_maths` -* :doc:`citation` -* :doc:`code_maintenance` + Since later pages depend on earlier ones, try reading this user guide + sequentially using the ``next`` and ``previous`` links at the bottom + of each page. + + +.. toctree:: + :maxdepth: 2 + + iris_cubes + loading_iris_cubes + saving_iris_cubes + navigating_a_cube + subsetting_a_cube + real_and_lazy_data + plotting_a_cube + interpolation_and_regridding + merge_and_concat + cube_statistics + cube_maths + citation + code_maintenance + glossary + + +.. toctree:: + :maxdepth: 2 + :caption: Further Topics + + ../further_topics/metadata + ../further_topics/lenient_metadata + ../further_topics/lenient_maths + ../further_topics/ugrid/index diff --git a/docs/src/userguide/interpolation_and_regridding.rst b/docs/src/userguide/interpolation_and_regridding.rst index f590485606..deae4427ed 100644 --- a/docs/src/userguide/interpolation_and_regridding.rst +++ b/docs/src/userguide/interpolation_and_regridding.rst @@ -19,14 +19,14 @@ In Iris we refer to the available types of interpolation and regridding as `schemes`. The following are the interpolation schemes that are currently available in Iris: - * linear interpolation (:class:`iris.analysis.Linear`), and - * nearest-neighbour interpolation (:class:`iris.analysis.Nearest`). +* linear interpolation (:class:`iris.analysis.Linear`), and +* nearest-neighbour interpolation (:class:`iris.analysis.Nearest`). The following are the regridding schemes that are currently available in Iris: - * linear regridding (:class:`iris.analysis.Linear`), - * nearest-neighbour regridding (:class:`iris.analysis.Nearest`), and - * area-weighted regridding (:class:`iris.analysis.AreaWeighted`, first-order conservative). +* linear regridding (:class:`iris.analysis.Linear`), +* nearest-neighbour regridding (:class:`iris.analysis.Nearest`), and +* area-weighted regridding (:class:`iris.analysis.AreaWeighted`, first-order conservative). The linear, nearest-neighbor, and area-weighted regridding schemes support lazy regridding, i.e. if the source cube has lazy data, the resulting cube @@ -42,8 +42,8 @@ Interpolation Interpolating a cube is achieved with the :meth:`~iris.cube.Cube.interpolate` method. This method expects two arguments: - #. the sample points to interpolate, and - #. the interpolation scheme to use. +#. the sample points to interpolate, and +#. the interpolation scheme to use. The result is a new cube, interpolated at the sample points. @@ -51,9 +51,9 @@ Sample points must be defined as an iterable of ``(coord, value(s))`` pairs. The `coord` argument can be either a coordinate name or coordinate instance. The specified coordinate must exist on the cube being interpolated! For example: - * coordinate names and scalar sample points: ``[('latitude', 51.48), ('longitude', 0)]``, - * a coordinate instance and a scalar sample point: ``[(cube.coord('latitude'), 51.48)]``, and - * a coordinate name and a NumPy array of sample points: ``[('longitude', np.linspace(-11, 2, 14))]`` +* coordinate names and scalar sample points: ``[('latitude', 51.48), ('longitude', 0)]``, +* a coordinate instance and a scalar sample point: ``[(cube.coord('latitude'), 51.48)]``, and +* a coordinate name and a NumPy array of sample points: ``[('longitude', np.linspace(-11, 2, 14))]`` are all examples of valid sample points. @@ -175,11 +175,11 @@ The extrapolation mode is controlled by the ``extrapolation_mode`` keyword. For the available interpolation schemes available in Iris, the ``extrapolation_mode`` keyword must be one of: - * ``extrapolate`` -- the extrapolation points will be calculated by extending the gradient of the closest two points, - * ``error`` -- a ValueError exception will be raised, notifying an attempt to extrapolate, - * ``nan`` -- the extrapolation points will be be set to NaN, - * ``mask`` -- the extrapolation points will always be masked, even if the source data is not a MaskedArray, or - * ``nanmask`` -- if the source data is a MaskedArray the extrapolation points will be masked. Otherwise they will be set to NaN. +* ``extrapolate`` -- the extrapolation points will be calculated by extending the gradient of the closest two points, +* ``error`` -- a ValueError exception will be raised, notifying an attempt to extrapolate, +* ``nan`` -- the extrapolation points will be be set to NaN, +* ``mask`` -- the extrapolation points will always be masked, even if the source data is not a MaskedArray, or +* ``nanmask`` -- if the source data is a MaskedArray the extrapolation points will be masked. Otherwise they will be set to NaN. Using an extrapolation mode is achieved by constructing an interpolation scheme with the extrapolation mode keyword set as required. The constructed scheme @@ -206,8 +206,8 @@ intensive part of an interpolation is setting up the interpolator. To cache an interpolator you must set up an interpolator scheme and call the scheme's interpolator method. The interpolator method takes as arguments: - #. a cube to be interpolated, and - #. an iterable of coordinate names or coordinate instances of the coordinates that are to be interpolated over. +#. a cube to be interpolated, and +#. an iterable of coordinate names or coordinate instances of the coordinates that are to be interpolated over. For example: @@ -244,8 +244,8 @@ regridding is based on the **horizontal** grid of *another cube*. Regridding a cube is achieved with the :meth:`cube.regrid() ` method. This method expects two arguments: - #. *another cube* that defines the target grid onto which the cube should be regridded, and - #. the regridding scheme to use. +#. *another cube* that defines the target grid onto which the cube should be regridded, and +#. the regridding scheme to use. .. note:: @@ -278,15 +278,15 @@ mode when defining the regridding scheme. For the available regridding schemes in Iris, the ``extrapolation_mode`` keyword must be one of: - * ``extrapolate`` -- +* ``extrapolate`` -- - * for :class:`~iris.analysis.Linear` the extrapolation points will be calculated by extending the gradient of the closest two points. - * for :class:`~iris.analysis.Nearest` the extrapolation points will take their value from the nearest source point. + * for :class:`~iris.analysis.Linear` the extrapolation points will be calculated by extending the gradient of the closest two points. + * for :class:`~iris.analysis.Nearest` the extrapolation points will take their value from the nearest source point. - * ``nan`` -- the extrapolation points will be be set to NaN. - * ``error`` -- a ValueError exception will be raised, notifying an attempt to extrapolate. - * ``mask`` -- the extrapolation points will always be masked, even if the source data is not a MaskedArray. - * ``nanmask`` -- if the source data is a MaskedArray the extrapolation points will be masked. Otherwise they will be set to NaN. +* ``nan`` -- the extrapolation points will be be set to NaN. +* ``error`` -- a ValueError exception will be raised, notifying an attempt to extrapolate. +* ``mask`` -- the extrapolation points will always be masked, even if the source data is not a MaskedArray. +* ``nanmask`` -- if the source data is a MaskedArray the extrapolation points will be masked. Otherwise they will be set to NaN. The ``rotated_psl`` cube is defined on a limited area rotated pole grid. If we regridded the ``rotated_psl`` cube onto the global grid as defined by the ``global_air_temp`` cube @@ -395,8 +395,8 @@ intensive part of a regrid is setting up the regridder. To cache a regridder you must set up a regridder scheme and call the scheme's regridder method. The regridder method takes as arguments: - #. a cube (that is to be regridded) defining the source grid, and - #. a cube defining the target grid to regrid the source cube to. +#. a cube (that is to be regridded) defining the source grid, and +#. a cube defining the target grid to regrid the source cube to. For example: diff --git a/docs/src/userguide/iris_cubes.rst b/docs/src/userguide/iris_cubes.rst index d13dee369c..29d8f3cefc 100644 --- a/docs/src/userguide/iris_cubes.rst +++ b/docs/src/userguide/iris_cubes.rst @@ -4,82 +4,105 @@ Iris Data Structures ==================== -The top level object in Iris is called a cube. A cube contains data and metadata about a phenomenon. +The top level object in Iris is called a cube. A cube contains data and +metadata about a phenomenon. -In Iris, a cube is an interpretation of the *Climate and Forecast (CF) Metadata Conventions* whose purpose is to: +In Iris, a cube is an interpretation of the *Climate and Forecast (CF) +Metadata Conventions* whose purpose is to: - *require conforming datasets to contain sufficient metadata that they are self-describing... including physical - units if appropriate, and that each value can be located in space (relative to earth-based coordinates) and time.* +.. panels:: + :container: container-lg pb-3 + :column: col-lg-12 p-2 -Whilst the CF conventions are often mentioned alongside NetCDF, Iris implements several major format importers which can take -files of specific formats and turn them into Iris cubes. Additionally, a framework is provided which allows users -to extend Iris' import capability to cater for specialist or unimplemented formats. + *require conforming datasets to contain sufficient metadata that they are + self-describing... including physical units if appropriate, and that each + value can be located in space (relative to earth-based coordinates) and + time.* -A single cube describes one and only one phenomenon, always has a name, a unit and -an n-dimensional data array to represents the cube's phenomenon. In order to locate the -data spatially, temporally, or in any other higher-dimensional space, a collection of *coordinates* -exist on the cube. + +Whilst the CF conventions are often mentioned alongside NetCDF, Iris implements +several major format importers which can take files of specific formats and +turn them into Iris cubes. Additionally, a framework is provided which allows +users to extend Iris' import capability to cater for specialist or +unimplemented formats. + +A single cube describes one and only one phenomenon, always has a name, a unit +and an n-dimensional data array to represents the cube's phenomenon. In order +to locate the data spatially, temporally, or in any other higher-dimensional +space, a collection of *coordinates* exist on the cube. Coordinates =========== -A coordinate is a container to store metadata about some dimension(s) of a cube's data array and therefore, -by definition, its phenomenon. - - * Each coordinate has a name and a unit. - * When a coordinate is added to a cube, the data dimensions that it represents are also provided. - - * The shape of a coordinate is always the same as the shape of the associated data dimension(s) on the cube. - * A dimension not explicitly listed signifies that the coordinate is independent of that dimension. - * Each dimension of a coordinate must be mapped to a data dimension. The only coordinates with no mapping are - scalar coordinates. - - * Depending on the underlying data that the coordinate is representing, its values may be discrete points or be - bounded to represent interval extents (e.g. temperature at *point x* **vs** rainfall accumulation *between 0000-1200 hours*). - * Coordinates have an attributes dictionary which can hold arbitrary extra metadata, excluding certain restricted CF names - * More complex coordinates may contain a coordinate system which is necessary to fully interpret the values - contained within the coordinate. - +A coordinate is a container to store metadata about some dimension(s) of a +cube's data array and therefore, by definition, its phenomenon. + +* Each coordinate has a name and a unit. +* When a coordinate is added to a cube, the data dimensions that it + represents are also provided. + + * The shape of a coordinate is always the same as the shape of the + associated data dimension(s) on the cube. + * A dimension not explicitly listed signifies that the coordinate is + independent of that dimension. + * Each dimension of a coordinate must be mapped to a data dimension. The + only coordinates with no mapping are scalar coordinates. + +* Depending on the underlying data that the coordinate is representing, its + values may be discrete points or be bounded to represent interval extents + (e.g. temperature at *point x* **vs** rainfall accumulation *between + 0000-1200 hours*). +* Coordinates have an attributes dictionary which can hold arbitrary extra + metadata, excluding certain restricted CF names +* More complex coordinates may contain a coordinate system which is + necessary to fully interpret the values contained within the coordinate. + There are two classes of coordinates: - **DimCoord** - - * Numeric - * Monotonic - * Representative of, at most, a single data dimension (1d) +**DimCoord** + +* Numeric +* Monotonic +* Representative of, at most, a single data dimension (1d) + +**AuxCoord** + +* May be of any type, including strings +* May represent multiple data dimensions (n-dimensional) - **AuxCoord** - - * May be of any type, including strings - * May represent multiple data dimensions (n-dimensional) - Cube ==== A cube consists of: - * a standard name and/or a long name and an appropriate unit - * a data array who's values are representative of the phenomenon - * a collection of coordinates and associated data dimensions on the cube's data array, which are split into two separate lists: +* a standard name and/or a long name and an appropriate unit +* a data array who's values are representative of the phenomenon +* a collection of coordinates and associated data dimensions on the cube's + data array, which are split into two separate lists: + + * *dimension coordinates* - DimCoords which uniquely map to exactly one + data dimension, ordered by dimension. + * *auxiliary coordinates* - DimCoords or AuxCoords which map to as many + data dimensions as the coordinate has dimensions. - * *dimension coordinates* - DimCoords which uniquely map to exactly one data dimension, ordered by dimension. - * *auxiliary coordinates* - DimCoords or AuxCoords which map to as many data dimensions as the coordinate has dimensions. - - * an attributes dictionary which, other than some protected CF names, can hold arbitrary extra metadata. - * a list of cell methods to represent operations which have already been applied to the data (e.g. "mean over time") - * a list of coordinate "factories" used for deriving coordinates from the values of other coordinates in the cube +* an attributes dictionary which, other than some protected CF names, can + hold arbitrary extra metadata. +* a list of cell methods to represent operations which have already been + applied to the data (e.g. "mean over time") +* a list of coordinate "factories" used for deriving coordinates from the + values of other coordinates in the cube Cubes in Practice ----------------- - A Simple Cube Example ===================== -Suppose we have some gridded data which has 24 air temperature readings (in Kelvin) which is located at -4 different longitudes, 2 different latitudes and 3 different heights. Our data array can be represented pictorially: +Suppose we have some gridded data which has 24 air temperature readings +(in Kelvin) which is located at 4 different longitudes, 2 different latitudes +and 3 different heights. Our data array can be represented pictorially: .. image:: multi_array.png @@ -87,61 +110,66 @@ Where dimensions 0, 1, and 2 have lengths 3, 2 and 4 respectively. The Iris cube to represent this data would consist of: - * a standard name of ``air_temperature`` and a unit of ``kelvin`` - * a data array of shape ``(3, 2, 4)`` - * a coordinate, mapping to dimension 0, consisting of: - - * a standard name of ``height`` and unit of ``meters`` - * an array of length 3 representing the 3 ``height`` points - - * a coordinate, mapping to dimension 1, consisting of: - - * a standard name of ``latitude`` and unit of ``degrees`` - * an array of length 2 representing the 2 latitude points - * a coordinate system such that the ``latitude`` points could be fully located on the globe - - * a coordinate, mapping to dimension 2, consisting of: - - * a standard name of ``longitude`` and unit of ``degrees`` - * an array of length 4 representing the 4 longitude points - * a coordinate system such that the ``longitude`` points could be fully located on the globe - +* a standard name of ``air_temperature`` and a unit of ``kelvin`` +* a data array of shape ``(3, 2, 4)`` +* a coordinate, mapping to dimension 0, consisting of: + + * a standard name of ``height`` and unit of ``meters`` + * an array of length 3 representing the 3 ``height`` points +* a coordinate, mapping to dimension 1, consisting of: + * a standard name of ``latitude`` and unit of ``degrees`` + * an array of length 2 representing the 2 latitude points + * a coordinate system such that the ``latitude`` points could be fully + located on the globe -Pictorially the cube has taken on more information than a simple array: +* a coordinate, mapping to dimension 2, consisting of: + + * a standard name of ``longitude`` and unit of ``degrees`` + * an array of length 4 representing the 4 longitude points + * a coordinate system such that the ``longitude`` points could be fully + located on the globe + +Pictorially the cube has taken on more information than a simple array: .. image:: multi_array_to_cube.png -Additionally further information may be optionally attached to the cube. -For example, it is possible to attach any of the following: - - * a coordinate, not mapping to any data dimensions, consisting of: - - * a standard name of ``time`` and unit of ``days since 2000-01-01 00:00`` - * a data array of length 1 representing the time that the data array is valid for - - * an auxiliary coordinate, mapping to dimensions 1 and 2, consisting of: - - * a long name of ``place name`` and no unit - * a 2d string array of shape ``(2, 4)`` with the names of the 8 places that the lat/lons correspond to - - * an auxiliary coordinate "factory", which can derive its own mapping, consisting of: - - * a standard name of ``height`` and a unit of ``feet`` - * knowledge of how data values for this coordinate can be calculated given the ``height in meters`` coordinate - - * a cell method of "mean" over "ensemble" to indicate that the data has been meaned over - a collection of "ensembles" (i.e. multiple model runs). +Additionally further information may be optionally attached to the cube. +For example, it is possible to attach any of the following: + +* a coordinate, not mapping to any data dimensions, consisting of: + + * a standard name of ``time`` and unit of ``days since 2000-01-01 00:00`` + * a data array of length 1 representing the time that the data array is + valid for + +* an auxiliary coordinate, mapping to dimensions 1 and 2, consisting of: + + * a long name of ``place name`` and no unit + * a 2d string array of shape ``(2, 4)`` with the names of the 8 places + that the lat/lons correspond to + +* an auxiliary coordinate "factory", which can derive its own mapping, + consisting of: + + * a standard name of ``height`` and a unit of ``feet`` + * knowledge of how data values for this coordinate can be calculated + given the ``height in meters`` coordinate + +* a cell method of "mean" over "ensemble" to indicate that the data has been + meaned over a collection of "ensembles" (i.e. multiple model runs). Printing a Cube =============== -Every Iris cube can be printed to screen as you will see later in the user guide. It is worth familiarising yourself with the -output as this is the quickest way of inspecting the contents of a cube. Here is the result of printing a real life cube: +Every Iris cube can be printed to screen as you will see later in the user +guide. It is worth familiarising yourself with the output as this is the +quickest way of inspecting the contents of a cube. Here is the result of +printing a real life cube: .. _hybrid_cube_printout: @@ -150,7 +178,7 @@ output as this is the quickest way of inspecting the contents of a cube. Here is import iris filename = iris.sample_data_path('uk_hires.pp') - # NOTE: Every time the output of this cube changes, the full list of deductions below should be re-assessed. + # NOTE: Every time the output of this cube changes, the full list of deductions below should be re-assessed. print(iris.load_cube(filename, 'air_potential_temperature')) .. testoutput:: @@ -178,16 +206,22 @@ output as this is the quickest way of inspecting the contents of a cube. Here is Using this output we can deduce that: - * The cube represents air potential temperature. - * There are 4 data dimensions, and the data has a shape of ``(3, 7, 204, 187)`` - * The 4 data dimensions are mapped to the ``time``, ``model_level_number``, - ``grid_latitude``, ``grid_longitude`` coordinates respectively - * There are three 1d auxiliary coordinates and one 2d auxiliary (``surface_altitude``) - * There is a single ``altitude`` derived coordinate, which spans 3 data dimensions - * There are 7 distinct values in the "model_level_number" coordinate. Similar inferences can - be made for the other dimension coordinates. - * There are 7, not necessarily distinct, values in the ``level_height`` coordinate. - * There is a single ``forecast_reference_time`` scalar coordinate representing the entire cube. - * The cube has one further attribute relating to the phenomenon. - In this case the originating file format, PP, encodes information in a STASH code which in some cases can - be useful for identifying advanced experiment information relating to the phenomenon. +* The cube represents air potential temperature. +* There are 4 data dimensions, and the data has a shape of ``(3, 7, 204, 187)`` +* The 4 data dimensions are mapped to the ``time``, ``model_level_number``, + ``grid_latitude``, ``grid_longitude`` coordinates respectively +* There are three 1d auxiliary coordinates and one 2d auxiliary + (``surface_altitude``) +* There is a single ``altitude`` derived coordinate, which spans 3 data + dimensions +* There are 7 distinct values in the "model_level_number" coordinate. Similar + inferences can + be made for the other dimension coordinates. +* There are 7, not necessarily distinct, values in the ``level_height`` + coordinate. +* There is a single ``forecast_reference_time`` scalar coordinate representing + the entire cube. +* The cube has one further attribute relating to the phenomenon. + In this case the originating file format, PP, encodes information in a STASH + code which in some cases can be useful for identifying advanced experiment + information relating to the phenomenon. diff --git a/docs/src/userguide/loading_iris_cubes.rst b/docs/src/userguide/loading_iris_cubes.rst index fb938975e8..33ad932d70 100644 --- a/docs/src/userguide/loading_iris_cubes.rst +++ b/docs/src/userguide/loading_iris_cubes.rst @@ -39,15 +39,15 @@ This shows that there were 2 cubes as a result of loading the file, they were: The ``surface_altitude`` cube was 2 dimensional with: - * the two dimensions have extents of 204 and 187 respectively and are - represented by the ``grid_latitude`` and ``grid_longitude`` coordinates. +* the two dimensions have extents of 204 and 187 respectively and are + represented by the ``grid_latitude`` and ``grid_longitude`` coordinates. The ``air_potential_temperature`` cubes were 4 dimensional with: - * the same length ``grid_latitude`` and ``grid_longitude`` dimensions as - ``surface_altitide`` - * a ``time`` dimension of length 3 - * a ``model_level_number`` dimension of length 7 +* the same length ``grid_latitude`` and ``grid_longitude`` dimensions as + ``surface_altitide`` +* a ``time`` dimension of length 3 +* a ``model_level_number`` dimension of length 7 .. note:: @@ -55,7 +55,7 @@ The ``air_potential_temperature`` cubes were 4 dimensional with: (even if it only contains one :class:`iris.cube.Cube` - see :ref:`strict-loading`). Anything that can be done with a Python :class:`list` can be done with an :class:`iris.cube.CubeList`. - + The order of this list should not be relied upon. Ways of loading a specific cube or cubes are covered in :ref:`constrained-loading` and :ref:`strict-loading`. @@ -206,241 +206,8 @@ a specific ``model_level_number``:: level_10 = iris.Constraint(model_level_number=10) cubes = iris.load(filename, level_10) -Constraints can be combined using ``&`` to represent a more restrictive -constraint to ``load``:: - - filename = iris.sample_data_path('uk_hires.pp') - forecast_6 = iris.Constraint(forecast_period=6) - level_10 = iris.Constraint(model_level_number=10) - cubes = iris.load(filename, forecast_6 & level_10) - -.. note:: - - Whilst ``&`` is supported, the ``|`` that might reasonably be expected is - not. Explanation as to why is in the :class:`iris.Constraint` reference - documentation. - - For an example of constraining to multiple ranges of the same coordinate to - generate one cube, see the :class:`iris.Constraint` reference documentation. - - To generate multiple cubes, each constrained to a different range of the - same coordinate, use :py:func:`iris.load_cubes`. - -As well as being able to combine constraints using ``&``, -the :class:`iris.Constraint` class can accept multiple arguments, -and a list of values can be given to constrain a coordinate to one of -a collection of values:: - - filename = iris.sample_data_path('uk_hires.pp') - level_10_or_16_fp_6 = iris.Constraint(model_level_number=[10, 16], forecast_period=6) - cubes = iris.load(filename, level_10_or_16_fp_6) - -A common requirement is to limit the value of a coordinate to a specific range, -this can be achieved by passing the constraint a function:: - - def bottom_16_levels(cell): - # return True or False as to whether the cell in question should be kept - return cell <= 16 - - filename = iris.sample_data_path('uk_hires.pp') - level_lt_16 = iris.Constraint(model_level_number=bottom_16_levels) - cubes = iris.load(filename, level_lt_16) - -.. note:: - - As with many of the examples later in this documentation, the - simple function above can be conveniently written as a lambda function - on a single line:: - - bottom_16_levels = lambda cell: cell <= 16 - - -Note also the :ref:`warning on equality constraints with floating point coordinates `. - - -Cube attributes can also be part of the constraint criteria. Supposing a -cube attribute of ``STASH`` existed, as is the case when loading ``PP`` files, -then specific STASH codes can be filtered:: - - filename = iris.sample_data_path('uk_hires.pp') - level_10_with_stash = iris.AttributeConstraint(STASH='m01s00i004') & iris.Constraint(model_level_number=10) - cubes = iris.load(filename, level_10_with_stash) - -.. seealso:: - - For advanced usage there are further examples in the - :class:`iris.Constraint` reference documentation. - - -Constraining a Circular Coordinate Across its Boundary -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -Occasionally you may need to constrain your cube with a region that crosses the -boundary of a circular coordinate (this is often the meridian or the dateline / -antimeridian). An example use-case of this is to extract the entire Pacific Ocean -from a cube whose longitudes are bounded by the dateline. - -This functionality cannot be provided reliably using constraints. Instead you should use the -functionality provided by :meth:`cube.intersection ` -to extract this region. - - -.. _using-time-constraints: - -Constraining on Time -^^^^^^^^^^^^^^^^^^^^ -Iris follows NetCDF-CF rules in representing time coordinate values as normalised, -purely numeric, values which are normalised by the calendar specified in the coordinate's -units (e.g. "days since 1970-01-01"). -However, when constraining by time we usually want to test calendar-related -aspects such as hours of the day or months of the year, so Iris -provides special features to facilitate this: - -Firstly, when Iris evaluates Constraint expressions, it will convert time-coordinate -values (points and bounds) from numbers into :class:`~datetime.datetime`-like objects -for ease of calendar-based testing. - - >>> filename = iris.sample_data_path('uk_hires.pp') - >>> cube_all = iris.load_cube(filename, 'air_potential_temperature') - >>> print('All times :\n' + str(cube_all.coord('time'))) - All times : - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) - points: [2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00] - shape: (3,) - dtype: float64 - standard_name: 'time' - >>> # Define a function which accepts a datetime as its argument (this is simplified in later examples). - >>> hour_11 = iris.Constraint(time=lambda cell: cell.point.hour == 11) - >>> cube_11 = cube_all.extract(hour_11) - >>> print('Selected times :\n' + str(cube_11.coord('time'))) - Selected times : - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) - points: [2009-11-19 11:00:00] - shape: (1,) - dtype: float64 - standard_name: 'time' - -Secondly, the :class:`iris.time` module provides flexible time comparison -facilities. An :class:`iris.time.PartialDateTime` object can be compared to -objects such as :class:`datetime.datetime` instances, and this comparison will -then test only those 'aspects' which the PartialDateTime instance defines: - - >>> import datetime - >>> from iris.time import PartialDateTime - >>> dt = datetime.datetime(2011, 3, 7) - >>> print(dt > PartialDateTime(year=2010, month=6)) - True - >>> print(dt > PartialDateTime(month=6)) - False - >>> - -These two facilities can be combined to provide straightforward calendar-based -time selections when loading or extracting data. - -The previous constraint example can now be written as: - - >>> the_11th_hour = iris.Constraint(time=iris.time.PartialDateTime(hour=11)) - >>> print(iris.load_cube( - ... iris.sample_data_path('uk_hires.pp'), - ... 'air_potential_temperature' & the_11th_hour).coord('time')) - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) - points: [2009-11-19 11:00:00] - shape: (1,) - dtype: float64 - standard_name: 'time' - -It is common that a cube will need to be constrained between two given dates. -In the following example we construct a time sequence representing the first -day of every week for many years: - -.. testsetup:: timeseries_range - - import datetime - import numpy as np - from iris.time import PartialDateTime - long_ts = iris.cube.Cube(np.arange(150), long_name='data', units='1') - _mondays = iris.coords.DimCoord(7 * np.arange(150), standard_name='time', units='days since 2007-04-09') - long_ts.add_dim_coord(_mondays, 0) - - -.. doctest:: timeseries_range - :options: +NORMALIZE_WHITESPACE, +ELLIPSIS - - >>> print(long_ts.coord('time')) - DimCoord : time / (days since 2007-04-09, gregorian calendar) - points: [ - 2007-04-09 00:00:00, 2007-04-16 00:00:00, ..., - 2010-02-08 00:00:00, 2010-02-15 00:00:00] - shape: (150,) - dtype: int64 - standard_name: 'time' - -Given two dates in datetime format, we can select all points between them. - -.. doctest:: timeseries_range - :options: +NORMALIZE_WHITESPACE, +ELLIPSIS - - >>> d1 = datetime.datetime.strptime('20070715T0000Z', '%Y%m%dT%H%MZ') - >>> d2 = datetime.datetime.strptime('20070825T0000Z', '%Y%m%dT%H%MZ') - >>> st_swithuns_daterange_07 = iris.Constraint( - ... time=lambda cell: d1 <= cell.point < d2) - >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) - >>> print(within_st_swithuns_07.coord('time')) - DimCoord : time / (days since 2007-04-09, gregorian calendar) - points: [ - 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] - shape: (6,) - dtype: int64 - standard_name: 'time' - -Alternatively, we may rewrite this using :class:`iris.time.PartialDateTime` -objects. - -.. doctest:: timeseries_range - :options: +NORMALIZE_WHITESPACE, +ELLIPSIS - - >>> pdt1 = PartialDateTime(year=2007, month=7, day=15) - >>> pdt2 = PartialDateTime(year=2007, month=8, day=25) - >>> st_swithuns_daterange_07 = iris.Constraint( - ... time=lambda cell: pdt1 <= cell.point < pdt2) - >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) - >>> print(within_st_swithuns_07.coord('time')) - DimCoord : time / (days since 2007-04-09, gregorian calendar) - points: [ - 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] - shape: (6,) - dtype: int64 - standard_name: 'time' - -A more complex example might require selecting points over an annually repeating -date range. We can select points within a certain part of the year, in this case -between the 15th of July through to the 25th of August. By making use of -PartialDateTime this becomes simple: - -.. doctest:: timeseries_range - - >>> st_swithuns_daterange = iris.Constraint( - ... time=lambda cell: PartialDateTime(month=7, day=15) <= cell < PartialDateTime(month=8, day=25)) - >>> within_st_swithuns = long_ts.extract(st_swithuns_daterange) - ... - >>> # Note: using summary(max_values) to show more of the points - >>> print(within_st_swithuns.coord('time').summary(max_values=100)) - DimCoord : time / (days since 2007-04-09, gregorian calendar) - points: [ - 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, - 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00, - 2008-07-21 00:00:00, 2008-07-28 00:00:00, 2008-08-04 00:00:00, - 2008-08-11 00:00:00, 2008-08-18 00:00:00, 2009-07-20 00:00:00, - 2009-07-27 00:00:00, 2009-08-03 00:00:00, 2009-08-10 00:00:00, - 2009-08-17 00:00:00, 2009-08-24 00:00:00] - shape: (17,) - dtype: int64 - standard_name: 'time' - -Notice how the dates printed are between the range specified in the ``st_swithuns_daterange`` -and that they span multiple years. +Further details on using :class:`iris.Constraint` are +discussed later in :ref:`cube_extraction`. .. _strict-loading: diff --git a/docs/src/userguide/merge_and_concat.rst b/docs/src/userguide/merge_and_concat.rst index e8425df5ec..b521d49a59 100644 --- a/docs/src/userguide/merge_and_concat.rst +++ b/docs/src/userguide/merge_and_concat.rst @@ -22,14 +22,14 @@ result in fewer cubes as output. The following diagram illustrates the two proce There is one major difference between the ``merge`` and ``concatenate`` processes. - * The ``merge`` process combines multiple input cubes into a - single resultant cube with new dimensions created from the - *scalar coordinate values* of the input cubes. +* The ``merge`` process combines multiple input cubes into a + single resultant cube with new dimensions created from the + *scalar coordinate values* of the input cubes. - * The ``concatenate`` process combines multiple input cubes into a - single resultant cube with the same *number of dimensions* as the input cubes, - but with the length of one or more dimensions extended by *joining together - sequential dimension coordinates*. +* The ``concatenate`` process combines multiple input cubes into a + single resultant cube with the same *number of dimensions* as the input cubes, + but with the length of one or more dimensions extended by *joining together + sequential dimension coordinates*. Let's imagine 28 individual cubes representing the temperature at a location ``(y, x)``; one cube for each day of February. We can use @@ -253,6 +253,11 @@ which are described below. Using CubeList.concatenate ========================== +.. seealso:: + + Relevant gallery example: + :ref:`sphx_glr_generated_gallery_general_plot_projections_and_annotations.py` (Brief concatenating examples) + The :meth:`CubeList.concatenate ` method operates on a list of cubes and returns a new :class:`~iris.cube.CubeList` containing the cubes that have been concatenated. diff --git a/docs/src/userguide/navigating_a_cube.rst b/docs/src/userguide/navigating_a_cube.rst index c5924a61c6..b4c16b094b 100644 --- a/docs/src/userguide/navigating_a_cube.rst +++ b/docs/src/userguide/navigating_a_cube.rst @@ -110,6 +110,10 @@ cube with the :attr:`Cube.cell_methods ` attribute: print(cube.cell_methods) +.. seealso:: + + Relevant gallery example: + :ref:`sphx_glr_generated_gallery_meteorology_plot_wind_barbs.py` Accessing Coordinates on the Cube --------------------------------- @@ -176,6 +180,10 @@ We can add and remove coordinates via :func:`Cube.add_dim_coord>> import iris @@ -34,15 +42,14 @@ A subset of a cube can be "extracted" from a multi-dimensional cube in order to In this example we start with a 3 dimensional cube, with dimensions of ``height``, ``grid_latitude`` and ``grid_longitude``, -and extract every point where the latitude is 0, resulting in a 2d cube with axes of ``height`` and ``grid_longitude``. - +and use :class:`iris.Constraint` to extract every point where the latitude is 0, resulting in a 2d cube with axes of ``height`` and ``grid_longitude``. .. _floating-point-warning: .. warning:: Caution is required when using equality constraints with floating point coordinates such as ``grid_latitude``. Printing the points of a coordinate does not necessarily show the full precision of the underlying number and it - is very easy return no matches to a constraint when one was expected. + is very easy to return no matches to a constraint when one was expected. This can be avoided by using a function as the argument to the constraint:: def near_zero(cell): @@ -68,6 +75,33 @@ The two steps required to get ``height`` of 9000 m at the equator can be simplif equator_height_9km_slice = cube.extract(iris.Constraint(grid_latitude=0, height=9000)) print(equator_height_9km_slice) +Alternatively, constraints can be combined using ``&``:: + + cube = iris.load_cube(filename, 'electron density') + equator_constraint = iris.Constraint(grid_latitude=0) + height_constraint = iris.Constraint(height=9000) + equator_height_9km_slice = cube.extract(equator_constraint & height_constraint) + +.. note:: + + Whilst ``&`` is supported, the ``|`` that might reasonably be expected is + not. Explanation as to why is in the :class:`iris.Constraint` reference + documentation. + + For an example of constraining to multiple ranges of the same coordinate to + generate one cube, see the :class:`iris.Constraint` reference documentation. + +A common requirement is to limit the value of a coordinate to a specific range, +this can be achieved by passing the constraint a function:: + + def below_9km(cell): + # return True or False as to whether the cell in question should be kept + return cell <= 9000 + + cube = iris.load_cube(filename, 'electron density') + height_below_9km = iris.Constraint(height=below_9km) + below_9km_slice = cube.extract(height_below_9km) + As we saw in :doc:`loading_iris_cubes` the result of :func:`iris.load` is a :class:`CubeList `. The ``extract`` method also exists on a :class:`CubeList ` and behaves in exactly the same way as loading with constraints: @@ -100,9 +134,203 @@ same way as loading with constraints: source 'Data from Met Office Unified Model' um_version '7.3' +Cube attributes can also be part of the constraint criteria. Supposing a +cube attribute of ``STASH`` existed, as is the case when loading ``PP`` files, +then specific STASH codes can be filtered:: + + filename = iris.sample_data_path('uk_hires.pp') + level_10_with_stash = iris.AttributeConstraint(STASH='m01s00i004') & iris.Constraint(model_level_number=10) + cubes = iris.load(filename).extract(level_10_with_stash) + +.. seealso:: + + For advanced usage there are further examples in the + :class:`iris.Constraint` reference documentation. + +Constraining a Circular Coordinate Across its Boundary +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Occasionally you may need to constrain your cube with a region that crosses the +boundary of a circular coordinate (this is often the meridian or the dateline / +antimeridian). An example use-case of this is to extract the entire Pacific Ocean +from a cube whose longitudes are bounded by the dateline. + +This functionality cannot be provided reliably using constraints. Instead you should use the +functionality provided by :meth:`cube.intersection ` +to extract this region. + + +.. _using-time-constraints: + +Constraining on Time +^^^^^^^^^^^^^^^^^^^^ +Iris follows NetCDF-CF rules in representing time coordinate values as normalised, +purely numeric, values which are normalised by the calendar specified in the coordinate's +units (e.g. "days since 1970-01-01"). +However, when constraining by time we usually want to test calendar-related +aspects such as hours of the day or months of the year, so Iris +provides special features to facilitate this. + +Firstly, when Iris evaluates :class:`iris.Constraint` expressions, it will convert +time-coordinate values (points and bounds) from numbers into :class:`~datetime.datetime`-like +objects for ease of calendar-based testing. + + >>> filename = iris.sample_data_path('uk_hires.pp') + >>> cube_all = iris.load_cube(filename, 'air_potential_temperature') + >>> print('All times :\n' + str(cube_all.coord('time'))) + All times : + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) + points: [2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00] + shape: (3,) + dtype: float64 + standard_name: 'time' + >>> # Define a function which accepts a datetime as its argument (this is simplified in later examples). + >>> hour_11 = iris.Constraint(time=lambda cell: cell.point.hour == 11) + >>> cube_11 = cube_all.extract(hour_11) + >>> print('Selected times :\n' + str(cube_11.coord('time'))) + Selected times : + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) + points: [2009-11-19 11:00:00] + shape: (1,) + dtype: float64 + standard_name: 'time' + +Secondly, the :class:`iris.time` module provides flexible time comparison +facilities. An :class:`iris.time.PartialDateTime` object can be compared to +objects such as :class:`datetime.datetime` instances, and this comparison will +then test only those 'aspects' which the PartialDateTime instance defines: + + >>> import datetime + >>> from iris.time import PartialDateTime + >>> dt = datetime.datetime(2011, 3, 7) + >>> print(dt > PartialDateTime(year=2010, month=6)) + True + >>> print(dt > PartialDateTime(month=6)) + False + +These two facilities can be combined to provide straightforward calendar-based +time selections when loading or extracting data. + +The previous constraint example can now be written as: + + >>> the_11th_hour = iris.Constraint(time=iris.time.PartialDateTime(hour=11)) + >>> print(iris.load_cube( + ... iris.sample_data_path('uk_hires.pp'), + ... 'air_potential_temperature' & the_11th_hour).coord('time')) + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) + points: [2009-11-19 11:00:00] + shape: (1,) + dtype: float64 + standard_name: 'time' + +It is common that a cube will need to be constrained between two given dates. +In the following example we construct a time sequence representing the first +day of every week for many years: + +.. testsetup:: timeseries_range + + import datetime + import numpy as np + from iris.time import PartialDateTime + long_ts = iris.cube.Cube(np.arange(150), long_name='data', units='1') + _mondays = iris.coords.DimCoord(7 * np.arange(150), standard_name='time', units='days since 2007-04-09') + long_ts.add_dim_coord(_mondays, 0) + + +.. doctest:: timeseries_range + :options: +NORMALIZE_WHITESPACE, +ELLIPSIS + + >>> print(long_ts.coord('time')) + DimCoord : time / (days since 2007-04-09, standard calendar) + points: [ + 2007-04-09 00:00:00, 2007-04-16 00:00:00, ..., + 2010-02-08 00:00:00, 2010-02-15 00:00:00] + shape: (150,) + dtype: int64 + standard_name: 'time' + +Given two dates in datetime format, we can select all points between them. +Instead of constraining at loaded time, we already have the time coord so +we constrain that coord using :class:`iris.cube.Cube.extract` + +.. doctest:: timeseries_range + :options: +NORMALIZE_WHITESPACE, +ELLIPSIS + + >>> d1 = datetime.datetime.strptime('20070715T0000Z', '%Y%m%dT%H%MZ') + >>> d2 = datetime.datetime.strptime('20070825T0000Z', '%Y%m%dT%H%MZ') + >>> st_swithuns_daterange_07 = iris.Constraint( + ... time=lambda cell: d1 <= cell.point < d2) + >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) + >>> print(within_st_swithuns_07.coord('time')) + DimCoord : time / (days since 2007-04-09, standard calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] + shape: (6,) + dtype: int64 + standard_name: 'time' + +Alternatively, we may rewrite this using :class:`iris.time.PartialDateTime` +objects. + +.. doctest:: timeseries_range + :options: +NORMALIZE_WHITESPACE, +ELLIPSIS + + >>> pdt1 = PartialDateTime(year=2007, month=7, day=15) + >>> pdt2 = PartialDateTime(year=2007, month=8, day=25) + >>> st_swithuns_daterange_07 = iris.Constraint( + ... time=lambda cell: pdt1 <= cell.point < pdt2) + >>> within_st_swithuns_07 = long_ts.extract(st_swithuns_daterange_07) + >>> print(within_st_swithuns_07.coord('time')) + DimCoord : time / (days since 2007-04-09, standard calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00] + shape: (6,) + dtype: int64 + standard_name: 'time' + +A more complex example might require selecting points over an annually repeating +date range. We can select points within a certain part of the year, in this case +between the 15th of July through to the 25th of August. By making use of +PartialDateTime this becomes simple: + +.. doctest:: timeseries_range + + >>> st_swithuns_daterange = iris.Constraint( + ... time=lambda cell: PartialDateTime(month=7, day=15) <= cell.point < PartialDateTime(month=8, day=25)) + >>> within_st_swithuns = long_ts.extract(st_swithuns_daterange) + ... + >>> # Note: using summary(max_values) to show more of the points + >>> print(within_st_swithuns.coord('time').summary(max_values=100)) + DimCoord : time / (days since 2007-04-09, standard calendar) + points: [ + 2007-07-16 00:00:00, 2007-07-23 00:00:00, 2007-07-30 00:00:00, + 2007-08-06 00:00:00, 2007-08-13 00:00:00, 2007-08-20 00:00:00, + 2008-07-21 00:00:00, 2008-07-28 00:00:00, 2008-08-04 00:00:00, + 2008-08-11 00:00:00, 2008-08-18 00:00:00, 2009-07-20 00:00:00, + 2009-07-27 00:00:00, 2009-08-03 00:00:00, 2009-08-10 00:00:00, + 2009-08-17 00:00:00, 2009-08-24 00:00:00] + shape: (17,) + dtype: int64 + standard_name: 'time' + +Notice how the dates printed are between the range specified in the ``st_swithuns_daterange`` +and that they span multiple years. + +The above examples involve constraining on the points of the time coordinate. Constraining +on bounds can be done in the following way:: + + filename = iris.sample_data_path('ostia_monthly.nc') + cube = iris.load_cube(filename, 'surface_temperature') + dtmin = datetime.datetime(2008, 1, 1) + cube.extract(iris.Constraint(time = lambda cell: any(bound > dtmin for bound in cell.bound))) + +The above example constrains to cells where either the upper or lower bound occur +after 1st January 2008. Cube Iteration -^^^^^^^^^^^^^^^ +-------------- It is not possible to directly iterate over an Iris cube. That is, you cannot use code such as ``for x in cube:``. However, you can iterate over cube slices, as this section details. @@ -151,9 +379,10 @@ slicing the 3 dimensional cube (15, 100, 100) by longitude (i starts at 0 and 15 Once the your code can handle a 2d slice, it is then an easy step to loop over **all** 2d slices within the bigger cube using the slices method. +.. _cube_indexing: Cube Indexing -^^^^^^^^^^^^^ +------------- In the same way that you would expect a numeric multidimensional array to be **indexed** to take a subset of your original array, you can **index** a Cube for the same purpose. diff --git a/docs/src/voted_issues.rst b/docs/src/voted_issues.rst new file mode 100644 index 0000000000..7d983448b9 --- /dev/null +++ b/docs/src/voted_issues.rst @@ -0,0 +1,56 @@ +.. include:: common_links.inc + +.. _voted_issues_top: + +Voted Issues +============ + +You can help us to prioritise development of new features by leaving a 👍 +reaction on the header (not subsequent comments) of any issue. + +.. tip:: We suggest you subscribe to the issue so you will be updated. + When viewing the issue there is a **Notifications** + section where you can select to subscribe. + +Below is a sorted table of all issues that have 1 or more 👍 from our github +project. Please note that there is more development activity than what is on +the below table. + +.. _voted-issues.json: https://github.com/scitools/voted_issues/blob/main/voted-issues.json + +.. raw:: html + + + + + + + + + + +
👍IssueAuthorTitle
+ + + + +

+ + +.. note:: The data in this table is updated every 30 minutes and is sourced + from `voted-issues.json`_. + For the latest data please see the `issues on GitHub`_. + Note that the list on Github does not show the number of votes 👍 + only the total number of comments for the whole issue. \ No newline at end of file diff --git a/docs/src/whatsnew/1.0.rst b/docs/src/whatsnew/1.0.rst index b226dc609b..c256c33566 100644 --- a/docs/src/whatsnew/1.0.rst +++ b/docs/src/whatsnew/1.0.rst @@ -147,8 +147,7 @@ the surface pressure. In return, it provides a virtual "pressure" coordinate whose values are derived from the given components. This facility is utilised by the GRIB2 loader to automatically provide -the derived "pressure" coordinate for certain data [#f1]_ from the -`ECMWF `_. +the derived "pressure" coordinate for certain data [#f1]_ from the ECMWF. .. [#f1] Where the level type is either 105 or 119, and where the surface pressure has an ECMWF paramId of diff --git a/docs/src/whatsnew/2.1.rst b/docs/src/whatsnew/2.1.rst index 18c562d3da..33f3a013b1 100644 --- a/docs/src/whatsnew/2.1.rst +++ b/docs/src/whatsnew/2.1.rst @@ -1,3 +1,5 @@ +.. include:: ../common_links.inc + v2.1 (06 Jun 2018) ****************** @@ -67,7 +69,7 @@ Incompatible Changes as an alternative. * This release of Iris contains a number of updated metadata translations. - See this + See this `changelist `_ for further information. @@ -84,7 +86,7 @@ Internal calendar. * Iris updated its time-handling functionality from the - `netcdf4-python `_ + `netcdf4-python`__ ``netcdftime`` implementation to the standalone module `cftime `_. cftime is entirely compatible with netcdftime, but some issues may @@ -92,6 +94,8 @@ Internal In this situation, simply replacing ``netcdftime.datetime`` with ``cftime.datetime`` should be sufficient. +__ `netCDF4`_ + * Iris now requires version 2 of Matplotlib, and ``>=1.14`` of NumPy. Full requirements can be seen in the `requirements `_ directory of the Iris' the source. diff --git a/docs/src/whatsnew/3.0.rst b/docs/src/whatsnew/3.0.rst index 771a602954..223ef60011 100644 --- a/docs/src/whatsnew/3.0.rst +++ b/docs/src/whatsnew/3.0.rst @@ -97,9 +97,8 @@ v3.0.2 (27 May 2021) from collaborators targeting the Iris ``master`` branch. (:pull:`4007`) [``pre-v3.1.0``] - #. `@bjlittle`_ added conditional task execution to `.cirrus.yml`_ to allow - developers to easily disable `cirrus-ci`_ tasks. See - :ref:`skipping Cirrus-CI tasks`. (:pull:`4019`) [``pre-v3.1.0``] + #. `@bjlittle`_ added conditional task execution to ``.cirrus.yml`` to allow + developers to easily disable `cirrus-ci`_ tasks. (:pull:`4019`) [``pre-v3.1.0``] #. `@pp-mo`_ adjusted the use of :func:`dask.array.from_array` in :func:`iris._lazy_data.as_lazy_data`, to avoid the dask 'test access'. This makes loading of netcdf files with a diff --git a/docs/src/whatsnew/3.1.rst b/docs/src/whatsnew/3.1.rst index bd046a0a24..1f076572bc 100644 --- a/docs/src/whatsnew/3.1.rst +++ b/docs/src/whatsnew/3.1.rst @@ -227,9 +227,8 @@ This document explains the changes made to Iris for this release #. `@akuhnregnier`_ replaced `deprecated numpy 1.20 aliases for builtin types`_. (:pull:`3997`) -#. `@bjlittle`_ added conditional task execution to `.cirrus.yml`_ to allow - developers to easily disable `cirrus-ci`_ tasks. See - :ref:`skipping Cirrus-CI tasks`. (:pull:`4019`) +#. `@bjlittle`_ added conditional task execution to ``.cirrus.yml`` to allow + developers to easily disable `cirrus-ci`_ tasks. (:pull:`4019`) #. `@bjlittle`_ and `@jamesp`_ addressed a regression in behaviour when using `conda`_ 4.10.0 within `cirrus-ci`_. (:pull:`4084`) @@ -291,9 +290,8 @@ This document explains the changes made to Iris for this release #. `@bjlittle`_ enabled `cirrus-ci`_ compute credits for non-draft pull-requests from collaborators targeting the Iris ``master`` branch. (:pull:`4007`) -#. `@bjlittle`_ added conditional task execution to `.cirrus.yml`_ to allow - developers to easily disable `cirrus-ci`_ tasks. See - :ref:`skipping Cirrus-CI tasks`. (:pull:`4019`) +#. `@bjlittle`_ added conditional task execution to ``.cirrus.yml`` to allow + developers to easily disable `cirrus-ci`_ tasks. (:pull:`4019`) diff --git a/docs/src/whatsnew/3.2.rst b/docs/src/whatsnew/3.2.rst new file mode 100644 index 0000000000..723f26345e --- /dev/null +++ b/docs/src/whatsnew/3.2.rst @@ -0,0 +1,408 @@ +.. include:: ../common_links.inc + +v3.2 (15 Feb 2022) +****************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`report` v3.2.0 Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this minor release of Iris include: + + * We've added experimental support for + :ref:`Meshes `, which can now be loaded and + attached to a cube. Mesh support is based on the `CF-UGRID`_ model. + * We've also dropped support for ``Python 3.7``. + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +v3.2.1 (11 Mar 2022) +==================== + +.. dropdown:: :opticon:`alert` v3.2.1 Patches + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + + 📢 **Welcome** to `@dennissergeev`_, who made his first contribution to Iris. Nice work! + + The patches in this release of Iris include: + + 🐛 **Bugs Fixed** + + #. `@dennissergeev`_ changed _crs_distance_differentials() so that it uses the `Globe` + attribute from a given CRS instead of creating a new `ccrs.Globe()` object. + Iris can now handle non-Earth semi-major axes, as discussed in :issue:`4582` (:pull:`4605`). + + #. `@trexfeathers`_ avoided a dimensionality mismatch when streaming the + :attr:`~iris.coords.Coord.bounds` array for a scalar + :class:`~iris.coords.Coord`. (:pull:`4610`). + + +📢 Announcements +================ + +#. Welcome to `@wjbenfold`_, `@tinyendian`_, `@larsbarring`_, `@bsherratt`_ and + `@aaronspring`_ who made their first contributions to Iris. The first of + many we hope! +#. Congratulations to `@wjbenfold`_ who has become a core developer for Iris! 🎉 + + +✨ Features +=========== + +#. `@bjlittle`_, `@pp-mo`_, `@trexfeathers`_ and `@stephenworsley`_ added + support for :ref:`unstructured meshes `. This involved + adding a data model (:pull:`3968`, :pull:`4014`, :pull:`4027`, :pull:`4036`, + :pull:`4053`, :pull:`4439`) and API (:pull:`4063`, :pull:`4064`), and + supporting representation (:pull:`4033`, :pull:`4054`) of data on meshes. + Most of this new API can be found in :mod:`iris.experimental.ugrid`. The key + objects introduced are :class:`iris.experimental.ugrid.mesh.Mesh`, + :class:`iris.experimental.ugrid.mesh.MeshCoord` and + :obj:`iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD`. + A :class:`~iris.experimental.ugrid.mesh.Mesh` contains a full description of a UGRID + type mesh. :class:`~iris.experimental.ugrid.mesh.MeshCoord`\ s are coordinates that + reference and represent a :class:`~iris.experimental.ugrid.mesh.Mesh` for use + on a :class:`~iris.cube.Cube`. :class:`~iris.cube.Cube`\ s are also given the + property :attr:`~iris.cube.Cube.mesh` which returns a + :class:`~iris.experimental.ugrid.mesh.Mesh` if one is attached to the + :class:`~iris.cube.Cube` via a :class:`~iris.experimental.ugrid.mesh.MeshCoord`. + +#. `@trexfeathers`_ added support for loading unstructured mesh data from netcdf data, + for files using the `CF-UGRID`_ conventions. + The context manager :obj:`~iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD` + provides a way to load UGRID files so that :class:`~iris.cube.Cube`\ s can be + returned with a :class:`~iris.experimental.ugrid.mesh.Mesh` attached. + (:pull:`4058`). + +#. `@pp-mo`_ added support to save cubes with :ref:`meshes ` to netcdf + files, using the `CF-UGRID`_ conventions. + The existing :meth:`iris.save` function now does this, when saving cubes with meshes. + A routine :meth:`iris.experimental.ugrid.save.save_mesh` allows saving + :class:`~iris.experimental.ugrid.mesh.Mesh` objects to netcdf *without* any associated data + (i.e. not attached to cubes). + (:pull:`4318` and :pull:`4339`). + +#. `@trexfeathers`_ added :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords` + for inferring a :class:`~iris.experimental.ugrid.mesh.Mesh` from an + appropriate collection of :class:`iris.coords.Coord`\ s. + +#. `@larsbarring`_ updated :func:`~iris.util.equalise_attributes` to return a list of dictionaries + containing the attributes removed from each :class:`~iris.cube.Cube`. (:pull:`4357`) + +#. `@trexfeathers`_ enabled streaming of **all** lazy arrays when saving to + NetCDF files (was previously just :class:`~iris.cube.Cube` + :attr:`~iris.cube.Cube.data`). This is + important given the much greater size of + :class:`~iris.coords.AuxCoord` :attr:`~iris.coords.AuxCoord.points` and + :class:`~iris.experimental.ugrid.mesh.Connectivity` + :attr:`~iris.experimental.ugrid.mesh.Connectivity.indices` under the + :ref:`mesh model `. (:pull:`4375`) + +#. `@bsherratt`_ added a ``threshold`` parameter to + :meth:`~iris.cube.Cube.intersection` (:pull:`4363`) + +#. `@wjbenfold`_ added test data to ci benchmarks so that it is accessible to + benchmark scripts. Also added a regridding benchmark that uses this data + (:pull:`4402`) + +#. `@pp-mo`_ updated to the latest CF Standard Names Table ``v78`` (21 Sept 2021). + (:issue:`4479`, :pull:`4483`) + +#. `@SimonPeatman`_ added support for filenames in the form of a :class:`~pathlib.PurePath` + in :func:`~iris.load`, :func:`~iris.load_cube`, :func:`~iris.load_cubes`, + :func:`~iris.load_raw` and :func:`~iris.save` (:issue:`3411`, :pull:`3917`). + Support for :class:`~pathlib.PurePath` is yet to be implemented across the rest + of Iris (:issue:`4523`). + +#. `@pp-mo`_ removed broken tooling for deriving Iris metadata translations + from ``Metarelate``. From now we intend to manage phenonemon translation + in Iris itself. (:pull:`4484`) + +#. `@pp-mo`_ improved printout of various cube data component objects : + :class:`~iris.coords.Coord`, :class:`~iris.coords.CellMeasure`, + :class:`~iris.coords.AncillaryVariable`, + :class:`~iris.experimental.ugrid.mesh.MeshCoord` and + :class:`~iris.experimental.ugrid.mesh.Mesh`. + These now all provide a more controllable ``summary()`` method, and + more convenient and readable ``str()`` and ``repr()`` output in the style of + the :class:`iris.cube.Cube`. + They also no longer realise lazy data. (:pull:`4499`). + + +🐛 Bugs Fixed +============= + +#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.intersection` for special cases where + one cell's bounds align with the requested maximum and negative minimum, fixing + :issue:`4221`. (:pull:`4278`) + +#. `@bsherratt`_ fixed further edge cases in + :meth:`~iris.cube.Cube.intersection`, including :issue:`3698` (:pull:`4363`) + +#. `@tinyendian`_ fixed the error message produced by :meth:`~iris.cube.CubeList.concatenate_cube` + when a cube list contains cubes with different names, which will no longer report + "Cube names differ: var1 != var1" if var1 appears multiple times in the list + (:issue:`4342`, :pull:`4345`) + +#. `@larsbarring`_ fixed :class:`~iris.coord_systems.GeoCS` to handle spherical ellipsoid + parameter inverse_flattening = 0 (:issue:`4146`, :pull:`4348`) + +#. `@pdearnshaw`_ fixed an error in the call to :class:`cftime.datetime` in + :mod:`~iris.fileformats.pp_save_rules` that prevented the saving to PP of climate + means for DJF (:pull:`4391`) + +#. `@wjbenfold`_ improved the error message for failure of :meth:`~iris.cube.CubeList.concatenate` + to indicate that the value of a scalar coordinate may be mismatched, rather than the metadata + (:issue:`4096`, :pull:`4387`) + +#. `@bsherratt`_ fixed a regression to the NAME file loader introduced in 3.0.4, + as well as some long-standing bugs with vertical coordinates and number + formats. (:pull:`4411`) + +#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.subset` to alway return ``None`` if + no value match is found. (:pull:`4417`) + +#. `@wjbenfold`_ changed :meth:`iris.util.points_step` to stop it from warning + when applied to a single point (:issue:`4250`, :pull:`4367`) + +#. `@trexfeathers`_ changed :class:`~iris.coords._DimensionalMetadata` and + :class:`~iris.experimental.ugrid.Connectivity` equality methods to preserve + array laziness, allowing efficient comparisons even with larger-than-memory + objects. (:pull:`4439`) + +#. `@rcomer`_ modified :meth:`~iris.cube.Cube.aggregated_by` to calculate new + coordinate bounds using minimum and maximum for unordered coordinates, + fixing :issue:`1528`. (:pull:`4315`) + +#. `@wjbenfold`_ changed how a delayed unit conversion is performed on a cube + so that a cube with lazy data awaiting a unit conversion can be pickled. + (:issue:`4354`, :pull:`4377`) + +#. `@pp-mo`_ fixed a bug in netcdf loading, whereby *any* rotated latlon coordinate + was mistakenly interpreted as a latitude, usually resulting in two 'latitude's + instead of one latitude and one longitude. + (:issue:`4460`, :pull:`4470`) + +#. `@wjbenfold`_ stopped :meth:`iris.coord_systems.GeogCS.as_cartopy_projection` + from assuming the globe to be the Earth (:issue:`4408`, :pull:`4497`) + +#. `@rcomer`_ corrected the ``long_name`` mapping from UM stash code ``m01s09i215`` + to indicate cloud fraction greater than 7.9 oktas, rather than 7.5 + (:issue:`3305`, :pull:`4535`) + +#. `@lbdreyer`_ fixed a bug in :class:`iris.io.load_http` which was missing an import + (:pull:`4580`) + + +💣 Incompatible Changes +======================= + +#. N/A + + +🚀 Performance Enhancements +=========================== + +#. `@wjbenfold`_ resolved an issue that previously caused regridding with lazy + data to take significantly longer than with real data. Benchmark + :class:`benchmarks.HorizontalChunkedRegridding` shows a time decrease + from >10s to 625ms. (:issue:`4280`, :pull:`4400`) + +#. `@bjlittle`_ included an optimisation to :class:`~iris.cube.Cube.coord_dims` + to avoid unnecessary processing whenever a coordinate instance that already + exists within the cube is provided. (:pull:`4549`) + + +🔥 Deprecations +=============== + +#. `@wjbenfold`_ removed :mod:`iris.experimental.equalise_cubes`. In ``v3.0`` + the experimental ``equalise_attributes`` functionality was moved to the + :mod:`iris.util.equalise_attributes` function. Since then, calling the + :func:`iris.experimental.equalise_cubes.equalise_attributes` function raised + an exception. (:issue:`3528`, :pull:`4496`) + +#. `@wjbenfold`_ deprecated :func:`iris.util.approx_equal` in preference for + :func:`math.isclose`. The :func:`~iris.util.approx_equal` function will be + removed in a future release of Iris. (:pull:`4514`) + +#. `@wjbenfold`_ deprecated :mod:`iris.experimental.raster` as it is not + believed to still be in use. The deprecation warnings invite users to contact + the Iris Developers if this isn't the case. (:pull:`4525`) + +#. `@wjbenfold`_ deprecated :mod:`iris.fileformats.abf` and + :mod:`iris.fileformats.dot` as they are not believed to still be in use. The + deprecation warnings invite users to contact the Iris Developers if this + isn't the case. (:pull:`4515`) + +#. `@wjbenfold`_ removed the :func:`iris.util.as_compatible_shape` function, + which was deprecated in ``v3.0``. Instead use + :class:`iris.common.resolve.Resolve`. For example, rather than calling + ``as_compatible_shape(src_cube, target_cube)`` replace with + ``Resolve(src_cube, target_cube)(target_cube.core_data())``. (:pull:`4513`) + +#. `@wjbenfold`_ deprecated :func:`iris.analysis.maths.intersection_of_cubes` in + preference for :meth:`iris.cube.CubeList.extract_overlapping`. The + :func:`~iris.analysis.maths.intersection_of_cubes` function will be removed in + a future release of Iris. (:pull:`4541`) + +#. `@pp-mo`_ deprecated :mod:`iris.experimental.regrid_conservative`. This is + now replaced by `iris-emsf-regrid`_. (:pull:`4551`) + +#. `@pp-mo`_ deprecated everything in :mod:`iris.experimental.regrid`. + Most features have a preferred exact alternative, as suggested, *except* + :class:`iris.experimental.regrid.ProjectedUnstructuredLinear` : that has no + identical equivalent, but :class:`iris.analysis.UnstructuredNearest` is + suggested as being quite close (though possibly slower). (:pull:`4548`) + + +🔗 Dependencies +=============== + +#. `@bjlittle`_ introduced the ``cartopy >=0.20`` minimum pin. + (:pull:`4331`) + +#. `@trexfeathers`_ introduced the ``cf-units >=3`` and ``nc-time-axis >=1.3`` + minimum pins. (:pull:`4356`) + +#. `@bjlittle`_ introduced the ``numpy >=1.19`` minimum pin, in + accordance with `NEP-29`_ deprecation policy. (:pull:`4386`) + +#. `@bjlittle`_ dropped support for ``Python 3.7``, as per the `NEP-29`_ + backwards compatibility and deprecation policy schedule. (:pull:`4481`) + + +📚 Documentation +================ + +#. `@rcomer`_ updated the "Plotting Wind Direction Using Quiver" Gallery + example. (:pull:`4120`) + +#. `@trexfeathers`_ included Iris `GitHub Discussions`_ in + :ref:`get involved `. (:pull:`4307`) + +#. `@wjbenfold`_ improved readability in :ref:`userguide interpolation + section `. (:pull:`4314`) + +#. `@wjbenfold`_ added explanation about the absence of | operator for + :class:`iris.Constraint` to :ref:`userguide loading section + ` and to api reference documentation. (:pull:`4321`) + +#. `@trexfeathers`_ added more detail on making `iris-test-data`_ available + during :ref:`developer_running_tests`. (:pull:`4359`) + +#. `@lbdreyer`_ added a section to the release documentation outlining the role + of the :ref:`release_manager`. (:pull:`4413`) + +#. `@trexfeathers`_ encouraged contributors to include type hinting in code + they are working on - :ref:`code_formatting`. (:pull:`4390`) + +#. `@wjbenfold`_ updated Cartopy documentation links to point to the renamed + :class:`cartopy.mpl.geoaxes.GeoAxes`. (:pull:`4464`) + +#. `@wjbenfold`_ clarified behaviour of :func:`iris.load` in :ref:`userguide + loading section `. (:pull:`4462`) + +#. `@bjlittle`_ migrated readthedocs to use mambaforge for `faster documentation building`_. + (:pull:`4476`) + +#. `@wjbenfold`_ contributed `@alastair-gemmell`_'s :ref:`step-by-step guide to + contributing to the docs ` to the docs. + (:pull:`4461`) + +#. `@pp-mo`_ improved and corrected docstrings of + :class:`iris.analysis.PointInCell`, making it clear what is the actual + calculation performed. (:pull:`4548`) + +#. `@pp-mo`_ removed reference in docstring of + :class:`iris.analysis.UnstructuredNearest` to the obsolete (deprecated) + :class:`iris.experimental.regrid.ProjectedUnstructuredNearest`. + (:pull:`4548`) + + +💼 Internal +=========== + +#. `@trexfeathers`_ set the linkcheck to ignore + http://www.nationalarchives.gov.uk/doc/open-government-licence since this + always works locally, but never within CI. (:pull:`4307`) + +#. `@wjbenfold`_ netCDF integration tests now skip ``TestConstrainedLoad`` if + test data is missing (:pull:`4319`) + +#. `@wjbenfold`_ excluded ``Good First Issue`` labelled issues from being + marked stale. (:pull:`4317`) + +#. `@tkknight`_ added additional make targets for reducing the time of the + documentation build including ``html-noapi`` and ``html-quick``. + Useful for development purposes only. For more information see + :ref:`contributing.documentation.building` the documentation. (:pull:`4333`) + +#. `@rcomer`_ modified the ``animation`` test to prevent it throwing a warning + that sometimes interferes with unrelated tests. (:pull:`4330`) + +#. `@rcomer`_ removed a now redundant workaround in :func:`~iris.plot.contourf`. + (:pull:`4349`) + +#. `@trexfeathers`_ refactored :mod:`iris.experimental.ugrid` into sub-modules. + (:pull:`4347`). + +#. `@bjlittle`_ enabled the `sort-all`_ `pre-commit`_ hook to automatically + sort ``__all__`` entries into alphabetical order. (:pull:`4353`) + +#. `@rcomer`_ modified a NetCDF saver test to prevent it triggering a numpy + deprecation warning. (:issue:`4374`, :pull:`4376`) + +#. `@akuhnregnier`_ removed addition of period from + :func:`~iris.analysis.cartography.wrap_lons` and updated affected tests + using ``assertArrayAllClose`` following :issue:`3993`. + (:pull:`4421`) + +#. `@rcomer`_ updated some tests to work with Matplotlib v3.5. (:pull:`4428`) + +#. `@rcomer`_ applied minor fixes to some regridding tests. (:pull:`4432`) + +#. `@lbdreyer`_ corrected the license PyPI classifier. (:pull:`4435`) + +#. `@aaronspring`_ exchanged ``dask`` with + ``dask-core`` in testing environments reducing the number of dependencies + installed for testing. (:pull:`4434`) + +#. `@wjbenfold`_ prevented github action runs in forks (:issue:`4441`, + :pull:`4444`) + +#. `@wjbenfold`_ fixed tests for hybrid formulae that weren't being found by + nose (:issue:`4431`, :pull:`4450`) + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + +.. _@aaronspring: https://github.com/aaronspring +.. _@akuhnregnier: https://github.com/akuhnregnier +.. _@bsherratt: https://github.com/bsherratt +.. _@dennissergeev: https://github.com/dennissergeev +.. _@larsbarring: https://github.com/larsbarring +.. _@pdearnshaw: https://github.com/pdearnshaw +.. _@SimonPeatman: https://github.com/SimonPeatman +.. _@tinyendian: https://github.com/tinyendian + +.. comment + Whatsnew resources in alphabetical order: + +.. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html +.. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ +.. _iris-emsf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid +.. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba +.. _sort-all: https://github.com/aio-libs/sort-all diff --git a/docs/src/whatsnew/3.3.rst b/docs/src/whatsnew/3.3.rst new file mode 100644 index 0000000000..c2e47f298a --- /dev/null +++ b/docs/src/whatsnew/3.3.rst @@ -0,0 +1,373 @@ +.. include:: ../common_links.inc + +v3.3 (1 Sep 2022) +***************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`report` v3.3.0 Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this minor release of Iris include: + + * We've added support for datums, loading them from NetCDF when the + :obj:`iris.FUTURE.datum_support` flag is set. + * We've greatly improved the speed of linear interpolation. + * We've added the function :func:`iris.pandas.as_cubes` for richer + conversion from Pandas. + * We've improved the functionality of :func:`iris.util.mask_cube`. + * We've improved the functionality and performance of the + :obj:`iris.analysis.PERCENTILE` aggregator. + * We've completed implementation of our :ref:`contributing.benchmarks` + infrastructure. + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +v3.3.1 (29 Sep 2022) +==================== + +.. dropdown:: :opticon:`alert` v3.3.1 Patches + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + + The patches in this release of Iris include: + + #. `@pp-mo`_ fixed the Jupyter notebook display of :class:`~iris.cube.CubeList`. + (:issue:`4973`, :pull:`4976`) + + #. `@pp-mo`_ fixed a bug in NAME loaders where data with no associated statistic would + load as a cube with invalid cell-methods, which cannot be printed or saved to netcdf. + (:issue:`3288`, :pull:`4933`) + + #. `@pp-mo`_ ensured that :data:`iris.cube.Cube.cell_methods` must always be an iterable + of :class:`iris.coords.CellMethod` objects (:pull:`4933`). + + #. `@trexfeathers`_ advanced the Cartopy pin to ``>=0.21``, as Cartopy's + change to default Transverse Mercator projection affects an Iris test. + See `SciTools/cartopy@fcb784d`_ and `SciTools/cartopy@8860a81`_ for more + details. (:pull:`4992`) + + #. `@trexfeathers`_ introduced the ``netcdf4!=1.6.1`` pin to avoid a + problem with segfaults. (:pull:`4992`) + + +📢 Announcements +================ + +#. Welcome to `@krikru`_ who made their first contribution to Iris 🎉 + + +✨ Features +=========== + +#. `@schlunma`_ added weighted aggregation over "group coordinates": + :meth:`~iris.cube.Cube.aggregated_by` now accepts the keyword `weights` if a + :class:`~iris.analysis.WeightedAggregator` is used. (:issue:`4581`, + :pull:`4589`) + +#. `@wjbenfold`_ added support for ``false_easting`` and ``false_northing`` to + :class:`~iris.coord_systems.Mercator`. (:issue:`3107`, :pull:`4524`) + +#. `@rcomer`_ and `@wjbenfold`_ (reviewer) implemented lazy aggregation for the + :obj:`iris.analysis.PERCENTILE` aggregator. (:pull:`3901`) + +#. `@pp-mo`_ fixed cube arithmetic operation for cubes with meshes. + (:issue:`4454`, :pull:`4651`) + +#. `@wjbenfold`_ added support for CF-compliant treatment of + ``standard_parallel`` and ``scale_factor_at_projection_origin`` to + :class:`~iris.coord_systems.Mercator`. (:issue:`3844`, :pull:`4609`) + +#. `@wjbenfold`_ added support datums associated with coordinate systems (e.g. + :class:`~iris.coord_systems.GeogCS` other subclasses of + :class:`~iris.coord_systems.CoordSystem`). Loading of datum information from + a netCDF file only happens when the :obj:`iris.FUTURE.datum_support` flag is + set. (:issue:`4619`, :pull:`4704`) + +#. `@wjbenfold`_ and `@stephenworsley`_ (reviewer) added a maximum run length + aggregator (:class:`~iris.analysis.MAX_RUN`). (:pull:`4676`) + +#. `@wjbenfold`_ and `@rcomer`_ (reviewer) added a ``climatological`` keyword to + :meth:`~iris.cube.Cube.aggregated_by` that causes the climatological flag to + be set and the point for each cell to equal its first bound, thereby + preserving the time of year. (:issue:`1422`, :issue:`4098`, :issue:`4665`, + :pull:`4723`) + +#. `@wjbenfold`_ and `@pp-mo`_ (reviewer) implemented the + :class:`~iris.coord_systems.PolarStereographic` CRS. (:issue:`4770`, + :pull:`4773`) + +#. `@rcomer`_ and `@wjbenfold`_ (reviewer) enabled passing of the + :func:`numpy.percentile` keywords through the :obj:`~iris.analysis.PERCENTILE` + aggregator. (:pull:`4791`) + +#. `@wjbenfold`_ and `@bjlittle`_ (reviewer) implemented + :func:`iris.plot.fill_between` and :func:`iris.quickplot.fill_between`. + (:issue:`3493`, :pull:`4647`) + +#. `@rcomer`_ and `@bjlittle`_ (reviewer) re-wrote :func:`iris.util.mask_cube` + to provide lazy evaluation and greater flexibility with respect to input types. + (:issue:`3936`, :pull:`4889`) + +#. `@stephenworsley`_ and `@lbdreyer`_ added a new kwarg ``expand_extras`` to + :func:`iris.util.new_axis` which can be used to specify instances of + :class:`~iris.coords.AuxCoord`, :class:`~iris.coords.CellMeasure` and + :class:`~iris.coords.AncillaryVariable` which should also be expanded to map + to the new axis. (:pull:`4896`) + +#. `@stephenworsley`_ updated to the latest CF Standard Names Table ``v79`` + (19 March 2022). (:pull:`4910`) + +#. `@trexfeathers`_ and `@lbdreyer`_ (reviewer) added + :func:`iris.pandas.as_cubes`, which provides richer conversion from + Pandas :class:`~pandas.Series` / :class:`~pandas.DataFrame`\s to one or more + :class:`~iris.cube.Cube`\s. This includes: n-dimensional datasets, + :class:`~iris.coords.AuxCoord`\s, :class:`~iris.coords.CellMeasure`\s, + :class:`~iris.coords.AncillaryVariable`\s, and multi-dimensional + coordinates. (:pull:`4890`) + + +🐛 Bugs Fixed +============= + +#. `@rcomer`_ reverted part of the change from :pull:`3906` so that + :func:`iris.plot.plot` no longer defaults to placing a "Y" coordinate (e.g. + latitude) on the y-axis of the plot. (:issue:`4493`, :pull:`4601`) + +#. `@rcomer`_ enabled passing of scalar objects to :func:`~iris.plot.plot` and + :func:`~iris.plot.scatter`. (:pull:`4616`) + +#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.aggregated_by` with `mdtol` for 1D + cubes where an aggregated section is entirely masked, reported at + :issue:`3190`. (:pull:`4246`) + +#. `@rcomer`_ ensured that a :class:`matplotlib.axes.Axes`'s position is preserved + when Iris replaces it with a :class:`cartopy.mpl.geoaxes.GeoAxes`, fixing + :issue:`1157`. (:pull:`4273`) + +#. `@rcomer`_ fixed :meth:`~iris.coords.Coord.nearest_neighbour_index` for edge + cases where the requested point is float and the coordinate has integer + bounds, reported at :issue:`2969`. (:pull:`4245`) + +#. `@rcomer`_ modified bounds setting on :obj:`~iris.coords.DimCoord` instances + so that the order of the cell bounds is automatically reversed + to match the coordinate's direction if necessary. This is consistent with + the `Bounds for 1-D coordinate variables` subsection of the `Cell Boundaries`_ + section of the CF Conventions and ensures that contiguity is preserved if a + coordinate's direction is reversed. (:issue:`3249`, :issue:`423`, + :issue:`4078`, :issue:`3756`, :pull:`4466`) + +#. `@wjbenfold`_ and `@evertrol`_ prevented an ``AttributeError`` being logged + to ``stderr`` when a :class:`~iris.fileformats.cf.CFReader` that fails to + initialise is garbage collected. (:issue:`3312`, :pull:`4646`) + +#. `@wjbenfold`_ fixed plotting of circular coordinates to extend kwarg arrays + as well as the data. (:issue:`466`, :pull:`4649`) + +#. `@wjbenfold`_ and `@rcomer`_ (reviewer) corrected the axis on which masking + is applied when an aggregator adds a trailing dimension. (:pull:`4755`) + +#. `@rcomer`_ and `@pp-mo`_ ensured that all methods to create or modify a + :class:`iris.cube.CubeList` check that it only contains cubes. According to + code comments, this was supposedly already the case, but there were several bugs + and loopholes. (:issue:`1897`, :pull:`4767`) + +#. `@rcomer`_ modified cube arithmetic to handle mismatches in the cube's data + array type. This prevents masks being lost in some cases and therefore + resolves :issue:`2987`. (:pull:`3790`) + +#. `@krikru`_ and `@rcomer`_ updated :mod:`iris.quickplot` such that the + colorbar is added to the correct ``axes`` when specified as a keyword + argument to a plotting routine. Otherwise, by default the colorbar will be + added to the current axes of the current figure. (:pull:`4894`) + +#. `@rcomer`_ and `@bjlittle`_ (reviewer) modified :func:`iris.util.mask_cube` so it + either works in place or returns a new cube (:issue:`3717`, :pull:`4889`) + + +💣 Incompatible Changes +======================= + +#. `@rcomer`_ and `@bjlittle`_ (reviewer) updated Iris's calendar handling to be + consistent with ``cf-units`` version 3.1. In line with the `Calendar`_ + section in version 1.9 of the CF Conventions, we now use "standard" rather + than the deprecated "gregorian" label for the default calendar. Units may + still be instantiated with ``calendar="gregorian"`` but their calendar + attribute will be silently changed to "standard". This may cause failures in + code that explicitly checks the calendar attribute. (:pull:`4847`) + + +🚀 Performance +============== + +#. `@wjbenfold`_ added caching to the calculation of the points array in a + :class:`~iris.coords.DimCoord` created using + :meth:`~iris.coords.DimCoord.from_regular`. (:pull:`4698`) + +#. `@wjbenfold`_ introduced caching in :func:`_lazy_data._optimum_chunksize` and + :func:`iris.fileformats.pp_load_rules._epoch_date_hours` to reduce time spent + repeating calculations. (:pull:`4716`) + +#. `@pp-mo`_ made :meth:`~iris.cube.Cube.add_aux_factory` faster. + (:pull:`4718`) + +#. `@wjbenfold`_ and `@rcomer`_ (reviewer) permitted the fast percentile + aggregation method to be used on masked data when the missing data tolerance + is set to 0. (:issue:`4735`, :pull:`4755`) + +#. `@wjbenfold`_ improved the speed of linear interpolation using + :meth:`iris.analysis.trajectory.interpolate` (:pull:`4366`) + +#. NumPy ``v1.23`` behaviour changes mean that + :func:`iris.experimental.ugrid.utils.recombine_submeshes` now uses ~3x as + much memory; testing shows a ~16-million point mesh will now use ~600MB. + Investigated by `@pp-mo`_ and `@trexfeathers`_. (:issue:`4845`) + + +🔥 Deprecations +=============== + +#. `@trexfeathers`_ and `@lbdreyer`_ (reviewer) deprecated + :func:`iris.pandas.as_cube` in favour of the new + :func:`iris.pandas.as_cubes` - see `✨ Features`_ for more details. + (:pull:`4890`) + + +🔗 Dependencies +=============== + +#. `@rcomer`_ introduced the ``nc-time-axis >=1.4`` minimum pin, reflecting that + we no longer use the deprecated :class:`nc_time_axis.CalendarDateTime` + when plotting against time coordinates. (:pull:`4584`) + +#. `@wjbenfold`_ and `@bjlittle`_ (reviewer) unpinned ``pillow``. (:pull:`4826`) + +#. `@rcomer`_ introduced the ``cf-units >=3.1`` minimum pin, reflecting the + alignment of calendar behaviour in the two packages (see Incompatible Changes). + (:pull:`4847`) + +#. `@bjlittle`_ introduced the ``sphinx-gallery >=0.11.0`` minimum pin. + (:pull:`4885`) + +#. `@trexfeathers`_ updated the install process to work with setuptools + ``>=v64``, making ``v64`` the minimum compatible version. (:pull:`4903`) + +#. `@stephenworsley`_ and `@trexfeathers`_ introduced the ``shapely !=1.8.3`` + pin, avoiding a bug caused by its interaction with cartopy. + (:pull:`4911`, :pull:`4917`) + + +📚 Documentation +================ + +#. `@tkknight`_ added a page to show the issues that have been voted for. See + :ref:`voted_issues_top`. (:issue:`3307`, :pull:`4617`) + +#. `@wjbenfold`_ added a note about fixing proxy URLs in lockfiles generated + because dependencies have changed. (:pull:`4666`) + +#. `@lbdreyer`_ moved most of the User Guide's :class:`iris.Constraint` examples + from :ref:`loading_iris_cubes` to :ref:`cube_extraction` and added an + example of constraining on bounded time. (:pull:`4656`) + +#. `@tkknight`_ adopted the `PyData Sphinx Theme`_ for the documentation. + (:discussion:`4344`, :pull:`4661`) + +#. `@tkknight`_ updated our developers guidance to show our intent to adopt + numpydoc strings and fixed some API documentation rendering. + See :ref:`docstrings`. (:issue:`4657`, :pull:`4689`) + +#. `@trexfeathers`_ and `@lbdreyer`_ added a page with examples of converting + various mesh formats into the Iris Mesh Data Model. (:pull:`4739`) + +#. `@rcomer`_ updated the "Load a Time Series of Data From the NEMO Model" + gallery example. (:pull:`4741`) + +#. `@wjbenfold`_ added developer documentation to highlight some of the + utilities offered by :class:`iris.IrisTest` and how to update CML and other + output files. (:issue:`4544`, :pull:`4600`) + +#. `@trexfeathers`_ and `@abooton`_ modernised the Iris logo to be SVG format. + (:pull:`3935`) + + +💼 Internal +=========== + +#. `@trexfeathers`_ and `@pp-mo`_ finished implementing a mature benchmarking + infrastructure (see :ref:`contributing.benchmarks`), building on 2 hard + years of lessons learned 🎉. (:pull:`4477`, :pull:`4562`, :pull:`4571`, + :pull:`4583`, :pull:`4621`) + +#. `@wjbenfold`_ used the aforementioned benchmarking infrastructure to + introduce deep (large 3rd dimension) loading and realisation benchmarks. + (:pull:`4654`) + +#. `@wjbenfold`_ made :func:`iris.tests.stock.simple_1d` respect the + ``with_bounds`` argument. (:pull:`4658`) + +#. `@lbdreyer`_ replaced `nose`_ with `pytest`_ as Iris' test runner. + (:pull:`4734`) + +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) migrated to GitHub Actions + for Continuous-Integration. (:pull:`4503`) + +#. `@pp-mo`_ made tests run certain linux executables from the Python env, + specifically ncdump and ncgen. These could otherwise fail when run in IDEs + such as PyCharm and Eclipse, which don't automatically include the Python env + bin in the system PATH. + (:pull:`4794`) + +#. `@trexfeathers`_ and `@pp-mo`_ improved generation of stock NetCDF files. + (:pull:`4827`, :pull:`4836`) + +#. `@rcomer`_ removed some now redundant testing functions. (:pull:`4838`, + :pull:`4878`) + +#. `@bjlittle`_ and `@jamesp`_ (reviewer) and `@lbdreyer`_ (reviewer) extended + the GitHub Continuous-Integration to cover testing on ``py38``, ``py39``, + and ``py310``. (:pull:`4840`) + +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) adopted `setuptools-scm`_ for + automated ``iris`` package versioning. (:pull:`4841`) + +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) added building, testing and + publishing of ``iris`` PyPI ``sdist`` and binary ``wheels`` as part of + our GitHub Continuous-Integration. (:pull:`4849`) + +#. `@rcomer`_ and `@wjbenfold`_ (reviewer) used ``pytest`` parametrization to + streamline the gallery test code. (:pull:`4792`) + +#. `@trexfeathers`_ improved settings to better working with + ``setuptools_scm``. (:pull:`4925`) + + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + +.. _@evertrol: https://github.com/evertrol +.. _@krikru: https://github.com/krikru + + +.. comment + Whatsnew resources in alphabetical order: + +.. _Calendar: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.9/cf-conventions.html#calendar +.. _Cell Boundaries: https://cfconventions.org/Data/cf-conventions/cf-conventions-1.9/cf-conventions.html#cell-boundaries +.. _nose: https://nose.readthedocs.io +.. _PyData Sphinx Theme: https://pydata-sphinx-theme.readthedocs.io/en/stable/index.html +.. _pytest: https://docs.pytest.org +.. _setuptools-scm: https://github.com/pypa/setuptools_scm +.. _SciTools/cartopy@fcb784d: https://github.com/SciTools/cartopy/commit/fcb784daa65d95ed9a74b02ca292801c02bc4108 +.. _SciTools/cartopy@8860a81: https://github.com/SciTools/cartopy/commit/8860a8186d4dc62478e74c83f3b2b3e8f791372e diff --git a/docs/src/whatsnew/3.4.rst b/docs/src/whatsnew/3.4.rst new file mode 100644 index 0000000000..02fc574e51 --- /dev/null +++ b/docs/src/whatsnew/3.4.rst @@ -0,0 +1,302 @@ +.. include:: ../common_links.inc + +v3.4 (01 Dec 2022) +****************** + +This document explains the changes made to Iris for this release +(:doc:`View all changes `.) + + +.. dropdown:: :opticon:`report` v3.4.0 Release Highlights + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + :open: + + The highlights for this minor release of Iris include: + + * We have **archived older Iris documentation** - everything before + ``v3.0.0`` - so older versions will soon no longer appear in search + engines. If you need this older documentation: please + see :ref:`iris_support`. + * We have added a :ref:`glossary` to the Iris documentation. + * We have completed work to make **Pandas interoperability** handle + n-dimensional :class:`~iris.cube.Cube`\s. + * We have **begun refactoring Iris' regridding**, which has already improved + performance and functionality, with more potential in future! + * We have made several other significant `🚀 Performance Enhancements`_. + + And finally, get in touch with us on :issue:`GitHub` if you have + any issues or feature requests for improving Iris. Enjoy! + + +v3.4.1 (21 Feb 2023) +==================== + +.. dropdown:: :opticon:`alert` v3.4.1 Patches + :container: + shadow + :title: text-primary text-center font-weight-bold + :body: bg-light + :animate: fade-in + + The patches in this release of Iris include: + + #. `@trexfeathers`_ and `@pp-mo`_ made Iris' use of the `netCDF4`_ library + thread-safe. (:pull:`5095`) + + #. `@trexfeathers`_ and `@pp-mo`_ removed the netCDF4 pin mentioned in + `🔗 Dependencies`_ point 3. (:pull:`5095`) + + +📢 Announcements +================ + +#. Welcome to `@ESadek-MO`_, `@TTV-Intrepid`_ and `@hsteptoe`_, who made their + first contributions to Iris 🎉 + + .. _try_experimental_stratify: + +#. Do you enjoy `python-stratify`_? Did you know that Iris includes a + convenience for using `python-stratify`_ with :class:`~iris.cube.Cube`\s? + It has been 'experimental' for several years now, without receiving much + feedback, so it's **use it or lose it** time: please try out + :mod:`iris.experimental.stratify` and let us know what you think! + + +✨ Features +=========== + +#. `@ESadek-MO`_ edited :func:`~iris.io.expand_filespecs` to allow expansion of + non-existing paths, and added expansion functionality to :func:`~iris.io.save`. + (:issue:`4772`, :pull:`4913`) + +#. `@trexfeathers`_ and `Julian Heming`_ added new mappings between CF + standard names and UK Met Office LBFC codes. (:pull:`4859`) + +#. `@pp-mo`_ changed the metadata of a face/edge-type + :class:`~iris.experimental.ugrid.mesh.MeshCoord`, to be same as the face/edge + coordinate in the mesh from which it takes its ``.points``. Previously, all MeshCoords + took their metadata from the node coord, but only a node-type MeshCoord now does + that. Also, the MeshCoord ``.var_name`` is now that of the underlying coord, whereas + previously this was always None. These changes make MeshCoord more like an ordinary + :class:`~iris.coords.AuxCoord`, which avoids some specific known usage problems. + (:issue:`4860`, :pull:`5020`) + +#. `@Esadek-MO`_ and `@trexfeathers`_ added dim coord + prioritisation to ``_get_lon_lat_coords()`` in :mod:`iris.analysis.cartography`. + This allows :func:`iris.analysis.cartography.area_weights` and + :func:`~iris.analysis.cartography.project` to handle cubes which contain + both dim and aux coords of the same type e.g. ``longitude`` and ``grid_longitude``. + (:issue:`3916`, :pull:`5029`). + +#. `@stephenworsley`_ added the ability to regrid derived coordinates with the + :obj:`~iris.analysis.PointInCell` regridding scheme. (:pull:`4807`) + +#. `@trexfeathers`_ made NetCDF loading more tolerant by enabling skipping of + :class:`~iris.coords.DimCoord`\s, :class:`~iris.coords.AuxCoord`\s, + :class:`~iris.coords.CellMeasure`\s and + :class:`~iris.coords.AncillaryVariable`\s if they cannot be added to a + :class:`~iris.cube.Cube` (e.g. due to CF non-compliance). This is done via + a new error class: :class:`~iris.exceptions.CannotAddError` (subclass of + :class:`ValueError`). (:pull:`5054`) + +#. `@pp-mo`_ implemented == and != comparisons for :class:`~iris.Constraint` s. + A simple constraint is now == to another one constructed in the same way. + However, equality is limited for more complex cases : value-matching functions must + be the same identical function, and for &-combinations order is significant, + i.e. ``(c1 & c2) != (c2 & c1)``. + (:issue:`3616`, :pull:`3749`). + +#. `@hsteptoe`_ and `@trexfeathers`_ improved + :func:`iris.pandas.as_data_frame`\'s conversion of :class:`~iris.cube.Cube`\s to + :class:`~pandas.DataFrame`\s. This includes better handling of multiple + :class:`~iris.cube.Cube` dimensions, auxiliary coordinates and attribute + information. **Note:** the improvements are opt-in, by setting the + :obj:`iris.FUTURE.pandas_ndim` flag (see :class:`iris.Future` for more). + (:issue:`4526`, :pull:`4909`, :pull:`4669`, :pull:`5059`, :pull:`5074`) + + +🐛 Bugs Fixed +============= + +#. `@rcomer`_ and `@pp-mo`_ (reviewer) factored masking into the returned + sum-of-weights calculation from :obj:`~iris.analysis.SUM`. (:pull:`4905`) + +#. `@schlunma`_ fixed a bug which prevented using + :meth:`iris.cube.Cube.collapsed` on coordinates whose number of bounds + differs from 0 or 2. This enables the use of this method on mesh + coordinates. (:issue:`4672`, :pull:`4870`) + +#. `@bjlittle`_ and `@lbdreyer`_ (reviewer) fixed the building of the CF + Standard Names module ``iris.std_names`` for the ``setup.py`` commands + ``develop`` and ``std_names``. (:issue:`4951`, :pull:`4952`) + +#. `@lbdreyer`_ and `@pp-mo`_ (reviewer) fixed the cube print out such that + scalar ancillary variables are displayed in a dedicated section rather than + being added to the vector ancillary variables section. Further, ancillary + variables and cell measures that map to a cube dimension of length 1 are now + included in the respective vector sections. (:pull:`4945`) + +#. `@rcomer`_ removed some old redundant code that prevented determining the + order of time cells. (:issue:`4697`, :pull:`4729`) + +#. `@stephenworsley`_ improved the accuracy of the error messages for + :meth:`~iris.cube.Cube.coord` when failing to find coordinates in the case where + a coordinate is given as the argument. Similarly, improved the error messages for + :meth:`~iris.cube.Cube.cell_measure` and :meth:`~iris.cube.Cube.ancillary_variable`. + (:issue:`4898`, :pull:`4928`) + +#. `@stephenworsley`_ fixed a bug which caused derived coordinates to be realised + after calling :meth:`iris.cube.Cube.aggregated_by`. (:issue:`3637`, :pull:`4947`) + +#. `@rcomer`_ corrected the ``standard_name`` mapping from UM stash code ``m01s30i311`` + to indicate that this is the upward, rather than northward part of the flow. + (:pull:`5060`) + +#. `@bjlittle`_ and `@trexfeathers`_ (reviewer) fixed an issue which prevented + uncompressed PP fields with additional trailing padded words in the field + data to be loaded and saved. (:pull:`5058`) + +#. `@lbdreyer`_ and `@trexfeathers`_ (reviewer) fixed the handling of data when + regridding with :class:`~iris.analysis.UnstructuredNearest` or calling + :func:`~iris.analysis.trajectory.interpolate` such that the data type and mask is + preserved. (:issue:`4463`, :pull:`5062`) + + +💣 Incompatible Changes +======================= + +#. `@trexfeathers`_ altered testing to accept new Dask copying behaviour from + `dask/dask#9555`_ - copies of a Dask array created using ``da.from_array()`` + will all ``compute()`` to a shared identical array. So creating a + :class:`~iris.cube.Cube` using ``Cube(data=da.from_array(...``, then + using :class:`~iris.cube.Cube` :meth:`~iris.cube.Cube.copy`, + will produce two :class:`~iris.cube.Cube`\s that both return an identical + array when requesting :class:`~iris.cube.Cube` :attr:`~iris.cube.Cube.data`. + We do not expect this to affect typical user workflows but please get in + touch if you need help. (:pull:`5041`) + +#. `@trexfeathers`_ moved ``iris.experimental.animate.animate()`` to + :func:`iris.plot.animate`, in recognition of its successful use over several + years since introduction. (:pull:`5056`) + + +🚀 Performance Enhancements +=========================== + +#. `@rcomer`_ and `@pp-mo`_ (reviewer) increased aggregation speed for + :obj:`~iris.analysis.SUM`, :obj:`~iris.analysis.COUNT` and + :obj:`~iris.analysis.PROPORTION` on real data. (:pull:`4905`) + +#. `@bouweandela`_ made :meth:`iris.coords.Coord.cells` faster for time + coordinates. This also affects :meth:`iris.cube.Cube.extract`, + :meth:`iris.cube.Cube.subset`, and :meth:`iris.coords.Coord.intersect`. + (:pull:`4969`) + +#. `@bouweandela`_ improved the speed of :meth:`iris.cube.Cube.subset` / + :meth:`iris.coords.Coord.intersect`. + (:pull:`4955`) + +#. `@stephenworsley`_ improved the speed of the :obj:`~iris.analysis.PointInCell` + regridding scheme. (:pull:`4807`) + + +🔥 Deprecations +=============== + +#. `@hsteptoe`_ and `@trexfeathers`_ (reviewer) deprecated + :func:`iris.pandas.as_series` in favour of the new + :func:`iris.pandas.as_data_frame` - see `✨ Features`_ for more details. + (:pull:`4669`) + + +🔗 Dependencies +=============== + +#. `@rcomer`_ introduced the ``dask >=2.26`` minimum pin, so that Iris can benefit + from Dask's support for `NEP13`_ and `NEP18`_. (:pull:`4905`) + +#. `@trexfeathers`_ advanced the Cartopy pin to ``>=0.21``, as Cartopy's + change to default Transverse Mercator projection affects an Iris test. + See `SciTools/cartopy@fcb784d`_ and `SciTools/cartopy@8860a81`_ for more + details. + (:pull:`4968`) + +#. `@trexfeathers`_ introduced the ``netcdf4<1.6.1`` pin to avoid a problem + with segfaults. (:pull:`4968`, :pull:`5075`, :issue:`5016`) + +#. `@trexfeathers`_ updated the Matplotlib colormap registration in + :mod:`iris.palette` in response to a deprecation warning. Using the new + Matplotlib API also means a ``matplotlib>=3.5`` pin. (:pull:`4998`) + +#. See `💣 Incompatible Changes`_ for notes about `dask/dask#9555`_. + + +📚 Documentation +================ + +#. `@ESadek-MO`_, `@TTV-Intrepid`_ and `@trexfeathers`_ added a gallery example for zonal + means plotted parallel to a cartographic plot. (:pull:`4871`) + +#. `@Esadek-MO`_ added a key-terms :ref:`glossary` page into the user guide. (:pull:`4902`) + +#. `@pp-mo`_ added a :ref:`code example ` + for converting ORCA-gridded data to an unstructured cube. (:pull:`5013`) +#. `@Esadek-MO`_ added links to relevant Gallery examples within the User Guide + to improve understanding. (:pull:`5009`) + +#. `@trexfeathers`_ changed the warning header for the **latest** documentation + to reference Read the Docs' built-in version switcher, instead of generating + its own independent links. (:pull:`5055`) + +#. `@tkknight`_ updated the links for the Iris documentation to v2.4 and + earlier to point to the archive of zip files instead. (:pull:`5064`) + +#. `@Esadek-MO`_ began adding notes at the bottom of functions to + to clarify if the function preserves laziness or not. See :issue:`3292` for + the ongoing checklist. (:pull:`5066`) + +💼 Internal +=========== + +#. `@rcomer`_ removed the obsolete ``setUpClass`` method from Iris testing. + (:pull:`4927`) + +#. `@bjlittle`_ and `@lbdreyer`_ (reviewer) removed support for + ``python setup.py test``, which is a deprecated approach to executing + package tests, see `pypa/setuptools#1684`_. Also performed assorted + ``setup.py`` script hygiene. (:pull:`4948`, :pull:`4949`, :pull:`4950`) + +#. `@pp-mo`_ split the module :mod:`iris.fileformats.netcdf` into separate + :mod:`~iris.fileformats.netcdf.loader` and :mod:`~iris.fileformats.netcdf.saver` + submodules, just to make the code easier to handle. + +#. `@trexfeathers`_ adapted the benchmark for importing :mod:`iris.palette` to + cope with new colormap behaviour in Matplotlib `v3.6`. (:pull:`4998`) + +#. `@rcomer`_ removed a now redundant workaround for an old matplotlib bug, + highlighted by :issue:`4090`. (:pull:`4999`) + +#. `@rcomer`_ added the ``show`` option to the documentation Makefiles, as a + convenient way for contributors to view their built documentation. + (:pull:`5000`) + +.. comment + Whatsnew author names (@github name) in alphabetical order. Note that, + core dev names are automatically included by the common_links.inc: + +.. _@TTV-Intrepid: https://github.com/TTV-Intrepid +.. _Julian Heming: https://www.metoffice.gov.uk/research/people/julian-heming +.. _@hsteptoe: https://github.com/hsteptoe + + +.. comment + Whatsnew resources in alphabetical order: + +.. _NEP13: https://numpy.org/neps/nep-0013-ufunc-overrides.html +.. _NEP18: https://numpy.org/neps/nep-0018-array-function-protocol.html +.. _pypa/setuptools#1684: https://github.com/pypa/setuptools/issues/1684 +.. _SciTools/cartopy@fcb784d: https://github.com/SciTools/cartopy/commit/fcb784daa65d95ed9a74b02ca292801c02bc4108 +.. _SciTools/cartopy@8860a81: https://github.com/SciTools/cartopy/commit/8860a8186d4dc62478e74c83f3b2b3e8f791372e +.. _dask/dask#9555: https://github.com/dask/dask/pull/9555 diff --git a/docs/src/whatsnew/index.rst b/docs/src/whatsnew/index.rst index fabb056484..005fac70c4 100644 --- a/docs/src/whatsnew/index.rst +++ b/docs/src/whatsnew/index.rst @@ -1,16 +1,20 @@ +.. include:: ../common_links.inc + .. _iris_whatsnew: What's New in Iris -****************** - -These "What's new" pages describe the important changes between major -Iris versions. +------------------ +.. include:: latest.rst .. toctree:: :maxdepth: 1 + :hidden: latest.rst + 3.4.rst + 3.3.rst + 3.2.rst 3.1.rst 3.0.rst 2.4.rst diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index e2d4c2bc0b..c5c3b2d173 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -14,13 +14,9 @@ This document explains the changes made to Iris for this release :animate: fade-in :open: - The highlights for this minor release of Iris include: + The highlights for this major/minor release of Iris include: - * We've added experimental support for - :ref:`Meshes `, which can now be loaded and - attached to a cube. Mesh support is based on the based on `CF-UGRID`_ - model. - * We've also dropped support for ``Python 3.7``. + * We're so proud to fully support `@ed-hawkins`_ and `#ShowYourStripes`_ ❤️ And finally, get in touch with us on :issue:`GitHub` if you have any issues or feature requests for improving Iris. Enjoy! @@ -29,154 +25,23 @@ This document explains the changes made to Iris for this release 📢 Announcements ================ -#. Welcome to `@wjbenfold`_, `@tinyendian`_, `@larsbarring`_, `@bsherratt`_ and - `@aaronspring`_ who made their first contributions to Iris. The first of - many we hope! -#. Congratulations to `@wjbenfold`_ who has become a core developer for Iris! 🎉 +#. Congratulations to `@ESadek-MO`_ who has become a core developer for Iris! 🎉 +#. Welcome and congratulations to `@HGWright`_ for making his first contribution to Iris! 🎉 ✨ Features =========== -#. `@bjlittle`_, `@pp-mo`_, `@trexfeathers`_ and `@stephenworsley`_ added - support for :ref:`unstructured meshes `. This involved - adding a data model (:pull:`3968`, :pull:`4014`, :pull:`4027`, :pull:`4036`, - :pull:`4053`, :pull:`4439`) and API (:pull:`4063`, :pull:`4064`), and - supporting representation (:pull:`4033`, :pull:`4054`) of data on meshes. - Most of this new API can be found in :mod:`iris.experimental.ugrid`. The key - objects introduced are :class:`iris.experimental.ugrid.mesh.Mesh`, - :class:`iris.experimental.ugrid.mesh.MeshCoord` and - :obj:`iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD`. - A :class:`~iris.experimental.ugrid.mesh.Mesh` contains a full description of a UGRID - type mesh. :class:`~iris.experimental.ugrid.mesh.MeshCoord`\ s are coordinates that - reference and represent a :class:`~iris.experimental.ugrid.mesh.Mesh` for use - on a :class:`~iris.cube.Cube`. :class:`~iris.cube.Cube`\ s are also given the - property :attr:`~iris.cube.Cube.mesh` which returns a - :class:`~iris.experimental.ugrid.mesh.Mesh` if one is attached to the - :class:`~iris.cube.Cube` via a :class:`~iris.experimental.ugrid.mesh.MeshCoord`. - -#. `@trexfeathers`_ added support for loading unstructured mesh data from netcdf data, - for files using the `CF-UGRID`_ conventions. - The context manager :obj:`~iris.experimental.ugrid.load.PARSE_UGRID_ON_LOAD` - provides a way to load UGRID files so that :class:`~iris.cube.Cube`\ s can be - returned with a :class:`~iris.experimental.ugrid.mesh.Mesh` attached. - (:pull:`4058`). - -#. `@pp-mo`_ added support to save cubes with :ref:`meshes ` to netcdf - files, using the `CF-UGRID`_ conventions. - The existing :meth:`iris.save` function now does this, when saving cubes with meshes. - A routine :meth:`iris.experimental.ugrid.save.save_mesh` allows saving - :class:`~iris.experimental.ugrid.mesh.Mesh` objects to netcdf *without* any associated data - (i.e. not attached to cubes). - (:pull:`4318` and :pull:`4339`). - -#. `@trexfeathers`_ added :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords` - for inferring a :class:`~iris.experimental.ugrid.mesh.Mesh` from an - appropriate collection of :class:`iris.coords.Coord`\ s. - -#. `@larsbarring`_ updated :func:`~iris.util.equalise_attributes` to return a list of dictionaries - containing the attributes removed from each :class:`~iris.cube.Cube`. (:pull:`4357`) - -#. `@trexfeathers`_ enabled streaming of **all** lazy arrays when saving to - NetCDF files (was previously just :class:`~iris.cube.Cube` - :attr:`~iris.cube.Cube.data`). This is - important given the much greater size of - :class:`~iris.coords.AuxCoord` :attr:`~iris.coords.AuxCoord.points` and - :class:`~iris.experimental.ugrid.mesh.Connectivity` - :attr:`~iris.experimental.ugrid.mesh.Connectivity.indices` under the - :ref:`mesh model `. (:pull:`4375`) - -#. `@bsherratt`_ added a ``threshold`` parameter to - :meth:`~iris.cube.Cube.intersection` (:pull:`4363`) - -#. `@wjbenfold`_ added test data to ci benchmarks so that it is accessible to - benchmark scripts. Also added a regridding benchmark that uses this data - (:pull:`4402`) - -#. `@pp-mo`_ updated to the latest CF Standard Names Table ``v78`` (21 Sept 2021). - (:issue:`4479`, :pull:`4483`) - -#. `@SimonPeatman`_ added support for filenames in the form of a :class:`~pathlib.PurePath` - in :func:`~iris.load`, :func:`~iris.load_cube`, :func:`~iris.load_cubes`, - :func:`~iris.load_raw` and :func:`~iris.save` (:issue:`3411`, :pull:`3917`). - Support for :class:`~pathlib.PurePath` is yet to be implemented across the rest - of Iris (:issue:`4523`). - -#. `@pp-mo`_ removed broken tooling for deriving Iris metadata translations - from `Metarelate`_. From now we intend to manage phenonemon translation - in Iris itself. (:pull:`4484`) - -#. `@pp-mo`_ improved printout of various cube data component objects : - :class:`~iris.coords.Coord`, :class:`~iris.coords.CellMeasure`, - :class:`~iris.coords.AncillaryVariable`, - :class:`~iris.experimental.ugrid.mesh.MeshCoord` and - :class:`~iris.experimental.ugrid.mesh.Mesh`. - These now all provide a more controllable ``summary()`` method, and - more convenient and readable ``str()`` and ``repr()`` output in the style of - the :class:`iris.cube.Cube`. - They also no longer realise lazy data. (:pull:`4499`). +#. `@bsherratt`_ added support for plugins - see the corresponding + :ref:`documentation page` for further information. + (:pull:`5144`) 🐛 Bugs Fixed ============= -#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.intersection` for special cases where - one cell's bounds align with the requested maximum and negative minimum, fixing - :issue:`4221`. (:pull:`4278`) - -#. `@bsherratt`_ fixed further edge cases in - :meth:`~iris.cube.Cube.intersection`, including :issue:`3698` (:pull:`4363`) - -#. `@tinyendian`_ fixed the error message produced by :meth:`~iris.cube.CubeList.concatenate_cube` - when a cube list contains cubes with different names, which will no longer report - "Cube names differ: var1 != var1" if var1 appears multiple times in the list - (:issue:`4342`, :pull:`4345`) - -#. `@larsbarring`_ fixed :class:`~iris.coord_systems.GeoCS` to handle spherical ellipsoid - parameter inverse_flattening = 0 (:issue:`4146`, :pull:`4348`) - -#. `@pdearnshaw`_ fixed an error in the call to :class:`cftime.datetime` in - :mod:`~iris.fileformats.pp_save_rules` that prevented the saving to PP of climate - means for DJF (:pull:`4391`) - -#. `@wjbenfold`_ improved the error message for failure of :meth:`~iris.cube.CubeList.concatenate` - to indicate that the value of a scalar coordinate may be mismatched, rather than the metadata - (:issue:`4096`, :pull:`4387`) - -#. `@bsherratt`_ fixed a regression to the NAME file loader introduced in 3.0.4, - as well as some long-standing bugs with vertical coordinates and number - formats. (:pull:`4411`) - -#. `@rcomer`_ fixed :meth:`~iris.cube.Cube.subset` to alway return ``None`` if - no value match is found. (:pull:`4417`) - -#. `@wjbenfold`_ changed :meth:`iris.util.points_step` to stop it from warning - when applied to a single point (:issue:`4250`, :pull:`4367`) - -#. `@trexfeathers`_ changed :class:`~iris.coords._DimensionalMetadata` and - :class:`~iris.experimental.ugrid.Connectivity` equality methods to preserve - array laziness, allowing efficient comparisons even with larger-than-memory - objects. (:pull:`4439`) - -#. `@rcomer`_ modified :meth:`~iris.cube.Cube.aggregated_by` to calculate new - coordinate bounds using minimum and maximum for unordered coordinates, - fixing :issue:`1528`. (:pull:`4315`) - -#. `@wjbenfold`_ changed how a delayed unit conversion is performed on a cube - so that a cube with lazy data awaiting a unit conversion can be pickled. - (:issue:`4354`, :pull:`4377`) - -#. `@pp-mo`_ fixed a bug in netcdf loading, whereby *any* rotated latlon coordinate - was mistakenly interpreted as a latitude, usually resulting in two 'latitude's - instead of one latitude and one longitude. - (:issue:`4460`, :pull:`4470`) - -#. `@wjbenfold`_ stopped :meth:`iris.coord_systems.GeogCS.as_cartopy_projection` - from assuming the globe to be the Earth (:issue:`4408`, :pull:`4497`) - -#. `@rcomer`_ corrected the ``long_name`` mapping from UM stash code ``m01s09i215`` - to indicate cloud fraction greater than 7.9 oktas, rather than 7.5 - (:issue:`3305`, :pull:`4535`) +#. `@trexfeathers`_ and `@pp-mo`_ made Iris' use of the `netCDF4`_ library + thread-safe. (:pull:`5095`) 💣 Incompatible Changes @@ -188,195 +53,78 @@ This document explains the changes made to Iris for this release 🚀 Performance Enhancements =========================== -#. `@wjbenfold`_ resolved an issue that previously caused regridding with lazy - data to take significantly longer than with real data. Benchmark - :class:`benchmarks.HorizontalChunkedRegridding` shows a time decrease - from >10s to 625ms. (:issue:`4280`, :pull:`4400`) - -#. `@bjlittle`_ included an optimisation to :class:`~iris.cube.Cube.coord_dims` - to avoid unnecessary processing whenever a coordinate instance that already - exists within the cube is provided. (:pull:`4549`) +#. N/A 🔥 Deprecations =============== -#. `@wjbenfold`_ removed :mod:`iris.experimental.equalise_cubes`. In ``v3.0`` - the experimental ``equalise_attributes`` functionality was moved to the - :mod:`iris.util.equalise_attributes` function. Since then, calling the - :func:`iris.experimental.equalise_cubes.equalise_attributes` function raised - an exception. (:issue:`3528`, :pull:`4496`) - -#. `@wjbenfold`_ deprecated :func:`iris.util.approx_equal` in preference for - :func:`math.isclose`. The :func:`~iris.util.approx_equal` function will be - removed in a future release of Iris. (:pull:`4514`) - -#. `@wjbenfold`_ deprecated :mod:`iris.experimental.raster` as it is not - believed to still be in use. The deprecation warnings invite users to contact - the Iris Developers if this isn't the case. (:pull:`4525`) - -#. `@wjbenfold`_ deprecated :mod:`iris.fileformats.abf` and - :mod:`iris.fileformats.dot` as they are not believed to still be in use. The - deprecation warnings invite users to contact the Iris Developers if this - isn't the case. (:pull:`4515`) - -#. `@wjbenfold`_ removed the :func:`iris.util.as_compatible_shape` function, - which was deprecated in ``v3.0``. Instead use - :class:`iris.common.resolve.Resolve`. For example, rather than calling - ``as_compatible_shape(src_cube, target_cube)`` replace with - ``Resolve(src_cube, target_cube)(target_cube.core_data())``. (:pull:`4513`) - -#. `@wjbenfold`_ deprecated :func:`iris.analysis.maths.intersection_of_cubes` in - preference for :meth:`iris.cube.CubeList.extract_overlapping`. The - :func:`~iris.analysis.maths.intersection_of_cubes` function will be removed in - a future release of Iris. (:pull:`4541`) - -#. `@pp-mo`_ deprecated :mod:`iris.experimental.regrid_conservative`. This is - now replaced by `iris-emsf-regrid`_. (:pull:`4551`) - -#. `@pp-mo`_ deprecated everything in :mod:`iris.experimental.regrid`. - Most features have a preferred exact alternative, as suggested, *except* - :class:`iris.experimental.regrid.ProjectedUnstructuredLinear` : that has no - identical equivalent, but :class:`iris.analysis.UnstructuredNearest` is - suggested as being quite close (though possibly slower). (:pull:`4548`) +#. N/A 🔗 Dependencies =============== -#. `@bjlittle`_ introduced the ``cartopy >=0.20`` minimum pin. - (:pull:`4331`) - -#. `@trexfeathers`_ introduced the ``cf-units >=3`` and ``nc-time-axis >=1.3`` - minimum pins. (:pull:`4356`) - -#. `@bjlittle`_ introduced the ``numpy >=1.19`` minimum pin, in - accordance with `NEP-29`_ deprecation policy. (:pull:`4386`) - -#. `@bjlittle`_ dropped support for ``Python 3.7``, as per the `NEP-29`_ - backwards compatibility and deprecation policy schedule. (:pull:`4481`) +#. N/A 📚 Documentation ================ -#. `@rcomer`_ updated the "Plotting Wind Direction Using Quiver" Gallery - example. (:pull:`4120`) - -#. `@trexfeathers`_ included `Iris GitHub Discussions`_ in - :ref:`get involved `. (:pull:`4307`) +#. `@rcomer`_ clarified instructions for updating gallery tests. (:pull:`5100`) +#. `@tkknight`_ unpinned ``pydata-sphinx-theme`` and set the default to use + the light version (not dark) while we make the docs dark mode friendly + (:pull:`5129`) -#. `@wjbenfold`_ improved readability in :ref:`userguide interpolation - section `. (:pull:`4314`) +#. `@jonseddon`_ updated the citation to a more recent version of Iris. (:pull:`5116`) -#. `@wjbenfold`_ added explanation about the absence of | operator for - :class:`iris.Constraint` to :ref:`userguide loading section - ` and to api reference documentation. (:pull:`4321`) +#. `@rcomer`_ linked the :obj:`~iris.analysis.PERCENTILE` aggregator from the + :obj:`~iris.analysis.MEDIAN` docstring, noting that the former handles lazy + data. (:pull:`5128`) -#. `@trexfeathers`_ added more detail on making `iris-test-data`_ available - during :ref:`developer_running_tests`. (:pull:`4359`) +#. `@trexfeathers`_ updated the WSL link to Microsoft's latest documentation, + and removed an ECMWF link in the ``v1.0`` What's New that was failing the + linkcheck CI. (:pull:`5109`) -#. `@lbdreyer`_ added a section to the release documentation outlining the role - of the :ref:`release_manager`. (:pull:`4413`) +#. `@trexfeathers`_ added a new top-level :doc:`/community/index` section, + as a one-stop place to find out about getting involved, and how we relate + to other projects. (:pull:`5025`) -#. `@trexfeathers`_ encouraged contributors to include type hinting in code - they are working on - :ref:`code_formatting`. (:pull:`4390`) +#. The **Iris community**, with help from the **Xarray community**, produced + the :doc:`/community/iris_xarray` page, highlighting the similarities and + differences between the two packages. (:pull:`5025`) -#. `@wjbenfold`_ updated Cartopy documentation links to point to the renamed - :class:`cartopy.mpl.geoaxes.GeoAxes`. (:pull:`4464`) - -#. `@wjbenfold`_ clarified behaviour of :func:`iris.load` in :ref:`userguide - loading section `. (:pull:`4462`) - -#. `@bjlittle`_ migrated readthedocs to use mambaforge for `faster documentation building`_. - (:pull:`4476`) - -#. `@wjbenfold`_ contributed `@alastair-gemmell`_'s :ref:`step-by-step guide to - contributing to the docs ` to the docs. - (:pull:`4461`) - -#. `@pp-mo`_ improved and corrected docstrings of - :class:`iris.analysis.PointInCell`, making it clear what is the actual - calculation performed. (:pull:`4548`) - -#. `@pp-mo`_ removed reference in docstring of - :class:`iris.analysis.UnstructuredNearest` to the obsolete (deprecated) - :class:`iris.experimental.regrid.ProjectedUnstructuredNearest`. - (:pull:`4548`) +#. `@bjlittle`_ added a new section to the `README.md`_ to show our support + for the outstanding work of `@ed-hawkins`_ et al for `#ShowYourStripes`_. + (:pull:`5141`) +#. `@HGWright`_ fixed some typo's from Gitwash. (:pull:`5145`) 💼 Internal =========== -#. `@trexfeathers`_ set the linkcheck to ignore - http://www.nationalarchives.gov.uk/doc/open-government-licence since this - always works locally, but never within CI. (:pull:`4307`) - -#. `@wjbenfold`_ netCDF integration tests now skip ``TestConstrainedLoad`` if - test data is missing (:pull:`4319`) - -#. `@wjbenfold`_ excluded ``Good First Issue`` labelled issues from being - marked stale. (:pull:`4317`) - -#. `@tkknight`_ added additional make targets for reducing the time of the - documentation build including ``html-noapi`` and ``html-quick``. - Useful for development purposes only. For more information see - :ref:`contributing.documentation.building` the documentation. (:pull:`4333`) - -#. `@rcomer`_ modified the ``animation`` test to prevent it throwing a warning - that sometimes interferes with unrelated tests. (:pull:`4330`) - -#. `@rcomer`_ removed a now redundant workaround in :func:`~iris.plot.contourf`. - (:pull:`4349`) - -#. `@trexfeathers`_ refactored :mod:`iris.experimental.ugrid` into sub-modules. - (:pull:`4347`). - -#. `@bjlittle`_ enabled the `sort-all`_ `pre-commit`_ hook to automatically - sort ``__all__`` entries into alphabetical order. (:pull:`4353`) - -#. `@rcomer`_ modified a NetCDF saver test to prevent it triggering a numpy - deprecation warning. (:issue:`4374`, :pull:`4376`) - -#. `@akuhnregnier`_ removed addition of period from - :func:`~iris.analysis.cartography.wrap_lons` and updated affected tests - using ``assertArrayAllClose`` following :issue:`3993`. - (:pull:`4421`) - -#. `@rcomer`_ updated some tests to work with Matplotlib v3.5. (:pull:`4428`) - -#. `@rcomer`_ applied minor fixes to some regridding tests. (:pull:`4432`) - -#. `@lbdreyer`_ corrected the license PyPI classifier. (:pull:`4435`) +#. `@fnattino`_ changed the order of ``ncgen`` arguments in the command to + create NetCDF files for testing (caused errors on OS X). (:pull:`5105`) -#. `@aaronspring `_ exchanged ``dask`` with - ``dask-core`` in testing environments reducing the number of dependencies - installed for testing. (:pull:`4434`) +#. `@rcomer`_ removed some old infrastructure that printed test timings. + (:pull:`5101`) -#. `@wjbenfold`_ prevented github action runs in forks (:issue:`4441`, - :pull:`4444`) +#. `@lbdreyer`_ and `@trexfeathers`_ (reviewer) added coverage testing. This + can be enabled by using the "--coverage" flag when running the tests with + nox i.e. ``nox --session tests -- --coverage``. (:pull:`4765`) -#. `@wjbenfold`_ fixed tests for hybrid formulae that weren't being found by - nose (:issue:`4431`, :pull:`4450`) +#. `@lbdreyer`_ and `@trexfeathers`_ (reviewer) removed the ``--coding-tests`` + option from Iris' test runner. (:pull:`4765`) .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: -.. _@aaronspring: https://github.com/aaronspring -.. _@akuhnregnier: https://github.com/akuhnregnier -.. _@bsherratt: https://github.com/bsherratt -.. _@larsbarring: https://github.com/larsbarring -.. _@pdearnshaw: https://github.com/pdearnshaw -.. _@SimonPeatman: https://github.com/SimonPeatman -.. _@tinyendian: https://github.com/tinyendian +.. _@fnattino: https://github.com/fnattino +.. _@ed-hawkins: https://github.com/ed-hawkins .. comment Whatsnew resources in alphabetical order: -.. _NEP-29: https://numpy.org/neps/nep-0029-deprecation_policy.html -.. _Metarelate: http://www.metarelate.net/ -.. _UGRID: http://ugrid-conventions.github.io/ugrid-conventions/ -.. _iris-emsf-regrid: https://github.com/SciTools-incubator/iris-esmf-regrid -.. _faster documentation building: https://docs.readthedocs.io/en/stable/guides/conda.html#making-builds-faster-with-mamba -.. _sort-all: https://github.com/aio-libs/sort-all +.. _#ShowYourStripes: https://showyourstripes.info/s/globe/ +.. _README.md: https://github.com/SciTools/iris#----- diff --git a/docs/src/whatsnew/latest.rst.template b/docs/src/whatsnew/latest.rst.template index 79c578ca65..a0ce415a65 100644 --- a/docs/src/whatsnew/latest.rst.template +++ b/docs/src/whatsnew/latest.rst.template @@ -42,7 +42,7 @@ v3.X.X (DD MMM YYYY) NOTE: section above is a template for bugfix patches ==================================================== (Please remove this section when creating an initial 'latest.rst') - + 📢 Announcements @@ -109,4 +109,3 @@ NOTE: section above is a template for bugfix patches .. comment Whatsnew resources in alphabetical order: - diff --git a/docs/src/why_iris.rst b/docs/src/why_iris.rst new file mode 100644 index 0000000000..82b791b4bd --- /dev/null +++ b/docs/src/why_iris.rst @@ -0,0 +1,43 @@ +.. _why_iris: + +Why Iris +======== + +**A powerful, format-agnostic, community-driven Python package for analysing +and visualising Earth science data.** + +Iris implements a data model based on the `CF conventions `_ +giving you a powerful, format-agnostic interface for working with your data. +It excels when working with multi-dimensional Earth Science data, where tabular +representations become unwieldy and inefficient. + +`CF Standard names `_, +`units `_, and coordinate metadata +are built into Iris, giving you a rich and expressive interface for maintaining +an accurate representation of your data. Its treatment of data and +associated metadata as first-class objects includes: + +.. rst-class:: squarelist + +* visualisation interface based on `matplotlib `_ and + `cartopy `_, +* unit conversion, +* subsetting and extraction, +* merge and concatenate, +* aggregations and reductions (including min, max, mean and weighted averages), +* interpolation and regridding (including nearest-neighbor, linear and + area-weighted), and +* operator overloads (``+``, ``-``, ``*``, ``/``, etc.). + +A number of file formats are recognised by Iris, including CF-compliant NetCDF, +GRIB, and PP, and it has a plugin architecture to allow other formats to be +added seamlessly. + +Building upon `NumPy `_ and +`dask `_, Iris scales from efficient +single-machine workflows right through to multi-core clusters and HPC. +Interoperability with packages from the wider scientific Python ecosystem comes +from Iris' use of standard NumPy/dask arrays as its underlying data storage. + +Iris is part of SciTools, for more information see https://scitools.org.uk/. +For **Iris 2.4** and earlier documentation please see :ref:`iris_support`. \ No newline at end of file diff --git a/etc/cf-standard-name-table.xml b/etc/cf-standard-name-table.xml index bd76168192..9c5fcd9cf0 100644 --- a/etc/cf-standard-name-table.xml +++ b/etc/cf-standard-name-table.xml @@ -1,7 +1,7 @@ - 78 - 2021-09-21T11:55:06Z + 79 + 2022-03-19T15:25:54Z Centre for Environmental Data Analysis support@ceda.ac.uk @@ -8014,6 +8014,20 @@ The phrase "magnitude_of_X" means magnitude of a vector X. The surface called "surface" means the lower boundary of the atmosphere. "Surface stress" means the shear stress (force per unit area) exerted by the wind at the surface. A downward stress is a downward flux of momentum. Over large bodies of water, wind stress can drive near-surface currents. "Downward" indicates a vector component which is positive when directed downward (negative upward). + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of 19’-butanoyloxyfucoxanthin is C46H64O8. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/BUTAXXXX/1/. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of 19'-hexanoyloxyfucoxanthin is C48H68O8. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/HEXAXXXX/2/. + + kg m-3 @@ -8028,6 +8042,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for aceto-nitrile is CH3CN. The IUPAC name for aceto-nitrile is ethanenitrile. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/ATPXZZDZ/2/. + + kg m-3 @@ -8042,6 +8063,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Alkenes are unsaturated hydrocarbons as they contain chemical double bonds between adjacent carbon atoms. Alkenes contain only hydrogen and carbon combined in the general proportions C(n)H(2n); "alkenes" is the term used in standard names to describe the group of chemical species having this common structure that are represented within a given model. The list of individual species that are included in a quantity having a group chemical standard name can vary between models. Where possible, the data variable should be accompanied by a complete description of the species represented, for example, by using a comment attribute. Standard names exist for some individual alkene species, e.g., ethene and propene. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of alpha-carotene is C40H56. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/BECAXXP1/2/. + + kg m-3 @@ -8112,6 +8140,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for benzene is C6H6. Benzene is the simplest aromatic hydrocarbon and has a ring structure consisting of six carbon atoms joined by alternating single and double chemical bonds. Each carbon atom is additionally bonded to one hydrogen atom. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of beta-carotene is C40H56. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/BBCAXXP1/2/. + + kg m-3 @@ -8217,6 +8252,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula of carbon tetrachloride is CCl4. The IUPAC name for carbon tetrachloride is tetrachloromethane. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Carotene" refers to the sum of all forms of the carotenoid pigment carotene. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/CAROXXXX/1/. + + kg m-3 @@ -8287,6 +8329,41 @@ 'Mass concentration' means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. Chlorophyll-a is the most commonly occurring form of natural chlorophyll. The chemical formula of chlorophyll-a is C55H72O5N4Mg. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/CHLBXXPX/2/. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. Chlorophyll c1c2 (sometimes written c1-c2 or c1+c2) means the sum of chlorophyll c1 and chlorophyll c2. The chemical formula of chlorophyll c1 is C35H30MgN4O5, and chlorophyll c2 is C35H28MgN4O5. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/CHLC12PX/3/. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. The chemical formula of chlorophyll c3 is C36H44MgN4O7. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/CHLC03PX/2/. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Chlorophylls are the green pigments found in most plants, algae and cyanobacteria; their presence is essential for photosynthesis to take place. There are several different forms of chlorophyll that occur naturally. All contain a chlorin ring (chemical formula C20H16N4) which gives the green pigment and a side chain whose structure varies. The naturally occurring forms of chlorophyll contain between 35 and 55 carbon atoms. Chlorophyll-c means chlorophyll c1+c2+c3. The chemical formula of chlorophyll c1 is C35H30MgN4O5, and chlorophyll c2 is C35H28MgN4O5. The chemical formula of chlorophyll c3 is C36H44MgN4O7. + + + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of chlorophyllide-a is C35H34MgN4O5. + + kg m-3 @@ -8322,6 +8399,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. Condensed water means liquid and ice. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of diadinoxanthin is C40H54O3. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/DIADXXXX/2/. + + kg m-3 @@ -8378,6 +8462,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for dinitrogen pentoxide is N2O5. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". + + kg m-3 @@ -8455,6 +8546,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for formic acid is HCOOH. The IUPAC name for formic acid is methanoic acid. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of fucoxanthin is C42H58O6. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/FUCXZZZZ/2/. + + kg m-3 @@ -8637,6 +8735,13 @@ Mass concentration means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The mass concentration of liquid water takes into account all cloud droplets and liquid precipitation regardless of drop size or fall speed. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of lutein is C40H56O2. + + kg m-3 @@ -8707,6 +8812,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for molecular hydrogen is H2. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". + + kg m-3 @@ -8833,6 +8945,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol takes up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the aerosol. "Dry aerosol particles" means aerosol particles without any water uptake. The term "particulate_organic_matter_dry_aerosol" means all particulate organic matter dry aerosol except elemental carbon. It is the sum of primary_particulate_organic_matter_dry_aerosol and secondary_particulate_organic_matter_dry_aerosol. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/PERDXXXX/2/. + + kg m-3 @@ -8861,6 +8980,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. It means the ratio of the mass of X to the mass of Y (including X). A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Petroleum hydrocarbons are compounds containing just carbon and hydrogen originating from the fossil fuel crude oil. + + kg m-3 + + + Concentration of phaeopigment per unit volume of the water body, where the filtration size or collection method is unspecified (equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/. "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Phaeopigments are a group of non-photosynthetic pigments that are the degradation product of algal chlorophyll pigments. Phaeopigments contain phaeophytin, which fluoresces in response to excitation light, and phaeophorbide, which is colorless and does not fluoresce (source: https://academic.oup.com/plankt/article/24/11/1221/1505482). Phaeopigment concentration commonly increases during the development phase of marine phytoplankton blooms, and declines in the post bloom stage (source: https://www.sciencedirect.com/science/article/pii/0967063793901018). + + kg m-3 @@ -8931,6 +9057,13 @@ Mass concentration means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Aerosol" means the system of suspended liquid or solid particles in air (except cloud droplets) and their carrier gas, the air itself. Aerosol particles take up ambient water (a process known as hygroscopic growth) depending on the relative humidity and the composition of the particles. "Dry aerosol particles" means aerosol particles without any water uptake. "Pm2p5 aerosol" means atmospheric particulate compounds with an aerodynamic diameter of less than or equal to 2.5 micrometers. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of prasinoxanthin is C40H56O4. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/PXAPXXXX/2/. + + kg m-3 @@ -9036,6 +9169,13 @@ "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula for toluene is C6H5CH3. Toluene has the same structure as benzene, except that one of the hydrogen atoms is replaced by a methyl group. The IUPAC name for toluene is methylbenzene. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of violaxanthin is C40H56O4. + + kg m-3 @@ -9064,6 +9204,13 @@ Mass concentration means mass per unit volume and is used in the construction mass_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for xylene is C6H4C2H6. In chemistry, xylene is a generic term for a group of three isomers of dimethylbenzene. The IUPAC names for the isomers are 1,2-dimethylbenzene, 1,3-dimethylbenzene and 1,4-dimethylbenzene. Xylene is an aromatic hydrocarbon. There are standard names that refer to aromatic_compounds as a group, as well as those for individual species. + + kg m-3 + + + "Mass concentration" means mass per unit volume and is used in the construction "mass_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The chemical formula of zeaxanthin is C40H56O2. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/ZEAXXXXX/2/. + + kg m-3 @@ -10737,6 +10884,13 @@ Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for aceto-nitrile is CH3CN. The IUPAC name for aceto-nitrile is ethanenitrile. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/ATPXZZDZ/2/. + + mol m-3 @@ -11185,6 +11339,13 @@ Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The concentration of any chemical species, whether particulate or dissolved, may vary with depth in the ocean. A depth profile may go through one or more local minima in concentration. The mole_concentration_of_molecular_oxygen_in_sea_water_at_shallowest_local_minimum_in_vertical_profile is the mole concentration of oxygen at the local minimum in the concentration profile that occurs closest to the sea surface. The chemical formula for molecular oxygen is O2. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved nitrogen" means the sum of all nitrogen in solution: inorganic nitrogen (nitrite, nitrate and ammonium) plus nitrogen in carbon compounds. + + mol m-3 @@ -11199,6 +11360,20 @@ "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Dissolved organic nitrogen" describes the nitrogen held in carbon compounds in solution. These are mostly generated by plankton excretion and decay. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical or biological species denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". "Organic phosphorus" means phosphorus in carbon compounds. The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/ORGPDSZZ/4/. + + + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". Phosphorus means phosphorus in all chemical forms, commonly referred to as "total phosphorus". The equivalent term in the NERC P01 Parameter Usage Vocabulary may be found at http://vocab.nerc.ac.uk/collection/P01/current/TPHSDSZZ/6/. + + mol m-3 @@ -11626,6 +11801,13 @@ Mole concentration means number of moles per unit volume, also called "molarity", and is used in the construction mole_concentration_of_X_in_Y, where X is a material constituent of Y. A chemical species denoted by X may be described by a single term such as 'nitrogen' or a phrase such as 'nox_expressed_as_nitrogen'. The chemical formula for ozone is O3. + + mol m-3 + + + "Mole concentration" means number of moles per unit volume, also called "molarity", and is used in the construction "mole_concentration_of_X_in_Y", where X is a material constituent of Y. A chemical species or biological group denoted by X may be described by a single term such as "nitrogen" or a phrase such as "nox_expressed_as_nitrogen". The phrase "expressed_as" is used in the construction "A_expressed_as_B", where B is a chemical constituent of A. It means that the quantity indicated by the standard name is calculated solely with respect to the B contained in A, neglecting all other chemical constituents of A. + + mol m-3 @@ -18595,21 +18777,21 @@ Pa - "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xx" indicates the component of the tensor along the grid x_ axis. + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xx" indicates the component of the tensor along the grid x_ axis. Pa - "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xy" indicates the lateral contributions to x_ and y_ components of the tensor. + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "xy" indicates the lateral contributions to x_ and y_ components of the tensor. Pa - "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "yy" indicates the component of the tensor along the grid y_ axis. + "Sea surface wave radiation stress" describes the excess momentum flux caused by sea surface waves. Radiation stresses behave as a second-order tensor. "yy" indicates the component of the tensor along the grid y_ axis. @@ -31472,16 +31654,12 @@ - - biological_taxon_lsid - - temperature_in_ground - - surface_snow_density + + biological_taxon_lsid @@ -31516,14 +31694,18 @@ tendency_of_atmosphere_mass_content_of_water_vapor_due_to_sublimation_of_surface_snow_and_ice - - atmosphere_upward_absolute_vorticity + + surface_snow_density atmosphere_upward_relative_vorticity + + atmosphere_upward_absolute_vorticity + + area_type @@ -31532,34 +31714,46 @@ area_type - - iron_growth_limitation_of_diazotrophic_phytoplankton + + mass_fraction_of_liquid_precipitation_in_air - - growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance + + mass_fraction_of_liquid_precipitation_in_air tendency_of_mole_concentration_of_particulate_organic_matter_expressed_as_carbon_in_sea_water_due_to_net_primary_production_by_diazotrophic_phytoplankton - - mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water + + nitrogen_growth_limitation_of_diazotrophic_phytoplankton - - mass_fraction_of_liquid_precipitation_in_air + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton - - mass_fraction_of_liquid_precipitation_in_air + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + + + mole_concentration_of_diazotrophic_phytoplankton_expressed_as_carbon_in_sea_water mass_concentration_of_diazotrophic_phytoplankton_expressed_as_chlorophyll_in_sea_water + + iron_growth_limitation_of_diazotrophic_phytoplankton + + + + growth_limitation_of_diazotrophic_phytoplankton_due_to_solar_irradiance + + air_pseudo_equivalent_potential_temperature @@ -31576,64 +31770,300 @@ tendency_of_mass_fraction_of_stratiform_cloud_ice_in_air_due_to_riming_from_cloud_liquid_water - - nitrogen_growth_limitation_of_diazotrophic_phytoplankton + + sea_water_velocity_from_direction - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + sea_water_velocity_to_direction - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_diazotrophic_phytoplankton + + sea_water_velocity_to_direction - - air_pseudo_equivalent_temperature + + integral_wrt_depth_of_product_of_salinity_and_sea_water_density - - air_equivalent_temperature + + integral_wrt_depth_of_product_of_conservative_temperature_and_sea_water_density - - atmosphere_mass_content_of_convective_cloud_liquid_water + + integral_wrt_depth_of_product_of_potential_temperature_and_sea_water_density - - effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top + + volume_fraction_of_condensed_water_in_soil_at_wilting_point - - northward_heat_flux_in_air_due_to_eddy_advection + + volume_fraction_of_condensed_water_in_soil_at_field_capacity - - northward_eliassen_palm_flux_in_air + + volume_fraction_of_condensed_water_in_soil_at_critical_point - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_wood + + volume_fraction_of_condensed_water_in_soil - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_leaves + + product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity - - net_primary_productivity_of_biomass_expressed_as_carbon + + product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity - - mole_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + product_of_lagrangian_tendency_of_air_pressure_and_geopotential_height - - mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water + + product_of_lagrangian_tendency_of_air_pressure_and_air_temperature - - mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water + + product_of_lagrangian_tendency_of_air_pressure_and_air_temperature + + + + tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_dianeutral_mixing + + + + tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + + + tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + + + effective_radius_of_stratiform_cloud_snow_particles + + + + tendency_of_atmosphere_moles_of_cfc11 + + + + moles_of_cfc11_per_unit_mass_in_sea_water + + + + atmosphere_moles_of_cfc11 + + + + tendency_of_atmosphere_moles_of_cfc113 + + + + atmosphere_moles_of_cfc113 + + + + tendency_of_atmosphere_moles_of_cfc114 + + + + atmosphere_moles_of_cfc114 + + + + tendency_of_atmosphere_moles_of_cfc115 + + + + atmosphere_moles_of_cfc115 + + + + tendency_of_atmosphere_moles_of_cfc12 + + + + atmosphere_moles_of_cfc12 + + + + tendency_of_atmosphere_moles_of_halon1202 + + + + atmosphere_moles_of_halon1202 + + + + tendency_of_atmosphere_moles_of_halon1211 + + + + atmosphere_moles_of_halon1211 + + + + tendency_of_atmosphere_moles_of_halon1301 + + + + atmosphere_moles_of_halon1301 + + + + tendency_of_atmosphere_moles_of_halon2402 + + + + atmosphere_moles_of_halon2402 + + + + tendency_of_atmosphere_moles_of_hcc140a + + + + atmosphere_moles_of_hcc140a + + + + tendency_of_troposphere_moles_of_hcc140a + + + + tendency_of_middle_atmosphere_moles_of_hcc140a + + + + tendency_of_troposphere_moles_of_hcfc22 + + + + tendency_of_atmosphere_moles_of_hcfc22 + + + + atmosphere_moles_of_hcfc22 + + + + tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition + + + + lagrangian_tendency_of_atmosphere_sigma_coordinate + + + + lagrangian_tendency_of_atmosphere_sigma_coordinate + + + + electrical_mobility_diameter_of_ambient_aerosol_particles + + + + diameter_of_ambient_aerosol_particles + + + + mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air + + + + effective_radius_of_stratiform_cloud_rain_particles + + + + effective_radius_of_stratiform_cloud_ice_particles + + + + effective_radius_of_stratiform_cloud_graupel_particles + + + + effective_radius_of_convective_cloud_snow_particles + + + + effective_radius_of_convective_cloud_rain_particles + + + + effective_radius_of_convective_cloud_ice_particles + + + + histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid + + + + backscattering_ratio_in_air + + + + product_of_northward_wind_and_lagrangian_tendency_of_air_pressure + + + + product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure + + + + carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change + + + + floating_ice_shelf_area_fraction + + + + atmosphere_moles_of_carbon_tetrachloride + + + + mole_fraction_of_methylglyoxal_in_air + + + + mole_fraction_of_dichlorine_peroxide_in_air + + + + atmosphere_mass_content_of_convective_cloud_liquid_water + + + + effective_radius_of_cloud_liquid_water_particles_at_liquid_water_cloud_top + + + + air_equivalent_temperature + + + + air_pseudo_equivalent_temperature + + + + mass_content_of_cloud_liquid_water_in_atmosphere_layer + + + + air_equivalent_potential_temperature + + + + number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top + + + + number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top @@ -31660,360 +32090,104 @@ atmosphere_mass_content_of_cloud_liquid_water - - mass_fraction_of_sulfate_dry_aerosol_particles_in_air - - - - mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - - - mass_fraction_of_ammonium_dry_aerosol_particles_in_air - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer - - - - mass_content_of_cloud_ice_in_atmosphere_layer - - - - mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - - - mass_concentration_of_mercury_dry_aerosol_particles_in_air - - - - mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - - - sea_water_velocity_to_direction - - - - sea_water_velocity_to_direction - - - - gross_primary_productivity_of_biomass_expressed_as_carbon - - - - eastward_water_vapor_flux_in_air - - - - atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - - - tendency_of_middle_atmosphere_moles_of_carbon_monoxide - - - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection - - - - tendency_of_atmosphere_mass_content_of_water_vapor - - - - lwe_thickness_of_atmosphere_mass_content_of_water_vapor - - - - change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - - - change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - - - atmosphere_mass_content_of_water_vapor - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - - - tendency_of_middle_atmosphere_moles_of_methyl_bromide - - - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - - - atmosphere_mass_content_of_sulfate - - - - atmosphere_mass_content_of_sulfate - - - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - - - tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - - - atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles - - - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_deep_convection - - - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_convection - - - - atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles - - - - mass_content_of_cloud_liquid_water_in_atmosphere_layer - - - - air_equivalent_potential_temperature - - - - number_concentration_of_stratiform_cloud_liquid_water_particles_at_stratiform_liquid_water_cloud_top - - - - number_concentration_of_convective_cloud_liquid_water_particles_at_convective_liquid_water_cloud_top - - - - wave_frequency - - - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves - - - - tendency_of_troposphere_moles_of_carbon_monoxide - - - - tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles - - - - tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_turbulent_deposition - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_gravitational_settling + + mole_fraction_of_noy_expressed_as_nitrogen_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_moles_of_methane - - atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles + + rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc - - integral_wrt_depth_of_product_of_conservative_temperature_and_sea_water_density + + net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton - - integral_wrt_depth_of_product_of_salinity_and_sea_water_density + + mole_fraction_of_inorganic_bromine_in_air - - tendency_of_atmosphere_moles_of_methyl_bromide + + water_vapor_saturation_deficit_in_air - - integral_wrt_depth_of_product_of_potential_temperature_and_sea_water_density + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning - - atmosphere_moles_of_methyl_bromide + + tendency_of_atmosphere_moles_of_carbon_tetrachloride - - product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity + + tendency_of_atmosphere_moles_of_carbon_monoxide - - product_of_lagrangian_tendency_of_air_pressure_and_specific_humidity + + platform_yaw - - tendency_of_sea_water_potential_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + platform_pitch - - tendency_of_sea_water_conservative_temperature_expressed_as_heat_content_due_to_parameterized_dianeutral_mixing + + platform_roll - - volume_fraction_of_condensed_water_in_soil_at_wilting_point + + tendency_of_specific_humidity_due_to_stratiform_precipitation - - volume_fraction_of_condensed_water_in_soil_at_field_capacity + + tendency_of_air_temperature_due_to_stratiform_precipitation - - volume_fraction_of_condensed_water_in_soil_at_critical_point + + stratiform_precipitation_flux - - volume_fraction_of_condensed_water_in_soil + + stratiform_precipitation_amount - - product_of_lagrangian_tendency_of_air_pressure_and_geopotential_height + + lwe_thickness_of_stratiform_precipitation_amount - - product_of_lagrangian_tendency_of_air_pressure_and_air_temperature + + lwe_stratiform_precipitation_rate - - product_of_lagrangian_tendency_of_air_pressure_and_air_temperature + + water_evaporation_amount_from_canopy - - tendency_of_sea_water_salinity_expressed_as_salt_content_due_to_parameterized_dianeutral_mixing + + water_evaporation_flux_from_canopy - - atmosphere_moles_of_methane + + precipitation_flux_onto_canopy - - electrical_mobility_diameter_of_ambient_aerosol_particles + + outgoing_water_volume_transport_along_river_channel - - histogram_of_backscattering_ratio_in_air_over_height_above_reference_ellipsoid + + tendency_of_sea_ice_amount_due_to_conversion_of_snow_to_sea_ice tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_emission - - effective_radius_of_stratiform_cloud_snow_particles - - - - mass_concentration_of_biomass_burning_dry_aerosol_particles_in_air - - - - atmosphere_mass_content_of_nitric_acid_trihydrate_ambient_aerosol_particles - - - - atmosphere_mass_content_of_nitrate_dry_aerosol_particles - - - - atmosphere_mass_content_of_mercury_dry_aerosol_particles - - - - backscattering_ratio_in_air - - - - product_of_northward_wind_and_lagrangian_tendency_of_air_pressure + + mass_fraction_of_mercury_dry_aerosol_particles_in_air @@ -32024,256 +32198,224 @@ tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_wet_deposition - - tendency_of_atmosphere_moles_of_cfc11 - - - - moles_of_cfc11_per_unit_mass_in_sea_water - - - - atmosphere_moles_of_cfc11 - - - - tendency_of_atmosphere_moles_of_hcc140a - - - - effective_radius_of_convective_cloud_rain_particles - - - - tendency_of_troposphere_moles_of_hcc140a - - - - tendency_of_middle_atmosphere_moles_of_hcc140a - - - - tendency_of_troposphere_moles_of_hcfc22 - - - - tendency_of_atmosphere_moles_of_hcfc22 + + stratiform_cloud_area_fraction - - atmosphere_moles_of_hcfc22 + + magnitude_of_sea_ice_displacement - - tendency_of_atmosphere_number_content_of_aerosol_particles_due_to_turbulent_deposition + + surface_downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - lagrangian_tendency_of_atmosphere_sigma_coordinate + + surface_downwelling_shortwave_flux_in_air_assuming_clear_sky - - diameter_of_ambient_aerosol_particles + + surface_downwelling_shortwave_flux_in_air - - effective_radius_of_stratiform_cloud_ice_particles + + surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water - - effective_radius_of_convective_cloud_ice_particles + + surface_downwelling_radiative_flux_per_unit_wavelength_in_air - - effective_radius_of_stratiform_cloud_graupel_particles + + surface_downwelling_radiance_per_unit_wavelength_in_sea_water - - effective_radius_of_stratiform_cloud_rain_particles + + surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - effective_radius_of_convective_cloud_snow_particles + + surface_downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - product_of_eastward_wind_and_lagrangian_tendency_of_air_pressure + + surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water - - carbon_mass_flux_into_litter_and_soil_due_to_anthropogenic_land_use_or_land_cover_change + + surface_downwelling_longwave_flux_in_air - - stratiform_cloud_area_fraction + + integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air - - sea_water_velocity_from_direction + + integral_wrt_time_of_surface_downwelling_longwave_flux_in_air - - thickness_of_stratiform_snowfall_amount + + downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles + + downwelling_radiative_flux_per_unit_wavelength_in_sea_water - - lwe_thickness_of_stratiform_snowfall_amount + + downwelling_radiative_flux_per_unit_wavelength_in_air - - equivalent_thickness_at_stp_of_atmosphere_ozone_content + + downwelling_radiance_per_unit_wavelength_in_sea_water - - atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles + + downwelling_radiance_per_unit_wavelength_in_air - - atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles + + downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water - - atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles + + downwelling_photon_radiance_per_unit_wavelength_in_sea_water - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + downwelling_photon_flux_per_unit_wavelength_in_sea_water - - atmosphere_optical_thickness_due_to_ambient_aerosol_particles + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky - - atmosphere_net_upward_convective_mass_flux + + surface_upwelling_longwave_flux_in_air_assuming_clear_sky - - mass_fraction_of_mercury_dry_aerosol_particles_in_air + + upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - atmosphere_moles_of_hcc140a + + upwelling_radiative_flux_per_unit_wavelength_in_sea_water - - floating_ice_shelf_area_fraction + + upwelling_radiative_flux_per_unit_wavelength_in_air - - atmosphere_moles_of_carbon_tetrachloride + + upwelling_radiance_per_unit_wavelength_in_air - - mole_fraction_of_methylglyoxal_in_air + + surface_upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol - - mole_fraction_of_dichlorine_peroxide_in_air + + surface_upwelling_shortwave_flux_in_air - - mole_fraction_of_noy_expressed_as_nitrogen_in_air + + surface_upwelling_radiative_flux_per_unit_wavelength_in_sea_water - - net_primary_mole_productivity_of_biomass_expressed_as_carbon_by_miscellaneous_phytoplankton + + surface_upwelling_radiative_flux_per_unit_wavelength_in_air - - mole_fraction_of_inorganic_bromine_in_air + + surface_upwelling_radiance_per_unit_wavelength_in_sea_water - - water_vapor_saturation_deficit_in_air + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_agricultural_waste_burning + + volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles - - tendency_of_atmosphere_moles_of_carbon_tetrachloride + + soil_mass_content_of_carbon - - tendency_of_atmosphere_moles_of_carbon_monoxide + + slow_soil_pool_mass_content_of_carbon - - tendency_of_atmosphere_moles_of_cfc113 + + root_mass_content_of_carbon - - atmosphere_moles_of_cfc113 + + miscellaneous_living_matter_mass_content_of_carbon - - tendency_of_atmosphere_moles_of_cfc114 + + fast_soil_pool_mass_content_of_carbon - - atmosphere_moles_of_cfc114 + + medium_soil_pool_mass_content_of_carbon - - tendency_of_atmosphere_moles_of_cfc115 + + leaf_mass_content_of_carbon - - atmosphere_moles_of_cfc115 + + carbon_mass_content_of_forestry_and_agricultural_products - - tendency_of_atmosphere_moles_of_cfc12 + + carbon_mass_content_of_forestry_and_agricultural_products - - atmosphere_moles_of_cfc12 + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance - - tendency_of_atmosphere_moles_of_halon1202 + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth - - atmosphere_moles_of_halon1202 + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration - - tendency_of_atmosphere_moles_of_halon1211 + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil - - atmosphere_moles_of_halon1211 + + surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration - - tendency_of_atmosphere_moles_of_halon1301 + + northward_transformed_eulerian_mean_air_velocity - - atmosphere_moles_of_halon1301 + + eastward_transformed_eulerian_mean_air_velocity - - tendency_of_atmosphere_moles_of_halon2402 + + surface_litter_mass_content_of_carbon - - atmosphere_moles_of_halon2402 + + litter_mass_content_of_carbon @@ -32308,14 +32450,14 @@ mole_concentration_of_diatoms_expressed_as_nitrogen_in_sea_water - - tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes - - tendency_of_mole_concentration_of_dissolved_inorganic_silicon_in_sea_water_due_to_biological_processes + + tendency_of_mole_concentration_of_dissolved_inorganic_phosphorus_in_sea_water_due_to_biological_processes + + tendency_of_atmosphere_mole_concentration_of_carbon_monoxide_due_to_chemical_destruction @@ -32324,56 +32466,64 @@ volume_extinction_coefficient_in_air_due_to_ambient_aerosol_particles - - atmosphere_mass_content_of_convective_cloud_condensed_water + + water_vapor_partial_pressure_in_air - - water_evaporation_flux_from_canopy + + platform_name - - precipitation_flux_onto_canopy + + platform_id - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky + + mass_flux_of_carbon_into_litter_from_vegetation - - surface_downwelling_radiance_per_unit_wavelength_in_sea_water + + subsurface_litter_mass_content_of_carbon - - upwelling_radiative_flux_per_unit_wavelength_in_sea_water + + stem_mass_content_of_carbon - - downwelling_photon_flux_per_unit_wavelength_in_sea_water + + mole_concentration_of_dissolved_inorganic_14C_in_sea_water - - downwelling_radiance_per_unit_wavelength_in_sea_water + + surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon - - surface_downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C - - surface_downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + mole_concentration_of_dissolved_inorganic_13C_in_sea_water - - surface_upwelling_radiative_flux_per_unit_wavelength_in_sea_water + + surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water - - surface_downwelling_shortwave_flux_in_air + + surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water - - tendency_of_sea_ice_amount_due_to_conversion_of_snow_to_sea_ice + + surface_upwelling_radiance_per_unit_wavelength_in_air + + + + surface_upwelling_longwave_flux_in_air + + + + incoming_water_volume_transport_along_river_channel @@ -32392,792 +32542,820 @@ sea_ice_temperature_expressed_as_heat_content - - outgoing_water_volume_transport_along_river_channel + + water_evapotranspiration_flux - - lwe_thickness_of_stratiform_precipitation_amount + + surface_water_evaporation_flux - - tendency_of_atmosphere_moles_of_methane + + water_volume_transport_into_sea_water_from_rivers - - rate_of_hydroxyl_radical_destruction_due_to_reaction_with_nmvoc + + stratiform_graupel_flux - - magnitude_of_sea_ice_displacement + + wood_debris_mass_content_of_carbon - - surface_downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol - - surface_downwelling_radiative_flux_per_unit_wavelength_in_air + + water_flux_into_sea_water_from_rivers - - surface_downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + integral_wrt_height_of_product_of_northward_wind_and_specific_humidity - - surface_downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity - - surface_downwelling_photon_flux_per_unit_wavelength_in_sea_water + + integral_wrt_depth_of_sea_water_temperature - - surface_downwelling_longwave_flux_in_air + + integral_wrt_depth_of_sea_water_temperature - - integral_wrt_time_of_surface_downwelling_shortwave_flux_in_air + + integral_wrt_depth_of_sea_water_temperature - - integral_wrt_time_of_surface_downwelling_longwave_flux_in_air + + integral_wrt_depth_of_sea_water_temperature - - downwelling_spherical_irradiance_per_unit_wavelength_in_sea_water + + integral_wrt_depth_of_sea_water_practical_salinity - - downwelling_radiative_flux_per_unit_wavelength_in_sea_water + + northward_ocean_heat_transport_due_to_parameterized_eddy_advection - - downwelling_radiative_flux_per_unit_wavelength_in_air + + tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection - - downwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - downwelling_photon_spherical_irradiance_per_unit_wavelength_in_sea_water + + ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection - - downwelling_radiance_per_unit_wavelength_in_air + + upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - downwelling_photon_radiance_per_unit_wavelength_in_sea_water + + sea_water_y_velocity_due_to_parameterized_mesoscale_eddies - - surface_upwelling_shortwave_flux_in_air_assuming_clear_sky + + sea_water_x_velocity_due_to_parameterized_mesoscale_eddies - - surface_upwelling_longwave_flux_in_air_assuming_clear_sky + + eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies - - upwelling_radiative_flux_per_unit_wavelength_in_air + + tendency_of_sea_water_temperature_due_to_parameterized_eddy_advection - - upwelling_radiance_per_unit_wavelength_in_air + + tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection - - surface_upwelling_shortwave_flux_in_air_assuming_clear_sky_and_no_aerosol + + ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - surface_upwelling_shortwave_flux_in_air + + ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection - - surface_upwelling_radiance_per_unit_wavelength_in_sea_water + + ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection - - incoming_water_volume_transport_along_river_channel + + ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection - - surface_upwelling_longwave_flux_in_air + + ocean_heat_y_transport_due_to_parameterized_eddy_advection - - surface_upwelling_radiance_per_unit_wavelength_in_air_emerging_from_sea_water + + ocean_heat_x_transport_due_to_parameterized_eddy_advection - - surface_upwelling_radiative_flux_per_unit_wavelength_in_air + + northward_ocean_salt_transport_due_to_parameterized_eddy_advection - - surface_upwelling_radiance_per_unit_wavelength_in_air + + northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection - - surface_upwelling_radiance_per_unit_wavelength_in_air_reflected_by_sea_water + + integral_wrt_time_of_toa_outgoing_longwave_flux - - wood_debris_mass_content_of_carbon + + integral_wrt_time_of_toa_net_downward_shortwave_flux - - water_flux_into_sea_water_from_rivers + + integral_wrt_time_of_surface_net_downward_shortwave_flux - - integral_wrt_depth_of_sea_water_temperature + + integral_wrt_time_of_surface_net_downward_longwave_flux - - integral_wrt_depth_of_sea_water_temperature + + integral_wrt_time_of_surface_downward_sensible_heat_flux - - integral_wrt_depth_of_sea_water_temperature + + integral_wrt_time_of_surface_downward_latent_heat_flux - - integral_wrt_depth_of_sea_water_temperature + + integral_wrt_time_of_air_temperature_excess - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_ambient_aerosol_particles + + integral_wrt_time_of_air_temperature_deficit - - volume_scattering_coefficient_of_radiative_flux_in_air_due_to_dried_aerosol_particles + + tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation - - integral_wrt_height_of_product_of_northward_wind_and_specific_humidity + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition - - integral_wrt_depth_of_sea_water_practical_salinity + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition - - integral_wrt_height_of_product_of_eastward_wind_and_specific_humidity + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling - - platform_yaw + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal - - platform_roll + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires - - water_vapor_partial_pressure_in_air + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion - - platform_name + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport - - platform_id + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport - - platform_pitch + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion - - tendency_of_specific_humidity_due_to_stratiform_precipitation + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires - - tendency_of_air_temperature_due_to_stratiform_precipitation + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution - - water_evaporation_amount_from_canopy + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling + + mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission + + atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles - - atmosphere_mass_content_of_cloud_ice + + mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air - - stratiform_precipitation_amount + + lagrangian_tendency_of_air_pressure - - tendency_of_atmosphere_moles_of_nitrous_oxide + + lagrangian_tendency_of_air_pressure - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition + + air_pressure_at_mean_sea_level - - medium_soil_pool_mass_content_of_carbon + + sea_floor_depth_below_geoid - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration + + sea_surface_height_above_geoid - - surface_downward_mass_flux_of_14C_dioxide_abiotic_analogue_expressed_as_carbon + + sea_surface_height_above_geoid - - mole_concentration_of_dissolved_inorganic_13C_in_sea_water + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission - - surface_litter_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_heterotrophic_respiration + + atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - fast_soil_pool_mass_content_of_carbon + + atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - soil_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_deposition - - slow_soil_pool_mass_content_of_carbon + + tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_dry_deposition - - root_mass_content_of_carbon + + surface_geostrophic_eastward_sea_water_velocity - - miscellaneous_living_matter_mass_content_of_carbon + + surface_geostrophic_northward_sea_water_velocity - - carbon_mass_content_of_forestry_and_agricultural_products + + tendency_of_sea_surface_height_above_mean_sea_level + + + + surface_geostrophic_sea_water_y_velocity_assuming_mean_sea_level_for_geoid + + + + surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid + + + + surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + + + surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + + + surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + + + surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + + + sea_surface_height_above_mean_sea_level - - carbon_mass_content_of_forestry_and_agricultural_products + + sea_surface_height_above_mean_sea_level - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_maintenance + + sea_floor_depth_below_mean_sea_level - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_plant_respiration_for_biomass_growth + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - surface_upward_mass_flux_of_carbon_dioxide_expressed_as_carbon_due_to_respiration_in_soil + + mass_fraction_of_pm10_ambient_aerosol_particles_in_air - - northward_transformed_eulerian_mean_air_velocity + + mass_concentration_of_pm10_ambient_aerosol_particles_in_air - - eastward_transformed_eulerian_mean_air_velocity + + atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles - - mass_flux_of_carbon_into_litter_from_vegetation + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - subsurface_litter_mass_content_of_carbon + + mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air - - litter_mass_content_of_carbon + + mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air - - stem_mass_content_of_carbon + + atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles - - mole_concentration_of_dissolved_inorganic_14C_in_sea_water + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - surface_downward_mass_flux_of_13C_dioxide_abiotic_analogue_expressed_as_13C + + mass_fraction_of_pm1_ambient_aerosol_particles_in_air - - stratiform_precipitation_flux + + mass_concentration_of_pm1_ambient_aerosol_particles_in_air - - lwe_stratiform_precipitation_rate + + atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles - - surface_water_evaporation_flux + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - water_evapotranspiration_flux + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - water_volume_transport_into_sea_water_from_rivers + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - stratiform_graupel_flux + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition - - toa_outgoing_shortwave_flux_assuming_clear_sky_and_no_aerosol + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - ocean_y_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling - - ocean_tracer_laplacian_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - sea_water_x_velocity_due_to_parameterized_mesoscale_eddies + + tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_sea_water_temperature_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - northward_ocean_heat_transport_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition - - upward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_emission - - tendency_of_sea_water_salinity_due_to_parameterized_eddy_advection + + tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - integral_wrt_time_of_surface_net_downward_shortwave_flux + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - tendency_of_ocean_eddy_kinetic_energy_content_due_to_parameterized_eddy_advection + + mass_fraction_of_sea_salt_dry_aerosol_particles_in_air - - sea_water_y_velocity_due_to_parameterized_mesoscale_eddies + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - ocean_tracer_biharmonic_diffusivity_due_to_parameterized_mesoscale_eddy_advection + + mass_concentration_of_sea_salt_dry_aerosol_particles_in_air - - eastward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - northward_sea_water_velocity_due_to_parameterized_mesoscale_eddies + + atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles - - ocean_heat_y_transport_due_to_parameterized_eddy_advection + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - ocean_meridional_overturning_mass_streamfunction_due_to_parameterized_eddy_advection + + atmosphere_mass_content_of_sea_salt_dry_aerosol_particles - - ocean_mass_y_transport_due_to_advection_and_parameterized_eddy_advection + + ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit - - ocean_mass_x_transport_due_to_advection_and_parameterized_eddy_advection + + sea_surface_swell_wave_mean_period - - ocean_heat_x_transport_due_to_parameterized_eddy_advection + + sea_surface_wind_wave_mean_period - - northward_ocean_freshwater_transport_due_to_parameterized_eddy_advection + + sea_surface_wave_mean_period - - northward_ocean_salt_transport_due_to_parameterized_eddy_advection + + sea_surface_wind_wave_to_direction - - integral_wrt_time_of_toa_outgoing_longwave_flux + + sea_surface_swell_wave_to_direction - - integral_wrt_time_of_toa_net_downward_shortwave_flux + + mass_content_of_water_in_soil - - integral_wrt_time_of_surface_net_downward_longwave_flux + + mass_content_of_water_in_soil_layer - - integral_wrt_time_of_surface_downward_sensible_heat_flux + + sea_surface_wave_significant_height - - integral_wrt_time_of_surface_downward_latent_heat_flux + + sea_surface_wind_wave_significant_height - - integral_wrt_time_of_air_temperature_excess + + sea_surface_swell_wave_significant_height - - integral_wrt_time_of_air_temperature_deficit + + tendency_of_atmosphere_moles_of_sulfate_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - atmosphere_absorption_optical_thickness_due_to_sulfate_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition - - atmosphere_absorption_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - atmosphere_absorption_optical_thickness_due_to_dust_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_gravitational_settling - - angstrom_exponent_of_ambient_aerosol_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - atmosphere_convective_available_potential_energy + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_dry_deposition - - atmosphere_convective_available_potential_energy + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air_due_to_emission_from_aviation + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_gravitational_settling + + tendency_of_atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_waste_treatment_and_disposal + + tendency_of_atmosphere_mass_content_of_nitrate_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_savanna_and_grassland_fires + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_residential_and_commercial_combustion + + tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_maritime_transport + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_land_transport + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_industrial_processes_and_combustion + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_gravitational_settling - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_forest_fires + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission_from_energy_production_and_distribution + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_wet_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_ammonium_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles_due_to_dry_deposition + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles - - mass_fraction_of_elemental_carbon_dry_aerosol_particles_in_air + + optical_thickness_of_atmosphere_layer_due_to_ambient_aerosol_particles - - atmosphere_mass_content_of_elemental_carbon_dry_aerosol_particles + + number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air - - mass_concentration_of_elemental_carbon_dry_aerosol_particles_in_air + + number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - lagrangian_tendency_of_air_pressure + + number_concentration_of_ambient_aerosol_particles_in_air - - lagrangian_tendency_of_air_pressure + + mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - sea_surface_height_above_geoid + + mole_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - sea_surface_height_above_geoid + + mass_fraction_of_water_in_ambient_aerosol_particles_in_air - - surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + mass_fraction_of_sulfate_dry_aerosol_particles_in_air - - surface_geostrophic_eastward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - sea_surface_height_above_mean_sea_level + + mass_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - sea_surface_height_above_mean_sea_level + + mass_fraction_of_nitrate_dry_aerosol_particles_in_air - - surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + mass_fraction_of_dust_dry_aerosol_particles_in_air - - surface_geostrophic_northward_sea_water_velocity_assuming_mean_sea_level_for_geoid + + mass_fraction_of_ammonium_dry_aerosol_particles_in_air - - surface_geostrophic_sea_water_y_velocity_assuming_mean_sea_level_for_geoid + + mass_concentration_of_water_in_ambient_aerosol_particles_in_air - - sea_floor_depth_below_geoid + + mass_concentration_of_sulfate_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_emission + + mass_concentration_of_sulfate_ambient_aerosol_particles_in_air - - atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + mass_concentration_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air - - atmosphere_absorption_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_emission + + mass_concentration_of_nitrate_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_deposition + + mass_concentration_of_mercury_dry_aerosol_particles_in_air - - tendency_of_atmosphere_mass_content_of_nitrogen_compounds_expressed_as_nitrogen_due_to_dry_deposition + + atmosphere_optical_thickness_due_to_water_in_ambient_aerosol_particles - - surface_geostrophic_northward_sea_water_velocity + + mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air - - surface_geostrophic_sea_water_x_velocity_assuming_mean_sea_level_for_geoid + + mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air - - air_pressure_at_mean_sea_level + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - sea_floor_depth_below_mean_sea_level + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur - - ocean_mixed_layer_thickness_defined_by_vertical_tracer_diffusivity_deficit + + mass_concentration_of_ammonium_dry_aerosol_particles_in_air - - sea_surface_wind_wave_mean_period + + mass_concentration_of_coarse_mode_ambient_aerosol_particles_in_air - - sea_surface_wave_mean_period + + mass_concentration_of_dust_dry_aerosol_particles_in_air - - sea_surface_swell_wave_mean_period + + atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles - - sea_surface_wind_wave_to_direction + + atmosphere_optical_thickness_due_to_dust_dry_aerosol_particles - - sea_surface_swell_wave_to_direction + + atmosphere_optical_thickness_due_to_dust_ambient_aerosol_particles - - mass_content_of_water_in_soil_layer + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles - - mass_content_of_water_in_soil + + atmosphere_optical_thickness_due_to_ambient_aerosol_particles - - sea_surface_wind_wave_significant_height + + atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles - - sea_surface_swell_wave_significant_height + + atmosphere_mass_content_of_water_in_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + atmosphere_mass_content_of_sulfate_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_expressed_as_sulfur_due_to_turbulent_deposition + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission + + atmosphere_mass_content_of_sulfate_ambient_aerosol_particles - - atmosphere_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles + + atmosphere_mass_content_of_secondary_particulate_organic_matter_dry_aerosol_particles - - mass_concentration_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + atmosphere_mass_content_of_nitric_acid_trihydrate_ambient_aerosol_particles - - atmosphere_mass_content_of_water_in_ambient_aerosol_particles + + atmosphere_mass_content_of_nitrate_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion + + atmosphere_mass_content_of_mercury_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_wet_deposition + + atmosphere_mass_content_of_dust_dry_aerosol_particles - - tendency_of_atmosphere_mass_content_of_mercury_dry_aerosol_particles_due_to_dry_deposition + + atmosphere_mass_content_of_ammonium_dry_aerosol_particles - - mass_fraction_of_nitrate_dry_aerosol_particles_in_air + + atmosphere_absorption_optical_thickness_due_to_sulfate_ambient_aerosol_particles - - mass_concentration_of_sulfate_dry_aerosol_particles_in_air + + atmosphere_absorption_optical_thickness_due_to_particulate_organic_matter_ambient_aerosol_particles - - mass_fraction_of_water_in_ambient_aerosol_particles_in_air + + atmosphere_absorption_optical_thickness_due_to_dust_ambient_aerosol_particles - - mass_fraction_of_secondary_particulate_organic_matter_dry_aerosol_particles_in_air + + angstrom_exponent_of_ambient_aerosol_in_air - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion + + atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles - - tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition - - mass_concentration_of_sulfate_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_wet_deposition - - mass_concentration_of_dust_dry_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_turbulent_deposition - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_gravitational_settling - - tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_dry_deposition @@ -33188,528 +33366,532 @@ mass_fraction_of_particulate_organic_matter_dry_aerosol_particles_in_air - - number_concentration_of_coarse_mode_ambient_aerosol_particles_in_air + + atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles - - sea_surface_wave_significant_height + + atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles - - tendency_of_atmosphere_moles_of_nitric_acid_trihydrate_ambient_aerosol_particles + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission + + + + tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition - - tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_dry_deposition + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_wood - - tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_wet_deposition + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_roots - - number_concentration_of_nucleation_mode_ambient_aerosol_particles_in_air + + net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_leaves - - number_concentration_of_ambient_aerosol_particles_in_air + + net_primary_productivity_of_biomass_expressed_as_carbon - - mole_fraction_of_nitric_acid_trihydrate_ambient_aerosol_particles_in_air + + gross_primary_productivity_of_biomass_expressed_as_carbon - - mass_fraction_of_dust_dry_aerosol_particles_in_air + + atmosphere_convective_available_potential_energy - - mass_concentration_of_water_in_ambient_aerosol_particles_in_air + + atmosphere_convective_available_potential_energy - - mass_concentration_of_nitrate_dry_aerosol_particles_in_air + + mass_concentration_of_chlorophyll_in_sea_water - - mass_concentration_of_particulate_organic_matter_dry_aerosol_particles_in_air + + mass_concentration_of_chlorophyll_in_sea_water - - mass_concentration_of_ammonium_dry_aerosol_particles_in_air + + omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + isotropic_radiance_per_unit_wavelength_in_air - - atmosphere_mass_content_of_sulfate_ambient_aerosol_particles + + isotropic_radiance_per_unit_wavelength_in_air - - atmosphere_mass_content_of_dust_dry_aerosol_particles + + land_ice_lwe_surface_specific_mass_balance_rate - - atmosphere_absorption_optical_thickness_due_to_ambient_aerosol_particles + + land_ice_surface_specific_mass_balance_rate - - atmosphere_mass_content_of_sulfate_dry_aerosol_particles + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_advection - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence + + equivalent_thickness_at_stp_of_atmosphere_ozone_content - - surface_upward_mole_flux_of_carbon_dioxide + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_industrial_processes_and_combustion - - surface_downward_mole_flux_of_carbon_dioxide + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_forest_fires - - atmosphere_mass_content_of_cloud_condensed_water + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - northward_water_vapor_flux_in_air + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_due_to_net_chemical_production_and_emission - - lwe_stratiform_snowfall_rate + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_maritime_transport - - stratiform_snowfall_amount + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_energy_production_and_distribution - - stratiform_rainfall_rate + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_agricultural_waste_burning - - stratiform_rainfall_flux + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_land_transport - - stratiform_rainfall_amount + + tendency_of_atmosphere_mass_content_of_primary_particulate_organic_matter_dry_aerosol_particles_due_to_emission - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_emission + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_savanna_and_grassland_fires - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_waste_treatment_and_disposal - - tendency_of_sea_surface_height_above_mean_sea_level + + tendency_of_atmosphere_mass_content_of_particulate_organic_matter_dry_aerosol_particles_expressed_as_carbon_due_to_emission_from_residential_and_commercial_combustion - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_dust_dry_aerosol_particles_due_to_emission - - mass_fraction_of_pm10_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_mass_content_of_sulfate_dry_aerosol_particles_due_to_emission - - mass_concentration_of_pm10_ambient_aerosol_particles_in_air + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence - - atmosphere_optical_thickness_due_to_pm10_ambient_aerosol_particles + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_shallow_convection - - surface_geostrophic_eastward_sea_water_velocity + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + atmosphere_net_upward_convective_mass_flux - - mass_fraction_of_pm2p5_ambient_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_molecular_hydrogen - - mass_concentration_of_pm2p5_ambient_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_methyl_chloride - - atmosphere_optical_thickness_due_to_pm2p5_ambient_aerosol_particles + + tendency_of_troposphere_moles_of_methyl_bromide - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_methane - - mass_fraction_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_troposphere_moles_of_carbon_monoxide - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_middle_atmosphere_moles_of_molecular_hydrogen - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + tendency_of_middle_atmosphere_moles_of_methyl_chloride - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + tendency_of_middle_atmosphere_moles_of_methyl_bromide - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_gravitational_settling + + tendency_of_middle_atmosphere_moles_of_methane - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_middle_atmosphere_moles_of_carbon_monoxide - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_turbulent_deposition + + tendency_of_atmosphere_moles_of_nitrous_oxide - - mass_concentration_of_pm1_ambient_aerosol_particles_in_air + + tendency_of_atmosphere_moles_of_molecular_hydrogen - - atmosphere_optical_thickness_due_to_pm1_ambient_aerosol_particles + + tendency_of_atmosphere_moles_of_methyl_chloride - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + tendency_of_atmosphere_moles_of_methyl_bromide - - tendency_of_atmosphere_mass_content_of_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + y_wind - - tendency_of_atmosphere_mass_content_of_pm2p5_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + x_wind - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_wet_deposition + + sea_water_y_velocity - - tendency_of_atmosphere_mass_content_of_pm10_sea_salt_dry_aerosol_particles_due_to_dry_deposition + + sea_water_x_velocity - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water - - mass_fraction_of_sea_salt_dry_aerosol_particles_in_air + + mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + mole_concentration_of_microzooplankton_expressed_as_nitrogen_in_sea_water - - mass_concentration_of_sea_salt_dry_aerosol_particles_in_air + + mole_concentration_of_mesozooplankton_expressed_as_nitrogen_in_sea_water - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + atmosphere_moles_of_nitrous_oxide - - atmosphere_optical_thickness_due_to_sea_salt_ambient_aerosol_particles + + atmosphere_moles_of_molecular_hydrogen - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + atmosphere_moles_of_methyl_chloride - - atmosphere_mass_content_of_sea_salt_dry_aerosol_particles + + atmosphere_moles_of_methyl_bromide - - surface_upward_sensible_heat_flux + + atmosphere_moles_of_methane - - surface_temperature + + atmosphere_moles_of_carbon_monoxide - - surface_temperature + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection - - surface_temperature + + tendency_of_mass_content_of_water_vapor_in_atmosphere_layer - - surface_net_downward_radiative_flux + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_turbulence - - wind_mixing_energy_flux_into_sea_water + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection - - water_flux_into_sea_water + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_deep_convection - - upward_eliassen_palm_flux_in_air + + tendency_of_atmosphere_mass_content_of_water_vapor_due_to_convection - - upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves + + tendency_of_atmosphere_mass_content_of_water_vapor - - upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves + + tendency_of_atmosphere_mass_content_of_water_due_to_advection - - specific_gravitational_potential_energy + + mass_content_of_water_vapor_in_atmosphere_layer - - product_of_northward_wind_and_specific_humidity + + mass_content_of_water_in_atmosphere_layer - - mole_fraction_of_ozone_in_air + + mass_content_of_cloud_ice_in_atmosphere_layer - - isotropic_shortwave_radiance_in_air + + mass_content_of_cloud_condensed_water_in_atmosphere_layer - - isotropic_longwave_radiance_in_air + + lwe_thickness_of_atmosphere_mass_content_of_water_vapor - - mass_concentration_of_primary_particulate_organic_matter_dry_aerosol_particles_in_air + + change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - atmosphere_mass_content_of_ammonium_dry_aerosol_particles + + change_over_time_in_atmosphere_mass_content_of_water_due_to_advection - - stratiform_snowfall_flux + + atmosphere_mass_content_of_sulfate - - thickness_of_stratiform_rainfall_amount + + atmosphere_mass_content_of_sulfate - - sea_surface_wind_wave_period + + surface_upward_mole_flux_of_carbon_dioxide - - omnidirectional_spherical_irradiance_per_unit_wavelength_in_sea_water + + surface_downward_mole_flux_of_carbon_dioxide - - tendency_of_middle_atmosphere_moles_of_molecular_hydrogen + + atmosphere_mass_content_of_water_vapor - - tendency_of_middle_atmosphere_moles_of_methyl_chloride + + atmosphere_mass_content_of_convective_cloud_condensed_water - - tendency_of_middle_atmosphere_moles_of_methane + + atmosphere_mass_content_of_cloud_ice - - sea_water_y_velocity + + atmosphere_mass_content_of_cloud_condensed_water - - sea_water_x_velocity + + thickness_of_stratiform_snowfall_amount - - mole_fraction_of_hypochlorous_acid_in_air + + thickness_of_stratiform_rainfall_amount - - tendency_of_troposphere_moles_of_molecular_hydrogen + + stratiform_snowfall_flux - - tendency_of_troposphere_moles_of_methyl_chloride + + stratiform_snowfall_amount - - mass_content_of_water_vapor_in_atmosphere_layer + + stratiform_rainfall_rate - - mass_content_of_water_in_atmosphere_layer + + stratiform_rainfall_flux - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_turbulence + + stratiform_rainfall_amount - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_deep_convection + + northward_water_vapor_flux_in_air - - tendency_of_troposphere_moles_of_methyl_bromide + + lwe_thickness_of_stratiform_snowfall_amount - - tendency_of_mass_content_of_water_vapor_in_atmosphere_layer_due_to_convection + + lwe_stratiform_snowfall_rate - - tendency_of_atmosphere_mass_content_of_water_vapor_due_to_shallow_convection + + kinetic_energy_dissipation_in_atmosphere_boundary_layer - - radiation_wavelength + + eastward_water_vapor_flux_in_air - - tendency_of_troposphere_moles_of_methane + + surface_upward_sensible_heat_flux - - tendency_of_atmosphere_mass_content_of_water_due_to_advection + + surface_temperature - - mole_fraction_of_chlorine_monoxide_in_air + + surface_temperature - - mole_fraction_of_chlorine_dioxide_in_air + + surface_temperature - - mass_fraction_of_ozone_in_air + + surface_net_downward_radiative_flux - - mass_fraction_of_convective_cloud_condensed_water_in_air + + mole_fraction_of_hypochlorous_acid_in_air - - sea_surface_swell_wave_period + + mole_fraction_of_chlorine_monoxide_in_air - - surface_drag_coefficient_in_air + + mole_fraction_of_chlorine_dioxide_in_air - - mass_content_of_cloud_condensed_water_in_atmosphere_layer + + wind_mixing_energy_flux_into_sea_water - - mole_concentration_of_organic_detritus_expressed_as_silicon_in_sea_water + + water_flux_into_sea_water - - mole_concentration_of_organic_detritus_expressed_as_nitrogen_in_sea_water + + upward_eastward_momentum_flux_in_air_due_to_orographic_gravity_waves - - y_wind + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_westward_gravity_waves - - kinetic_energy_dissipation_in_atmosphere_boundary_layer + + upward_eastward_momentum_flux_in_air_due_to_nonorographic_eastward_gravity_waves - - mass_concentration_of_suspended_matter_in_sea_water + + upward_eliassen_palm_flux_in_air - - x_wind + + northward_heat_flux_in_air_due_to_eddy_advection - - isotropic_radiance_per_unit_wavelength_in_air + + northward_eliassen_palm_flux_in_air - - isotropic_radiance_per_unit_wavelength_in_air + + wave_frequency - - atmosphere_moles_of_nitrous_oxide + + sea_surface_wind_wave_period - - atmosphere_moles_of_molecular_hydrogen + + sea_surface_swell_wave_period - - net_primary_productivity_of_biomass_expressed_as_carbon_accumulated_in_roots + + mass_concentration_of_suspended_matter_in_sea_water - - atmosphere_moles_of_methyl_chloride + + surface_drag_coefficient_in_air - - land_ice_surface_specific_mass_balance_rate + + surface_drag_coefficient_for_momentum_in_air - - land_ice_lwe_surface_specific_mass_balance_rate + + surface_drag_coefficient_for_heat_in_air - - tendency_of_atmosphere_moles_of_molecular_hydrogen + + specific_gravitational_potential_energy - - atmosphere_moles_of_carbon_monoxide + + radiation_wavelength - - tendency_of_atmosphere_moles_of_methyl_chloride + + product_of_northward_wind_and_specific_humidity - - surface_drag_coefficient_for_momentum_in_air + + mole_fraction_of_ozone_in_air - - surface_drag_coefficient_for_heat_in_air + + isotropic_shortwave_radiance_in_air - - leaf_mass_content_of_carbon + + isotropic_longwave_radiance_in_air - - mass_concentration_of_chlorophyll_in_sea_water + + mass_fraction_of_ozone_in_air - - mass_concentration_of_chlorophyll_in_sea_water + + mass_fraction_of_convective_cloud_condensed_water_in_air diff --git a/lib/iris/__init__.py b/lib/iris/__init__.py index 26f03c0566..38465472ee 100644 --- a/lib/iris/__init__.py +++ b/lib/iris/__init__.py @@ -44,6 +44,10 @@ standard library function :func:`os.path.expanduser` and module :mod:`fnmatch` for more details. + .. warning:: + + If supplying a URL, only OPeNDAP Data Sources are supported. + * constraints: Either a single constraint, or an iterable of constraints. Each constraint can be either a string, an instance of @@ -87,25 +91,30 @@ def callback(cube, field, filename): import contextlib import glob +import importlib import itertools import os.path import pathlib import threading import iris._constraints -from iris._deprecation import IrisDeprecation, warn_deprecated import iris.config import iris.io +from ._deprecation import IrisDeprecation, warn_deprecated + +try: + from ._version import version as __version__ # noqa: F401 +except ModuleNotFoundError: + __version__ = "unknown" + + try: import iris_sample_data except ImportError: iris_sample_data = None -# Iris revision. -__version__ = "3.2.dev0" - # Restrict the names imported when using "from iris import *" __all__ = [ "AttributeConstraint", @@ -121,6 +130,7 @@ def callback(cube, field, filename): "sample_data_path", "save", "site_configuration", + "use_plugin", ] @@ -132,37 +142,45 @@ def callback(cube, field, filename): class Future(threading.local): """Run-time configuration controller.""" - def __init__(self): + def __init__(self, datum_support=False, pandas_ndim=False): """ A container for run-time options controls. To adjust the values simply update the relevant attribute from within your code. For example:: + # example_future_flag is a fictional example. iris.FUTURE.example_future_flag = False If Iris code is executed with multiple threads, note the values of these options are thread-specific. - .. note:: - - iris.FUTURE.example_future_flag does not exist. It is provided - as an example because there are currently no flags in - iris.Future. + Parameters + ---------- + datum_support : bool, default=False + Opts in to loading coordinate system datum information from NetCDF + files into :class:`~iris.coord_systems.CoordSystem`\\ s, wherever + this information is present. + pandas_ndim : bool, default=False + See :func:`iris.pandas.as_data_frame` for details - opts in to the + newer n-dimensional behaviour. """ - # The flag 'example_future_flag' is provided as a future reference - # for the structure of this class. + # The flag 'example_future_flag' is provided as a reference for the + # structure of this class. + # + # Note that self.__dict__ is used explicitly due to the manner in which + # __setattr__ is overridden. # # self.__dict__['example_future_flag'] = example_future_flag - pass + self.__dict__["datum_support"] = datum_support + self.__dict__["pandas_ndim"] = pandas_ndim def __repr__(self): - # msg = ('Future(example_future_flag={})') # return msg.format(self.example_future_flag) - msg = "Future()" - return msg.format() + msg = "Future(datum_support={}, pandas_ndim={})" + return msg.format(self.datum_support, self.pandas_ndim) # deprecated_options = {'example_future_flag': 'warning',} deprecated_options = {} @@ -201,15 +219,11 @@ def context(self, **kwargs): statement, the previous state is restored. For example:: + + # example_future_flag is a fictional example. with iris.FUTURE.context(example_future_flag=False): # ... code that expects some past behaviour - .. note:: - - iris.FUTURE.example_future_flag does not exist and is - provided only as an example since there are currently no - flags in Future. - """ # Save the current context current_state = self.__dict__.copy() @@ -287,6 +301,7 @@ def load(uris, constraints=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -315,6 +330,7 @@ def load_cube(uris, constraint=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -354,6 +370,7 @@ def load_cubes(uris, constraints=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -399,6 +416,7 @@ def load_raw(uris, constraints=None, callback=None): * uris: One or more filenames/URIs, as a string or :class:`pathlib.PurePath`. + If supplying a URL, only OPeNDAP Data Sources are supported. Kwargs: @@ -454,3 +472,22 @@ def sample_data_path(*path_to_join): "appropriate for general file access.".format(target) ) return target + + +def use_plugin(plugin_name): + """ + Convenience function to import a plugin + + For example:: + + use_plugin("my_plugin") + + is equivalent to:: + + import iris.plugins.my_plugin + + This is useful for plugins that are not used directly, but instead do all + their setup on import. In this case, style checkers would not know the + significance of the import statement and warn that it is an unused import. + """ + importlib.import_module(f"iris.plugins.{plugin_name}") diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index 4e23793e1d..bfd4865f56 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -131,6 +131,30 @@ def latitude_bands(cell): _CoordConstraint(coord_name, coord_thing) ) + def __eq__(self, other): + # Equivalence is defined, but is naturally limited for any Constraints + # based on callables, i.e. "cube_func", or value functions for + # attributes/names/coords : These can only be == if they contain the + # *same* callable object (i.e. same object identity). + eq = ( + type(other) == Constraint + and self._name == other._name + and self._cube_func == other._cube_func + and self._coord_constraints == other._coord_constraints + ) + # NOTE: theoretically, you could compare coord constraints as a *set*, + # as order should not affect matching. + # Not totally sure, so for now let's not. + return eq + + def __hash__(self): + # We want constraints to have hashes, so they can act as e.g. + # dictionary keys or tuple elements. + # So, we *must* provide this, as overloading '__eq__' automatically + # disables it. + # Just use basic object identity. + return id(self) + def __repr__(self): args = [] if self._name: @@ -218,6 +242,19 @@ def __init__(self, lhs, rhs, operator): self.rhs = rhs_constraint self.operator = operator + def __eq__(self, other): + eq = ( + type(other) == ConstraintCombination + and self.lhs == other.lhs + and self.rhs == other.rhs + and self.operator == other.operator + ) + return eq + + def __hash__(self): + # Must re-define if you overload __eq__ : Use object identity. + return id(self) + def _coordless_match(self, cube): return self.operator( self.lhs._coordless_match(cube), self.rhs._coordless_match(cube) @@ -261,6 +298,18 @@ def __repr__(self): self._coord_thing, ) + def __eq__(self, other): + eq = ( + type(other) == _CoordConstraint + and self.coord_name == other.coord_name + and self._coord_thing == other._coord_thing + ) + return eq + + def __hash__(self): + # Must re-define if you overload __eq__ : Use object identity. + return id(self) + def extract(self, cube): """ Returns the the column based indices of the given cube which @@ -493,6 +542,17 @@ def __init__(self, **attributes): self._attributes = attributes super().__init__(cube_func=self._cube_func) + def __eq__(self, other): + eq = ( + type(other) == AttributeConstraint + and self._attributes == other._attributes + ) + return eq + + def __hash__(self): + # Must re-define if you overload __eq__ : Use object identity. + return id(self) + def _cube_func(self, cube): match = True for name, value in self._attributes.items(): @@ -577,6 +637,17 @@ def __init__( self._names = ("standard_name", "long_name", "var_name", "STASH") super().__init__(cube_func=self._cube_func) + def __eq__(self, other): + eq = type(other) == NameConstraint and all( + getattr(self, attname) == getattr(other, attname) + for attname in self._names + ) + return eq + + def __hash__(self): + # Must re-define if you overload __eq__ : Use object identity. + return id(self) + def _cube_func(self, cube): def matcher(target, value): if callable(value): diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index 038f9d9337..e0566fc8f2 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -10,7 +10,7 @@ """ -from functools import wraps +from functools import lru_cache, wraps import dask import dask.array as da @@ -39,7 +39,7 @@ def is_lazy_data(data): """ Return whether the argument is an Iris 'lazy' data array. - At present, this means simply a Dask array. + At present, this means simply a :class:`dask.array.Array`. We determine this by checking for a "compute" property. """ @@ -47,7 +47,14 @@ def is_lazy_data(data): return result -def _optimum_chunksize(chunks, shape, limit=None, dtype=np.dtype("f4")): +@lru_cache +def _optimum_chunksize_internals( + chunks, + shape, + limit=None, + dtype=np.dtype("f4"), + dask_array_chunksize=dask.config.get("array.chunk-size"), +): """ Reduce or increase an initial chunk shape to get close to a chosen ideal size, while prioritising the splitting of the earlier (outer) dimensions @@ -60,7 +67,8 @@ def _optimum_chunksize(chunks, shape, limit=None, dtype=np.dtype("f4")): * shape (tuple of int): The full array shape of the target data. * limit (int): - The 'ideal' target chunk size, in bytes. Default from dask.config. + The 'ideal' target chunk size, in bytes. Default from + :mod:`dask.config`. * dtype (np.dtype): Numpy dtype of target data. @@ -70,7 +78,7 @@ def _optimum_chunksize(chunks, shape, limit=None, dtype=np.dtype("f4")): .. note:: The purpose of this is very similar to - `dask.array.core.normalize_chunks`, when called as + :func:`dask.array.core.normalize_chunks`, when called as `(chunks='auto', shape, dtype=dtype, previous_chunks=chunks, ...)`. Except, the operation here is optimised specifically for a 'c-like' dimension order, i.e. outer dimensions first, as for netcdf variables. @@ -86,7 +94,7 @@ def _optimum_chunksize(chunks, shape, limit=None, dtype=np.dtype("f4")): # Set the chunksize limit. if limit is None: # Fetch the default 'optimal' chunksize from the dask config. - limit = dask.config.get("array.chunk-size") + limit = dask_array_chunksize # Convert to bytes limit = dask.utils.parse_bytes(limit) @@ -146,15 +154,34 @@ def _optimum_chunksize(chunks, shape, limit=None, dtype=np.dtype("f4")): return tuple(result) +@wraps(_optimum_chunksize_internals) +def _optimum_chunksize( + chunks, + shape, + limit=None, + dtype=np.dtype("f4"), +): + # By providing dask_array_chunksize as an argument, we make it so that the + # output of _optimum_chunksize_internals depends only on its arguments (and + # thus we can use lru_cache) + return _optimum_chunksize_internals( + tuple(chunks), + tuple(shape), + limit=limit, + dtype=dtype, + dask_array_chunksize=dask.config.get("array.chunk-size"), + ) + + def as_lazy_data(data, chunks=None, asarray=False): """ - Convert the input array `data` to a dask array. + Convert the input array `data` to a :class:`dask.array.Array`. Args: * data (array-like): An indexable object with 'shape', 'dtype' and 'ndim' properties. - This will be converted to a dask array. + This will be converted to a :class:`dask.array.Array`. Kwargs: @@ -166,7 +193,7 @@ def as_lazy_data(data, chunks=None, asarray=False): Set to False (default) to pass passed chunks through unchanged. Returns: - The input array converted to a dask array. + The input array converted to a :class:`dask.array.Array`. .. note:: The result chunk size is a multiple of 'chunks', if given, up to the @@ -258,15 +285,16 @@ def multidim_lazy_stack(stack): """ Recursively build a multidimensional stacked dask array. - This is needed because dask.array.stack only accepts a 1-dimensional list. + This is needed because :meth:`dask.array.Array.stack` only accepts a + 1-dimensional list. Args: * stack: - An ndarray of dask arrays. + An ndarray of :class:`dask.array.Array`. Returns: - The input array converted to a lazy dask array. + The input array converted to a lazy :class:`dask.array.Array`. """ if stack.ndim == 0: @@ -359,7 +387,7 @@ def map_complete_blocks(src, func, dims, out_sizes): Args: - * src (:class:`~iris.cube.Cube`): + * src (:class:`~iris.cube.Cube` or array-like): Source cube that function is applied to. * func: Function to apply. @@ -369,10 +397,15 @@ def map_complete_blocks(src, func, dims, out_sizes): Output size of dimensions that cannot be chunked. """ - if not src.has_lazy_data(): + if is_lazy_data(src): + data = src + elif not hasattr(src, "has_lazy_data"): + # Not a lazy array and not a cube. So treat as ordinary numpy array. + return func(src) + elif not src.has_lazy_data(): return func(src.data) - - data = src.lazy_data() + else: + data = src.lazy_data() # Ensure dims are not chunked in_chunks = list(data.chunks) diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index bc12080523..5ca5f31a8e 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -1418,6 +1418,7 @@ def _define_space(self, space, positions, indexes, function_matrix): participates in a functional relationship. """ + # Heuristic reordering of coordinate defintion indexes into # preferred dimension order. def axis_and_name(name): diff --git a/lib/iris/_representation/cube_printout.py b/lib/iris/_representation/cube_printout.py index 81d46bb29f..ea32fc5126 100644 --- a/lib/iris/_representation/cube_printout.py +++ b/lib/iris/_representation/cube_printout.py @@ -252,15 +252,18 @@ def add_scalar_row(name, value=""): # Add a row for each item # NOTE: different section types need different handling title = sect_name.lower() - if "scalar coordinate" in title: + if title == "scalar coordinates:": for item in sect.contents: add_scalar_row(item.name, item.content) if item.extra: add_scalar_row(item_to_extra_indent + item.extra) - elif "attribute" in title or "cell method" in title: + elif title in ("attributes:", "cell methods:", "mesh:"): for title, value in zip(sect.names, sect.values): add_scalar_row(title, value) - elif "scalar cell measure" in title: + elif title in ( + "scalar ancillary variables:", + "scalar cell measures:", + ): # These are just strings: nothing in the 'value' column. for name in sect.contents: add_scalar_row(name) diff --git a/lib/iris/_representation/cube_summary.py b/lib/iris/_representation/cube_summary.py index 1e78a92fd1..6b0d4cf0f3 100644 --- a/lib/iris/_representation/cube_summary.py +++ b/lib/iris/_representation/cube_summary.py @@ -48,11 +48,25 @@ def __init__(self, cube, name_padding=35): self.dimension_header = DimensionHeader(cube) -def string_repr(text, quote_strings=False): +def string_repr(text, quote_strings=False, clip_strings=False): """Produce a one-line printable form of a text string.""" - if re.findall("[\n\t]", text) or quote_strings: + force_quoted = re.findall("[\n\t]", text) or quote_strings + if force_quoted: # Replace the string with its repr (including quotes). text = repr(text) + if clip_strings: + # First check for quotes. + # N.B. not just 'quote_strings', but also array values-as-strings + has_quotes = text[0] in "\"'" + if has_quotes: + # Strip off (and store) any outer quotes before clipping. + pre_quote, post_quote = text[0], text[-1] + text = text[1:-1] + # clipping : use 'rider' with extra space in case it ends in a '.' + text = iris.util.clip_string(text, rider=" ...") + if has_quotes: + # Replace in original quotes + text = pre_quote + text + post_quote return text @@ -62,17 +76,20 @@ def array_repr(arr): text = repr(arr) # ..then reduce any multiple spaces and newlines. text = re.sub("[ \t\n]+", " ", text) + text = string_repr(text, quote_strings=False, clip_strings=True) return text -def value_repr(value, quote_strings=False): +def value_repr(value, quote_strings=False, clip_strings=False): """ Produce a single-line printable version of an attribute or scalar value. """ if hasattr(value, "dtype"): value = array_repr(value) elif isinstance(value, str): - value = string_repr(value, quote_strings=quote_strings) + value = string_repr( + value, quote_strings=quote_strings, clip_strings=clip_strings + ) value = str(value) return value @@ -132,7 +149,7 @@ def __init__(self, cube, vector, iscoord): self.extra = "" -class ScalarSummary(CoordSummary): +class ScalarCoordSummary(CoordSummary): def __init__(self, cube, coord): self.name = coord.name() if ( @@ -188,10 +205,12 @@ def __init__(self, title, cube, vectors, iscoord): ] -class ScalarSection(Section): +class ScalarCoordSection(Section): def __init__(self, title, cube, scalars): self.title = title - self.contents = [ScalarSummary(cube, scalar) for scalar in scalars] + self.contents = [ + ScalarCoordSummary(cube, scalar) for scalar in scalars + ] class ScalarCellMeasureSection(Section): @@ -200,6 +219,12 @@ def __init__(self, title, cell_measures): self.contents = [cm.name() for cm in cell_measures] +class ScalarAncillaryVariableSection(Section): + def __init__(self, title, ancillary_variables): + self.title = title + self.contents = [av.name() for av in ancillary_variables] + + class AttributeSection(Section): def __init__(self, title, attributes): self.title = title @@ -207,14 +232,32 @@ def __init__(self, title, attributes): self.values = [] self.contents = [] for name, value in sorted(attributes.items()): - value = value_repr(value, quote_strings=True) - value = iris.util.clip_string(value) + value = value_repr(value, quote_strings=True, clip_strings=True) self.names.append(name) self.values.append(value) content = "{}: {}".format(name, value) self.contents.append(content) +class ScalarMeshSection(AttributeSection): + # This happens to behave just like an attribute sections, but it + # initialises direct from the cube. + def __init__(self, title, cube): + self.title = title + self.names = [] + self.values = [] + self.contents = [] + if cube.mesh is not None: + self.names.extend(["name", "location"]) + self.values.extend([cube.mesh.name(), cube.location]) + self.contents.extend( + [ + "{}: {}".format(name, value) + for name, value in zip(self.names, self.values) + ] + ) + + class CellMethodSection(Section): def __init__(self, title, cell_methods): self.title = title @@ -237,7 +280,7 @@ class CubeSummary: """ - def __init__(self, cube, shorten=False, name_padding=35): + def __init__(self, cube, name_padding=35): self.header = FullHeader(cube, name_padding) # Cache the derived coords so we can rely on consistent @@ -277,13 +320,23 @@ def __init__(self, cube, shorten=False, name_padding=35): if id(coord) not in scalar_coord_ids ] - # cell measures - vector_cell_measures = [ - cm for cm in cube.cell_measures() if cm.shape != (1,) - ] - # Ancillary Variables - vector_ancillary_variables = [av for av in cube.ancillary_variables()] + vector_ancillary_variables = [] + scalar_ancillary_variables = [] + for av, av_dims in cube._ancillary_variables_and_dims: + if av_dims: + vector_ancillary_variables.append(av) + else: + scalar_ancillary_variables.append(av) + + # Cell Measures + vector_cell_measures = [] + scalar_cell_measures = [] + for cm, cm_dims in cube._cell_measures_and_dims: + if cm_dims: + vector_cell_measures.append(cm) + else: + scalar_cell_measures.append(cm) # Sort scalar coordinates by name. scalar_coords.sort(key=lambda coord: coord.name()) @@ -297,9 +350,6 @@ def __init__(self, cube, shorten=False, name_padding=35): vector_derived_coords.sort( key=lambda coord: (cube.coord_dims(coord), coord.name()) ) - scalar_cell_measures = [ - cm for cm in cube.cell_measures() if cm.shape == (1,) - ] self.vector_sections = {} @@ -322,14 +372,21 @@ def add_vector_section(title, contents, iscoord=True): def add_scalar_section(section_class, title, *args): self.scalar_sections[title] = section_class(title, *args) + add_scalar_section(ScalarMeshSection, "Mesh:", cube) + add_scalar_section( - ScalarSection, "Scalar coordinates:", cube, scalar_coords + ScalarCoordSection, "Scalar coordinates:", cube, scalar_coords ) add_scalar_section( ScalarCellMeasureSection, "Scalar cell measures:", scalar_cell_measures, ) + add_scalar_section( + ScalarAncillaryVariableSection, + "Scalar ancillary variables:", + scalar_ancillary_variables, + ) add_scalar_section( CellMethodSection, "Cell methods:", cube.cell_methods ) diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 465a521065..f34cda1402 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -37,7 +37,9 @@ from collections import OrderedDict from collections.abc import Iterable +import functools from functools import wraps +import warnings import dask.array as da import numpy as np @@ -63,6 +65,7 @@ "HMEAN", "Linear", "MAX", + "MAX_RUN", "MEAN", "MEDIAN", "MIN", @@ -79,6 +82,7 @@ "WPERCENTILE", "WeightedAggregator", "clear_phenomenon_identity", + "create_weighted_aggregator_fn", ) @@ -292,7 +296,6 @@ def _dimensional_metadata_comparison(*cubes, object_get=None): # for coordinate groups for cube, coords in zip(cubes, all_coords): for coord in coords: - # if this coordinate has already been processed, then continue on # to the next one if id(coord) in processed_coords: @@ -582,9 +585,19 @@ def aggregate(self, data, axis, **kwargs): mdtol = kwargs.pop("mdtol", None) result = self.call_func(data, axis=axis, **kwargs) - if mdtol is not None and ma.isMaskedArray(data): + if ( + mdtol is not None + and ma.is_masked(data) + and result is not ma.masked + ): fraction_not_missing = data.count(axis=axis) / data.shape[axis] - mask_update = 1 - mdtol > fraction_not_missing + mask_update = np.array(1 - mdtol > fraction_not_missing) + if np.array(result).ndim > mask_update.ndim: + # call_func created trailing dimension. + mask_update = np.broadcast_to( + mask_update.reshape(mask_update.shape + (1,)), + np.array(result).shape, + ) if ma.isMaskedArray(result): result.mask = result.mask | mask_update else: @@ -679,7 +692,7 @@ class PercentileAggregator(_Aggregator): """ - def __init__(self, units_func=None, lazy_func=None, **kwargs): + def __init__(self, units_func=None, **kwargs): """ Create a percentile aggregator. @@ -692,11 +705,6 @@ def __init__(self, units_func=None, lazy_func=None, **kwargs): Returns an :class:`cf_units.Unit`, or a value that can be made into one. - * lazy_func (callable or None): - An alternative to :data:`call_func` implementing a lazy - aggregation. Note that, it need not support all features of the - main operation, but should raise an error in unhandled cases. - Additional kwargs:: Passed through to :data:`call_func` and :data:`lazy_func`. @@ -714,10 +722,29 @@ def __init__(self, units_func=None, lazy_func=None, **kwargs): None, _percentile, units_func=units_func, - lazy_func=lazy_func, + lazy_func=_build_dask_mdtol_function(_percentile), **kwargs, ) + def _base_aggregate(self, data, axis, lazy, **kwargs): + """ + Method to avoid duplication of checks in aggregate and lazy_aggregate. + """ + msg = "{} aggregator requires the mandatory keyword argument {!r}." + for arg in self._args: + if arg not in kwargs: + raise ValueError(msg.format(self.name(), arg)) + + if kwargs.get("fast_percentile_method", False) and ( + kwargs.get("mdtol", 1) != 0 + ): + kwargs["error_on_masked"] = True + + if lazy: + return _Aggregator.lazy_aggregate(self, data, axis, **kwargs) + else: + return _Aggregator.aggregate(self, data, axis, **kwargs) + def aggregate(self, data, axis, **kwargs): """ Perform the percentile aggregation over the given data. @@ -753,12 +780,41 @@ def aggregate(self, data, axis, **kwargs): """ - msg = "{} aggregator requires the mandatory keyword argument {!r}." - for arg in self._args: - if arg not in kwargs: - raise ValueError(msg.format(self.name(), arg)) + return self._base_aggregate(data, axis, lazy=False, **kwargs) + + def lazy_aggregate(self, data, axis, **kwargs): + """ + Perform aggregation over the data with a lazy operation, analogous to + the 'aggregate' result. + + Keyword arguments are passed through to the data aggregation function + (for example, the "percent" keyword for a percentile aggregator). + This function is usually used in conjunction with update_metadata(), + which should be passed the same keyword arguments. + + Args: + + * data (array): + A lazy array (:class:`dask.array.Array`). - return _Aggregator.aggregate(self, data, axis, **kwargs) + * axis (int or list of int): + The dimensions to aggregate over -- note that this is defined + differently to the 'aggregate' method 'axis' argument, which only + accepts a single dimension index. + + Kwargs: + + * kwargs: + All keyword arguments are passed through to the data aggregation + function. + + Returns: + A lazy array representing the result of the aggregation operation + (:class:`dask.array.Array`). + + """ + + return self._base_aggregate(data, axis, lazy=True, **kwargs) def post_process(self, collapsed_cube, data_result, coords, **kwargs): """ @@ -1004,9 +1060,10 @@ def update_metadata(self, cube, coords, **kwargs): coord_names.append(coord.name()) # Add a cell method. - method_name = self.cell_method.format(**kwargs) - cell_method = iris.coords.CellMethod(method_name, coord_names) - cube.add_cell_method(cell_method) + if self.cell_method is not None: + method_name = self.cell_method.format(**kwargs) + cell_method = iris.coords.CellMethod(method_name, coord_names) + cube.add_cell_method(cell_method) class WeightedAggregator(Aggregator): @@ -1115,6 +1172,43 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): return result +def create_weighted_aggregator_fn(aggregator_fn, axis, **kwargs): + """Return an aggregator function that can explicitely handle weights. + + Args: + + * aggregator_fn (callable): + An aggregator function, i.e., a callable that takes arguments ``data``, + ``axis`` and ``**kwargs`` and returns an array. Examples: + :meth:`Aggregator.aggregate`, :meth:`Aggregator.lazy_aggregate`. + This function should accept the keyword argument ``weights``. + * axis (int): + Axis to aggregate over. This argument is directly passed to + ``aggregator_fn``. + + Kwargs: + + * Arbitrary keyword arguments passed to ``aggregator_fn``. Should not + include ``weights`` (this will be removed if present). + + Returns: + A function that takes two arguments ``data_arr`` and ``weights`` (both + should be an array of the same shape) and returns an array. + + """ + kwargs_copy = dict(kwargs) + kwargs_copy.pop("weights", None) + aggregator_fn = functools.partial(aggregator_fn, axis=axis, **kwargs_copy) + + def new_aggregator_fn(data_arr, weights): + """Weighted aggregation.""" + if weights is None: + return aggregator_fn(data_arr) + return aggregator_fn(data_arr, weights=weights) + + return new_aggregator_fn + + def _build_dask_mdtol_function(dask_stats_function): """ Make a wrapped dask statistic function that supports the 'mdtol' keyword. @@ -1123,7 +1217,9 @@ def _build_dask_mdtol_function(dask_stats_function): call signature : "dask_stats_function(data, axis=axis, **kwargs)". It must be masked-data tolerant, i.e. it ignores masked input points and performs a calculation on only the unmasked points. - For example, mean([1, --, 2]) = (1 + 2) / 2 = 1.5. + For example, mean([1, --, 2]) = (1 + 2) / 2 = 1.5. If an additional + dimension is created by 'dask_function', it is assumed to be the trailing + one (as for '_percentile'). The returned value is a new function operating on dask arrays. It has the call signature `stat(data, axis=-1, mdtol=None, **kwargs)`. @@ -1143,6 +1239,12 @@ def inner_stat(array, axis=-1, mdtol=None, **kwargs): points_per_calc = array.size / dask_result.size masked_point_fractions = point_mask_counts / points_per_calc boolean_mask = masked_point_fractions > mdtol + if dask_result.ndim > boolean_mask.ndim: + # dask_stats_function created trailing dimension. + boolean_mask = da.broadcast_to( + boolean_mask.reshape(boolean_mask.shape + (1,)), + dask_result.shape, + ) # Return an mdtol-masked version of the basic result. result = da.ma.masked_array( da.ma.getdata(dask_result), boolean_mask @@ -1152,7 +1254,78 @@ def inner_stat(array, axis=-1, mdtol=None, **kwargs): return inner_stat -def _percentile(data, axis, percent, fast_percentile_method=False, **kwargs): +def _axis_to_single_trailing(stats_function): + """ + Given a statistical function that acts on the trailing axis of a 1D or 2D + array, wrap it so that higher dimension arrays can be passed, as well as any + axis as int or tuple. + + """ + + @wraps(stats_function) + def inner_stat(data, axis, *args, **kwargs): + # Get data as a 1D or 2D view with the target axis as the trailing one. + if not isinstance(axis, Iterable): + axis = (axis,) + end = range(-len(axis), 0) + + data = np.moveaxis(data, axis, end) + shape = data.shape[: -len(axis)] # Shape of dims we won't collapse. + if shape: + data = data.reshape(np.prod(shape), -1) + else: + data = data.flatten() + + result = stats_function(data, *args, **kwargs) + + # Ensure to unflatten any leading dimensions. + if shape: + # Account for the additive dimension if necessary. + if result.size > np.prod(shape): + shape += (-1,) + result = result.reshape(shape) + + return result + + return inner_stat + + +def _calc_percentile(data, percent, fast_percentile_method=False, **kwargs): + """ + Calculate percentiles along the trailing axis of a 1D or 2D array. + + """ + if fast_percentile_method: + if kwargs.pop("error_on_masked", False): + msg = ( + "Cannot use fast np.percentile method with masked array unless" + " mdtol is 0." + ) + if ma.is_masked(data): + raise TypeError(msg) + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + "Warning: 'partition' will ignore the 'mask' of the MaskedArray.", + ) + result = np.percentile(data, percent, axis=-1, **kwargs) + + result = result.T + else: + quantiles = percent / 100.0 + for key in ["alphap", "betap"]: + kwargs.setdefault(key, 1) + result = scipy.stats.mstats.mquantiles( + data, quantiles, axis=-1, **kwargs + ) + if not ma.isMaskedArray(data) and not ma.is_masked(result): + return np.asarray(result) + else: + return ma.MaskedArray(result) + + +@_axis_to_single_trailing +def _percentile(data, percent, fast_percentile_method=False, **kwargs): """ The percentile aggregator is an additive operation. This means that it *may* introduce a new dimension to the data for the statistic being @@ -1161,50 +1334,43 @@ def _percentile(data, axis, percent, fast_percentile_method=False, **kwargs): If a new additive dimension is formed, then it will always be the last dimension of the resulting percentile data payload. + Args: + + * data (array-like) + array from which percentiles are to be calculated + Kwargs: - * fast_percentile_method (boolean) : + * fast_percentile_method (boolean) When set to True, uses the numpy.percentiles method as a faster alternative to the scipy.mstats.mquantiles method. Does not handle masked arrays. + **kwargs : dict, optional + passed to scipy.stats.mstats.mquantiles if fast_percentile_method is + False. Otherwise passed to numpy.percentile. + """ - # Ensure that the target axis is the last dimension. - data = np.rollaxis(data, axis, start=data.ndim) - shape = data.shape[:-1] - # Flatten any leading dimensions. - if shape: - data = data.reshape([np.prod(shape), data.shape[-1]]) + if not isinstance(percent, Iterable): + percent = [percent] + percent = np.array(percent) + # Perform the percentile calculation. - if fast_percentile_method: - msg = "Cannot use fast np.percentile method with masked array." - if ma.is_masked(data): - raise TypeError(msg) - result = np.percentile(data, percent, axis=-1) - result = result.T - else: - quantiles = np.array(percent) / 100.0 - result = scipy.stats.mstats.mquantiles( - data, quantiles, axis=-1, **kwargs - ) - if not ma.isMaskedArray(data) and not ma.is_masked(result): - result = np.asarray(result) - else: - result = ma.MaskedArray(result) + _partial_percentile = functools.partial( + _calc_percentile, + percent=percent, + fast_percentile_method=fast_percentile_method, + **kwargs, + ) + + result = iris._lazy_data.map_complete_blocks( + data, _partial_percentile, (-1,), percent.shape + ) - # Ensure to unflatten any leading dimensions. - if shape: - if not isinstance(percent, Iterable): - percent = [percent] - percent = np.array(percent) - # Account for the additive dimension. - if percent.shape > (1,): - shape += percent.shape - result = result.reshape(shape) # Check whether to reduce to a scalar result, as per the behaviour # of other aggregators. - if result.shape == (1,) and quantiles.ndim == 0: - result = result[0] + if result.shape == (1,): + result = np.squeeze(result) return result @@ -1332,18 +1498,21 @@ def _weighted_percentile( return result -@_build_dask_mdtol_function -def _lazy_count(array, **kwargs): - array = iris._lazy_data.as_lazy_data(array) +def _count(array, **kwargs): + """ + Counts the number of points along the axis that satisfy the condition + specified by ``function``. Uses Dask's support for NEP13/18 to work as + either a lazy or a real function. + + """ func = kwargs.pop("function", None) if not callable(func): emsg = "function must be a callable. Got {}." raise TypeError(emsg.format(type(func))) - return da.sum(func(array), **kwargs) + return np.sum(func(array), **kwargs) def _proportion(array, function, axis, **kwargs): - count = iris._lazy_data.non_lazy(_lazy_count) # if the incoming array is masked use that to count the total number of # values if ma.isMaskedArray(array): @@ -1354,7 +1523,7 @@ def _proportion(array, function, axis, **kwargs): # case pass the array shape instead of the mask: total_non_masked = array.shape[axis] else: - total_non_masked = count( + total_non_masked = _count( array.mask, axis=axis, function=np.logical_not, **kwargs ) total_non_masked = ma.masked_equal(total_non_masked, 0) @@ -1367,12 +1536,52 @@ def _proportion(array, function, axis, **kwargs): # a dtype for its data that is different to the dtype of the fill-value, # which can cause issues outside this function. # Reference - tests/unit/analyis/test_PROPORTION.py Test_masked.test_ma - numerator = count(array, axis=axis, function=function, **kwargs) + numerator = _count(array, axis=axis, function=function, **kwargs) result = ma.asarray(numerator / total_non_masked) return result +def _lazy_max_run(array, axis=-1, **kwargs): + """ + Lazily perform the calculation of maximum run lengths along the given axis + """ + array = iris._lazy_data.as_lazy_data(array) + func = kwargs.pop("function", None) + if not callable(func): + emsg = "function must be a callable. Got {}." + raise TypeError(emsg.format(type(func))) + bool_array = da.ma.getdata(func(array)) + bool_array = da.logical_and( + bool_array, da.logical_not(da.ma.getmaskarray(array)) + ) + padding = [(0, 0)] * array.ndim + padding[axis] = (0, 1) + ones_zeros = da.pad(bool_array, padding).astype(int) + cum_sum = da.cumsum(ones_zeros, axis=axis) + run_totals = da.where(ones_zeros == 0, cum_sum, 0) + stepped_run_lengths = da.reductions.cumreduction( + np.maximum.accumulate, + np.maximum, + np.NINF, + run_totals, + axis=axis, + dtype=cum_sum.dtype, + out=None, + method="sequential", + preop=None, + ) + run_lengths = da.diff(stepped_run_lengths, axis=axis) + result = da.max(run_lengths, axis=axis) + + # Check whether to reduce to a scalar result, as per the behaviour + # of other aggregators. + if result.shape == (1,): + result = da.squeeze(result) + + return result + + def _rms(array, axis, **kwargs): # XXX due to the current limitations in `da.average` (see below), maintain # an explicit non-lazy aggregation function for now. @@ -1394,26 +1603,36 @@ def _lazy_rms(array, axis, **kwargs): # all. Thus trying to use this aggregator with weights will currently # raise an error in dask due to the unexpected keyword `weights`, # rather than silently returning the wrong answer. - return da.sqrt(da.mean(array ** 2, axis=axis, **kwargs)) + return da.sqrt(da.mean(array**2, axis=axis, **kwargs)) -@_build_dask_mdtol_function -def _lazy_sum(array, **kwargs): - array = iris._lazy_data.as_lazy_data(array) - # weighted or scaled sum +def _sum(array, **kwargs): + """ + Weighted or scaled sum. Uses Dask's support for NEP13/18 to work as either + a lazy or a real function. + + """ axis_in = kwargs.get("axis", None) weights_in = kwargs.pop("weights", None) returned_in = kwargs.pop("returned", False) if weights_in is not None: - wsum = da.sum(weights_in * array, **kwargs) + wsum = np.sum(weights_in * array, **kwargs) else: - wsum = da.sum(array, **kwargs) + wsum = np.sum(array, **kwargs) if returned_in: + al = da if iris._lazy_data.is_lazy_data(array) else np if weights_in is None: - weights = iris._lazy_data.as_lazy_data(np.ones_like(array)) + weights = al.ones_like(array) + if al is da: + # Dask version of ones_like does not preserve masks. See dask#9301. + weights = da.ma.masked_array( + weights, da.ma.getmaskarray(array) + ) else: - weights = weights_in - rvalue = (wsum, da.sum(weights, axis=axis_in)) + weights = al.ma.masked_array( + weights_in, mask=al.ma.getmaskarray(array) + ) + rvalue = (wsum, np.sum(weights, axis=axis_in)) else: rvalue = wsum return rvalue @@ -1533,9 +1752,9 @@ def interp_order(length): # COUNT = Aggregator( "count", - iris._lazy_data.non_lazy(_lazy_count), + _count, units_func=lambda units: 1, - lazy_func=_lazy_count, + lazy_func=_build_dask_mdtol_function(_count), ) """ An :class:`~iris.analysis.Aggregator` instance that counts the number @@ -1558,9 +1777,40 @@ def interp_order(length): .. seealso:: The :func:`~iris.analysis.PROPORTION` aggregator. -This aggregator handles masked data. +This aggregator handles masked data and lazy data. + +""" + +MAX_RUN = Aggregator( + None, + iris._lazy_data.non_lazy(_lazy_max_run), + units_func=lambda units: 1, + lazy_func=_build_dask_mdtol_function(_lazy_max_run), +) """ +An :class:`~iris.analysis.Aggregator` instance that finds the longest run of +:class:`~iris.cube.Cube` data occurrences that satisfy a particular criterion, +as defined by a user supplied *function*, along the given axis. + +**Required** kwargs associated with the use of this aggregator: + +* function (callable): + A function which converts an array of data values into a corresponding array + of True/False values. + +**For example**: + +The longest run of days with precipitation exceeding 10 (in cube data units) at +each grid location could be calculated with:: + + result = precip_cube.collapsed('time', iris.analysis.MAX_RUN, + function=lambda values: values > 10) + +This aggregator handles masked data, which it treats as interrupting a run, and lazy data. + +""" +MAX_RUN.name = lambda: "max_run" GMEAN = Aggregator("geometric_mean", scipy.stats.mstats.gmean) @@ -1575,7 +1825,7 @@ def interp_order(length): result = cube.collapsed('longitude', iris.analysis.GMEAN) -This aggregator handles masked data. +This aggregator handles masked data, but NOT lazy data. """ @@ -1597,7 +1847,7 @@ def interp_order(length): The harmonic mean is only valid if all data values are greater than zero. -This aggregator handles masked data. +This aggregator handles masked data, but NOT lazy data. """ @@ -1663,7 +1913,8 @@ def interp_order(length): result = cube.collapsed('longitude', iris.analysis.MEDIAN) -This aggregator handles masked data. +This aggregator handles masked data, but NOT lazy data. For lazy aggregation, +please try :obj:`~.PERCENTILE`. """ @@ -1682,7 +1933,7 @@ def interp_order(length): result = cube.collapsed('longitude', iris.analysis.MIN) -This aggregator handles masked data. +This aggregator handles masked data and lazy data. """ @@ -1701,7 +1952,7 @@ def interp_order(length): result = cube.collapsed('longitude', iris.analysis.MAX) -This aggregator handles masked data. +This aggregator handles masked data and lazy data. """ @@ -1727,38 +1978,53 @@ def interp_order(length): result = cube.collapsed('time', iris.analysis.PEAK) -This aggregator handles masked data. +This aggregator handles masked data but NOT lazy data. """ -PERCENTILE = PercentileAggregator(alphap=1, betap=1) +PERCENTILE = PercentileAggregator() """ -An :class:`~iris.analysis.PercentileAggregator` instance that calculates the +A :class:`~iris.analysis.PercentileAggregator` instance that calculates the percentile over a :class:`~iris.cube.Cube`, as computed by -:func:`scipy.stats.mstats.mquantiles`. +:func:`scipy.stats.mstats.mquantiles` (default) or :func:`numpy.percentile` (if +``fast_percentile_method`` is True). -**Required** kwargs associated with the use of this aggregator: +Parameters +---------- -* percent (float or sequence of floats): +percent : float or sequence of floats Percentile rank/s at which to extract value/s. -Additional kwargs associated with the use of this aggregator: - -* alphap (float): +alphap : float, default=1 Plotting positions parameter, see :func:`scipy.stats.mstats.mquantiles`. - Defaults to 1. -* betap (float): +betap : float, default=1 Plotting positions parameter, see :func:`scipy.stats.mstats.mquantiles`. - Defaults to 1. +fast_percentile_method : bool, default=False + When set to True, uses :func:`numpy.percentile` method as a faster + alternative to the :func:`scipy.stats.mstats.mquantiles` method. An + exception is raised if the data are masked and the missing data tolerance + is not 0. -**For example**: +**kwargs : dict, optional + Passed to :func:`scipy.stats.mstats.mquantiles` or :func:`numpy.percentile`. + +Example +------- To compute the 10th and 90th percentile over *time*:: result = cube.collapsed('time', iris.analysis.PERCENTILE, percent=[10, 90]) -This aggregator handles masked data. +This aggregator handles masked data and lazy data. + +.. note:: + + Performance of this aggregator on lazy data is particularly sensitive to + the dask array chunking, so it may be useful to test with various chunk + sizes for a given application. Any chunking along the dimensions to be + aggregated is removed by the aggregator prior to calculating the + percentiles. """ @@ -1792,7 +2058,7 @@ def interp_order(length): .. seealso:: The :func:`~iris.analysis.COUNT` aggregator. -This aggregator handles masked data. +This aggregator handles masked data, but NOT lazy data. """ @@ -1818,7 +2084,7 @@ def interp_order(length): result = cube.collapsed('longitude', iris.analysis.RMS) -This aggregator handles masked data. +This aggregator handles masked data and lazy data. """ @@ -1852,7 +2118,7 @@ def interp_order(length): .. note:: - Lazy operation is supported, via :func:`dask.array.nanstd`. + Lazy operation is supported, via :func:`dask.array.std`. This aggregator handles masked data. @@ -1861,8 +2127,8 @@ def interp_order(length): SUM = WeightedAggregator( "sum", - iris._lazy_data.non_lazy(_lazy_sum), - lazy_func=_build_dask_mdtol_function(_lazy_sum), + _sum, + lazy_func=_build_dask_mdtol_function(_sum), ) """ An :class:`~iris.analysis.Aggregator` instance that calculates @@ -1891,7 +2157,7 @@ def interp_order(length): result = cube.rolling_window('time', iris.analysis.SUM, len(weights), weights=weights) -This aggregator handles masked data. +This aggregator handles masked data and lazy data. """ @@ -1926,9 +2192,9 @@ def interp_order(length): .. note:: - Lazy operation is supported, via :func:`dask.array.nanvar`. + Lazy operation is supported, via :func:`dask.array.var`. -This aggregator handles masked data. +This aggregator handles masked data and lazy data. """ @@ -1960,6 +2226,11 @@ def interp_order(length): :func:`scipy.interpolate.interp1d` Defaults to "linear", which is equivalent to alphap=0.5, betap=0.5 in `iris.analysis.PERCENTILE` +Notes +------ +This function does not maintain laziness when called; it realises data. +See more at :doc:`/userguide/real_and_lazy_data`. + """ @@ -1985,7 +2256,9 @@ class _Groupby: """ - def __init__(self, groupby_coords, shared_coords=None): + def __init__( + self, groupby_coords, shared_coords=None, climatological=False + ): """ Determine the group slices over the group-by coordinates. @@ -2001,6 +2274,12 @@ def __init__(self, groupby_coords, shared_coords=None): that share the same group-by coordinate axis. The `int` identifies which dimension of the coord is on the group-by coordinate axis. + * climatological (bool): + Indicates whether the output is expected to be climatological. For + any aggregated time coord(s), this causes the climatological flag to + be set and the point for each cell to equal its first bound, thereby + preserving the time of year. + """ #: Group-by and shared coordinates that have been grouped. self.coords = [] @@ -2029,6 +2308,13 @@ def __init__(self, groupby_coords, shared_coords=None): for coord, dim in shared_coords: self._add_shared_coord(coord, dim) + # Aggregation is climatological in nature + self.climatological = climatological + + # Stores mapping from original cube coords to new ones, as metadata may + # not match + self.coord_replacement_mapping = [] + def _add_groupby_coord(self, coord): if coord.ndim != 1: raise iris.exceptions.CoordinateMultiDimError(coord) @@ -2187,6 +2473,9 @@ def _compute_shared_coords(self): # Create new shared bounded coordinates. for coord, dim in self._shared_coords: + climatological_coord = ( + self.climatological and coord.units.is_time_reference() + ) if coord.points.dtype.kind in "SU": if coord.bounds is None: new_points = [] @@ -2225,6 +2514,7 @@ def _compute_shared_coords(self): maxmin_axis = (dim, -1) first_choices = coord.bounds.take(0, -1) last_choices = coord.bounds.take(1, -1) + else: # Derive new coord's bounds from points. item = coord.points @@ -2277,7 +2567,11 @@ def _compute_shared_coords(self): # Now create the new bounded group shared coordinate. try: - new_points = new_bounds.mean(-1) + if climatological_coord: + # Use the first bound as the point + new_points = new_bounds[..., 0] + else: + new_points = new_bounds.mean(-1) except TypeError: msg = ( "The {0!r} coordinate on the collapsing dimension" @@ -2286,17 +2580,19 @@ def _compute_shared_coords(self): raise ValueError(msg) try: - self.coords.append( - coord.copy(points=new_points, bounds=new_bounds) - ) + new_coord = coord.copy(points=new_points, bounds=new_bounds) except ValueError: # non monotonic points/bounds - self.coords.append( - iris.coords.AuxCoord.from_coord(coord).copy( - points=new_points, bounds=new_bounds - ) + new_coord = iris.coords.AuxCoord.from_coord(coord).copy( + points=new_points, bounds=new_bounds ) + if climatological_coord: + new_coord.climatological = True + self.coord_replacement_mapping.append((coord, new_coord)) + + self.coords.append(new_coord) + def __len__(self): """Calculate the number of groups given the group-by coordinates.""" @@ -2328,6 +2624,11 @@ def clear_phenomenon_identity(cube): Helper function to clear the standard_name, attributes, and cell_methods of a cube. + + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ cube.rename(None) cube.attributes.clear() @@ -2421,6 +2722,10 @@ def interpolator(self, cube, coords): dimensions in the result cube caused by scalar values in `sample_points`. + The N arrays of values within `sample_points` will be used to + create an N-d grid of points that will then be sampled (rather than + just N points) + The values for coordinates that correspond to date/times may optionally be supplied as datetime.datetime or cftime.datetime instances. diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index 8381185e58..3b728e9a43 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -11,7 +11,7 @@ from iris._lazy_data import map_complete_blocks from iris.analysis._interpolation import get_xy_dim_coords, snapshot_grid -from iris.analysis._regrid import RectilinearRegridder +from iris.analysis._regrid import RectilinearRegridder, _create_cube import iris.analysis.cartography import iris.coord_systems from iris.util import _meshgrid @@ -853,7 +853,7 @@ def _calculate_regrid_area_weighted_weights( cached_x_bounds = [] cached_x_indices = [] max_x_indices = 0 - for (x_0, x_1) in grid_x_bounds: + for x_0, x_1 in grid_x_bounds: if grid_x_decreasing: x_0, x_1 = x_1, x_0 x_bounds, x_indices = _cropped_bounds(src_x_bounds, x_0, x_1) @@ -1111,18 +1111,32 @@ def _regrid_area_weighted_rectilinear_src_and_grid__perform( ) # Wrap up the data as a Cube. - regrid_callback = RectilinearRegridder._regrid - new_cube = RectilinearRegridder._create_cube( + + _regrid_callback = functools.partial( + RectilinearRegridder._regrid, + src_x_coord=src_x, + src_y_coord=src_y, + sample_grid_x=meshgrid_x, + sample_grid_y=meshgrid_y, + ) + # TODO: investigate if an area weighted callback would be more appropriate. + # _regrid_callback = functools.partial( + # _regrid_area_weighted_array, + # weights_info=weights_info, + # index_info=index_info, + # mdtol=mdtol, + # ) + + def regrid_callback(*args, **kwargs): + _data, dims = args + return _regrid_callback(_data, *dims, **kwargs) + + new_cube = _create_cube( new_data, src_cube, - src_x_dim, - src_y_dim, - src_x, - src_y, - grid_x, - grid_y, - meshgrid_x, - meshgrid_y, + [src_x_dim, src_y_dim], + [grid_x, grid_y], + 2, regrid_callback, ) diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 127aec7c1e..4cb449ae51 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -120,7 +120,7 @@ def _angle(p, q, r): mid_lons = np.deg2rad(q[0]) pr = _3d_xyz_from_latlon(r[0], r[1]) - _3d_xyz_from_latlon(p[0], p[1]) - pr_norm = np.sqrt(np.sum(pr ** 2, axis=0)) + pr_norm = np.sqrt(np.sum(pr**2, axis=0)) pr_top = pr[1] * np.cos(mid_lons) - pr[0] * np.sin(mid_lons) index = pr_norm == 0 @@ -449,6 +449,11 @@ def rotate_grid_vectors( Vector magnitudes will always be the same as the inputs. + .. note:: + + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ u_out, v_out = (cube.copy() for cube in (u_cube, v_cube)) if not grid_angles_cube: diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index 2a7dfa6e62..f5e89a9e51 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -268,7 +268,7 @@ def _account_for_circular(self, points, data): """ from iris.analysis.cartography import wrap_lons - for (circular, modulus, index, dim, offset) in self._circulars: + for circular, modulus, index, dim, offset in self._circulars: if modulus: # Map all the requested values into the range of the source # data (centred over the centre of the source data to allow diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index 5c7439b0ce..f1891a48e4 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -11,7 +11,6 @@ import numpy as np import numpy.ma as ma from scipy.sparse import csc_matrix -from scipy.sparse import diags as sparse_diags from iris._lazy_data import map_complete_blocks from iris.analysis._interpolation import ( @@ -21,7 +20,7 @@ snapshot_grid, ) from iris.analysis._scipy_interpolate import _RegularGridInterpolator -from iris.util import _meshgrid +from iris.util import _meshgrid, guess_coord_axis def _transform_xy_arrays(crs_from, x, y, crs_to): @@ -52,18 +51,20 @@ def _regrid_weighted_curvilinear_to_rectilinear__prepare( First (setup) part of 'regrid_weighted_curvilinear_to_rectilinear'. Check inputs and calculate the sparse regrid matrix and related info. - The 'regrid info' returned can be re-used over many 2d slices. + The 'regrid info' returned can be re-used over many cubes. """ - if src_cube.aux_factories: - msg = "All source cube derived coordinates will be ignored." - warnings.warn(msg) # Get the source cube x and y 2D auxiliary coordinates. sx, sy = src_cube.coord(axis="x"), src_cube.coord(axis="y") # Get the target grid cube x and y dimension coordinates. tx, ty = get_xy_dim_coords(grid_cube) + sl = [0] * grid_cube.ndim + sl[grid_cube.coord_dims(tx)[0]] = np.s_[:] + sl[grid_cube.coord_dims(ty)[0]] = np.s_[:] + grid_cube = grid_cube[tuple(sl)] + if sx.units != sy.units: msg = ( "The source cube x ({!r}) and y ({!r}) coordinates must " @@ -287,83 +288,108 @@ def _regrid_indices(cells, depth, points): return regrid_info -def _regrid_weighted_curvilinear_to_rectilinear__perform( - src_cube, regrid_info +def _curvilinear_to_rectilinear_regrid_data( + data, + dims, + regrid_info, ): """ - Second (regrid) part of 'regrid_weighted_curvilinear_to_rectilinear'. + Part of 'regrid_weighted_curvilinear_to_rectilinear' which acts on the data. - Perform the prepared regrid calculation on a single 2d cube. + Perform the prepared regrid calculation on an array. """ - from iris.cube import Cube - sparse_matrix, sum_weights, rows, grid_cube = regrid_info + inds = list(range(-len(dims), 0)) + data = np.moveaxis(data, dims, inds) + data_shape = data.shape + grid_size = np.prod([data_shape[ind] for ind in inds]) + # Calculate the numerator of the weighted mean (M, 1). - is_masked = ma.isMaskedArray(src_cube.data) + is_masked = ma.isMaskedArray(data) + sum_weights = None if not is_masked: - data = src_cube.data + data = data else: # Use raw data array - data = src_cube.data.data + r_data = data.data # Check if there are any masked source points to take account of. - is_masked = np.ma.is_masked(src_cube.data) + is_masked = ma.is_masked(data) if is_masked: # Zero any masked source points so they add nothing in output sums. - mask = src_cube.data.mask - data[mask] = 0.0 + mask = data.mask + r_data[mask] = 0.0 # Calculate a new 'sum_weights' to allow for missing source points. # N.B. it is more efficient to use the original once-calculated # sparse matrix, but in this case we can't. # Hopefully, this post-multiplying by the validities is less costly # than repeating the whole sparse calculation. - valid_src_cells = ~mask.flat[:] - src_cell_validity_factors = sparse_diags( - np.array(valid_src_cells, dtype=int), 0 - ) - valid_weights = sparse_matrix * src_cell_validity_factors - sum_weights = valid_weights.sum(axis=1).getA() - # Work out where output cells are missing all contributions. - # This allows for where 'rows' contains output cells that have no - # data because of missing input points. - zero_sums = sum_weights == 0.0 - # Make sure we can still divide by sum_weights[rows]. - sum_weights[zero_sums] = 1.0 + valid_src_cells = ~mask.reshape(-1, grid_size) + sum_weights = valid_src_cells @ sparse_matrix.T + data = r_data + if sum_weights is None: + sum_weights = ( + np.ones(data_shape).reshape(-1, grid_size) @ sparse_matrix.T + ) + # Work out where output cells are missing all contributions. + # This allows for where 'rows' contains output cells that have no + # data because of missing input points. + zero_sums = sum_weights == 0.0 + # Make sure we can still divide by sum_weights[rows]. + sum_weights[zero_sums] = 1.0 # Calculate sum in each target cell, over contributions from each source # cell. - numerator = sparse_matrix * data.reshape(-1, 1) - - # Create a template for the weighted mean result. - weighted_mean = ma.masked_all(numerator.shape, dtype=numerator.dtype) - - # Calculate final results in all relevant places. - weighted_mean[rows] = numerator[rows] / sum_weights[rows] - if is_masked: - # Ensure masked points where relevant source cells were all missing. - if np.any(zero_sums): - # Make masked if it wasn't. - weighted_mean = np.ma.asarray(weighted_mean) - # Mask where contributing sums were zero. - weighted_mean[zero_sums] = np.ma.masked - - # Construct the final regridded weighted mean cube. + numerator = data.reshape(-1, grid_size) @ sparse_matrix.T + + weighted_mean = numerator / sum_weights + # Ensure masked points where relevant source cells were all missing. + weighted_mean = ma.asarray(weighted_mean) + if np.any(zero_sums): + # Mask where contributing sums were zero. + weighted_mean[zero_sums] = ma.masked + + new_data_shape = list(data_shape) + for dim, length in zip(inds, grid_cube.shape): + new_data_shape[dim] = length + if len(dims) == 1: + new_data_shape.append(grid_cube.shape[1]) + dims = (dims[0], dims[0] + 1) + if len(dims) > 2: + new_data_shape = new_data_shape[: 2 - len(dims)] + dims = dims[:2] + + result = weighted_mean.reshape(new_data_shape) + result = np.moveaxis(result, [-2, -1], dims) + return result + + +def _regrid_weighted_curvilinear_to_rectilinear__perform( + src_cube, regrid_info +): + """ + Second (regrid) part of 'regrid_weighted_curvilinear_to_rectilinear'. + + Perform the prepared regrid calculation on a single cube. + + """ + dims = src_cube.coord_dims( + CurvilinearRegridder._get_horizontal_coord(src_cube, "x") + ) + result_data = _curvilinear_to_rectilinear_regrid_data( + src_cube.data, dims, regrid_info + ) + grid_cube = regrid_info[-1] tx = grid_cube.coord(axis="x", dim_coords=True) ty = grid_cube.coord(axis="y", dim_coords=True) - (tx_dim,) = grid_cube.coord_dims(tx) - (ty_dim,) = grid_cube.coord_dims(ty) - dim_coords_and_dims = list(zip((ty.copy(), tx.copy()), (ty_dim, tx_dim))) - cube = Cube( - weighted_mean.reshape(grid_cube.shape), - dim_coords_and_dims=dim_coords_and_dims, + regrid_callback = functools.partial( + _curvilinear_to_rectilinear_regrid_data, regrid_info=regrid_info ) - cube.metadata = copy.deepcopy(src_cube.metadata) - - for coord in src_cube.coords(dimensions=()): - cube.add_aux_coord(coord.copy()) - - return cube + result = _create_cube( + result_data, src_cube, dims, (ty.copy(), tx.copy()), 2, regrid_callback + ) + return result class CurvilinearRegridder: @@ -457,7 +483,7 @@ def __call__(self, src): point-in-cell regridding. """ - from iris.cube import Cube, CubeList + from iris.cube import Cube # Validity checks. if not isinstance(src, Cube): @@ -473,30 +499,18 @@ def __call__(self, src): "The given cube is not defined on the same " "source grid as this regridder." ) - - # Call the regridder function. - # This includes repeating over any non-XY dimensions, because the - # underlying routine does not support this. - # FOR NOW: we will use cube.slices and merge to achieve this, - # though that is not a terribly efficient method ... - # TODO: create a template result cube and paste data slices into it, - # which would be more efficient. - result_slices = CubeList([]) - for slice_cube in src.slices(sx): - if self._regrid_info is None: - # Calculate the basic regrid info just once. - self._regrid_info = ( - _regrid_weighted_curvilinear_to_rectilinear__prepare( - slice_cube, self.weights, self._target_cube - ) - ) - slice_result = ( - _regrid_weighted_curvilinear_to_rectilinear__perform( - slice_cube, self._regrid_info + slice_cube = next(src.slices(sx)) + if self._regrid_info is None: + # Calculate the basic regrid info just once. + self._regrid_info = ( + _regrid_weighted_curvilinear_to_rectilinear__prepare( + slice_cube, self.weights, self._target_cube ) ) - result_slices.append(slice_result) - result = result_slices.merge_cube() + result = _regrid_weighted_curvilinear_to_rectilinear__perform( + src, self._regrid_info + ) + return result @@ -688,11 +702,23 @@ def _regrid( # Prepare the result data array shape = list(src_data.shape) - assert shape[x_dim] == src_x_coord.shape[0] - assert shape[y_dim] == src_y_coord.shape[0] - - shape[y_dim] = sample_grid_x.shape[0] - shape[x_dim] = sample_grid_x.shape[1] + final_shape = shape.copy() + if x_dim is not None: + assert shape[x_dim] == src_x_coord.shape[0] + shape[x_dim] = sample_grid_x.shape[1] + final_shape[x_dim] = shape[x_dim] + else: + shape.append(1) + x_dim = len(shape) - 1 + src_data = np.expand_dims(src_data, -1) + if y_dim is not None: + assert shape[y_dim] == src_y_coord.shape[0] + shape[y_dim] = sample_grid_x.shape[0] + final_shape[y_dim] = shape[y_dim] + else: + shape.append(1) + y_dim = len(shape) - 1 + src_data = np.expand_dims(src_data, -1) dtype = src_data.dtype if method == "linear": @@ -714,7 +740,11 @@ def _regrid( if src_x_coord.points.size > 1 else False ) - reverse_y = src_y_coord.points[0] > src_y_coord.points[1] + reverse_y = ( + src_y_coord.points[0] > src_y_coord.points[1] + if src_y_coord.points.size > 1 + else False + ) flip_index = [slice(None)] * src_data.ndim if reverse_x: src_x_coord = src_x_coord[::-1] @@ -733,7 +763,7 @@ def _regrid( # Slice out the first full 2D piece of data for construction of the # interpolator. - index = [0] * src_data.ndim + index = [0] * len(shape) index[x_dim] = index[y_dim] = slice(None) initial_data = src_data[tuple(index)] if y_dim < x_dim: @@ -808,166 +838,21 @@ def interpolate(data): if ma.isMaskedArray(data) or mode.force_mask: # NB. np.ma.getmaskarray returns an array of `False` if # `src_subset` is not a masked array. - src_mask = np.ma.getmaskarray(src_subset) + src_mask = ma.getmaskarray(src_subset) interpolator.fill_value = mode.mask_fill_value mask_fraction = interpolate(src_mask) new_mask = mask_fraction > 0 - if np.ma.isMaskedArray(data): + if ma.isMaskedArray(data): data.mask[tuple(index)] = new_mask elif np.any(new_mask): # Set mask=False to ensure we have an expanded mask array. - data = np.ma.MaskedArray(data, mask=False) + data = ma.MaskedArray(data, mask=False) data.mask[tuple(index)] = new_mask + data = data.reshape(final_shape) return data - @staticmethod - def _create_cube( - data, - src, - x_dim, - y_dim, - src_x_coord, - src_y_coord, - grid_x_coord, - grid_y_coord, - sample_grid_x, - sample_grid_y, - regrid_callback, - ): - """ - Return a new Cube for the result of regridding the source Cube onto - the new grid. - - All the metadata and coordinates of the result Cube are copied from - the source Cube, with two exceptions: - - Grid dimension coordinates are copied from the grid Cube. - - Auxiliary coordinates which span the grid dimensions are - ignored, except where they provide a reference surface for an - :class:`iris.aux_factory.AuxCoordFactory`. - - Args: - - * data: - The regridded data as an N-dimensional NumPy array. - * src: - The source Cube. - * x_dim: - The X dimension within the source Cube. - * y_dim: - The Y dimension within the source Cube. - * src_x_coord: - The X :class:`iris.coords.DimCoord`. - * src_y_coord: - The Y :class:`iris.coords.DimCoord`. - * grid_x_coord: - The :class:`iris.coords.DimCoord` for the new grid's X - coordinate. - * grid_y_coord: - The :class:`iris.coords.DimCoord` for the new grid's Y - coordinate. - * sample_grid_x: - A 2-dimensional array of sample X values. - * sample_grid_y: - A 2-dimensional array of sample Y values. - * regrid_callback: - The routine that will be used to calculate the interpolated - values of any reference surfaces. - - Returns: - The new, regridded Cube. - - """ - from iris.cube import Cube - - # - # XXX: At the moment requires to be a static method as used by - # experimental regrid_area_weighted_rectilinear_src_and_grid - # - # Create a result cube with the appropriate metadata - result = Cube(data) - result.metadata = copy.deepcopy(src.metadata) - - # Copy across all the coordinates which don't span the grid. - # Record a mapping from old coordinate IDs to new coordinates, - # for subsequent use in creating updated aux_factories. - coord_mapping = {} - - def copy_coords(src_coords, add_method): - for coord in src_coords: - dims = src.coord_dims(coord) - if coord == src_x_coord: - coord = grid_x_coord - elif coord == src_y_coord: - coord = grid_y_coord - elif x_dim in dims or y_dim in dims: - continue - result_coord = coord.copy() - add_method(result_coord, dims) - coord_mapping[id(coord)] = result_coord - - copy_coords(src.dim_coords, result.add_dim_coord) - copy_coords(src.aux_coords, result.add_aux_coord) - - def regrid_reference_surface( - src_surface_coord, - surface_dims, - x_dim, - y_dim, - src_x_coord, - src_y_coord, - sample_grid_x, - sample_grid_y, - regrid_callback, - ): - # Determine which of the reference surface's dimensions span the X - # and Y dimensions of the source cube. - surface_x_dim = surface_dims.index(x_dim) - surface_y_dim = surface_dims.index(y_dim) - surface = regrid_callback( - src_surface_coord.points, - surface_x_dim, - surface_y_dim, - src_x_coord, - src_y_coord, - sample_grid_x, - sample_grid_y, - ) - surface_coord = src_surface_coord.copy(surface) - return surface_coord - - # Copy across any AuxFactory instances, and regrid their reference - # surfaces where required. - for factory in src.aux_factories: - for coord in factory.dependencies.values(): - if coord is None: - continue - dims = src.coord_dims(coord) - if x_dim in dims and y_dim in dims: - result_coord = regrid_reference_surface( - coord, - dims, - x_dim, - y_dim, - src_x_coord, - src_y_coord, - sample_grid_x, - sample_grid_y, - regrid_callback, - ) - result.add_aux_coord(result_coord, dims) - coord_mapping[id(coord)] = result_coord - try: - result.add_aux_factory(factory.updated(coord_mapping)) - except KeyError: - msg = ( - "Cannot update aux_factory {!r} because of dropped" - " coordinates.".format(factory.name()) - ) - warnings.warn(msg) - return result - def _check_units(self, coord): from iris.coord_systems import GeogCS, RotatedGeogCS @@ -1089,20 +974,168 @@ def __call__(self, src): ) # Wrap up the data as a Cube. - regrid_callback = functools.partial( - self._regrid, method=self._method, extrapolation_mode="nan" + _regrid_callback = functools.partial( + self._regrid, + src_x_coord=src_x_coord, + src_y_coord=src_y_coord, + sample_grid_x=sample_grid_x, + sample_grid_y=sample_grid_y, + method=self._method, + extrapolation_mode="nan", ) - result = self._create_cube( + + def regrid_callback(*args, **kwargs): + _data, dims = args + return _regrid_callback(_data, *dims, **kwargs) + + result = _create_cube( data, src, - x_dim, - y_dim, - src_x_coord, - src_y_coord, - grid_x_coord, - grid_y_coord, - sample_grid_x, - sample_grid_y, + [x_dim, y_dim], + [grid_x_coord, grid_y_coord], + 2, regrid_callback, ) return result + + +def _create_cube( + data, src, src_dims, tgt_coords, num_tgt_dims, regrid_callback +): + r""" + Return a new cube for the result of regridding. + Returned cube represents the result of regridding the source cube + onto the horizontal coordinates (e.g. latitude) of the target cube. + All the metadata and coordinates of the result cube are copied from + the source cube, with two exceptions: + - Horizontal coordinates are copied from the target cube. + - Auxiliary coordinates which span the grid dimensions are + ignored. + Parameters + ---------- + data : array + The regridded data as an N-dimensional NumPy array. + src : cube + The source Cube. + src_dims : tuple of int + The dimensions of the X and Y coordinate within the source Cube. + tgt_coords : tuple of :class:`iris.coords.Coord`\\ 's + Either two 1D :class:`iris.coords.DimCoord`\\ 's, two 1D + :class:`iris.experimental.ugrid.DimCoord`\\ 's or two ND + :class:`iris.coords.AuxCoord`\\ 's representing the new grid's + X and Y coordinates. + num_tgt_dims : int + The number of dimensions that the `tgt_coords` span. + regrid_callback : callable + The routine that will be used to calculate the interpolated + values of any reference surfaces. + Returns + ------- + cube + A new iris.cube.Cube instance. + """ + from iris.coords import DimCoord + from iris.cube import Cube + + result = Cube(data) + + if len(src_dims) >= 2: + grid_dim_x, grid_dim_y = src_dims[:2] + elif len(src_dims) == 1: + grid_dim_x = src_dims[0] + grid_dim_y = grid_dim_x + 1 + + if num_tgt_dims == 1: + grid_dim_x = grid_dim_y = min(src_dims) + for tgt_coord, dim in zip(tgt_coords, (grid_dim_x, grid_dim_y)): + if len(tgt_coord.shape) == 1: + if isinstance(tgt_coord, DimCoord) and dim is not None: + result.add_dim_coord(tgt_coord, dim) + else: + result.add_aux_coord(tgt_coord, dim) + else: + result.add_aux_coord(tgt_coord, (grid_dim_y, grid_dim_x)) + + result.metadata = copy.deepcopy(src.metadata) + + # Copy across all the coordinates which don't span the grid. + # Record a mapping from old coordinate IDs to new coordinates, + # for subsequent use in creating updated aux_factories. + + coord_mapping = {} + + def copy_coords(src_coords, add_method): + for coord in src_coords: + dims = src.coord_dims(coord) + if set(src_dims).intersection(set(dims)): + continue + if guess_coord_axis(coord) in ["X", "Y"]: + continue + + def dim_offset(dim): + offset = sum( + [ + d <= dim + for d in (grid_dim_x, grid_dim_y) + if d is not None + ] + ) + if offset and num_tgt_dims == 1: + offset -= 1 + offset -= sum([d <= dim for d in src_dims if d is not None]) + return dim + offset + + dims = [dim_offset(dim) for dim in dims] + result_coord = coord.copy() + # Add result_coord to the owner of add_method. + add_method(result_coord, dims) + coord_mapping[id(coord)] = result_coord + + copy_coords(src.dim_coords, result.add_dim_coord) + copy_coords(src.aux_coords, result.add_aux_coord) + + def regrid_reference_surface( + src_surface_coord, + surface_dims, + src_dims, + regrid_callback, + ): + # Determine which of the reference surface's dimensions span the X + # and Y dimensions of the source cube. + relative_surface_dims = [ + surface_dims.index(dim) if dim is not None else None + for dim in src_dims + ] + surface = regrid_callback( + src_surface_coord.points, + relative_surface_dims, + ) + surface_coord = src_surface_coord.copy(surface) + return surface_coord + + # Copy across any AuxFactory instances, and regrid their reference + # surfaces where required. + for factory in src.aux_factories: + for coord in factory.dependencies.values(): + if coord is None: + continue + dims = src.coord_dims(coord) + if set(src_dims).intersection(dims): + result_coord = regrid_reference_surface( + coord, + dims, + src_dims, + regrid_callback, + ) + result.add_aux_coord(result_coord, dims) + coord_mapping[id(coord)] = result_coord + try: + result.add_aux_factory(factory.updated(coord_mapping)) + except KeyError: + msg = ( + "Cannot update aux_factory {!r} because of dropped" + " coordinates.".format(factory.name()) + ) + warnings.warn(msg) + + return result diff --git a/lib/iris/analysis/_scipy_interpolate.py b/lib/iris/analysis/_scipy_interpolate.py index c6b33c56a4..bfa070c7c7 100644 --- a/lib/iris/analysis/_scipy_interpolate.py +++ b/lib/iris/analysis/_scipy_interpolate.py @@ -225,11 +225,10 @@ def compute_interp_weights(self, xi, method=None): prepared = (xi_shape, method) + self._find_indices(xi.T) if method == "linear": - xi_shape, method, indices, norm_distances, out_of_bounds = prepared # Allocate arrays for describing the sparse matrix. - n_src_values_per_result_value = 2 ** ndim + n_src_values_per_result_value = 2**ndim n_result_values = len(indices[0]) n_non_zero = n_result_values * n_src_values_per_result_value weights = np.ones(n_non_zero, dtype=norm_distances[0].dtype) diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index 409782f256..75b7d86406 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -147,6 +147,12 @@ def cube_delta(cube, coord): .. note:: Missing data support not yet implemented. + .. note:: + + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + + """ # handle the case where a user passes a coordinate name if isinstance(coord, str): @@ -251,6 +257,11 @@ def differentiate(cube, coord_to_differentiate): .. note:: Spherical differentiation does not occur in this routine. + .. note:: + + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ # Get the delta cube in the required differential direction. # This operation results in a copy of the original cube. @@ -532,6 +543,12 @@ def curl(i_cube, j_cube, k_cube=None): where phi is longitude, theta is latitude. + .. note:: + + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + + """ # Get the vector quantity names. # (i.e. ['easterly', 'northerly', 'vertical']) @@ -577,7 +594,6 @@ def curl(i_cube, j_cube, k_cube=None): horiz_cs, (iris.coord_systems.GeogCS, iris.coord_systems.RotatedGeogCS) ) if not spherical_coords: - # TODO Implement some mechanism for conforming to a common grid dj_dx = _curl_differentiate(j_cube, x_coord) prototype_diff = dj_dx @@ -629,14 +645,10 @@ def curl(i_cube, j_cube, k_cube=None): # (d/dtheta (i_cube * sin(lat)) - d_j_cube_dphi) # phi_cmpt = 1/r * ( d/dr (r * j_cube) - d_k_cube_dtheta) # theta_cmpt = 1/r * ( 1/cos(lat) * d_k_cube_dphi - d/dr (r * i_cube) - if ( - y_coord.name() - not in [ - "latitude", - "grid_latitude", - ] - or x_coord.name() not in ["longitude", "grid_longitude"] - ): + if y_coord.name() not in [ + "latitude", + "grid_latitude", + ] or x_coord.name() not in ["longitude", "grid_longitude"]: raise ValueError( "Expecting latitude as the y coord and " "longitude as the x coord for spherical curl." @@ -745,6 +757,12 @@ def spatial_vectors_with_phenom_name(i_cube, j_cube, k_cube=None): #doctest: +SKIP (['u', 'v', 'w'], 'wind') + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + + """ directional_names = ( ("u", "v", "w"), diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index 373487af53..a8e90a63ad 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -66,6 +66,10 @@ def wrap_lons(lons, base, period): >>> print(wrap_lons(np.array([185, 30, -200, 75]), -180, 360)) [-175. 30. 160. 75.] + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ # It is important to use 64bit floating precision when changing a floats # numbers range. @@ -169,20 +173,25 @@ def rotate_pole(lons, lats, pole_lon, pole_lat): def _get_lon_lat_coords(cube): - lat_coords = [ - coord for coord in cube.coords() if "latitude" in coord.name() - ] - lon_coords = [ - coord for coord in cube.coords() if "longitude" in coord.name() - ] + def search_for_coord(coord_iterable, coord_name): + return [ + coord for coord in coord_iterable if coord_name in coord.name() + ] + + lat_coords = search_for_coord( + cube.dim_coords, "latitude" + ) or search_for_coord(cube.coords(), "latitude") + lon_coords = search_for_coord( + cube.dim_coords, "longitude" + ) or search_for_coord(cube.coords(), "longitude") if len(lat_coords) > 1 or len(lon_coords) > 1: raise ValueError( - "Calling `_get_lon_lat_coords` with multiple lat or lon coords" + "Calling `_get_lon_lat_coords` with multiple same-type (i.e. dim/aux) lat or lon coords" " is currently disallowed" ) lat_coord = lat_coords[0] lon_coord = lon_coords[0] - return (lon_coord, lat_coord) + return lon_coord, lat_coord def _xy_range(cube, mode=None): @@ -266,6 +275,10 @@ def get_xy_grids(cube): x, y = get_xy_grids(cube) + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ x_coord, y_coord = cube.coord(axis="X"), cube.coord(axis="Y") @@ -294,6 +307,11 @@ def get_xy_contiguous_bounded_grids(cube): xs, ys = get_xy_contiguous_bounded_grids(cube) + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ x_coord, y_coord = cube.coord(axis="X"), cube.coord(axis="Y") @@ -335,7 +353,7 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth): raise ValueError("Bounds must be [n,2] array") # fill in a new array of areas - radius_sqr = radius_of_earth ** 2 + radius_sqr = radius_of_earth**2 radian_lat_64 = radian_lat_bounds.astype(np.float64) radian_lon_64 = radian_lon_bounds.astype(np.float64) @@ -493,6 +511,10 @@ def cosine_latitude_weights(cube): cube = iris.load_cube(iris.sample_data_path('air_temp.pp')) weights = np.sqrt(cosine_latitude_weights(cube)) + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ # Find all latitude coordinates, we want one and only one. lat_coords = [ @@ -578,6 +600,11 @@ def project(cube, target_proj, nx=None, ny=None): An instance of :class:`iris.cube.Cube` and a list describing the extent of the projection. + .. note:: + + If there are both dim and aux latitude-longitude coordinates, only + the dim coordinates will be used. + .. note:: This function assumes global data and will if necessary extrapolate @@ -591,6 +618,11 @@ def project(cube, target_proj, nx=None, ny=None): resulting nearest neighbour values. If masked, the value in the resulting cube is set to 0. + .. note:: + + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + .. warning:: This function uses a nearest neighbour approach rather than any form @@ -927,7 +959,7 @@ def _crs_distance_differentials(crs, x, y): """ # Make a true-latlon coordinate system for distance calculations. - crs_latlon = ccrs.Geodetic(globe=ccrs.Globe(ellipse="sphere")) + crs_latlon = ccrs.Geodetic(globe=crs.globe) # Transform points to true-latlon (just to get the true latitudes). _, true_lat = _transform_xy(crs, x, y, crs_latlon) # Get coordinate differentials w.r.t. true-latlon. @@ -1008,10 +1040,10 @@ def _transform_distance_vectors_tolerance_mask( u_one_t, v_zero_t = _transform_distance_vectors(ones, zeros, ds, dx2, dy2) u_zero_t, v_one_t = _transform_distance_vectors(zeros, ones, ds, dx2, dy2) # Squared magnitudes should be equal to one within acceptable tolerance. - # A value of atol=2e-3 is used, which corresponds to a change in magnitude - # of approximately 0.1%. - sqmag_1_0 = u_one_t ** 2 + v_zero_t ** 2 - sqmag_0_1 = u_zero_t ** 2 + v_one_t ** 2 + # A value of atol=2e-3 is used, which masks any magnitude changes >0.5% + # (approx percentage - based on experimenting). + sqmag_1_0 = u_one_t**2 + v_zero_t**2 + sqmag_0_1 = u_zero_t**2 + v_one_t**2 mask = np.logical_not( np.logical_and( np.isclose(sqmag_1_0, ones, atol=2e-3), @@ -1065,6 +1097,11 @@ def rotate_winds(u_cube, v_cube, target_cs): The names of the output cubes are those of the inputs, prefixed with 'transformed\_' (e.g. 'transformed_x_wind'). + .. note:: + + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + .. warning:: Conversion between rotated-pole and non-rotated systems can be diff --git a/lib/iris/analysis/geometry.py b/lib/iris/analysis/geometry.py index a412a26ebc..b246b518d4 100644 --- a/lib/iris/analysis/geometry.py +++ b/lib/iris/analysis/geometry.py @@ -160,6 +160,11 @@ def geometry_area_weights(cube, geometry, normalize=False): calculation might be wrong. In this case, a UserWarning will be issued. + .. note:: + + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. + Args: * cube (:class:`iris.cube.Cube`): diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 107d964ed4..09a02ad51c 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -115,6 +115,11 @@ def abs(cube, in_place=False): Returns: An instance of :class:`iris.cube.Cube`. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ _assert_is_cube(cube) new_dtype = _output_dtype(np.abs, cube.dtype, in_place=in_place) @@ -160,6 +165,11 @@ def intersection_of_cubes(cube, other_cube): intersections = cubes.extract_overlapping(coords) cube1, cube2 = (intersections[0], intersections[1]) + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ wmsg = ( "iris.analysis.maths.intersection_of_cubes has been deprecated and will " @@ -243,6 +253,11 @@ def add(cube, other, dim=None, in_place=False): Returns: An instance of :class:`iris.cube.Cube`. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ _assert_is_cube(cube) new_dtype = _output_dtype( @@ -292,6 +307,11 @@ def subtract(cube, other, dim=None, in_place=False): Returns: An instance of :class:`iris.cube.Cube`. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ _assert_is_cube(cube) new_dtype = _output_dtype( @@ -383,6 +403,10 @@ def multiply(cube, other, dim=None, in_place=False): Returns: An instance of :class:`iris.cube.Cube`. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -456,6 +480,10 @@ def divide(cube, other, dim=None, in_place=False): Returns: An instance of :class:`iris.cube.Cube`. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ _assert_is_cube(cube) @@ -519,6 +547,10 @@ def exponentiate(cube, exponent, in_place=False): Returns: An instance of :class:`iris.cube.Cube`. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ _assert_is_cube(cube) new_dtype = _output_dtype( @@ -540,7 +572,7 @@ def power(data, out=None): return _math_op_common( cube, power, - cube.units ** exponent, + cube.units**exponent, new_dtype=new_dtype, in_place=in_place, ) @@ -567,6 +599,11 @@ def exp(cube, in_place=False): Returns: An instance of :class:`iris.cube.Cube`. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ _assert_is_cube(cube) new_dtype = _output_dtype(np.exp, cube.dtype, in_place=in_place) @@ -593,6 +630,11 @@ def log(cube, in_place=False): Returns: An instance of :class:`iris.cube.Cube`. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ _assert_is_cube(cube) new_dtype = _output_dtype(np.log, cube.dtype, in_place=in_place) @@ -623,6 +665,11 @@ def log2(cube, in_place=False): Returns: An instance of :class:`iris.cube.Cube`. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ _assert_is_cube(cube) new_dtype = _output_dtype(np.log2, cube.dtype, in_place=in_place) @@ -649,6 +696,11 @@ def log10(cube, in_place=False): Returns: An instance of :class:`iris.cube.Cube`. + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ _assert_is_cube(cube) new_dtype = _output_dtype(np.log10, cube.dtype, in_place=in_place) @@ -703,6 +755,12 @@ def apply_ufunc( cube = apply_ufunc(numpy.sin, cube, in_place=True) + .. note:: + + This function maintains laziness when called; it does not realise data. This is dependent on `ufunc` argument + being a numpy operation that is compatible with lazy operation. + See more at :doc:`/userguide/real_and_lazy_data`. + """ if not isinstance(ufunc, np.ufunc): @@ -774,6 +832,7 @@ def _binary_op_common( new_dtype=None, dim=None, in_place=False, + sanitise_metadata=True, ): """ Function which shares common code between binary operations. @@ -792,6 +851,8 @@ def _binary_op_common( coordinate that is not found in `cube` in_place - whether or not to apply the operation in place to `cube` and `cube.data` + sanitise_metadata - whether or not to remove metadata using + _sanitise_metadata function """ from iris.cube import Cube @@ -837,6 +898,20 @@ def unary_func(lhs): raise TypeError(emsg) return data + if in_place and not cube.has_lazy_data(): + # In-place arithmetic doesn't work if array type of LHS is less complex + # than RHS. + if iris._lazy_data.is_lazy_data(rhs): + cube.data = cube.lazy_data() + elif ma.is_masked(rhs) and not isinstance(cube.data, ma.MaskedArray): + cube.data = ma.array(cube.data) + + elif isinstance( + cube.core_data(), ma.MaskedArray + ) and iris._lazy_data.is_lazy_data(rhs): + # Workaround for #2987. numpy#15200 discusses the general problem. + cube = cube.copy(cube.lazy_data()) + result = _math_op_common( cube, unary_func, @@ -844,13 +919,15 @@ def unary_func(lhs): new_dtype=new_dtype, in_place=in_place, skeleton_cube=skeleton_cube, + sanitise_metadata=sanitise_metadata, ) if isinstance(other, Cube): # Insert the resultant data from the maths operation # within the resolved cube. result = resolver.cube(result.core_data(), in_place=in_place) - _sanitise_metadata(result, new_unit) + if sanitise_metadata: + _sanitise_metadata(result, new_unit) return result @@ -932,6 +1009,7 @@ def _math_op_common( new_dtype=None, in_place=False, skeleton_cube=False, + sanitise_metadata=True, ): from iris.cube import Cube @@ -965,7 +1043,8 @@ def _math_op_common( ): new_cube.data = ma.masked_array(0, 1, dtype=new_dtype) - _sanitise_metadata(new_cube, new_unit) + if sanitise_metadata: + _sanitise_metadata(new_cube, new_unit) return new_cube diff --git a/lib/iris/analysis/stats.py b/lib/iris/analysis/stats.py index 89dde1818b..711e3c5bfb 100644 --- a/lib/iris/analysis/stats.py +++ b/lib/iris/analysis/stats.py @@ -168,10 +168,10 @@ def _ones_like(cube): covar = (s1 * s2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_1, mdtol=mdtol ) - var_1 = (s1 ** 2).collapsed( + var_1 = (s1**2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_1 ) - var_2 = (s2 ** 2).collapsed( + var_2 = (s2**2).collapsed( corr_coords, iris.analysis.SUM, weights=weights_2 ) diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index d5fac9d108..24f7a9dede 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -85,7 +85,6 @@ def __init__(self, waypoints, sample_count=10): cur_seg = segments[cur_seg_i] len_accum = cur_seg.length for p in range(self.sample_count): - # calculate the sample position along our total length sample_at_len = p * sample_step @@ -216,6 +215,10 @@ def interpolate(cube, sample_points, method=None): ('longitude', [-60, -50, -40])] interpolated_cube = interpolate(cube, sample_points) + Notes + ------ + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. """ from iris.analysis import Linear @@ -320,20 +323,59 @@ def interpolate(cube, sample_points, method=None): break if method in ["linear", None]: - for i in range(trajectory_size): - point = [(coord, values[i]) for coord, values in sample_points] - column = cube.interpolate(point, Linear()) - new_cube.data[..., i] = column.data - # Fill in the empty squashed (non derived) coords. - for column_coord in column.dim_coords + column.aux_coords: - src_dims = cube.coord_dims(column_coord) - if not squish_my_dims.isdisjoint(src_dims): - if len(column_coord.points) != 1: - msg = "Expected to find exactly one point. Found {}." - raise Exception(msg.format(column_coord.points)) - new_cube.coord(column_coord.name()).points[ - i - ] = column_coord.points[0] + # Using cube.interpolate will generate extra values that we don't need + # as it makes a grid from the provided coordinates (like a meshgrid) + # and then does interpolation for all of them. This is memory + # inefficient, but significantly more time efficient than calling + # cube.interpolate (or the underlying method on the interpolator) + # repeatedly, so using this approach for now. In future, it would be + # ideal if we only interpolated at the points we care about + columns = cube.interpolate(sample_points, Linear()) + # np.einsum(a, [0, 0], [0]) is like np.diag(a) + # We're using einsum here to do an n-dimensional diagonal, leaving the + # other dimensions unaffected and putting the diagonal's direction on + # the final axis + initial_inds = list(range(1, columns.ndim + 1)) + for ind in squish_my_dims: + initial_inds[ind] = 0 + final_inds = list(filter(lambda x: x != 0, initial_inds)) + [0] + new_cube.data = np.einsum(columns.data, initial_inds, final_inds) + + # Fill in the empty squashed (non derived) coords. + # We're using the same einstein summation plan as for the cube, but + # redoing those indices to match the indices in the coordinates + for columns_coord in columns.dim_coords + columns.aux_coords: + src_dims = cube.coord_dims(columns_coord) + if not squish_my_dims.isdisjoint(src_dims): + # Mapping the cube indicies onto the coord + initial_coord_inds = [initial_inds[ind] for ind in src_dims] + # Making the final ones the same way as for the cube + # 0 will always appear in the initial ones because we know this + # coord overlaps the squish dims + final_coord_inds = list( + filter(lambda x: x != 0, initial_coord_inds) + ) + [0] + new_coord_points = np.einsum( + columns_coord.points, initial_coord_inds, final_coord_inds + ) + # Check we're not overwriting coord.points with the wrong shape + if ( + not new_cube.coord(columns_coord.name()).points.shape + == new_coord_points.shape + ): + msg = ( + "Coord {} was expected to have new points of shape {}. " + "Found shape of {}." + ) + raise ValueError( + msg.format( + columns_coord.name(), + new_cube.coord(columns_coord.name()).points.shape, + new_coord_points.shape, + ) + ) + # Replace the points + new_cube.coord(columns_coord.name()).points = new_coord_points elif method == "nearest": # Use a cache with _nearest_neighbour_indices_ndcoords() @@ -404,21 +446,7 @@ def interpolate(cube, sample_points, method=None): ] # Apply the fancy indexing to get all the result data points. - source_data = source_data[tuple(fancy_source_indices)] - - # "Fix" problems with missing datapoints producing odd values - # when copied from a masked into an unmasked array. - # TODO: proper masked data handling. - if np.ma.isMaskedArray(source_data): - # This is **not** proper mask handling, because we cannot produce a - # masked result, but it ensures we use a "filled" version of the - # input in this case. - source_data = source_data.filled() - new_cube.data[:] = source_data - # NOTE: we assign to "new_cube.data[:]" and *not* just "new_cube.data", - # because the existing code produces a default dtype from 'np.empty' - # instead of preserving the input dtype. - # TODO: maybe this should be fixed -- i.e. to preserve input dtype ?? + new_cube.data = source_data[tuple(fancy_source_indices)] # Fill in the empty squashed (non derived) coords. column_coords = [ diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index cb5f53f5f4..cb3149fe58 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -44,8 +44,6 @@ # https://www.unidata.ucar.edu/software/netcdf/docs/netcdf_data_set_components.html#object_name -from ..util import guess_coord_axis - _TOKEN_PARSE = re.compile(r"""^[a-zA-Z0-9][\w\.\+\-@]*$""") # Configure the logger. @@ -971,6 +969,7 @@ def _combine_lenient(self, other): A list of combined metadata member values. """ + # Perform "strict" combination for "coord_system" and "climatological". def func(field): left = getattr(self, field) @@ -1026,6 +1025,7 @@ def _difference_lenient(self, other): A list of difference metadata member values. """ + # Perform "strict" difference for "coord_system" and "climatological". def func(field): left = getattr(self, field) @@ -1413,6 +1413,8 @@ def metadata_filter( to only those that matched the given criteria. """ + from ..util import guess_coord_axis + name = None obj = None diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 12db64cafe..a0c97dfc00 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -13,7 +13,9 @@ from collections import namedtuple from collections.abc import Iterable +from dataclasses import dataclass import logging +from typing import Any from dask.array.core import broadcast_shapes import numpy as np @@ -56,10 +58,42 @@ _PreparedFactory = namedtuple("PreparedFactory", ["container", "dependencies"]) -_PreparedItem = namedtuple( - "PreparedItem", - ["metadata", "points", "bounds", "dims", "container"], -) + +@dataclass +class _PreparedItem: + metadata: Any + points: Any + bounds: Any + dims: Any + container: Any + mesh: Any = None + location: Any = None + axis: Any = None + + def create_coord(self, metadata): + from iris.experimental.ugrid.mesh import MeshCoord + + if issubclass(self.container, MeshCoord): + # Make a MeshCoord, for which we have mesh/location/axis. + result = MeshCoord( + mesh=self.mesh, + location=self.location, + axis=self.axis, + ) + # Note: in this case we do also have "prepared metadata", but we + # do *not* assign it as we do for an 'ordinary' Coord. + # Instead, MeshCoord name/units/attributes are immutable, and set at + # create time to those of the underlying mesh node coordinate. + # cf https://github.com/SciTools/iris/issues/4670 + + else: + # make a regular coord, for which we have points/bounds/metadata. + result = self.container(self.points, bounds=self.bounds) + # Also assign prepared metadata. + result.metadata = metadata + + return result + _PreparedMetadata = namedtuple("PreparedMetadata", ["combined", "src", "tgt"]) @@ -646,7 +680,13 @@ def _categorise_items(cube): @staticmethod def _create_prepared_item( - coord, dims, src_metadata=None, tgt_metadata=None + coord, + dims, + src_metadata=None, + tgt_metadata=None, + points=None, + bounds=None, + container=None, ): """ Convenience method that creates a :class:`~iris.common.resolve._PreparedItem` @@ -658,8 +698,10 @@ def _create_prepared_item( * coord: The coordinate with the ``points`` and ``bounds`` to be extracted. - * dims: - The dimensions that the ``coord`` spans on the resulting resolved :class:`~iris.cube.Cube`. + * dims (int or tuple): + The dimensions that the ``coord`` spans on the resulting resolved + :class:`~iris.cube.Cube`. + (Can also be a single dimension number). * src_metadata: The coordinate metadata from the ``src`` :class:`~iris.cube.Cube`. @@ -667,26 +709,85 @@ def _create_prepared_item( * tgt_metadata: The coordinate metadata from the ``tgt`` :class:`~iris.cube.Cube`. + * points: + Override points array. When not given, use coord.points. + + * bounds: + Override bounds array. When not given, use coord.bounds. + + * container: + Override coord type (class constructor). + When not given, use type(coord). + Returns: The :class:`~iris.common.resolve._PreparedItem`. + .. note:: + + If container or type(coord) is DimCoord/AuxCoord (i.e. not + MeshCoord), then points+bounds define the built AuxCoord/DimCoord. + Theses points+bounds come either from those args, or the 'coord'. + Alternatively, when container or type(coord) is MeshCoord, then + points==bounds==None and the preparted item contains + mesh/location/axis properties for the resulting MeshCoord. + These don't have override args: they *always* come from 'coord'. + """ + if not isinstance(dims, Iterable): + dims = (dims,) + if src_metadata is not None and tgt_metadata is not None: combined = src_metadata.combine(tgt_metadata) else: combined = src_metadata or tgt_metadata - if not isinstance(dims, Iterable): - dims = (dims,) prepared_metadata = _PreparedMetadata( combined=combined, src=src_metadata, tgt=tgt_metadata ) - bounds = coord.bounds + + if container is None: + container = type(coord) + + from iris.experimental.ugrid.mesh import MeshCoord + + if issubclass(container, MeshCoord): + # Build a prepared-item to make a MeshCoord. + # This case does *NOT* use points + bounds, so alternatives to the + # coord content should not have been specified by the caller. + assert points is None and bounds is None + mesh = coord.mesh + location = coord.location + axis = coord.axis + + else: + # Build a prepared-item to make a DimCoord or AuxCoord. + + # mesh/location/axis are not used. + mesh = None + location = None + axis = None + + # points + bounds default to those from the coordinate, but + # alternative values may be specified. + if points is None: + points = coord.points + bounds = coord.bounds + # 'ELSE' points was passed: both points+bounds come from the args + + # Always *copy* points+bounds, to avoid any possible direct (shared) + # references to existing coord arrays. + points = points.copy() + if bounds is not None: + bounds = bounds.copy() + result = _PreparedItem( metadata=prepared_metadata, - points=coord.points.copy(), - bounds=bounds if bounds is None else bounds.copy(), dims=dims, - container=type(coord), + points=points, + bounds=bounds, + mesh=mesh, + location=location, + axis=axis, + container=container, ) return result @@ -1422,30 +1523,64 @@ def _prepare_common_aux_payload( (tgt_item,) = tgt_items src_coord = src_item.coord tgt_coord = tgt_item.coord - points, bounds = self._prepare_points_and_bounds( - src_coord, - tgt_coord, - src_item.dims, - tgt_item.dims, - ignore_mismatch=ignore_mismatch, - ) - if points is not None: - src_type = type(src_coord) - tgt_type = type(tgt_coord) - # Downcast to aux if there are mixed container types. - container = src_type if src_type is tgt_type else AuxCoord - prepared_metadata = _PreparedMetadata( - combined=src_metadata.combine(tgt_item.metadata), - src=src_metadata, - tgt=tgt_item.metadata, - ) - prepared_item = _PreparedItem( - metadata=prepared_metadata, - points=points.copy(), - bounds=bounds if bounds is None else bounds.copy(), - dims=tgt_item.dims, - container=container, + + prepared_item = None + src_is_mesh, tgt_is_mesh = [ + hasattr(coord, "mesh") for coord in (src_coord, tgt_coord) + ] + if src_is_mesh and tgt_is_mesh: + # MeshCoords are a bit "special" ... + # In this case, we may need to produce an alternative form + # to the 'ordinary' _PreparedItem + # However, this only works if they have identical meshes.. + if src_coord == tgt_coord: + prepared_item = self._create_prepared_item( + src_coord, + tgt_item.dims, + src_metadata=src_metadata, + tgt_metadata=tgt_item.metadata, + ) + else: + emsg = ( + f"Mesh coordinate {src_coord.name()!r} does not match between the " + f"LHS cube {self.lhs_cube.name()!r} and " + f"RHS cube {self.rhs_cube.name()!r}." + ) + raise ValueError(emsg) + + if prepared_item is None: + # Make a "normal" _PreparedItem, which is specified using + # points + bounds arrays. + # First, convert any un-matching MeshCoords to AuxCoord + if src_is_mesh: + src_coord = AuxCoord.from_coord(src_coord) + if tgt_is_mesh: + tgt_coord = AuxCoord.from_coord(tgt_coord) + points, bounds = self._prepare_points_and_bounds( + src_coord, + tgt_coord, + src_item.dims, + tgt_item.dims, + ignore_mismatch=ignore_mismatch, ) + if points is not None: + src_type = type(src_coord) + tgt_type = type(tgt_coord) + # Downcast to aux if there are mixed container types. + container = ( + src_type if src_type is tgt_type else AuxCoord + ) + prepared_item = self._create_prepared_item( + src_coord, + tgt_item.dims, + src_metadata=src_metadata, + tgt_metadata=tgt_item.metadata, + points=points, + bounds=bounds, + container=container, + ) + + if prepared_item is not None: prepared_items.append(prepared_item) def _prepare_common_dim_payload( @@ -1499,16 +1634,13 @@ def _prepare_common_dim_payload( ) if points is not None: - prepared_metadata = _PreparedMetadata( - combined=src_metadata.combine(tgt_metadata), - src=src_metadata, - tgt=tgt_metadata, - ) - prepared_item = _PreparedItem( - metadata=prepared_metadata, - points=points.copy(), - bounds=bounds if bounds is None else bounds.copy(), - dims=(tgt_dim,), + prepared_item = self._create_prepared_item( + src_coord, + tgt_dim, + src_metadata=src_metadata, + tgt_metadata=tgt_metadata, + points=points, + bounds=bounds, container=DimCoord, ) self.prepared_category.items_dim.append(prepared_item) @@ -2333,8 +2465,7 @@ def cube(self, data, in_place=False): # Add the prepared dim coordinates. for item in self.prepared_category.items_dim: - coord = item.container(item.points, bounds=item.bounds) - coord.metadata = item.metadata.combined + coord = item.create_coord(metadata=item.metadata.combined) result.add_dim_coord(coord, item.dims) # Add the prepared aux and scalar coordinates. @@ -2343,8 +2474,8 @@ def cube(self, data, in_place=False): + self.prepared_category.items_scalar ) for item in prepared_aux_coords: - coord = item.container(item.points, bounds=item.bounds) - coord.metadata = item.metadata.combined + # These items are "special" + coord = item.create_coord(metadata=item.metadata.combined) try: result.add_aux_coord(coord, item.dims) except ValueError as err: diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index 72019b4b87..698b4828f1 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -90,6 +90,7 @@ def vectorised_fn(*args): # coordinates only # + # Private "helper" function def _pt_date(coord, time): """ diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 2f875bb159..edf0c1871b 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -9,6 +9,7 @@ """ from abc import ABCMeta, abstractmethod +from functools import cached_property import warnings import cartopy.crs as ccrs @@ -53,10 +54,28 @@ class CoordSystem(metaclass=ABCMeta): grid_mapping_name = None def __eq__(self, other): - return ( - self.__class__ == other.__class__ - and self.__dict__ == other.__dict__ - ) + """ + Override equality + + The `_globe` and `_crs` attributes are not compared because they are + cached properties and completely derived from other attributes. The + nature of caching means that they can appear on one object and not on + another despite the objects being identical, and them being completely + derived from other attributes means they will only differ if other + attributes that are being tested for equality differ. + """ + if self.__class__ != other.__class__: + return False + self_keys = set(self.__dict__.keys()) + other_keys = set(other.__dict__.keys()) + check_keys = (self_keys | other_keys) - {"_globe", "_crs"} + for key in check_keys: + try: + if self.__dict__[key] != other.__dict__[key]: + return False + except KeyError: + return False + return True def __ne__(self, other): # Must supply __ne__, Python does not defer to __eq__ for @@ -122,11 +141,17 @@ def as_cartopy_projection(self): pass +_short_datum_names = { + "OSGB 1936": "OSGB36", + "OSGB_1936": "OSGB36", + "WGS 84": "WGS84", +} + + class GeogCS(CoordSystem): """ A geographic (ellipsoidal) coordinate system, defined by the shape of the Earth and a prime meridian. - """ grid_mapping_name = "latitude_longitude" @@ -139,34 +164,35 @@ def __init__( longitude_of_prime_meridian=None, ): """ - Creates a new GeogCS. - - Kwargs: + Create a new GeogCS. + Parameters + ---------- * semi_major_axis, semi_minor_axis: - Axes of ellipsoid, in metres. At least one must be given - (see note below). - + Axes of ellipsoid, in metres. At least one must be given (see note + below). * inverse_flattening: - Can be omitted if both axes given (see note below). - Defaults to 0.0 . - + Can be omitted if both axes given (see note below). Default 0.0 * longitude_of_prime_meridian: - Specifies the prime meridian on the ellipsoid, in degrees. - Defaults to 0.0 . + Specifies the prime meridian on the ellipsoid, in degrees. Default 0.0 + Notes + ----- If just semi_major_axis is set, with no semi_minor_axis or inverse_flattening, then a perfect sphere is created from the given radius. - If just two of semi_major_axis, semi_minor_axis, and - inverse_flattening are given the missing element is calculated from the - formula: + If just two of semi_major_axis, semi_minor_axis, and inverse_flattening + are given the missing element is calculated from the formula: :math:`flattening = (major - minor) / major` Currently, Iris will not allow over-specification (all three ellipsoid parameters). + After object creation, altering any of these properties will not update + the others. semi_major_axis and semi_minor_axis are used when creating + Cartopy objects. + Examples:: cs = GeogCS(6371229) @@ -233,13 +259,15 @@ def __init__( raise ValueError("Insufficient ellipsoid specification") #: Major radius of the ellipsoid in metres. - self.semi_major_axis = float(semi_major_axis) + self._semi_major_axis = float(semi_major_axis) #: Minor radius of the ellipsoid in metres. - self.semi_minor_axis = float(semi_minor_axis) + self._semi_minor_axis = float(semi_minor_axis) #: :math:`1/f` where :math:`f = (a-b)/a`. - self.inverse_flattening = float(inverse_flattening) + self._inverse_flattening = float(inverse_flattening) + + self._datum = None #: Describes 'zero' on the ellipsoid in degrees. self.longitude_of_prime_meridian = _arg_default( @@ -257,6 +285,14 @@ def _pretty_attrs(self): self.longitude_of_prime_meridian, ) ) + # An unknown crs datum will be treated as None + if self.datum is not None and self.datum != "unknown": + attrs.append( + ( + "datum", + self.datum, + ) + ) return attrs def __repr__(self): @@ -294,7 +330,7 @@ def xml_element(self, doc): return CoordSystem.xml_element(self, doc, attrs) def as_cartopy_crs(self): - return ccrs.Geodetic(self.as_cartopy_globe()) + return self._crs def as_cartopy_projection(self): return ccrs.PlateCarree( @@ -303,14 +339,160 @@ def as_cartopy_projection(self): ) def as_cartopy_globe(self): - # Explicitly set `ellipse` to None as a workaround for - # Cartopy setting WGS84 as the default. - return ccrs.Globe( - semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None, + return self._globe + + @cached_property + def _globe(self): + """ + A representation of this CRS as a Cartopy Globe. + + Note + ---- + This property is created when required and then cached for speed. That + cached value is cleared when an assignment is made to a property of the + class that invalidates the cache. + """ + if self._datum is not None: + short_datum = _short_datum_names.get(self._datum, self._datum) + # Cartopy doesn't actually enact datums unless they're provided without + # ellipsoid axes, so only provide the datum + return ccrs.Globe(short_datum, ellipse=None) + else: + return ccrs.Globe( + ellipse=None, + semimajor_axis=self._semi_major_axis, + semiminor_axis=self._semi_minor_axis, + ) + + @cached_property + def _crs(self): + """ + A representation of this CRS as a Cartopy CRS. + + Note + ---- + This property is created when required and then cached for speed. That + cached value is cleared when an assignment is made to a property of the + class that invalidates the cache. + """ + return ccrs.Geodetic(self._globe) + + def _wipe_cached_properties(self): + """ + Wipes the cached properties on the object as part of any update to a + value that invalidates the cache. + """ + try: + delattr(self, "_crs") + except AttributeError: + pass + try: + delattr(self, "_globe") + except AttributeError: + pass + + @property + def semi_major_axis(self): + if self._semi_major_axis is not None: + return self._semi_major_axis + else: + return self._crs.ellipsoid.semi_major_metre + + @semi_major_axis.setter + def semi_major_axis(self, value): + """ + Setting this property to a different value invalidates the current datum + (if any) because a datum encodes a specific semi-major axis. This also + invalidates the cached `cartopy.Globe` and `cartopy.CRS`. + """ + value = float(value) + if not np.isclose(self.semi_major_axis, value): + self._datum = None + self._wipe_cached_properties() + self._semi_major_axis = value + + @property + def semi_minor_axis(self): + if self._semi_minor_axis is not None: + return self._semi_minor_axis + else: + return self._crs.ellipsoid.semi_minor_metre + + @semi_minor_axis.setter + def semi_minor_axis(self, value): + """ + Setting this property to a different value invalidates the current datum + (if any) because a datum encodes a specific semi-minor axis. This also + invalidates the cached `cartopy.Globe` and `cartopy.CRS`. + """ + value = float(value) + if not np.isclose(self.semi_minor_axis, value): + self._datum = None + self._wipe_cached_properties() + self._semi_minor_axis = value + + @property + def inverse_flattening(self): + if self._inverse_flattening is not None: + return self._inverse_flattening + else: + self._crs.ellipsoid.inverse_flattening + + @inverse_flattening.setter + def inverse_flattening(self, value): + """ + Setting this property to a different value does not affect the behaviour + of this object any further than the value of this property. + """ + wmsg = ( + "Setting inverse_flattening does not affect other properties of " + "the GeogCS object. To change other properties set them explicitly" + " or create a new GeogCS instance." + ) + warnings.warn(wmsg, UserWarning) + value = float(value) + self._inverse_flattening = value + + @property + def datum(self): + if self._datum is None: + return None + else: + datum = self._datum + return datum + + @datum.setter + def datum(self, value): + """ + Setting this property to a different value invalidates the current + values of the ellipsoid measurements because a datum encodes its own + ellipse. This also invalidates the cached `cartopy.Globe` and + `cartopy.CRS`. + """ + if self._datum != value: + self._semi_major_axis = None + self._semi_minor_axis = None + self._inverse_flattening = None + self._wipe_cached_properties() + self._datum = value + + @classmethod + def from_datum(cls, datum, longitude_of_prime_meridian=None): + crs = super().__new__(cls) + + crs._semi_major_axis = None + crs._semi_minor_axis = None + crs._inverse_flattening = None + + #: Describes 'zero' on the ellipsoid in degrees. + crs.longitude_of_prime_meridian = _arg_default( + longitude_of_prime_meridian, 0 ) + crs._datum = datum + + return crs + class RotatedGeogCS(CoordSystem): """ @@ -766,7 +948,6 @@ def __init__( false_northing=None, ellipsoid=None, ): - """ Constructs a Geostationary coord system. @@ -878,32 +1059,39 @@ def __init__( false_northing=None, true_scale_lat=None, ellipsoid=None, + scale_factor_at_projection_origin=None, ): """ Constructs a Stereographic coord system. - Args: + Parameters + ---------- - * central_lat: + central_lat : float The latitude of the pole. - * central_lon: + central_lon : float The central longitude, which aligns with the y axis. - Kwargs: - - * false_easting: - X offset from planar origin in metres. Defaults to 0.0 . + false_easting : float, optional + X offset from planar origin in metres. - * false_northing: - Y offset from planar origin in metres. Defaults to 0.0 . + false_northing : float, optional + Y offset from planar origin in metres. - * true_scale_lat: + true_scale_lat : float, optional Latitude of true scale. - * ellipsoid (:class:`GeogCS`): + scale_factor_at_projection_origin : float, optional + Scale factor at the origin of the projection + + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. + Notes + ----- + It is only valid to provide one of true_scale_lat and scale_factor_at_projection_origin + """ #: True latitude of planar origin in degrees. @@ -922,27 +1110,42 @@ def __init__( self.true_scale_lat = _arg_default( true_scale_lat, None, cast_as=_float_or_None ) - # N.B. the way we use this parameter, we need it to default to None, + #: Scale factor at projection origin. + self.scale_factor_at_projection_origin = _arg_default( + scale_factor_at_projection_origin, None, cast_as=_float_or_None + ) + # N.B. the way we use these parameters, we need them to default to None, # and *not* to 0.0 . + if ( + self.true_scale_lat is not None + and self.scale_factor_at_projection_origin is not None + ): + raise ValueError( + "It does not make sense to provide both " + '"scale_factor_at_projection_origin" and "true_scale_latitude". ' + ) + #: Ellipsoid definition (:class:`GeogCS` or None). self.ellipsoid = ellipsoid - def __repr__(self): - return ( - "Stereographic(central_lat={!r}, central_lon={!r}, " - "false_easting={!r}, false_northing={!r}, " - "true_scale_lat={!r}, " - "ellipsoid={!r})".format( - self.central_lat, - self.central_lon, - self.false_easting, - self.false_northing, - self.true_scale_lat, - self.ellipsoid, + def _repr_attributes(self): + if self.scale_factor_at_projection_origin is None: + scale_info = "true_scale_lat={!r}, ".format(self.true_scale_lat) + else: + scale_info = "scale_factor_at_projection_origin={!r}, ".format( + self.scale_factor_at_projection_origin ) + return ( + f"(central_lat={self.central_lat}, central_lon={self.central_lon}, " + f"false_easting={self.false_easting}, false_northing={self.false_northing}, " + f"{scale_info}" + f"ellipsoid={self.ellipsoid})" ) + def __repr__(self): + return "Stereographic" + self._repr_attributes() + def as_cartopy_crs(self): globe = self._ellipsoid_to_globe(self.ellipsoid, ccrs.Globe()) @@ -952,6 +1155,7 @@ def as_cartopy_crs(self): self.false_easting, self.false_northing, self.true_scale_lat, + self.scale_factor_at_projection_origin, globe=globe, ) @@ -959,6 +1163,73 @@ def as_cartopy_projection(self): return self.as_cartopy_crs() +class PolarStereographic(Stereographic): + """ + A subclass of the stereographic map projection centred on a pole. + + """ + + grid_mapping_name = "polar_stereographic" + + def __init__( + self, + central_lat, + central_lon, + false_easting=None, + false_northing=None, + true_scale_lat=None, + scale_factor_at_projection_origin=None, + ellipsoid=None, + ): + """ + Construct a Polar Stereographic coord system. + + Parameters + ---------- + + central_lat : {90, -90} + The latitude of the pole. + + central_lon : float + The central longitude, which aligns with the y axis. + + false_easting : float, optional + X offset from planar origin in metres. + + false_northing : float, optional + Y offset from planar origin in metres. + + true_scale_lat : float, optional + Latitude of true scale. + + scale_factor_at_projection_origin : float, optional + Scale factor at the origin of the projection + + ellipsoid : :class:`GeogCS`, optional + If given, defines the ellipsoid. + + Notes + ----- + It is only valid to provide at most one of `true_scale_lat` and + `scale_factor_at_projection_origin`. + + + """ + + super().__init__( + central_lat=central_lat, + central_lon=central_lon, + false_easting=false_easting, + false_northing=false_northing, + true_scale_lat=true_scale_lat, + scale_factor_at_projection_origin=scale_factor_at_projection_origin, + ellipsoid=ellipsoid, + ) + + def __repr__(self): + return "PolarStereographic" + self._repr_attributes() + + class LambertConformal(CoordSystem): """ A coordinate system in the Lambert Conformal conic projection. @@ -1083,6 +1354,9 @@ def __init__( longitude_of_projection_origin=None, ellipsoid=None, standard_parallel=None, + scale_factor_at_projection_origin=None, + false_easting=None, + false_northing=None, ): """ Constructs a Mercator coord system. @@ -1098,6 +1372,22 @@ def __init__( * standard_parallel: The latitude where the scale is 1. Defaults to 0.0 . + * scale_factor_at_projection_origin: + Scale factor at natural origin. Defaults to unused. + + * false_easting: + X offset from the planar origin in metres. Defaults to 0.0. + + * false_northing: + Y offset from the planar origin in metres. Defaults to 0.0. + + * datum: + If given, specifies the datumof the coordinate system. Only + respected if iris.Future.daum_support is set. + + Note: Only one of ``standard_parallel`` and + ``scale_factor_at_projection_origin`` should be included. + """ #: True longitude of planar origin in degrees. self.longitude_of_projection_origin = _arg_default( @@ -1107,15 +1397,41 @@ def __init__( #: Ellipsoid definition (:class:`GeogCS` or None). self.ellipsoid = ellipsoid + # Initialise to None, then set based on arguments #: The latitude where the scale is 1. - self.standard_parallel = _arg_default(standard_parallel, 0) + self.standard_parallel = None + # The scale factor at the origin of the projection + self.scale_factor_at_projection_origin = None + if scale_factor_at_projection_origin is None: + self.standard_parallel = _arg_default(standard_parallel, 0) + else: + if standard_parallel is None: + self.scale_factor_at_projection_origin = _arg_default( + scale_factor_at_projection_origin, 0 + ) + else: + raise ValueError( + "It does not make sense to provide both " + '"scale_factor_at_projection_origin" and ' + '"standard_parallel".' + ) + + #: X offset from the planar origin in metres. + self.false_easting = _arg_default(false_easting, 0) + + #: Y offset from the planar origin in metres. + self.false_northing = _arg_default(false_northing, 0) def __repr__(self): res = ( "Mercator(longitude_of_projection_origin=" "{self.longitude_of_projection_origin!r}, " "ellipsoid={self.ellipsoid!r}, " - "standard_parallel={self.standard_parallel!r})" + "standard_parallel={self.standard_parallel!r}, " + "scale_factor_at_projection_origin=" + "{self.scale_factor_at_projection_origin!r}, " + "false_easting={self.false_easting!r}, " + "false_northing={self.false_northing!r})" ) return res.format(self=self) @@ -1126,6 +1442,9 @@ def as_cartopy_crs(self): central_longitude=self.longitude_of_projection_origin, globe=globe, latitude_true_scale=self.standard_parallel, + scale_factor=self.scale_factor_at_projection_origin, + false_easting=self.false_easting, + false_northing=self.false_northing, ) def as_cartopy_projection(self): diff --git a/lib/iris/coords.py b/lib/iris/coords.py index b236d407da..91bb786ae8 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -12,12 +12,12 @@ from collections import namedtuple from collections.abc import Container, Iterator import copy +from functools import lru_cache from itertools import chain, zip_longest import operator import warnings import zlib -import cftime import dask.array as da import numpy as np import numpy.ma as ma @@ -1344,7 +1344,14 @@ def __add__(self, mod): return Cell(point, bound) def __hash__(self): - return super().__hash__() + # See __eq__ for the definition of when two cells are equal. + if self.bound is None: + return hash(self.point) + bound = self.bound + rbound = bound[::-1] + if rbound < bound: + bound = rbound + return hash((self.point, bound)) def __eq__(self, other): """ @@ -1360,7 +1367,9 @@ def __eq__(self, other): else: return self.point == other elif isinstance(other, Cell): - return (self.point == other.point) and (self.bound == other.bound) + return (self.point == other.point) and ( + self.bound == other.bound or self.bound == other.bound[::-1] + ) elif ( isinstance(other, str) and self.bound is None @@ -1408,16 +1417,6 @@ def __common_cmp__(self, other, operator_method): ): raise ValueError("Unexpected operator_method") - # Prevent silent errors resulting from missing cftime - # behaviour. - if isinstance(other, cftime.datetime) or ( - isinstance(self.point, cftime.datetime) - and not isinstance(other, iris.time.PartialDateTime) - ): - raise TypeError( - "Cannot determine the order of " "cftime.datetime objects" - ) - if isinstance(other, Cell): # Cell vs Cell comparison for providing a strict sort order if self.bound is None: @@ -1482,19 +1481,7 @@ def __common_cmp__(self, other, operator_method): else: me = max(self.bound) - # Work around to handle cftime.datetime comparison, which - # doesn't return NotImplemented on failure in some versions of the - # library - try: - result = operator_method(me, other) - except TypeError: - rop = { - operator.lt: operator.gt, - operator.gt: operator.lt, - operator.le: operator.ge, - operator.ge: operator.le, - }[operator_method] - result = rop(other, me) + result = operator_method(me, other) return result @@ -1892,7 +1879,22 @@ def cells(self): ... """ - return _CellIterator(self) + if self.ndim != 1: + raise iris.exceptions.CoordinateMultiDimError(self) + + points = self.points + bounds = self.bounds + if self.units.is_time_reference(): + points = self.units.num2date(points) + if self.has_bounds(): + bounds = self.units.num2date(bounds) + + if self.has_bounds(): + for point, bound in zip(points, bounds): + yield Cell(point, bound) + else: + for point in points: + yield Cell(point) def _sanity_check_bounds(self): if self.ndim == 1: @@ -2212,12 +2214,24 @@ def serialize(x): "Metadata may not be fully descriptive for {!r}." ) warnings.warn(msg.format(self.name())) - elif not self.is_contiguous(): - msg = ( - "Collapsing a non-contiguous coordinate. " - "Metadata may not be fully descriptive for {!r}." - ) - warnings.warn(msg.format(self.name())) + else: + try: + self._sanity_check_bounds() + except ValueError as exc: + msg = ( + "Cannot check if coordinate is contiguous: {} " + "Metadata may not be fully descriptive for {!r}. " + "Ignoring bounds." + ) + warnings.warn(msg.format(str(exc), self.name())) + self.bounds = None + else: + if not self.is_contiguous(): + msg = ( + "Collapsing a non-contiguous coordinate. " + "Metadata may not be fully descriptive for {!r}." + ) + warnings.warn(msg.format(self.name())) if self.has_bounds(): item = self.core_bounds() @@ -2367,18 +2381,16 @@ def intersect(self, other, return_indices=False): ) raise ValueError(msg) - # Cache self.cells for speed. We can also use the index operation on a - # list conveniently. - self_cells = [cell for cell in self.cells()] + # Cache self.cells for speed. We can also use the dict for fast index + # lookup. + self_cells = {cell: idx for idx, cell in enumerate(self.cells())} # Maintain a list of indices on self for which cells exist in both self # and other. self_intersect_indices = [] for cell in other.cells(): - try: - self_intersect_indices.append(self_cells.index(cell)) - except ValueError: - pass + if cell in self_cells: + self_intersect_indices.append(self_cells[cell]) if return_indices is False and self_intersect_indices == []: raise ValueError( @@ -2440,7 +2452,9 @@ def nearest_neighbour_index(self, point): if self.has_bounds(): # make bounds ranges complete+separate, so point is in at least one increasing = self.bounds[0, 1] > self.bounds[0, 0] - bounds = bounds.copy() + # identify data type that bounds and point can safely cast to + dtype = np.result_type(bounds, point) + bounds = bounds.astype(dtype) # sort the bounds cells by their centre values sort_inds = np.argsort(np.mean(bounds, axis=1)) bounds = bounds[sort_inds] @@ -2519,6 +2533,10 @@ def _xml_id_extra(self, unique_value): return unique_value +_regular_points = lru_cache(iris.util.regular_points) +"""Caching version of iris.util.regular_points""" + + class DimCoord(Coord): """ A coordinate that is 1D, and numeric, with values that have a strict monotonic ordering. Missing values are not @@ -2566,12 +2584,9 @@ def from_regular( bounds values will be defined. Defaults to False. """ - points = (zeroth + step) + step * np.arange(count, dtype=np.float32) - _, regular = iris.util.points_step(points) - if not regular: - points = (zeroth + step) + step * np.arange( - count, dtype=np.float64 - ) + # Use lru_cache because this is done repeatedly with the same arguments + # (particularly in field-based file loading). + points = _regular_points(zeroth, step, count).copy() points.flags.writeable = False if with_bounds: @@ -2803,6 +2818,10 @@ def _new_bounds_requirements(self, bounds): * bounds are not masked, and * bounds are monotonic in the first dimension. + Also reverse the order of the second dimension if necessary to match the + first dimension's direction. I.e. both should increase or both should + decrease. + """ # Ensure the bounds are a compatible shape. if self.shape != bounds.shape[:-1] and not ( @@ -2827,7 +2846,6 @@ def _new_bounds_requirements(self, bounds): n_bounds = bounds.shape[-1] n_points = bounds.shape[0] if n_points > 1: - directions = set() for b_index in range(n_bounds): monotonic, direction = iris.util.monotonic( @@ -2852,6 +2870,16 @@ def _new_bounds_requirements(self, bounds): emsg.format(self.name(), self.__class__.__name__) ) + if n_bounds == 2: + # Make ordering of bounds consistent with coord's direction + # if possible. + (direction,) = directions + diffs = bounds[:, 0] - bounds[:, 1] + if np.all(np.sign(diffs) == direction): + bounds = np.flip(bounds, axis=1) + + return bounds + @Coord.bounds.setter def bounds(self, bounds): if bounds is not None: @@ -2860,8 +2888,9 @@ def bounds(self, bounds): # Make sure we have an array (any type of array). bounds = np.asanyarray(bounds) - # Check validity requirements for dimension-coordinate bounds. - self._new_bounds_requirements(bounds) + # Check validity requirements for dimension-coordinate bounds and reverse + # trailing dimension if necessary. + bounds = self._new_bounds_requirements(bounds) # Cast to a numpy array for masked arrays with no mask. bounds = np.array(bounds) @@ -3104,22 +3133,6 @@ def xml_element(self, doc): return cellMethod_xml_element -# See Coord.cells() for the description/context. -class _CellIterator(Iterator): - def __init__(self, coord): - self._coord = coord - if coord.ndim != 1: - raise iris.exceptions.CoordinateMultiDimError(coord) - self._indices = iter(range(coord.shape[0])) - - def __next__(self): - # NB. When self._indices runs out it will raise StopIteration for us. - i = next(self._indices) - return self._coord.cell(i) - - next = __next__ - - # See ExplicitCoord._group() for the description/context. class _GroupIterator(Iterator): def __init__(self, points): diff --git a/lib/iris/cube.py b/lib/iris/cube.py index b456bd9663..abe37c35fb 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -152,19 +152,13 @@ class CubeList(list): """ - def __new__(cls, list_of_cubes=None): - """Given a :class:`list` of cubes, return a CubeList instance.""" - cube_list = list.__new__(cls, list_of_cubes) - - # Check that all items in the incoming list are cubes. Note that this - # checking does not guarantee that a CubeList instance *always* has - # just cubes in its list as the append & __getitem__ methods have not - # been overridden. - if not all([isinstance(cube, Cube) for cube in cube_list]): - raise ValueError( - "All items in list_of_cubes must be Cube " "instances." - ) - return cube_list + def __init__(self, *args, **kwargs): + """Given an iterable of cubes, return a CubeList instance.""" + # Do whatever a list does, to initialise ourself "as a list" + super().__init__(*args, **kwargs) + # Check that all items in the list are cubes. + for cube in self: + self._assert_is_cube(cube) def __str__(self): """Runs short :meth:`Cube.summary` on every cube.""" @@ -182,6 +176,15 @@ def __repr__(self): """Runs repr on every cube.""" return "[%s]" % ",\n".join([repr(cube) for cube in self]) + @staticmethod + def _assert_is_cube(obj): + if not hasattr(obj, "add_aux_coord"): + msg = ( + r"Object {obj} cannot be put in a cubelist, " + "as it is not a Cube." + ) + raise ValueError(msg) + def _repr_html_(self): from iris.experimental.representation import CubeListRepresentation @@ -189,6 +192,7 @@ def _repr_html_(self): return representer.repr_html() # TODO #370 Which operators need overloads? + def __add__(self, other): return CubeList(list.__add__(self, other)) @@ -210,6 +214,48 @@ def __getslice__(self, start, stop): result = CubeList(result) return result + def __iadd__(self, other_cubes): + """ + Add a sequence of cubes to the cubelist in place. + """ + return super(CubeList, self).__iadd__(CubeList(other_cubes)) + + def __setitem__(self, key, cube_or_sequence): + """Set self[key] to cube or sequence of cubes""" + if isinstance(key, int): + # should have single cube. + self._assert_is_cube(cube_or_sequence) + else: + # key is a slice (or exception will come from list method). + cube_or_sequence = CubeList(cube_or_sequence) + + super(CubeList, self).__setitem__(key, cube_or_sequence) + + def append(self, cube): + """ + Append a cube. + """ + self._assert_is_cube(cube) + super(CubeList, self).append(cube) + + def extend(self, other_cubes): + """ + Extend cubelist by appending the cubes contained in other_cubes. + + Args: + + * other_cubes: + A cubelist or other sequence of cubes. + """ + super(CubeList, self).extend(CubeList(other_cubes)) + + def insert(self, index, cube): + """ + Insert a cube before index. + """ + self._assert_is_cube(cube) + super(CubeList, self).insert(index, cube) + def xml(self, checksum=False, order=True, byteorder=True): """Return a string of the XML that this list of cubes represents.""" @@ -838,7 +884,8 @@ def __init__( This object defines the shape of the cube and the phenomenon value in each cell. - ``data`` can be a dask array, a NumPy array, a NumPy array + ``data`` can be a :class:`dask.array.Array`, a + :class:`numpy.ndarray`, a NumPy array subclass (such as :class:`numpy.ma.MaskedArray`), or array_like (as described in :func:`numpy.asarray`). @@ -972,6 +1019,30 @@ def _names(self): """ return self._metadata_manager._names + def _dimensional_metadata(self, name_or_dimensional_metadata): + """ + Return a single _DimensionalMetadata instance that matches the given + name_or_dimensional_metadata. If one is not found, raise an error. + + """ + found_item = None + for cube_method in [ + self.coord, + self.cell_measure, + self.ancillary_variable, + ]: + try: + found_item = cube_method(name_or_dimensional_metadata) + if found_item: + break + except KeyError: + pass + if not found_item: + raise KeyError( + f"{name_or_dimensional_metadata} was not found in {self}." + ) + return found_item + def is_compatible(self, other, ignore=None): """ Return whether the cube is compatible with another. @@ -1088,7 +1159,9 @@ def add_aux_coord(self, coord, data_dims=None): """ if self.coords(coord): # TODO: just fail on duplicate object - raise ValueError("Duplicate coordinates are not permitted.") + raise iris.exceptions.CannotAddError( + "Duplicate coordinates are not permitted." + ) self._add_unique_aux_coord(coord, data_dims) def _check_multi_dim_metadata(self, metadata, data_dims): @@ -1108,7 +1181,7 @@ def _check_multi_dim_metadata(self, metadata, data_dims): len(data_dims), metadata.ndim, metadata.name() ) ) - raise ValueError(msg) + raise iris.exceptions.CannotAddError(msg) # Check compatibility with the shape of the data for i, dim in enumerate(data_dims): if metadata.shape[i] != self.shape[dim]: @@ -1116,7 +1189,7 @@ def _check_multi_dim_metadata(self, metadata, data_dims): "Unequal lengths. Cube dimension {} => {};" " metadata {!r} dimension {} => {}." ) - raise ValueError( + raise iris.exceptions.CannotAddError( msg.format( dim, self.shape[dim], @@ -1128,7 +1201,7 @@ def _check_multi_dim_metadata(self, metadata, data_dims): elif metadata.shape != (1,): msg = "Missing data dimensions for multi-valued {} {!r}" msg = msg.format(metadata.__class__.__name__, metadata.name()) - raise ValueError(msg) + raise iris.exceptions.CannotAddError(msg) return data_dims def _add_unique_aux_coord(self, coord, data_dims): @@ -1142,7 +1215,7 @@ def _add_unique_aux_coord(self, coord, data_dims): "cube {item} of {ownval!r}." ) if coord.mesh != mesh: - raise ValueError( + raise iris.exceptions.CannotAddError( msg.format( item="mesh", coord=coord, @@ -1152,7 +1225,7 @@ def _add_unique_aux_coord(self, coord, data_dims): ) location = self.location if coord.location != location: - raise ValueError( + raise iris.exceptions.CannotAddError( msg.format( item="location", coord=coord, @@ -1162,7 +1235,7 @@ def _add_unique_aux_coord(self, coord, data_dims): ) mesh_dims = (self.mesh_dim(),) if data_dims != mesh_dims: - raise ValueError( + raise iris.exceptions.CannotAddError( msg.format( item="mesh dimension", coord=coord, @@ -1188,12 +1261,23 @@ def add_aux_factory(self, aux_factory): "Factory must be a subclass of " "iris.aux_factory.AuxCoordFactory." ) - cube_coords = self.coords() + + # Get all 'real' coords (i.e. not derived ones) : use private data + # rather than cube.coords(), as that is quite slow. + def coordsonly(coords_and_dims): + return [coord for coord, dims in coords_and_dims] + + cube_coords = coordsonly(self._dim_coords_and_dims) + coordsonly( + self._aux_coords_and_dims + ) + for dependency in aux_factory.dependencies: ref_coord = aux_factory.dependencies[dependency] if ref_coord is not None and ref_coord not in cube_coords: msg = "{} coordinate for factory is not present on cube {}" - raise ValueError(msg.format(ref_coord.name(), self.name())) + raise iris.exceptions.CannotAddError( + msg.format(ref_coord.name(), self.name()) + ) self._aux_factories.append(aux_factory) def add_cell_measure(self, cell_measure, data_dims=None): @@ -1220,7 +1304,9 @@ def add_cell_measure(self, cell_measure, data_dims=None): """ if self.cell_measures(cell_measure): - raise ValueError("Duplicate cell_measures are not permitted.") + raise iris.exceptions.CannotAddError( + "Duplicate cell_measures are not permitted." + ) data_dims = self._check_multi_dim_metadata(cell_measure, data_dims) self._cell_measures_and_dims.append((cell_measure, data_dims)) self._cell_measures_and_dims.sort( @@ -1248,7 +1334,9 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None): """ if self.ancillary_variables(ancillary_variable): - raise ValueError("Duplicate ancillary variables not permitted") + raise iris.exceptions.CannotAddError( + "Duplicate ancillary variables not permitted" + ) data_dims = self._check_multi_dim_metadata( ancillary_variable, data_dims @@ -1279,13 +1367,13 @@ def add_dim_coord(self, dim_coord, data_dim): """ if self.coords(dim_coord): - raise ValueError( + raise iris.exceptions.CannotAddError( "The coordinate already exists on the cube. " "Duplicate coordinates are not permitted." ) # Check dimension is available if self.coords(dimensions=data_dim, dim_coords=True): - raise ValueError( + raise iris.exceptions.CannotAddError( "A dim_coord is already associated with " "dimension %d." % data_dim ) @@ -1293,12 +1381,14 @@ def add_dim_coord(self, dim_coord, data_dim): def _add_unique_dim_coord(self, dim_coord, data_dim): if isinstance(dim_coord, iris.coords.AuxCoord): - raise ValueError("The dim_coord may not be an AuxCoord instance.") + raise iris.exceptions.CannotAddError( + "The dim_coord may not be an AuxCoord instance." + ) # Convert data_dim to a single integer if isinstance(data_dim, Container): if len(data_dim) != 1: - raise ValueError( + raise iris.exceptions.CannotAddError( "The supplied data dimension must be a" " single number." ) data_dim = int(list(data_dim)[0]) @@ -1307,7 +1397,7 @@ def _add_unique_dim_coord(self, dim_coord, data_dim): # Check data_dim value is valid if data_dim < 0 or data_dim >= self.ndim: - raise ValueError( + raise iris.exceptions.CannotAddError( "The cube does not have the specified dimension " "(%d)" % data_dim ) @@ -1315,7 +1405,7 @@ def _add_unique_dim_coord(self, dim_coord, data_dim): # Check compatibility with the shape of the data if dim_coord.shape[0] != self.shape[data_dim]: msg = "Unequal lengths. Cube dimension {} => {}; coord {!r} => {}." - raise ValueError( + raise iris.exceptions.CannotAddError( msg.format( data_dim, self.shape[data_dim], @@ -1911,6 +2001,12 @@ def coord( if name_or_coord is not None: if not isinstance(name_or_coord, str): _name = name_or_coord.name() + emsg = ( + "Expected to find exactly 1 coordinate matching the given " + f"{_name!r} coordinate's metadata, but found none." + ) + raise iris.exceptions.CoordinateNotFoundError(emsg) + bad_name = _name or standard_name or long_name or "" emsg = ( f"Expected to find exactly 1 {bad_name!r} coordinate, " @@ -2115,9 +2211,15 @@ def cell_measure(self, name_or_cell_measure=None): bad_name = ( name_or_cell_measure and name_or_cell_measure.name() ) or "" + if name_or_cell_measure is not None: + emsg = ( + "Expected to find exactly 1 cell measure matching the given " + f"{bad_name!r} cell measure's metadata, but found none." + ) + raise iris.exceptions.CellMeasureNotFoundError(emsg) msg = ( - "Expected to find exactly 1 %s cell_measure, but found " - "none." % bad_name + f"Expected to find exactly 1 {bad_name!r} cell measure, " + "but found none." ) raise iris.exceptions.CellMeasureNotFoundError(msg) @@ -2202,9 +2304,16 @@ def ancillary_variable(self, name_or_ancillary_variable=None): name_or_ancillary_variable and name_or_ancillary_variable.name() ) or "" + if name_or_ancillary_variable is not None: + emsg = ( + "Expected to find exactly 1 ancillary_variable matching the " + f"given {bad_name!r} ancillary_variable's metadata, but found " + "none." + ) + raise iris.exceptions.AncillaryVariableNotFoundError(emsg) msg = ( - "Expected to find exactly 1 {!s} ancillary_variable, but " - "found none.".format(bad_name) + f"Expected to find exactly 1 {bad_name!r} ancillary_variable, " + "but found none." ) raise iris.exceptions.AncillaryVariableNotFoundError(msg) @@ -2220,10 +2329,23 @@ def cell_methods(self): return self._metadata_manager.cell_methods @cell_methods.setter - def cell_methods(self, cell_methods): - self._metadata_manager.cell_methods = ( - tuple(cell_methods) if cell_methods else tuple() - ) + def cell_methods(self, cell_methods: Iterable): + if not cell_methods: + # For backwards compatibility: Empty or null value is equivalent to (). + cell_methods = () + else: + # Can supply any iterable, which is converted (copied) to a tuple. + cell_methods = tuple(cell_methods) + for cell_method in cell_methods: + # All contents should be CellMethods. Requiring class membership is + # somewhat non-Pythonic, but simple, and not a problem for now. + if not isinstance(cell_method, iris.coords.CellMethod): + msg = ( + f"Cube.cell_methods assigned value includes {cell_method}, " + "which is not an iris.coords.CellMethod." + ) + raise ValueError(msg) + self._metadata_manager.cell_methods = cell_methods def core_data(self): """ @@ -2566,7 +2688,6 @@ def subset(self, coord): coord_to_extract in self.aux_coords and len(coord_to_extract.points) == 1 ): - # Default to returning None result = None @@ -3713,6 +3834,10 @@ def collapsed(self, coords, aggregator, **kwargs): for coord in coords: dims_to_collapse.update(self.coord_dims(coord)) + if aggregator.name() == "max_run" and len(dims_to_collapse) > 1: + msg = "Not possible to calculate runs over more than one dimension" + raise ValueError(msg) + if not dims_to_collapse: msg = ( "Cannot collapse a dimension which does not describe any " @@ -3818,6 +3943,7 @@ def collapsed(self, coords, aggregator, **kwargs): data_result = aggregator.aggregate( unrolled_data, axis=-1, **kwargs ) + aggregator.update_metadata( collapsed_cube, coords, axis=collapse_axis, **kwargs ) @@ -3826,36 +3952,53 @@ def collapsed(self, coords, aggregator, **kwargs): ) return result - def aggregated_by(self, coords, aggregator, **kwargs): + def aggregated_by( + self, coords, aggregator, climatological=False, **kwargs + ): """ - Perform aggregation over the cube given one or more "group - coordinates". + Perform aggregation over the cube given one or more "group coordinates". A "group coordinate" is a coordinate where repeating values represent a - single group, such as a month coordinate on a daily time slice. - Repeated values will form a group even if they are not consecutive. + single group, such as a month coordinate on a daily time slice. Repeated + values will form a group even if they are not consecutive. The group coordinates must all be over the same cube dimension. Each common value group identified over all the group-by coordinates is collapsed using the provided aggregator. - Args: + Weighted aggregations (:class:`iris.analysis.WeightedAggregator`) may + also be supplied. These include :data:`~iris.analysis.MEAN` and + :data:`~iris.analysis.SUM`. + + Weighted aggregations support an optional *weights* keyword argument. If + set, this should be supplied as an array of weights whose shape matches + the cube or as 1D array whose length matches the dimension over which is + aggregated. - * coords (list of coord names or :class:`iris.coords.Coord` instances): + Parameters + ---------- + coords : (list of coord names or :class:`iris.coords.Coord` instances) One or more coordinates over which group aggregation is to be performed. - * aggregator (:class:`iris.analysis.Aggregator`): + aggregator : :class:`iris.analysis.Aggregator` Aggregator to be applied to each group. - - Kwargs: - - * kwargs: + climatological : bool + Indicates whether the output is expected to be climatological. For + any aggregated time coord(s), this causes the climatological flag to + be set and the point for each cell to equal its first bound, thereby + preserving the time of year. + + Returns + ------- + :class:`iris.cube.Cube` + + Other Parameters + ---------------- + kwargs: Aggregator and aggregation function keyword arguments. - Returns: - :class:`iris.cube.Cube`. - - For example: + Examples + -------- >>> import iris >>> import iris.analysis @@ -3892,14 +4035,6 @@ def aggregated_by(self, coords, aggregator, **kwargs): groupby_coords = [] dimension_to_groupby = None - # We can't handle weights - if isinstance( - aggregator, iris.analysis.WeightedAggregator - ) and aggregator.uses_weighting(**kwargs): - raise ValueError( - "Invalid Aggregation, aggregated_by() cannot use" " weights." - ) - coords = self._as_list_of_coords(coords) for coord in sorted(coords, key=lambda coord: coord.metadata): if coord.ndim > 1: @@ -3922,12 +4057,38 @@ def aggregated_by(self, coords, aggregator, **kwargs): raise iris.exceptions.CoordinateCollapseError(msg) groupby_coords.append(coord) + # Check shape of weights. These must either match the shape of the cube + # or be 1D (in this case, their length must be equal to the length of the + # dimension we are aggregating over). + weights = kwargs.get("weights") + return_weights = kwargs.get("returned", False) + if weights is not None: + if weights.ndim == 1: + if len(weights) != self.shape[dimension_to_groupby]: + raise ValueError( + f"1D weights must have the same length as the dimension " + f"that is aggregated, got {len(weights):d}, expected " + f"{self.shape[dimension_to_groupby]:d}" + ) + weights = iris.util.broadcast_to_shape( + weights, + self.shape, + (dimension_to_groupby,), + ) + if weights.shape != self.shape: + raise ValueError( + f"Weights must either be 1D or have the same shape as the " + f"cube, got shape {weights.shape} for weights, " + f"{self.shape} for cube" + ) + # Determine the other coordinates that share the same group-by # coordinate dimension. shared_coords = list( filter( - lambda coord_: coord_ not in groupby_coords, - self.coords(contains_dimension=dimension_to_groupby), + lambda coord_: coord_ not in groupby_coords + and dimension_to_groupby in self.coord_dims(coord_), + self.dim_coords + self.aux_coords, ) ) @@ -3941,7 +4102,9 @@ def aggregated_by(self, coords, aggregator, **kwargs): # Create the aggregation group-by instance. groupby = iris.analysis._Groupby( - groupby_coords, shared_coords_and_dims + groupby_coords, + shared_coords_and_dims, + climatological=climatological, ) # Create the resulting aggregate-by cube and remove the original @@ -3957,6 +4120,11 @@ def aggregated_by(self, coords, aggregator, **kwargs): for coord in groupby_coords + shared_coords: aggregateby_cube.remove_coord(coord) + coord_mapping = {} + for coord in aggregateby_cube.coords(): + orig_id = id(self.coord(coord)) + coord_mapping[orig_id] = coord + # Determine the group-by cube data shape. data_shape = list(self.shape + aggregator.aggregate_shape(**kwargs)) data_shape[dimension_to_groupby] = len(groupby) @@ -3967,16 +4135,41 @@ def aggregated_by(self, coords, aggregator, **kwargs): back_slice = (slice(None, None),) * ( len(data_shape) - dimension_to_groupby - 1 ) + + # Create cube and weights slices groupby_subcubes = map( lambda groupby_slice: self[ front_slice + (groupby_slice,) + back_slice ].lazy_data(), groupby.group(), ) - agg = partial( + if weights is not None: + groupby_subweights = map( + lambda groupby_slice: weights[ + front_slice + (groupby_slice,) + back_slice + ], + groupby.group(), + ) + else: + groupby_subweights = (None for _ in range(len(groupby))) + + agg = iris.analysis.create_weighted_aggregator_fn( aggregator.lazy_aggregate, axis=dimension_to_groupby, **kwargs ) - result = list(map(agg, groupby_subcubes)) + result = list(map(agg, groupby_subcubes, groupby_subweights)) + + # If weights are returned, "result" is a list of tuples (each tuple + # contains two elements; the first is the aggregated data, the + # second is the aggregated weights). Convert these to two lists + # (one for the aggregated data and one for the aggregated weights) + # before combining the different slices. + if return_weights: + result, weights_result = list(zip(*result)) + aggregateby_weights = da.stack( + weights_result, axis=dimension_to_groupby + ) + else: + aggregateby_weights = None aggregateby_data = da.stack(result, axis=dimension_to_groupby) else: cube_slice = [slice(None, None)] * len(data_shape) @@ -3985,13 +4178,23 @@ def aggregated_by(self, coords, aggregator, **kwargs): # sub-cube. cube_slice[dimension_to_groupby] = groupby_slice groupby_sub_cube = self[tuple(cube_slice)] + + # Slice the weights + if weights is not None: + groupby_sub_weights = weights[tuple(cube_slice)] + kwargs["weights"] = groupby_sub_weights + # Perform the aggregation over the group-by sub-cube and - # repatriate the aggregated data into the aggregate-by - # cube data. - cube_slice[dimension_to_groupby] = i + # repatriate the aggregated data into the aggregate-by cube + # data. If weights are also returned, handle them separately. result = aggregator.aggregate( groupby_sub_cube.data, axis=dimension_to_groupby, **kwargs ) + if return_weights: + weights_result = result[1] + result = result[0] + else: + weights_result = None # Determine aggregation result data type for the aggregate-by # cube data on first pass. @@ -4004,7 +4207,20 @@ def aggregated_by(self, coords, aggregator, **kwargs): aggregateby_data = np.zeros( data_shape, dtype=result.dtype ) + if weights_result is not None: + aggregateby_weights = np.zeros( + data_shape, dtype=weights_result.dtype + ) + else: + aggregateby_weights = None + cube_slice[dimension_to_groupby] = i aggregateby_data[tuple(cube_slice)] = result + if weights_result is not None: + aggregateby_weights[tuple(cube_slice)] = weights_result + + # Restore original weights. + if weights is not None: + kwargs["weights"] = weights # Add the aggregation meta data to the aggregate-by cube. aggregator.update_metadata( @@ -4015,22 +4231,41 @@ def aggregated_by(self, coords, aggregator, **kwargs): dimensions=dimension_to_groupby, dim_coords=True ) or [None] for coord in groupby.coords: + new_coord = coord.copy() + + # The metadata may have changed (e.g. climatology), so check if + # there's a better coord to pass to self.coord_dims + lookup_coord = coord + for ( + cube_coord, + groupby_coord, + ) in groupby.coord_replacement_mapping: + if coord == groupby_coord: + lookup_coord = cube_coord + if ( dim_coord is not None - and dim_coord.metadata == coord.metadata + and dim_coord.metadata == lookup_coord.metadata and isinstance(coord, iris.coords.DimCoord) ): - aggregateby_cube.add_dim_coord( - coord.copy(), dimension_to_groupby - ) + aggregateby_cube.add_dim_coord(new_coord, dimension_to_groupby) else: aggregateby_cube.add_aux_coord( - coord.copy(), self.coord_dims(coord) + new_coord, self.coord_dims(lookup_coord) ) + coord_mapping[id(self.coord(lookup_coord))] = new_coord + + aggregateby_cube._aux_factories = [] + for factory in self.aux_factories: + aggregateby_cube.add_aux_factory(factory.updated(coord_mapping)) # Attach the aggregate-by data into the aggregate-by cube. + if aggregateby_weights is None: + data_result = aggregateby_data + else: + data_result = (aggregateby_data, aggregateby_weights) aggregateby_cube = aggregator.post_process( - aggregateby_cube, aggregateby_data, coords, **kwargs + aggregateby_cube, data_result, coords, **kwargs ) return aggregateby_cube @@ -4247,6 +4482,8 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): interpolate. The values for coordinates that correspond to dates or times may optionally be supplied as datetime.datetime or cftime.datetime instances. + The N pairs supplied will be used to create an N-d grid of points + that will then be sampled (rather than just N points). * scheme: An instance of the type of interpolation to use to interpolate from this :class:`~iris.cube.Cube` to the given sample points. The @@ -4277,7 +4514,7 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (time: 3; model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(cube.coord('time')) - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) points: [2009-11-19 10:00:00, 2009-11-19 11:00:00, 2009-11-19 12:00:00] shape: (3,) dtype: float64 @@ -4290,7 +4527,7 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(result.coord('time')) - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) points: [2009-11-19 10:30:00] shape: (1,) dtype: float64 @@ -4305,7 +4542,7 @@ def interpolate(self, sample_points, scheme, collapse_scalar=True): air_potential_temperature / (K) \ (model_level_number: 7; grid_latitude: 204; grid_longitude: 187) >>> print(result2.coord('time')) - DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) + DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) points: [2009-11-19 10:30:00] shape: (1,) dtype: float64 diff --git a/lib/iris/exceptions.py b/lib/iris/exceptions.py index 12d24ef70f..5d3da3349e 100644 --- a/lib/iris/exceptions.py +++ b/lib/iris/exceptions.py @@ -174,3 +174,9 @@ class UnitConversionError(IrisError): """Raised when Iris is unable to convert a unit.""" pass + + +class CannotAddError(ValueError): + """Raised when an object (e.g. coord) cannot be added to a :class:`~iris.cube.Cube`.""" + + pass diff --git a/lib/iris/experimental/animate.py b/lib/iris/experimental/animate.py index fb2e2af590..1b6c2d46be 100644 --- a/lib/iris/experimental/animate.py +++ b/lib/iris/experimental/animate.py @@ -6,118 +6,32 @@ """ Wrapper for animating iris cubes using iris or matplotlib plotting functions -""" - -import warnings +Notes +----- +.. deprecated:: 3.4.0 -import matplotlib.animation as animation -import matplotlib.pyplot as plt +``iris.experimental.animate.animate()`` has been moved to +:func:`iris.plot.animate`. This module will therefore be removed in a future +release. -import iris +""" def animate(cube_iterator, plot_func, fig=None, **kwargs): """ Animates the given cube iterator. - Args: - - * cube_iterator (iterable of :class:`iris.cube.Cube` objects): - Each animation frame corresponds to each :class:`iris.cube.Cube` - object. See :meth:`iris.cube.Cube.slices`. - - * plot_func (:mod:`iris.plot` or :mod:`iris.quickplot` plotting function): - Plotting function used to animate. Must accept the signature - ``plot_func(cube, vmin=vmin, vmax=vmax, coords=coords)``. - :func:`~iris.plot.contourf`, :func:`~iris.plot.contour`, - :func:`~iris.plot.pcolor` and :func:`~iris.plot.pcolormesh` - all conform to this signature. - - Kwargs: - - * fig (:class:`matplotlib.figure.Figure` instance): - By default, the current figure will be used or a new figure instance - created if no figure is available. See :func:`matplotlib.pyplot.gcf`. - - * coords (list of :class:`~iris.coords.Coord` objects or coordinate names): - Use the given coordinates as the axes for the plot. The order of the - given coordinates indicates which axis to use for each, where the first - element is the horizontal axis of the plot and the second element is - the vertical axis of the plot. - - * interval (int, float or long): - Defines the time interval in milliseconds between successive frames. - A default interval of 100ms is set. - - * vmin, vmax (int, float or long): - Color scaling values, see :class:`matplotlib.colors.Normalize` for - further details. Default values are determined by the min-max across - the data set over the entire sequence. - - See :class:`matplotlib.animation.FuncAnimation` for details of other valid - keyword arguments. + Warnings + -------- + This function is now **disabled**. - Returns: - :class:`~matplotlib.animation.FuncAnimation` object suitable for - saving and or plotting. - - For example, to animate along a set of cube slices:: - - cube_iter = cubes.slices(('grid_longitude', 'grid_latitude')) - ani = animate(cube_iter, qplt.contourf) - plt.show() + The functionality has been moved to :func:`iris.plot.animate`. """ - kwargs.setdefault("interval", 100) - coords = kwargs.pop("coords", None) - - if fig is None: - fig = plt.gcf() - - def update_animation_iris(i, cubes, vmin, vmax, coords): - # Clearing the figure is currently necessary for compatibility with - # the iris quickploting module - due to the colorbar. - plt.gcf().clf() - plot_func(cubes[i], vmin=vmin, vmax=vmax, coords=coords) - - # Turn cube iterator into a list to determine plot ranges. - # NOTE: we check that we are not providing a cube as this has a deprecated - # iter special method. - if hasattr(cube_iterator, "__iter__") and not isinstance( - cube_iterator, iris.cube.Cube - ): - cubes = iris.cube.CubeList(cube_iterator) - else: - msg = "iterable type object required for animation, {} given".format( - type(cube_iterator) - ) - raise TypeError(msg) - - supported = ["iris.plot", "iris.quickplot"] - if plot_func.__module__ not in supported: - msg = ( - 'Given plotting module "{}" may not be supported, intended ' - "use: {}." - ) - msg = msg.format(plot_func.__module__, supported) - warnings.warn(msg, UserWarning) - - supported = ["contour", "contourf", "pcolor", "pcolormesh"] - if plot_func.__name__ not in supported: - msg = ( - 'Given plotting function "{}" may not be supported, intended ' - "use: {}." - ) - msg = msg.format(plot_func.__name__, supported) - warnings.warn(msg, UserWarning) - - # Determine plot range. - vmin = kwargs.pop("vmin", min([cc.data.min() for cc in cubes])) - vmax = kwargs.pop("vmax", max([cc.data.max() for cc in cubes])) - - update = update_animation_iris - frames = range(len(cubes)) - - return animation.FuncAnimation( - fig, update, frames=frames, fargs=(cubes, vmin, vmax, coords), **kwargs + msg = ( + "The function 'iris.experimental.animate.animate()' has been moved, " + "and is now at 'iris.plot.animate()'.\n" + "Please replace 'iris.experimental.animate.animate' with " + "'iris.plot.animate'." ) + raise Exception(msg) diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index bfa048ddf0..fdc23c7bc4 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -17,13 +17,15 @@ """ +import functools + import cartopy.crs as ccrs import numpy as np import iris from iris._deprecation import warn_deprecated from iris.analysis._interpolation import get_xy_dim_coords -from iris.analysis._regrid import RectilinearRegridder +from iris.analysis._regrid import RectilinearRegridder, _create_cube from iris.util import _meshgrid wmsg = ( @@ -329,16 +331,23 @@ def _valid_units(coord): # Return result as a new cube based on the source. # TODO: please tidy this interface !!! - return RectilinearRegridder._create_cube( - fullcube_data, - src=source_cube, - x_dim=src_dims_xy[0], - y_dim=src_dims_xy[1], + _regrid_callback = functools.partial( + RectilinearRegridder._regrid, src_x_coord=src_coords[0], src_y_coord=src_coords[1], - grid_x_coord=dst_coords[0], - grid_y_coord=dst_coords[1], sample_grid_x=sample_grid_x, sample_grid_y=sample_grid_y, - regrid_callback=RectilinearRegridder._regrid, + ) + + def regrid_callback(*args, **kwargs): + _data, dims = args + return _regrid_callback(_data, *dims, **kwargs) + + return _create_cube( + fullcube_data, + source_cube, + [src_dims_xy[0], src_dims_xy[1]], + [dst_coords[0], dst_coords[1]], + 2, + regrid_callback, ) diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index 48e11e1fb0..116b340592 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -85,28 +85,32 @@ def __init__(self, cube): self.cube_id = id(self.cube) self.cube_str = escape(str(self.cube)) - self.str_headings = { - "Dimension coordinates:": None, - "Auxiliary coordinates:": None, - "Mesh coordinates:": None, - "Derived coordinates:": None, - "Cell measures:": None, - "Ancillary variables:": None, - "Scalar coordinates:": None, - "Scalar cell measures:": None, - "Cell methods:": None, - "Attributes:": None, - } - self.dim_desc_coords = [ + # Define the expected vector and scalar sections in output, in expected + # order of appearance. + # NOTE: if we recoded this to use a CubeSummary, these section titles + # would be available from that. + self.vector_section_names = [ "Dimension coordinates:", - "Auxiliary coordinates:", "Mesh coordinates:", + "Auxiliary coordinates:", "Derived coordinates:", "Cell measures:", "Ancillary variables:", ] - - self.two_cell_headers = ["Scalar coordinates:", "Attributes:"] + self.scalar_section_names = [ + "Mesh:", + "Scalar coordinates:", + "Scalar cell measures:", + "Cell methods:", + "Attributes:", + ] + self.sections_data = { + name: None + for name in self.vector_section_names + self.scalar_section_names + } + # 'Scalar-cell-measures' is currently alone amongst the scalar sections, + # in displaying only a 'name' and no 'value' field. + self.single_cell_section_names = ["Scalar cell measures:"] # Important content that summarises a cube is defined here. self.shapes = self.cube.shape @@ -160,7 +164,7 @@ def _get_bits(self, bits): # Get heading indices within the printout. start_inds = [] - for hdg in self.str_headings.keys(): + for hdg in self.sections_data.keys(): heading = "{}{}".format(left_indent, hdg) try: start_ind = bits.index(heading) @@ -178,7 +182,7 @@ def _get_bits(self, bits): content = bits[i0 + 1 : i1] else: content = bits[i0 + 1 :] - self.str_headings[str_heading_name] = content + self.sections_data[str_heading_name] = content def _make_header(self): """ @@ -272,47 +276,29 @@ def _make_row(self, title, body=None, col_span=0): row.append("") return row - def _expand_last_cell(self, element, body): - """Expand an element containing a cell by adding a new line.""" - split_point = element.index("") - element = element[:split_point] + "
" + body + element[split_point:] - return element - def _make_content(self): elements = [] - for k, v in self.str_headings.items(): + for k, v in self.sections_data.items(): if v is not None: # Add the sub-heading title. elements.extend(self._make_row(k)) for line in v: # Add every other row in the sub-heading. - if k in self.dim_desc_coords: + if k in self.vector_section_names: body = re.findall(r"[\w-]+", line) title = body.pop(0) colspan = 0 - elif k in self.two_cell_headers: - try: - split_point = line.index(":") - except ValueError: - # When a line exists in v without a ':', we expect - # that this is due to the value of some attribute - # containing multiple lines. We collect all these - # lines in the same cell. - body = line.strip() - # We choose the element containing the last cell - # in the last row. - element = elements[-2] - element = self._expand_last_cell(element, body) - elements[-2] = element - continue + else: + colspan = self.ndims + if k in self.single_cell_section_names: + title = line.strip() + body = "" else: + line = line.strip() + split_point = line.index(" ") title = line[:split_point].strip() body = line[split_point + 2 :].strip() - colspan = self.ndims - else: - title = line.strip() - body = "" - colspan = self.ndims + elements.extend( self._make_row(title, body=body, col_span=colspan) ) diff --git a/lib/iris/experimental/ugrid/load.py b/lib/iris/experimental/ugrid/load.py index 6c802e00d4..cfa3935991 100644 --- a/lib/iris/experimental/ugrid/load.py +++ b/lib/iris/experimental/ugrid/load.py @@ -8,8 +8,7 @@ Extensions to Iris' NetCDF loading to allow the construction of :class:`~iris.experimental.ugrid.mesh.Mesh`\\ es from UGRID data in the file. -Eventual destination: :mod:`iris.fileformats.netcdf` (plan to split that module -into ``load`` and ``save`` in future). +Eventual destination: :mod:`iris.fileformats.netcdf`. """ from contextlib import contextmanager @@ -19,8 +18,8 @@ from ...config import get_logger from ...coords import AuxCoord -from ...fileformats import netcdf from ...fileformats._nc_load_rules.helpers import get_attr_units, get_names +from ...fileformats.netcdf import loader as nc_loader from ...io import decode_uri, expand_filespecs from ...util import guess_coord_axis from .cf import ( @@ -202,7 +201,7 @@ def load_meshes(uris, var_name=None): else: handling_format_spec = FORMAT_AGENT.get_spec(source, None) - if handling_format_spec.handler == netcdf.load_cubes: + if handling_format_spec.handler == nc_loader.load_cubes: valid_sources.append(source) else: message = f"Ignoring non-NetCDF file: {source}" @@ -210,7 +209,8 @@ def load_meshes(uris, var_name=None): result = {} for source in valid_sources: - meshes_dict = _meshes_from_cf(CFUGridReader(source)) + with CFUGridReader(source) as cf_reader: + meshes_dict = _meshes_from_cf(cf_reader) meshes = list(meshes_dict.values()) if var_name is not None: meshes = list(filter(lambda m: m.var_name == var_name, meshes)) @@ -239,7 +239,7 @@ def _build_aux_coord(coord_var, file_path): assert isinstance(coord_var, CFUGridAuxiliaryCoordinateVariable) attributes = {} attr_units = get_attr_units(coord_var, attributes) - points_data = netcdf._get_cf_var_data(coord_var, file_path) + points_data = nc_loader._get_cf_var_data(coord_var, file_path) # Bounds will not be loaded: # Bounds may be present, but the UGRID conventions state this would @@ -293,7 +293,7 @@ def _build_connectivity(connectivity_var, file_path, element_dims): assert isinstance(connectivity_var, CFUGridConnectivityVariable) attributes = {} attr_units = get_attr_units(connectivity_var, attributes) - indices_data = netcdf._get_cf_var_data(connectivity_var, file_path) + indices_data = nc_loader._get_cf_var_data(connectivity_var, file_path) cf_role = connectivity_var.cf_role start_index = connectivity_var.start_index @@ -462,7 +462,7 @@ def _build_mesh(cf, mesh_var, file_path): ) mesh_elements = filter(None, mesh_elements) for iris_object in mesh_elements: - netcdf._add_unused_attributes( + nc_loader._add_unused_attributes( iris_object, cf.cf_group[iris_object.var_name] ) diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index 974a563046..0d566da73f 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -131,7 +131,7 @@ def __init__( Args: - * indices (numpy.ndarray or numpy.ma.core.MaskedArray or dask.array.Array): + * indices (:class:`numpy.ndarray` or :class:`numpy.ma.core.MaskedArray` or :class:`dask.array.Array`): 2D array giving the topological connection relationship between :attr:`location` elements and :attr:`connected` elements. The :attr:`location_axis` dimension indexes over the @@ -501,7 +501,7 @@ def core_indices(self): NumPy array or a Dask array. Returns: - numpy.ndarray or numpy.ma.core.MaskedArray or dask.array.Array + :class:`numpy.ndarray` or :class:`numpy.ma.core.MaskedArray` or :class:`dask.array.Array` """ return super()._core_values() @@ -2841,16 +2841,60 @@ def __init__( # Get the 'coord identity' metadata from the relevant node-coordinate. node_coord = self.mesh.coord(include_nodes=True, axis=self.axis) + node_metadict = node_coord.metadata._asdict() + # Use node metadata, unless location is face/edge. + use_metadict = node_metadict.copy() + if location != "node": + # Location is either "edge" or "face" - get the relevant coord. + kwargs = {f"include_{location}s": True, "axis": axis} + location_coord = self.mesh.coord(**kwargs) + + # Take the MeshCoord metadata from the 'location' coord. + use_metadict = location_coord.metadata._asdict() + unit_unknown = Unit(None) + + # N.B. at present, coords in a Mesh are stored+accessed by 'axis', which + # means they must have a standard_name. So ... + # (a) the 'location' (face/edge) coord *always* has a useable phenomenon + # identity. + # (b) we still want to check that location+node coords have the same + # phenomenon (i.e. physical meaning identity + units), **but** ... + # (c) we will accept/ignore some differences : not just "var_name", but + # also "long_name" *and* "attributes". So it is *only* "standard_name" + # and "units" that cause an error if they differ. + for key in ("standard_name", "units"): + bounds_value = use_metadict[key] + nodes_value = node_metadict[key] + if key == "units" and ( + bounds_value == unit_unknown or nodes_value == unit_unknown + ): + # Allow "any" unit to match no-units (for now) + continue + if bounds_value != nodes_value: + + def fix_repr(val): + # Tidy values appearance by converting Unit to string, and + # wrapping strings in '', but leaving other types as a + # plain str() representation. + if isinstance(val, Unit): + val = str(val) + if isinstance(val, str): + val = repr(val) + return val + + nodes_value, bounds_value = [ + fix_repr(val) for val in (nodes_value, bounds_value) + ] + msg = ( + f"Node coordinate {node_coord!r} disagrees with the " + f"{location} coordinate {location_coord!r}, " + f'in having a "{key}" value of {nodes_value} ' + f"instead of {bounds_value}." + ) + raise ValueError(msg) + # Call parent constructor to handle the common constructor args. - super().__init__( - points, - bounds=bounds, - standard_name=node_coord.standard_name, - long_name=node_coord.long_name, - var_name=None, # We *don't* "represent" the underlying node var - units=node_coord.units, - attributes=node_coord.attributes, - ) + super().__init__(points, bounds=bounds, **use_metadict) # Define accessors for MeshCoord-specific properties mesh/location/axis. # These are all read-only. @@ -3083,9 +3127,7 @@ def _construct_access_arrays(self): flat_inds_safe = al.where(missing_inds, 0, flat_inds_nomask) # Here's the core indexing operation. # The comma applies all inds-array values to the *first* dimension. - bounds = node_points[ - flat_inds_safe, - ] + bounds = node_points[flat_inds_safe,] # Fix 'missing' locations, and restore the proper shape. bounds = al.ma.masked_array(bounds, missing_inds) bounds = bounds.reshape(indices.shape) diff --git a/lib/iris/experimental/ugrid/metadata.py b/lib/iris/experimental/ugrid/metadata.py index ae0b787908..44bbe04fe9 100644 --- a/lib/iris/experimental/ugrid/metadata.py +++ b/lib/iris/experimental/ugrid/metadata.py @@ -53,6 +53,7 @@ def _combine_lenient(self, other): A list of combined metadata member values. """ + # Perform "strict" combination for "cf_role", "start_index", "location_axis". def func(field): left = getattr(self, field) @@ -113,6 +114,7 @@ def _difference_lenient(self, other): A list of difference metadata member values. """ + # Perform "strict" difference for "cf_role", "start_index", "location_axis". def func(field): left = getattr(self, field) @@ -233,6 +235,7 @@ def _difference_lenient(self, other): A list of difference metadata member values. """ + # Perform "strict" difference for "topology_dimension", # "node_dimension", "edge_dimension" and "face_dimension". def func(field): @@ -297,6 +300,7 @@ def _combine_lenient(self, other): A list of combined metadata member values. """ + # It is actually "strict" : return None except where members are equal. def func(field): left = getattr(self, field) @@ -352,6 +356,7 @@ def _difference_lenient(self, other): A list of different metadata member values. """ + # Perform "strict" difference for location / axis. def func(field): left = getattr(self, field) diff --git a/lib/iris/experimental/ugrid/save.py b/lib/iris/experimental/ugrid/save.py index 8a5934b939..3c42137905 100644 --- a/lib/iris/experimental/ugrid/save.py +++ b/lib/iris/experimental/ugrid/save.py @@ -8,8 +8,7 @@ Extensions to Iris' NetCDF saving to allow :class:`~iris.experimental.ugrid.mesh.Mesh` saving in UGRID format. -Eventual destination: :mod:`iris.fileformats.netcdf` (plan to split that module -into ``load`` and ``save`` in future). +Eventual destination: :mod:`iris.fileformats.netcdf`. """ from collections.abc import Iterable diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index d286abbf3d..09237d3f11 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -18,7 +18,7 @@ 3) Iris-specific info is (still) stored in additional properties created on the engine object : - engine.cf_var, .cube, .cube_parts, .requires, .rule_triggered, .filename + engine.cf_var, .cube, .cube_parts, .requires, .rules_triggered, .filename Our "rules" are just action routines. The top-level 'run_actions' routine decides which actions to call, based on the @@ -78,7 +78,7 @@ def inner(engine, *args, **kwargs): # but also may vary depending on whether it successfully # triggered, and if so what it matched. rule_name = _default_rulenamesfunc(func.__name__) - engine.rule_triggered.add(rule_name) + engine.rules_triggered.add(rule_name) func._rulenames_func = _default_rulenamesfunc return inner @@ -110,9 +110,13 @@ def action_default(engine): hh.build_transverse_mercator_coordinate_system, ), hh.CF_GRID_MAPPING_STEREO: ( - hh.has_supported_stereographic_parameters, + None, hh.build_stereographic_coordinate_system, ), + hh.CF_GRID_MAPPING_POLAR: ( + hh.has_supported_polar_stereographic_parameters, + hh.build_polar_stereographic_coordinate_system, + ), hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL: ( None, hh.build_lambert_conformal_coordinate_system, diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index a5b507d583..35163c47d5 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -13,13 +13,14 @@ build routines, and which it does not use. """ - import warnings import cf_units import numpy as np import numpy.ma as ma +import pyproj +import iris import iris.aux_factory from iris.common.mixin import _get_valid_standard_name import iris.coord_systems @@ -29,12 +30,14 @@ import iris.fileformats.netcdf from iris.fileformats.netcdf import ( UnknownCellMethodWarning, - _get_cf_var_data, parse_cell_methods, ) +from iris.fileformats.netcdf.loader import _get_cf_var_data import iris.std_names import iris.util +# TODO: should un-addable coords / cell measures / etcetera be skipped? iris#5068. + # # UD Units Constants (based on Unidata udunits.dat definition file) # @@ -131,6 +134,8 @@ CF_ATTR_BOUNDS = "bounds" CF_ATTR_CALENDAR = "calendar" CF_ATTR_CLIMATOLOGY = "climatology" +CF_ATTR_GRID_CRS_WKT = "crs_wkt" +CF_ATTR_GRID_DATUM = "horizontal_datum_name" CF_ATTR_GRID_INVERSE_FLATTENING = "inverse_flattening" CF_ATTR_GRID_EARTH_RADIUS = "earth_radius" CF_ATTR_GRID_MAPPING_NAME = "grid_mapping_name" @@ -141,6 +146,7 @@ CF_ATTR_GRID_SEMI_MINOR_AXIS = "semi_minor_axis" CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN = "latitude_of_projection_origin" CF_ATTR_GRID_LON_OF_PROJ_ORIGIN = "longitude_of_projection_origin" +CF_ATTR_GRID_STRAIGHT_VERT_LON = "straight_vertical_longitude_from_pole" CF_ATTR_GRID_STANDARD_PARALLEL = "standard_parallel" CF_ATTR_GRID_FALSE_EASTING = "false_easting" CF_ATTR_GRID_FALSE_NORTHING = "false_northing" @@ -233,7 +239,10 @@ def build_cube_metadata(engine): ################################################################################ def _get_ellipsoid(cf_grid_var): - """Return the ellipsoid definition.""" + """ + Return a :class:`iris.coord_systems.GeogCS` using the relevant properties of + `cf_grid_var`. Returns None if no relevant properties are specified. + """ major = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MAJOR_AXIS, None) minor = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MINOR_AXIS, None) inverse_flattening = getattr( @@ -248,21 +257,51 @@ def _get_ellipsoid(cf_grid_var): if major is None and minor is None and inverse_flattening is None: major = getattr(cf_grid_var, CF_ATTR_GRID_EARTH_RADIUS, None) - return major, minor, inverse_flattening + datum = getattr(cf_grid_var, CF_ATTR_GRID_DATUM, None) + # Check crs_wkt if no datum + if datum is None: + crs_wkt = getattr(cf_grid_var, CF_ATTR_GRID_CRS_WKT, None) + if crs_wkt is not None: + proj_crs = pyproj.crs.CRS.from_wkt(crs_wkt) + if proj_crs.datum is not None: + datum = proj_crs.datum.name + + # An unknown crs datum will be treated as None + if datum == "unknown": + datum = None + + if not iris.FUTURE.datum_support: + wmsg = ( + "Ignoring a datum in netCDF load for consistency with existing " + "behaviour. In a future version of Iris, this datum will be " + "applied. To apply the datum when loading, use the " + "iris.FUTURE.datum_support flag." + ) + warnings.warn(wmsg, FutureWarning, stacklevel=14) + datum = None + + if datum is not None: + return iris.coord_systems.GeogCS.from_datum(datum) + elif major is None and minor is None and inverse_flattening is None: + return None + else: + return iris.coord_systems.GeogCS(major, minor, inverse_flattening) ################################################################################ def build_coordinate_system(engine, cf_grid_var): """Create a coordinate system from the CF-netCDF grid mapping variable.""" - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - return iris.coord_systems.GeogCS(major, minor, inverse_flattening) + coord_system = _get_ellipsoid(cf_grid_var) + if coord_system is None: + raise ValueError("No ellipsoid specified") + else: + return coord_system ################################################################################ def build_rotated_coordinate_system(engine, cf_grid_var): """Create a rotated coordinate system from the CF-netCDF grid mapping variable.""" - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) north_pole_latitude = getattr( cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LAT, 90.0 @@ -277,14 +316,6 @@ def build_rotated_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0 ) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - rcs = iris.coord_systems.RotatedGeogCS( north_pole_latitude, north_pole_longitude, @@ -302,7 +333,7 @@ def build_transverse_mercator_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -327,14 +358,6 @@ def build_transverse_mercator_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None ) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.TransverseMercator( latitude_of_projection_origin, longitude_of_central_meridian, @@ -354,7 +377,7 @@ def build_lambert_conformal_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -368,14 +391,6 @@ def build_lambert_conformal_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None ) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.LambertConformal( latitude_of_projection_origin, longitude_of_central_meridian, @@ -395,7 +410,7 @@ def build_stereographic_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -403,18 +418,12 @@ def build_stereographic_coordinate_system(engine, cf_grid_var): longitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None ) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - # Iris currently only supports Stereographic projections with a scale - # factor of 1.0. This is checked elsewhere. - - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) cs = iris.coord_systems.Stereographic( latitude_of_projection_origin, @@ -422,6 +431,43 @@ def build_stereographic_coordinate_system(engine, cf_grid_var): false_easting, false_northing, true_scale_lat=None, + scale_factor_at_projection_origin=scale_factor_at_projection_origin, + ellipsoid=ellipsoid, + ) + + return cs + + +################################################################################ +def build_polar_stereographic_coordinate_system(engine, cf_grid_var): + """ + Create a polar stereographic coordinate system from the CF-netCDF + grid mapping variable. + + """ + ellipsoid = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_STRAIGHT_VERT_LON, None + ) + true_scale_lat = getattr(cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + + cs = iris.coord_systems.PolarStereographic( + latitude_of_projection_origin, + longitude_of_projection_origin, + false_easting, + false_northing, + true_scale_lat, + scale_factor_at_projection_origin, ellipsoid=ellipsoid, ) @@ -435,26 +481,27 @@ def build_mercator_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) longitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None ) - # Iris currently only supports Mercator projections with specific - # values for false_easting, false_northing, - # scale_factor_at_projection_origin and standard_parallel. These are - # checked elsewhere. - - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) cs = iris.coord_systems.Mercator( - longitude_of_projection_origin, ellipsoid=ellipsoid + longitude_of_projection_origin, + ellipsoid=ellipsoid, + standard_parallel=standard_parallel, + scale_factor_at_projection_origin=scale_factor_at_projection_origin, + false_easting=false_easting, + false_northing=false_northing, ) return cs @@ -467,7 +514,7 @@ def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -478,14 +525,6 @@ def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.LambertAzimuthalEqualArea( latitude_of_projection_origin, longitude_of_projection_origin, @@ -504,7 +543,7 @@ def build_albers_equal_area_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -518,14 +557,6 @@ def build_albers_equal_area_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None ) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.AlbersEqualArea( latitude_of_projection_origin, longitude_of_central_meridian, @@ -545,7 +576,7 @@ def build_vertical_perspective_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -559,14 +590,6 @@ def build_vertical_perspective_coordinate_system(engine, cf_grid_var): false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.VerticalPerspective( latitude_of_projection_origin, longitude_of_projection_origin, @@ -586,7 +609,7 @@ def build_geostationary_coordinate_system(engine, cf_grid_var): grid mapping variable. """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + ellipsoid = _get_ellipsoid(cf_grid_var) latitude_of_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None @@ -603,14 +626,6 @@ def build_geostationary_coordinate_system(engine, cf_grid_var): cf_grid_var, CF_ATTR_GRID_SWEEP_ANGLE_AXIS, None ) - ellipsoid = None - if ( - major is not None - or minor is not None - or inverse_flattening is not None - ): - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - cs = iris.coord_systems.Geostationary( latitude_of_projection_origin, longitude_of_projection_origin, @@ -839,6 +854,12 @@ def build_dimension_coordinate( cf_coord_var, coord_name, attributes ) + coord_skipped_msg = ( + f"{cf_coord_var.cf_name} coordinate not added to Cube: " + ) + coord_skipped_msg += "{error}" + coord_skipped = False + # Create the coordinate. try: coord = iris.coords.DimCoord( @@ -855,6 +876,11 @@ def build_dimension_coordinate( ) except ValueError as e_msg: # Attempt graceful loading. + msg = ( + "Failed to create {name!r} dimension coordinate: {error}\n" + "Gracefully creating {name!r} auxiliary coordinate instead." + ) + warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg)) coord = iris.coords.AuxCoord( points_data, standard_name=standard_name, @@ -866,22 +892,26 @@ def build_dimension_coordinate( coord_system=coord_system, climatological=climatological, ) - cube.add_aux_coord(coord, data_dims) - msg = ( - "Failed to create {name!r} dimension coordinate: {error}\n" - "Gracefully creating {name!r} auxiliary coordinate instead." - ) - warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg)) + try: + cube.add_aux_coord(coord, data_dims) + except iris.exceptions.CannotAddError as e_msg: + warnings.warn(coord_skipped_msg.format(error=e_msg)) + coord_skipped = True else: # Add the dimension coordinate to the cube. - if data_dims: - cube.add_dim_coord(coord, data_dims) - else: - # Scalar coords are placed in the aux_coords container. - cube.add_aux_coord(coord, data_dims) + try: + if data_dims: + cube.add_dim_coord(coord, data_dims) + else: + # Scalar coords are placed in the aux_coords container. + cube.add_aux_coord(coord, data_dims) + except iris.exceptions.CannotAddError as e_msg: + warnings.warn(coord_skipped_msg.format(error=e_msg)) + coord_skipped = True - # Update the coordinate to CF-netCDF variable mapping. - engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) + if not coord_skipped: + # Update the coordinate to CF-netCDF variable mapping. + engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) ################################################################################ @@ -950,10 +980,14 @@ def build_auxiliary_coordinate( ) # Add it to the cube - cube.add_aux_coord(coord, data_dims) - - # Make a list with names, stored on the engine, so we can find them all later. - engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) + try: + cube.add_aux_coord(coord, data_dims) + except iris.exceptions.CannotAddError as e_msg: + msg = "{name!r} coordinate not added to Cube: {error}" + warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg)) + else: + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) ################################################################################ @@ -997,12 +1031,16 @@ def build_cell_measures(engine, cf_cm_var): ) # Add it to the cube - cube.add_cell_measure(cell_measure, data_dims) - - # Make a list with names, stored on the engine, so we can find them all later. - engine.cube_parts["cell_measures"].append( - (cell_measure, cf_cm_var.cf_name) - ) + try: + cube.add_cell_measure(cell_measure, data_dims) + except iris.exceptions.CannotAddError as e_msg: + msg = "{name!r} cell measure not added to Cube: {error}" + warnings.warn(msg.format(name=str(cf_cm_var.cf_name), error=e_msg)) + else: + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["cell_measures"].append( + (cell_measure, cf_cm_var.cf_name) + ) ################################################################################ @@ -1042,10 +1080,16 @@ def build_ancil_var(engine, cf_av_var): ) # Add it to the cube - cube.add_ancillary_variable(av, data_dims) - - # Make a list with names, stored on the engine, so we can find them all later. - engine.cube_parts["ancillary_variables"].append((av, cf_av_var.cf_name)) + try: + cube.add_ancillary_variable(av, data_dims) + except iris.exceptions.CannotAddError as e_msg: + msg = "{name!r} ancillary variable not added to Cube: {error}" + warnings.warn(msg.format(name=str(cf_av_var.cf_name), error=e_msg)) + else: + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["ancillary_variables"].append( + (av, cf_av_var.cf_name) + ) ################################################################################ @@ -1244,40 +1288,20 @@ def has_supported_mercator_parameters(engine, cf_name): is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] - false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - scale_factor_at_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None - ) standard_parallel = getattr( cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None ) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) - if false_easting is not None and false_easting != 0: - warnings.warn( - "False eastings other than 0.0 not yet supported " - "for Mercator projections" - ) - is_valid = False - if false_northing is not None and false_northing != 0: - warnings.warn( - "False northings other than 0.0 not yet supported " - "for Mercator projections" - ) - is_valid = False if ( scale_factor_at_projection_origin is not None - and scale_factor_at_projection_origin != 1 + and standard_parallel is not None ): warnings.warn( - "Scale factors other than 1.0 not yet supported for " - "Mercator projections" - ) - is_valid = False - if standard_parallel is not None and standard_parallel != 0: - warnings.warn( - "Standard parallels other than 0.0 not yet " - "supported for Mercator projections" + "It does not make sense to provide both " + '"scale_factor_at_projection_origin" and "standard_parallel".' ) is_valid = False @@ -1285,24 +1309,45 @@ def has_supported_mercator_parameters(engine, cf_name): ################################################################################ -def has_supported_stereographic_parameters(engine, cf_name): - """Determine whether the CF grid mapping variable has a value of 1.0 - for the scale_factor_at_projection_origin attribute.""" +def has_supported_polar_stereographic_parameters(engine, cf_name): + """Determine whether the CF grid mapping variable has the supported + values for the parameters of the Polar Stereographic projection.""" is_valid = True cf_grid_var = engine.cf_var.cf_group[cf_name] + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) scale_factor_at_projection_origin = getattr( cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None ) + if ( + latitude_of_projection_origin != 90 + and latitude_of_projection_origin != -90 + ): + warnings.warn('"latitude_of_projection_origin" must be +90 or -90.') + is_valid = False + if ( scale_factor_at_projection_origin is not None - and scale_factor_at_projection_origin != 1 + and standard_parallel is not None ): warnings.warn( - "Scale factors other than 1.0 not yet supported for " - "stereographic projections" + "It does not make sense to provide both " + '"scale_factor_at_projection_origin" and "standard_parallel".' + ) + is_valid = False + + if scale_factor_at_projection_origin is None and standard_parallel is None: + warnings.warn( + 'One of "scale_factor_at_projection_origin" and ' + '"standard_parallel" is required.' ) is_valid = False diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 5c70c5acf2..4dcd5ce6aa 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -219,7 +219,6 @@ def load_cubes(filespecs, callback=None): for filespec in filespecs: for filename in glob.glob(filespec): - field = ABFField(filename) cube = field.to_cube() diff --git a/lib/iris/fileformats/cf.py b/lib/iris/fileformats/cf.py index b22fbd3b51..a21e1d975f 100644 --- a/lib/iris/fileformats/cf.py +++ b/lib/iris/fileformats/cf.py @@ -20,10 +20,10 @@ import re import warnings -import netCDF4 import numpy as np import numpy.ma as ma +from iris.fileformats.netcdf import _thread_safe_nc import iris.util # @@ -1044,12 +1044,15 @@ class CFReader: CFGroup = CFGroup def __init__(self, filename, warn=False, monotonic=False): + self._dataset = None self._filename = os.path.expanduser(filename) #: Collection of CF-netCDF variables associated with this netCDF file self.cf_group = self.CFGroup() - self._dataset = netCDF4.Dataset(self._filename, mode="r") + self._dataset = _thread_safe_nc.DatasetWrapper( + self._filename, mode="r" + ) # Issue load optimisation warning. if warn and self._dataset.file_format in [ @@ -1067,6 +1070,19 @@ def __init__(self, filename, warn=False, monotonic=False): self._build_cf_groups() self._reset() + def __enter__(self): + # Enable use as a context manager + # N.B. this **guarantees* closure of the file, when the context is exited. + # Note: ideally, the class would not do so much work in the __init__ call, and + # would do all that here, after acquiring necessary permissions/locks. + # But for legacy reasons, we can't do that. So **effectively**, the context + # (in terms of access control) alreday started, when we created the object. + return self + + def __exit__(self, exc_type, exc_value, traceback): + # When used as a context-manager, **always** close the file on exit. + self._close() + @property def filename(self): """The file that the CFReader is reading.""" @@ -1293,9 +1309,15 @@ def _reset(self): for nc_var_name in self._dataset.variables.keys(): self.cf_group[nc_var_name].cf_attrs_reset() - def __del__(self): + def _close(self): # Explicitly close dataset to prevent file remaining open. - self._dataset.close() + if self._dataset is not None: + self._dataset.close() + self._dataset = None + + def __del__(self): + # Be sure to close dataset when CFReader is destroyed / garbage-collected. + self._close() def _getncattr(dataset, attr, default=None): diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index 2fb628bebf..50c02e4d04 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -59,14 +59,18 @@ def _dot_path(): def save(cube, target): - """Save a dot representation of the cube. - - Args: + """ + Save a dot representation of the cube. - * cube - A :class:`iris.cube.Cube`. - * target - A filename or open file handle. + Args + ---- + cube: :class:`iris.cube.Cube`. + target + A filename or open file handle. - See also :func:`iris.io.save`. + See Also + -------- + :func:`iris.io.save`. """ if isinstance(target, str): @@ -87,19 +91,23 @@ def save(cube, target): def save_png(source, target, launch=False): """ - Produces a "dot" instance diagram by calling dot and optionally launching the resulting image. - - Args: + Produce a "dot" instance diagram by calling dot and optionally launching + the resulting image. - * source - A :class:`iris.cube.Cube`, or dot filename. - * target - A filename or open file handle. - If passing a file handle, take care to open it for binary output. + Args + ---- + source: :class:`iris.cube.Cube`, or dot filename. + target + A filename or open file handle. + If passing a file handle, take care to open it for binary output. - Kwargs: + **kwargs + * launch + Display the image. Default is False. - * launch - Display the image. Default is False. - - See also :func:`iris.io.save`. + See Also + -------- + :func:`iris.io.save`. """ # From cube or dot file? @@ -152,11 +160,13 @@ def save_png(source, target, launch=False): def cube_text(cube): - """Return a DOT text representation a `iris.cube.Cube`. - - Args: + """ + Return a DOT text representation a `iris.cube.Cube`. - * cube - The cube for which to create DOT text. + Args + ---- + cube + The cube for which to create DOT text. """ # We use r'' type string constructor as when we type \n in a string without the r'' constructor @@ -283,13 +293,14 @@ def cube_text(cube): def _coord_text(label, coord): """ - Returns a string containing the dot representation for a single coordinate node. - - Args: + Return a string containing the dot representation for a single coordinate + node. - * label + Args + ---- + label The dot ID of the coordinate node. - * coord + coord The coordinate to convert. """ @@ -315,14 +326,16 @@ def _coord_text(label, coord): def _coord_system_text(cs, uid): """ - Returns a string containing the dot representation for a single coordinate system node. + Return a string containing the dot representation for a single coordinate + system node. - Args: - - * cs + Args + ---- + cs The coordinate system to convert. - * uid - The uid allows/distinguishes non-identical CoordSystems of the same type. + uid + The uid allows/distinguishes non-identical CoordSystems of the same + type. """ attrs = [] @@ -341,15 +354,15 @@ def _coord_system_text(cs, uid): def _dot_node(indent, id, name, attributes): """ - Returns a string containing the dot representation for a single node. - - Args: + Return a string containing the dot representation for a single node. - * id + Args + ---- + id The ID of the node. - * name + name The visual name of the node. - * attributes + attributes An iterable of (name, value) attribute pairs. """ diff --git a/lib/iris/fileformats/name_loaders.py b/lib/iris/fileformats/name_loaders.py index 34e88aff80..b9b64a343e 100644 --- a/lib/iris/fileformats/name_loaders.py +++ b/lib/iris/fileformats/name_loaders.py @@ -456,7 +456,7 @@ def _generate_cubes( # Define the time unit and use it to serialise the datetime for # the time coordinate. time_unit = cf_units.Unit( - "hours since epoch", calendar=cf_units.CALENDAR_GREGORIAN + "hours since epoch", calendar=cf_units.CALENDAR_STANDARD ) # Build time, height, latitude and longitude coordinates. @@ -571,7 +571,9 @@ def _generate_cubes( cube.attributes[key] = value if cell_methods is not None: - cube.add_cell_method(cell_methods[i]) + cell_method = cell_methods[i] + if cell_method is not None: + cube.add_cell_method(cell_method) yield cube @@ -610,7 +612,7 @@ def _build_cell_methods(av_or_ints, coord): cell_method = None msg = "Unknown {} statistic: {!r}. Unable to create cell method." warnings.warn(msg.format(coord, av_or_int)) - cell_methods.append(cell_method) + cell_methods.append(cell_method) # NOTE: this can be a None return cell_methods @@ -992,7 +994,6 @@ def load_NAMEIII_version2(filename): # using the next() method. This will come in handy as we wish to # progress through the file line by line. with open(filename, "r") as file_handle: - # define a dictionary to hold the header metadata about this file header = read_header(file_handle) @@ -1003,7 +1004,6 @@ def load_NAMEIII_version2(filename): column_headings = {} datacol1 = header["Number of preliminary cols"] for line in file_handle: - data = [col.strip() for col in line.split(",")][:-1] # If first column is not zero we have reached the end @@ -1212,7 +1212,7 @@ def load_NAMEIII_trajectory(filename): """ time_unit = cf_units.Unit( - "hours since epoch", calendar=cf_units.CALENDAR_GREGORIAN + "hours since epoch", calendar=cf_units.CALENDAR_STANDARD ) with open(filename, "r") as infile: diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py new file mode 100644 index 0000000000..505e173b0b --- /dev/null +++ b/lib/iris/fileformats/netcdf/__init__.py @@ -0,0 +1,49 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Module to support the loading and saving of NetCDF files, also using the CF conventions +for metadata interpretation. + +See : `NetCDF User's Guide `_ +and `netCDF4 python module `_. + +Also : `CF Conventions `_. + +""" +import iris.config + +# Note: *must* be done before importing from submodules, as they also use this ! +logger = iris.config.get_logger(__name__) + +from .loader import DEBUG, NetCDFDataProxy, load_cubes +from .saver import ( + CF_CONVENTIONS_VERSION, + MESH_ELEMENTS, + SPATIO_TEMPORAL_AXES, + CFNameCoordMap, + Saver, + UnknownCellMethodWarning, + parse_cell_methods, + save, +) + +# Export all public elements from the loader and saver submodules. +# NOTE: the separation is purely for neatness and developer convenience; from +# the user point of view, it is still all one module. +__all__ = ( + "CFNameCoordMap", + "CF_CONVENTIONS_VERSION", + "DEBUG", + "MESH_ELEMENTS", + "NetCDFDataProxy", + "SPATIO_TEMPORAL_AXES", + "Saver", + "UnknownCellMethodWarning", + "load_cubes", + "logger", + "parse_cell_methods", + "save", +) diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py new file mode 100644 index 0000000000..decca1535f --- /dev/null +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -0,0 +1,342 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Module to ensure all calls to the netCDF4 library are thread-safe. + +Intention is that no other Iris module should import the netCDF4 module. + +""" +from abc import ABC +from threading import Lock +import typing + +import netCDF4 +import numpy as np + +_GLOBAL_NETCDF4_LOCK = Lock() + +# Doesn't need thread protection, but this allows all netCDF4 refs to be +# replaced with thread_safe refs. +default_fillvals = netCDF4.default_fillvals + + +class _ThreadSafeWrapper(ABC): + """ + Contains a netCDF4 class instance, ensuring wrapping all API calls within _GLOBAL_NETCDF4_LOCK. + + Designed to 'gate keep' all the instance's API calls, but allowing the + same API as if working directly with the instance itself. + + Using a contained object instead of inheritance, as we cannot successfully + subclass or monkeypatch netCDF4 classes, because they are only wrappers for + the C-layer. + """ + + CONTAINED_CLASS = NotImplemented + + # Allows easy type checking, avoiding difficulties with isinstance and mocking. + THREAD_SAFE_FLAG = True + + @classmethod + def _from_existing(cls, instance): + """Pass an existing instance to __init__, where it is contained.""" + assert isinstance(instance, cls.CONTAINED_CLASS) + return cls(instance) + + def __init__(self, *args, **kwargs): + """Contain an existing instance, or generate a new one from arguments.""" + if isinstance(args[0], self.CONTAINED_CLASS): + instance = args[0] + else: + with _GLOBAL_NETCDF4_LOCK: + instance = self.CONTAINED_CLASS(*args, **kwargs) + + self._contained_instance = instance + + def __getattr__(self, item): + if item == "_contained_instance": + # Special behaviour when accessing the _contained_instance itself. + return object.__getattribute__(self, item) + else: + with _GLOBAL_NETCDF4_LOCK: + return getattr(self._contained_instance, item) + + def __setattr__(self, key, value): + if key == "_contained_instance": + # Special behaviour when accessing the _contained_instance itself. + object.__setattr__(self, key, value) + else: + with _GLOBAL_NETCDF4_LOCK: + return setattr(self._contained_instance, key, value) + + def __getitem__(self, item): + with _GLOBAL_NETCDF4_LOCK: + return self._contained_instance.__getitem__(item) + + def __setitem__(self, key, value): + with _GLOBAL_NETCDF4_LOCK: + return self._contained_instance.__setitem__(key, value) + + +class DimensionWrapper(_ThreadSafeWrapper): + """ + Accessor for a netCDF4.Dimension, always acquiring _GLOBAL_NETCDF4_LOCK. + + All API calls should be identical to those for netCDF4.Dimension. + """ + + CONTAINED_CLASS = netCDF4.Dimension + + +class VariableWrapper(_ThreadSafeWrapper): + """ + Accessor for a netCDF4.Variable, always acquiring _GLOBAL_NETCDF4_LOCK. + + All API calls should be identical to those for netCDF4.Variable. + """ + + CONTAINED_CLASS = netCDF4.Variable + + def setncattr(self, *args, **kwargs) -> None: + """ + Calls netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK. + + Only defined explicitly in order to get some mocks to work. + """ + with _GLOBAL_NETCDF4_LOCK: + return self._contained_instance.setncattr(*args, **kwargs) + + @property + def dimensions(self) -> typing.List[str]: + """ + Calls netCDF4.Variable.dimensions within _GLOBAL_NETCDF4_LOCK. + + Only defined explicitly in order to get some mocks to work. + """ + with _GLOBAL_NETCDF4_LOCK: + # Return value is a list of strings so no need for + # DimensionWrapper, unlike self.get_dims(). + return self._contained_instance.dimensions + + # All Variable API that returns Dimension(s) is wrapped to instead return + # DimensionWrapper(s). + + def get_dims(self, *args, **kwargs) -> typing.Tuple[DimensionWrapper]: + """ + Calls netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers. + + The original returned netCDF4.Dimensions are simply replaced with their + respective DimensionWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + dimensions_ = list( + self._contained_instance.get_dims(*args, **kwargs) + ) + return tuple([DimensionWrapper._from_existing(d) for d in dimensions_]) + + +class GroupWrapper(_ThreadSafeWrapper): + """ + Accessor for a netCDF4.Group, always acquiring _GLOBAL_NETCDF4_LOCK. + + All API calls should be identical to those for netCDF4.Group. + """ + + CONTAINED_CLASS = netCDF4.Group + + # All Group API that returns Dimension(s) is wrapped to instead return + # DimensionWrapper(s). + + @property + def dimensions(self) -> typing.Dict[str, DimensionWrapper]: + """ + Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers. + + The original returned netCDF4.Dimensions are simply replaced with their + respective DimensionWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + dimensions_ = self._contained_instance.dimensions + return { + k: DimensionWrapper._from_existing(v) + for k, v in dimensions_.items() + } + + def createDimension(self, *args, **kwargs) -> DimensionWrapper: + """ + Calls createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper. + + The original returned netCDF4.Dimension is simply replaced with its + respective DimensionWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + new_dimension = self._contained_instance.createDimension( + *args, **kwargs + ) + return DimensionWrapper._from_existing(new_dimension) + + # All Group API that returns Variable(s) is wrapped to instead return + # VariableWrapper(s). + + @property + def variables(self) -> typing.Dict[str, VariableWrapper]: + """ + Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers. + + The original returned netCDF4.Variables are simply replaced with their + respective VariableWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + variables_ = self._contained_instance.variables + return { + k: VariableWrapper._from_existing(v) for k, v in variables_.items() + } + + def createVariable(self, *args, **kwargs) -> VariableWrapper: + """ + Calls createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrapper. + + The original returned netCDF4.Variable is simply replaced with its + respective VariableWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + new_variable = self._contained_instance.createVariable( + *args, **kwargs + ) + return VariableWrapper._from_existing(new_variable) + + def get_variables_by_attributes( + self, *args, **kwargs + ) -> typing.List[VariableWrapper]: + """ + Calls get_variables_by_attributes() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers. + + The original returned netCDF4.Variables are simply replaced with their + respective VariableWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + variables_ = list( + self._contained_instance.get_variables_by_attributes( + *args, **kwargs + ) + ) + return [VariableWrapper._from_existing(v) for v in variables_] + + # All Group API that returns Group(s) is wrapped to instead return + # GroupWrapper(s). + + @property + def groups(self): + """ + Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrappers. + + The original returned netCDF4.Groups are simply replaced with their + respective GroupWrappers, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + groups_ = self._contained_instance.groups + return {k: GroupWrapper._from_existing(v) for k, v in groups_.items()} + + @property + def parent(self): + """ + Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning a GroupWrapper. + + The original returned netCDF4.Group is simply replaced with its + respective GroupWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + parent_ = self._contained_instance.parent + return GroupWrapper._from_existing(parent_) + + def createGroup(self, *args, **kwargs): + """ + Calls createGroup() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrapper. + + The original returned netCDF4.Group is simply replaced with its + respective GroupWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + new_group = self._contained_instance.createGroup(*args, **kwargs) + return GroupWrapper._from_existing(new_group) + + +class DatasetWrapper(GroupWrapper): + """ + Accessor for a netCDF4.Dataset, always acquiring _GLOBAL_NETCDF4_LOCK. + + All API calls should be identical to those for netCDF4.Dataset. + """ + + CONTAINED_CLASS = netCDF4.Dataset + + @classmethod + def fromcdl(cls, *args, **kwargs): + """ + Calls netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK, returning a DatasetWrapper. + + The original returned netCDF4.Dataset is simply replaced with its + respective DatasetWrapper, ensuring that downstream calls are + also performed within _GLOBAL_NETCDF4_LOCK. + """ + with _GLOBAL_NETCDF4_LOCK: + instance = cls.CONTAINED_CLASS.fromcdl(*args, **kwargs) + return cls._from_existing(instance) + + +class NetCDFDataProxy: + """A reference to the data payload of a single NetCDF file variable.""" + + __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value") + + def __init__(self, shape, dtype, path, variable_name, fill_value): + self.shape = shape + self.dtype = dtype + self.path = path + self.variable_name = variable_name + self.fill_value = fill_value + + @property + def ndim(self): + return len(self.shape) + + def __getitem__(self, keys): + # Using a DatasetWrapper causes problems with invalid ID's and the + # netCDF4 library, presumably because __getitem__ gets called so many + # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead. + with _GLOBAL_NETCDF4_LOCK: + dataset = netCDF4.Dataset(self.path) + try: + variable = dataset.variables[self.variable_name] + # Get the NetCDF variable data and slice. + var = variable[keys] + finally: + dataset.close() + return np.asanyarray(var) + + def __repr__(self): + fmt = ( + "<{self.__class__.__name__} shape={self.shape}" + " dtype={self.dtype!r} path={self.path!r}" + " variable_name={self.variable_name!r}>" + ) + return fmt.format(self=self) + + def __getstate__(self): + return {attr: getattr(self, attr) for attr in self.__slots__} + + def __setstate__(self, state): + for key, value in state.items(): + setattr(self, key, value) diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py new file mode 100644 index 0000000000..8fcab61d17 --- /dev/null +++ b/lib/iris/fileformats/netcdf/loader.py @@ -0,0 +1,559 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Module to support the loading of Iris cubes from NetCDF files, also using the CF +conventions for metadata interpretation. + +See : `NetCDF User's Guide `_ +and `netCDF4 python module `_. + +Also : `CF Conventions `_. + +""" +import warnings + +import numpy as np + +from iris._lazy_data import as_lazy_data +from iris.aux_factory import ( + AtmosphereSigmaFactory, + HybridHeightFactory, + HybridPressureFactory, + OceanSFactory, + OceanSg1Factory, + OceanSg2Factory, + OceanSigmaFactory, + OceanSigmaZFactory, +) +import iris.config +import iris.coord_systems +import iris.coords +import iris.exceptions +import iris.fileformats.cf +from iris.fileformats.netcdf import _thread_safe_nc +from iris.fileformats.netcdf.saver import _CF_ATTRS +import iris.io +import iris.util + +# Show actions activation statistics. +DEBUG = False + +# Get the logger : shared logger for all in 'iris.fileformats.netcdf'. +from . import logger + +# An expected part of the public loader API, but includes thread safety +# concerns so is housed in _thread_safe_nc. +NetCDFDataProxy = _thread_safe_nc.NetCDFDataProxy + + +def _actions_engine(): + # Return an 'actions engine', which provides a pyke-rules-like interface to + # the core cf translation code. + # Deferred import to avoid circularity. + import iris.fileformats._nc_load_rules.engine as nc_actions_engine + + engine = nc_actions_engine.Engine() + return engine + + +def _assert_case_specific_facts(engine, cf, cf_group): + # Initialise a data store for built cube elements. + # This is used to patch element attributes *not* setup by the actions + # process, after the actions code has run. + engine.cube_parts["coordinates"] = [] + engine.cube_parts["cell_measures"] = [] + engine.cube_parts["ancillary_variables"] = [] + + # Assert facts for CF coordinates. + for cf_name in cf_group.coordinates.keys(): + engine.add_case_specific_fact("coordinate", (cf_name,)) + + # Assert facts for CF auxiliary coordinates. + for cf_name in cf_group.auxiliary_coordinates.keys(): + engine.add_case_specific_fact("auxiliary_coordinate", (cf_name,)) + + # Assert facts for CF cell measures. + for cf_name in cf_group.cell_measures.keys(): + engine.add_case_specific_fact("cell_measure", (cf_name,)) + + # Assert facts for CF ancillary variables. + for cf_name in cf_group.ancillary_variables.keys(): + engine.add_case_specific_fact("ancillary_variable", (cf_name,)) + + # Assert facts for CF grid_mappings. + for cf_name in cf_group.grid_mappings.keys(): + engine.add_case_specific_fact("grid_mapping", (cf_name,)) + + # Assert facts for CF labels. + for cf_name in cf_group.labels.keys(): + engine.add_case_specific_fact("label", (cf_name,)) + + # Assert facts for CF formula terms associated with the cf_group + # of the CF data variable. + + # Collect varnames of formula-root variables as we go. + # NOTE: use dictionary keys as an 'OrderedSet' + # - see: https://stackoverflow.com/a/53657523/2615050 + # This is to ensure that we can handle the resulting facts in a definite + # order, as using a 'set' led to indeterminate results. + formula_root = {} + for cf_var in cf.cf_group.formula_terms.values(): + for cf_root, cf_term in cf_var.cf_terms_by_root.items(): + # Only assert this fact if the formula root variable is + # defined in the CF group of the CF data variable. + if cf_root in cf_group: + formula_root[cf_root] = True + engine.add_case_specific_fact( + "formula_term", + (cf_var.cf_name, cf_root, cf_term), + ) + + for cf_root in formula_root.keys(): + engine.add_case_specific_fact("formula_root", (cf_root,)) + + +def _actions_activation_stats(engine, cf_name): + print("-" * 80) + print("CF Data Variable: %r" % cf_name) + + engine.print_stats() + + print("Rules Triggered:") + + for rule in sorted(list(engine.rules_triggered)): + print("\t%s" % rule) + + print("Case Specific Facts:") + kb_facts = engine.get_kb() + + for key in kb_facts.entity_lists.keys(): + for arg in kb_facts.entity_lists[key].case_specific_facts: + print("\t%s%s" % (key, arg)) + + +def _set_attributes(attributes, key, value): + """Set attributes dictionary, converting unicode strings appropriately.""" + + if isinstance(value, str): + try: + attributes[str(key)] = str(value) + except UnicodeEncodeError: + attributes[str(key)] = value + else: + attributes[str(key)] = value + + +def _add_unused_attributes(iris_object, cf_var): + """ + Populate the attributes of a cf element with the "unused" attributes + from the associated CF-netCDF variable. That is, all those that aren't CF + reserved terms. + + """ + + def attribute_predicate(item): + return item[0] not in _CF_ATTRS + + tmpvar = filter(attribute_predicate, cf_var.cf_attrs_unused()) + for attr_name, attr_value in tmpvar: + _set_attributes(iris_object.attributes, attr_name, attr_value) + + +def _get_actual_dtype(cf_var): + # Figure out what the eventual data type will be after any scale/offset + # transforms. + dummy_data = np.zeros(1, dtype=cf_var.dtype) + if hasattr(cf_var, "scale_factor"): + dummy_data = cf_var.scale_factor * dummy_data + if hasattr(cf_var, "add_offset"): + dummy_data = cf_var.add_offset + dummy_data + return dummy_data.dtype + + +def _get_cf_var_data(cf_var, filename): + # Get lazy chunked data out of a cf variable. + dtype = _get_actual_dtype(cf_var) + + # Create cube with deferred data, but no metadata + fill_value = getattr( + cf_var.cf_data, + "_FillValue", + _thread_safe_nc.default_fillvals[cf_var.dtype.str[1:]], + ) + proxy = NetCDFDataProxy( + cf_var.shape, dtype, filename, cf_var.cf_name, fill_value + ) + # Get the chunking specified for the variable : this is either a shape, or + # maybe the string "contiguous". + chunks = cf_var.cf_data.chunking() + # In the "contiguous" case, pass chunks=None to 'as_lazy_data'. + if chunks == "contiguous": + chunks = None + return as_lazy_data(proxy, chunks=chunks) + + +class _OrderedAddableList(list): + """ + A custom container object for actions recording. + + Used purely in actions debugging, to accumulate a record of which actions + were activated. + + It replaces a set, so as to preserve the ordering of operations, with + possible repeats, and it also numbers the entries. + + The actions routines invoke an 'add' method, so this effectively replaces + a set.add with a list.append. + + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._n_add = 0 + + def add(self, msg): + self._n_add += 1 + n_add = self._n_add + self.append(f"#{n_add:03d} : {msg}") + + +def _load_cube(engine, cf, cf_var, filename): + from iris.cube import Cube + + """Create the cube associated with the CF-netCDF data variable.""" + data = _get_cf_var_data(cf_var, filename) + cube = Cube(data) + + # Reset the actions engine. + engine.reset() + + # Initialise engine rule processing hooks. + engine.cf_var = cf_var + engine.cube = cube + engine.cube_parts = {} + engine.requires = {} + engine.rules_triggered = _OrderedAddableList() + engine.filename = filename + + # Assert all the case-specific facts. + # This extracts 'facts' specific to this data-variable (aka cube), from + # the info supplied in the CFGroup object. + _assert_case_specific_facts(engine, cf, cf_var.cf_group) + + # Run the actions engine. + # This creates various cube elements and attaches them to the cube. + # It also records various other info on the engine, to be processed later. + engine.activate() + + # Having run the rules, now add the "unused" attributes to each cf element. + def fix_attributes_all_elements(role_name): + elements_and_names = engine.cube_parts.get(role_name, []) + + for iris_object, cf_var_name in elements_and_names: + _add_unused_attributes(iris_object, cf.cf_group[cf_var_name]) + + # Populate the attributes of all coordinates, cell-measures and ancillary-vars. + fix_attributes_all_elements("coordinates") + fix_attributes_all_elements("ancillary_variables") + fix_attributes_all_elements("cell_measures") + + # Also populate attributes of the top-level cube itself. + _add_unused_attributes(cube, cf_var) + + # Work out reference names for all the coords. + names = { + coord.var_name: coord.standard_name or coord.var_name or "unknown" + for coord in cube.coords() + } + + # Add all the cube cell methods. + cube.cell_methods = [ + iris.coords.CellMethod( + method=method.method, + intervals=method.intervals, + comments=method.comments, + coords=[ + names[coord_name] if coord_name in names else coord_name + for coord_name in method.coord_names + ], + ) + for method in cube.cell_methods + ] + + if DEBUG: + # Show activation statistics for this data-var (i.e. cube). + _actions_activation_stats(engine, cf_var.cf_name) + + return cube + + +def _load_aux_factory(engine, cube): + """ + Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory. + + """ + formula_type = engine.requires.get("formula_type") + if formula_type in [ + "atmosphere_sigma_coordinate", + "atmosphere_hybrid_height_coordinate", + "atmosphere_hybrid_sigma_pressure_coordinate", + "ocean_sigma_z_coordinate", + "ocean_sigma_coordinate", + "ocean_s_coordinate", + "ocean_s_coordinate_g1", + "ocean_s_coordinate_g2", + ]: + + def coord_from_term(term): + # Convert term names to coordinates (via netCDF variable names). + name = engine.requires["formula_terms"].get(term, None) + if name is not None: + for coord, cf_var_name in engine.cube_parts["coordinates"]: + if cf_var_name == name: + return coord + warnings.warn( + "Unable to find coordinate for variable " + "{!r}".format(name) + ) + + if formula_type == "atmosphere_sigma_coordinate": + pressure_at_top = coord_from_term("ptop") + sigma = coord_from_term("sigma") + surface_air_pressure = coord_from_term("ps") + factory = AtmosphereSigmaFactory( + pressure_at_top, sigma, surface_air_pressure + ) + elif formula_type == "atmosphere_hybrid_height_coordinate": + delta = coord_from_term("a") + sigma = coord_from_term("b") + orography = coord_from_term("orog") + factory = HybridHeightFactory(delta, sigma, orography) + elif formula_type == "atmosphere_hybrid_sigma_pressure_coordinate": + # Hybrid pressure has two valid versions of its formula terms: + # "p0: var1 a: var2 b: var3 ps: var4" or + # "ap: var1 b: var2 ps: var3" where "ap = p0 * a" + # Attempt to get the "ap" term. + delta = coord_from_term("ap") + if delta is None: + # The "ap" term is unavailable, so try getting terms "p0" + # and "a" terms in order to derive an "ap" equivalent term. + coord_p0 = coord_from_term("p0") + if coord_p0 is not None: + if coord_p0.shape != (1,): + msg = ( + "Expecting {!r} to be a scalar reference " + "pressure coordinate, got shape {!r}".format( + coord_p0.var_name, coord_p0.shape + ) + ) + raise ValueError(msg) + if coord_p0.has_bounds(): + msg = ( + "Ignoring atmosphere hybrid sigma pressure " + "scalar coordinate {!r} bounds.".format( + coord_p0.name() + ) + ) + warnings.warn(msg) + coord_a = coord_from_term("a") + if coord_a is not None: + if coord_a.units.is_unknown(): + # Be graceful, and promote unknown to dimensionless units. + coord_a.units = "1" + delta = coord_a * coord_p0.points[0] + delta.units = coord_a.units * coord_p0.units + delta.rename("vertical pressure") + delta.var_name = "ap" + cube.add_aux_coord(delta, cube.coord_dims(coord_a)) + + sigma = coord_from_term("b") + surface_air_pressure = coord_from_term("ps") + factory = HybridPressureFactory(delta, sigma, surface_air_pressure) + elif formula_type == "ocean_sigma_z_coordinate": + sigma = coord_from_term("sigma") + eta = coord_from_term("eta") + depth = coord_from_term("depth") + depth_c = coord_from_term("depth_c") + nsigma = coord_from_term("nsigma") + zlev = coord_from_term("zlev") + factory = OceanSigmaZFactory( + sigma, eta, depth, depth_c, nsigma, zlev + ) + elif formula_type == "ocean_sigma_coordinate": + sigma = coord_from_term("sigma") + eta = coord_from_term("eta") + depth = coord_from_term("depth") + factory = OceanSigmaFactory(sigma, eta, depth) + elif formula_type == "ocean_s_coordinate": + s = coord_from_term("s") + eta = coord_from_term("eta") + depth = coord_from_term("depth") + a = coord_from_term("a") + depth_c = coord_from_term("depth_c") + b = coord_from_term("b") + factory = OceanSFactory(s, eta, depth, a, b, depth_c) + elif formula_type == "ocean_s_coordinate_g1": + s = coord_from_term("s") + c = coord_from_term("c") + eta = coord_from_term("eta") + depth = coord_from_term("depth") + depth_c = coord_from_term("depth_c") + factory = OceanSg1Factory(s, c, eta, depth, depth_c) + elif formula_type == "ocean_s_coordinate_g2": + s = coord_from_term("s") + c = coord_from_term("c") + eta = coord_from_term("eta") + depth = coord_from_term("depth") + depth_c = coord_from_term("depth_c") + factory = OceanSg2Factory(s, c, eta, depth, depth_c) + cube.add_aux_factory(factory) + + +def _translate_constraints_to_var_callback(constraints): + """ + Translate load constraints into a simple data-var filter function, if possible. + + Returns: + * function(cf_var:CFDataVariable): --> bool, + or None. + + For now, ONLY handles a single NameConstraint with no 'STASH' component. + + """ + import iris._constraints + + constraints = iris._constraints.list_of_constraints(constraints) + result = None + if len(constraints) == 1: + (constraint,) = constraints + if ( + isinstance(constraint, iris._constraints.NameConstraint) + and constraint.STASH == "none" + ): + # As long as it doesn't use a STASH match, then we can treat it as + # a testing against name properties of cf_var. + # That's just like testing against name properties of a cube, except that they may not all exist. + def inner(cf_datavar): + match = True + for name in constraint._names: + expected = getattr(constraint, name) + if name != "STASH" and expected != "none": + attr_name = "cf_name" if name == "var_name" else name + # Fetch property : N.B. CFVariable caches the property values + # The use of a default here is the only difference from the code in NameConstraint. + if not hasattr(cf_datavar, attr_name): + continue + actual = getattr(cf_datavar, attr_name, "") + if actual != expected: + match = False + break + return match + + result = inner + return result + + +def load_cubes(filenames, callback=None, constraints=None): + """ + Loads cubes from a list of NetCDF filenames/OPeNDAP URLs. + + Args: + + * filenames (string/list): + One or more NetCDF filenames/OPeNDAP URLs to load from. + + Kwargs: + + * callback (callable function): + Function which can be passed on to :func:`iris.io.run_callback`. + + Returns: + Generator of loaded NetCDF :class:`iris.cube.Cube`. + + """ + # TODO: rationalise UGRID/mesh handling once experimental.ugrid is folded + # into standard behaviour. + # Deferred import to avoid circular imports. + from iris.experimental.ugrid.cf import CFUGridReader + from iris.experimental.ugrid.load import ( + PARSE_UGRID_ON_LOAD, + _build_mesh_coords, + _meshes_from_cf, + ) + from iris.io import run_callback + + # Create a low-level data-var filter from the original load constraints, if they are suitable. + var_callback = _translate_constraints_to_var_callback(constraints) + + # Create an actions engine. + engine = _actions_engine() + + if isinstance(filenames, str): + filenames = [filenames] + + for filename in filenames: + # Ingest the netCDF file. + meshes = {} + if PARSE_UGRID_ON_LOAD: + cf_reader_class = CFUGridReader + else: + cf_reader_class = iris.fileformats.cf.CFReader + + with cf_reader_class(filename) as cf: + if PARSE_UGRID_ON_LOAD: + meshes = _meshes_from_cf(cf) + + # Process each CF data variable. + data_variables = list(cf.cf_group.data_variables.values()) + list( + cf.cf_group.promoted.values() + ) + for cf_var in data_variables: + if var_callback and not var_callback(cf_var): + # Deliver only selected results. + continue + + # cf_var-specific mesh handling, if a mesh is present. + # Build the mesh_coords *before* loading the cube - avoids + # mesh-related attributes being picked up by + # _add_unused_attributes(). + mesh_name = None + mesh = None + mesh_coords, mesh_dim = [], None + if PARSE_UGRID_ON_LOAD: + mesh_name = getattr(cf_var, "mesh", None) + if mesh_name is not None: + try: + mesh = meshes[mesh_name] + except KeyError: + message = ( + f"File does not contain mesh: '{mesh_name}' - " + f"referenced by variable: '{cf_var.cf_name}' ." + ) + logger.debug(message) + if mesh is not None: + mesh_coords, mesh_dim = _build_mesh_coords(mesh, cf_var) + + cube = _load_cube(engine, cf, cf_var, filename) + + # Attach the mesh (if present) to the cube. + for mesh_coord in mesh_coords: + cube.add_aux_coord(mesh_coord, mesh_dim) + + # Process any associated formula terms and attach + # the corresponding AuxCoordFactory. + try: + _load_aux_factory(engine, cube) + except ValueError as e: + warnings.warn("{}".format(e)) + + # Perform any user registered callback function. + cube = run_callback(callback, cube, cf_var, filename) + + # Callback mechanism may return None, which must not be yielded + if cube is None: + continue + + yield cube diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf/saver.py similarity index 81% rename from lib/iris/fileformats/netcdf.py rename to lib/iris/fileformats/netcdf/saver.py index 100ab29daa..e5d3bf2cc7 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -4,30 +4,30 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Module to support the loading of a NetCDF file into an Iris cube. +Module to support the saving of Iris cubes to a NetCDF file, also using the CF +conventions for metadata interpretation. -See also: `netCDF4 python `_ +See : `NetCDF User's Guide `_ +and `netCDF4 python module `_. -Also refer to document 'NetCDF Climate and Forecast (CF) Metadata Conventions'. +Also : `CF Conventions `_. """ - import collections -import collections.abc from itertools import repeat, zip_longest import os import os.path import re import string +from typing import List import warnings import cf_units import dask.array as da -import netCDF4 import numpy as np import numpy.ma as ma -from iris._lazy_data import _co_realise_lazy_arrays, as_lazy_data, is_lazy_data +from iris._lazy_data import _co_realise_lazy_arrays, is_lazy_data from iris.aux_factory import ( AtmosphereSigmaFactory, HybridHeightFactory, @@ -44,31 +44,21 @@ from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord import iris.exceptions import iris.fileformats.cf +from iris.fileformats.netcdf import _thread_safe_nc import iris.io import iris.util -# Show actions activation statistics. -DEBUG = False +# Get the logger : shared logger for all in 'iris.fileformats.netcdf'. +from . import logger -# Configure the logger. -logger = iris.config.get_logger(__name__) +# Avoid warning about unused import. +# We could use an __all__, but we don't want to maintain one here +logger # Standard CML spatio-temporal axis names. SPATIO_TEMPORAL_AXES = ["t", "z", "y", "x"] -# Pass through CF attributes: -# - comment -# - Conventions -# - flag_masks -# - flag_meanings -# - flag_values -# - history -# - institution -# - reference -# - source -# - title -# - positive -# +# The CF-meaningful attributes which may appear on a data variable. _CF_ATTRS = [ "add_offset", "ancillary_variables", @@ -185,13 +175,14 @@ _CM_INTERVAL = "interval" _CM_METHOD = "method" _CM_NAME = "name" +_CM_PARSE_NAME = re.compile(r"([\w_]+\s*?:\s+)+") _CM_PARSE = re.compile( r""" (?P([\w_]+\s*?:\s+)+) (?P[\w_\s]+(?![\w_]*\s*?:))\s* (?: \(\s* - (?P[^\)]+) + (?P.+) \)\s* )? """, @@ -203,6 +194,69 @@ class UnknownCellMethodWarning(Warning): pass +def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: + """ + Split a CF cell_methods attribute string into a list of zero or more cell + methods, each of which is then parsed with a regex to return a list of match + objects. + + Args: + + * nc_cell_methods: The value of the cell methods attribute to be split. + + Returns: + + * nc_cell_methods_matches: A list of the re.Match objects associated with + each parsed cell method + + Splitting is done based on words followed by colons outside of any brackets. + Validation of anything other than being laid out in the expected format is + left to the calling function. + """ + + # Find name candidates + name_start_inds = [] + for m in _CM_PARSE_NAME.finditer(nc_cell_methods): + name_start_inds.append(m.start()) + + # Remove those that fall inside brackets + bracket_depth = 0 + for ind, cha in enumerate(nc_cell_methods): + if cha == "(": + bracket_depth += 1 + elif cha == ")": + bracket_depth -= 1 + if bracket_depth < 0: + msg = ( + "Cell methods may be incorrectly parsed due to mismatched " + "brackets" + ) + warnings.warn(msg, UserWarning, stacklevel=2) + if bracket_depth > 0 and ind in name_start_inds: + name_start_inds.remove(ind) + + # List tuples of indices of starts and ends of the cell methods in the string + method_indices = [] + for ii in range(len(name_start_inds) - 1): + method_indices.append((name_start_inds[ii], name_start_inds[ii + 1])) + method_indices.append((name_start_inds[-1], len(nc_cell_methods))) + + # Index the string and match against each substring + nc_cell_methods_matches = [] + for start_ind, end_ind in method_indices: + nc_cell_method_str = nc_cell_methods[start_ind:end_ind] + nc_cell_method_match = _CM_PARSE.match(nc_cell_method_str.strip()) + if not nc_cell_method_match: + msg = ( + f"Failed to fully parse cell method string: {nc_cell_methods}" + ) + warnings.warn(msg, UserWarning, stacklevel=2) + continue + nc_cell_methods_matches.append(nc_cell_method_match) + + return nc_cell_methods_matches + + def parse_cell_methods(nc_cell_methods): """ Parse a CF cell_methods attribute string into a tuple of zero or @@ -226,7 +280,7 @@ def parse_cell_methods(nc_cell_methods): cell_methods = [] if nc_cell_methods is not None: - for m in _CM_PARSE.finditer(nc_cell_methods): + for m in _split_cell_methods(nc_cell_methods): d = m.groupdict() method = d[_CM_METHOD] method = method.strip() @@ -382,547 +436,6 @@ def coord(self, name): return result -def _actions_engine(): - # Return an 'actions engine', which provides a pyke-rules-like interface to - # the core cf translation code. - # Deferred import to avoid circularity. - import iris.fileformats._nc_load_rules.engine as nc_actions_engine - - engine = nc_actions_engine.Engine() - return engine - - -class NetCDFDataProxy: - """A reference to the data payload of a single NetCDF file variable.""" - - __slots__ = ("shape", "dtype", "path", "variable_name", "fill_value") - - def __init__(self, shape, dtype, path, variable_name, fill_value): - self.shape = shape - self.dtype = dtype - self.path = path - self.variable_name = variable_name - self.fill_value = fill_value - - @property - def ndim(self): - return len(self.shape) - - def __getitem__(self, keys): - dataset = netCDF4.Dataset(self.path) - try: - variable = dataset.variables[self.variable_name] - # Get the NetCDF variable data and slice. - var = variable[keys] - finally: - dataset.close() - return np.asanyarray(var) - - def __repr__(self): - fmt = ( - "<{self.__class__.__name__} shape={self.shape}" - " dtype={self.dtype!r} path={self.path!r}" - " variable_name={self.variable_name!r}>" - ) - return fmt.format(self=self) - - def __getstate__(self): - return {attr: getattr(self, attr) for attr in self.__slots__} - - def __setstate__(self, state): - for key, value in state.items(): - setattr(self, key, value) - - -def _assert_case_specific_facts(engine, cf, cf_group): - # Initialise a data store for built cube elements. - # This is used to patch element attributes *not* setup by the actions - # process, after the actions code has run. - engine.cube_parts["coordinates"] = [] - engine.cube_parts["cell_measures"] = [] - engine.cube_parts["ancillary_variables"] = [] - - # Assert facts for CF coordinates. - for cf_name in cf_group.coordinates.keys(): - engine.add_case_specific_fact("coordinate", (cf_name,)) - - # Assert facts for CF auxiliary coordinates. - for cf_name in cf_group.auxiliary_coordinates.keys(): - engine.add_case_specific_fact("auxiliary_coordinate", (cf_name,)) - - # Assert facts for CF cell measures. - for cf_name in cf_group.cell_measures.keys(): - engine.add_case_specific_fact("cell_measure", (cf_name,)) - - # Assert facts for CF ancillary variables. - for cf_name in cf_group.ancillary_variables.keys(): - engine.add_case_specific_fact("ancillary_variable", (cf_name,)) - - # Assert facts for CF grid_mappings. - for cf_name in cf_group.grid_mappings.keys(): - engine.add_case_specific_fact("grid_mapping", (cf_name,)) - - # Assert facts for CF labels. - for cf_name in cf_group.labels.keys(): - engine.add_case_specific_fact("label", (cf_name,)) - - # Assert facts for CF formula terms associated with the cf_group - # of the CF data variable. - - # Collect varnames of formula-root variables as we go. - # NOTE: use dictionary keys as an 'OrderedSet' - # - see: https://stackoverflow.com/a/53657523/2615050 - # This is to ensure that we can handle the resulting facts in a definite - # order, as using a 'set' led to indeterminate results. - formula_root = {} - for cf_var in cf.cf_group.formula_terms.values(): - for cf_root, cf_term in cf_var.cf_terms_by_root.items(): - # Only assert this fact if the formula root variable is - # defined in the CF group of the CF data variable. - if cf_root in cf_group: - formula_root[cf_root] = True - engine.add_case_specific_fact( - "formula_term", - (cf_var.cf_name, cf_root, cf_term), - ) - - for cf_root in formula_root.keys(): - engine.add_case_specific_fact("formula_root", (cf_root,)) - - -def _actions_activation_stats(engine, cf_name): - print("-" * 80) - print("CF Data Variable: %r" % cf_name) - - engine.print_stats() - - print("Rules Triggered:") - - for rule in sorted(list(engine.rule_triggered)): - print("\t%s" % rule) - - print("Case Specific Facts:") - kb_facts = engine.get_kb() - - for key in kb_facts.entity_lists.keys(): - for arg in kb_facts.entity_lists[key].case_specific_facts: - print("\t%s%s" % (key, arg)) - - -def _set_attributes(attributes, key, value): - """Set attributes dictionary, converting unicode strings appropriately.""" - - if isinstance(value, str): - try: - attributes[str(key)] = str(value) - except UnicodeEncodeError: - attributes[str(key)] = value - else: - attributes[str(key)] = value - - -def _add_unused_attributes(iris_object, cf_var): - """ - Populate the attributes of a cf element with the "unused" attributes - from the associated CF-netCDF variable. That is, all those that aren't CF - reserved terms. - - """ - - def attribute_predicate(item): - return item[0] not in _CF_ATTRS - - tmpvar = filter(attribute_predicate, cf_var.cf_attrs_unused()) - for attr_name, attr_value in tmpvar: - _set_attributes(iris_object.attributes, attr_name, attr_value) - - -def _get_actual_dtype(cf_var): - # Figure out what the eventual data type will be after any scale/offset - # transforms. - dummy_data = np.zeros(1, dtype=cf_var.dtype) - if hasattr(cf_var, "scale_factor"): - dummy_data = cf_var.scale_factor * dummy_data - if hasattr(cf_var, "add_offset"): - dummy_data = cf_var.add_offset + dummy_data - return dummy_data.dtype - - -def _get_cf_var_data(cf_var, filename): - # Get lazy chunked data out of a cf variable. - dtype = _get_actual_dtype(cf_var) - - # Create cube with deferred data, but no metadata - fill_value = getattr( - cf_var.cf_data, - "_FillValue", - netCDF4.default_fillvals[cf_var.dtype.str[1:]], - ) - proxy = NetCDFDataProxy( - cf_var.shape, dtype, filename, cf_var.cf_name, fill_value - ) - # Get the chunking specified for the variable : this is either a shape, or - # maybe the string "contiguous". - chunks = cf_var.cf_data.chunking() - # In the "contiguous" case, pass chunks=None to 'as_lazy_data'. - if chunks == "contiguous": - chunks = None - return as_lazy_data(proxy, chunks=chunks) - - -class OrderedAddableList(list): - # Used purely in actions debugging, to accumulate a record of which actions - # were activated. - # It replaces a set, so as to record the ordering of operations, with - # possible repeats, and it also numbers the entries. - # Actions routines invoke the 'add' method, which thus effectively converts - # a set.add into a list.append. - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._n_add = 0 - - def add(self, msg): - self._n_add += 1 - n_add = self._n_add - self.append(f"#{n_add:03d} : {msg}") - - -def _load_cube(engine, cf, cf_var, filename): - from iris.cube import Cube - - """Create the cube associated with the CF-netCDF data variable.""" - data = _get_cf_var_data(cf_var, filename) - cube = Cube(data) - - # Reset the actions engine. - engine.reset() - - # Initialise engine rule processing hooks. - engine.cf_var = cf_var - engine.cube = cube - engine.cube_parts = {} - engine.requires = {} - engine.rule_triggered = OrderedAddableList() - engine.filename = filename - - # Assert all the case-specific facts. - # This extracts 'facts' specific to this data-variable (aka cube), from - # the info supplied in the CFGroup object. - _assert_case_specific_facts(engine, cf, cf_var.cf_group) - - # Run the actions engine. - # This creates various cube elements and attaches them to the cube. - # It also records various other info on the engine, to be processed later. - engine.activate() - - # Having run the rules, now add the "unused" attributes to each cf element. - def fix_attributes_all_elements(role_name): - elements_and_names = engine.cube_parts.get(role_name, []) - - for iris_object, cf_var_name in elements_and_names: - _add_unused_attributes(iris_object, cf.cf_group[cf_var_name]) - - # Populate the attributes of all coordinates, cell-measures and ancillary-vars. - fix_attributes_all_elements("coordinates") - fix_attributes_all_elements("ancillary_variables") - fix_attributes_all_elements("cell_measures") - - # Also populate attributes of the top-level cube itself. - _add_unused_attributes(cube, cf_var) - - # Work out reference names for all the coords. - names = { - coord.var_name: coord.standard_name or coord.var_name or "unknown" - for coord in cube.coords() - } - - # Add all the cube cell methods. - cube.cell_methods = [ - iris.coords.CellMethod( - method=method.method, - intervals=method.intervals, - comments=method.comments, - coords=[ - names[coord_name] if coord_name in names else coord_name - for coord_name in method.coord_names - ], - ) - for method in cube.cell_methods - ] - - if DEBUG: - # Show activation statistics for this data-var (i.e. cube). - _actions_activation_stats(engine, cf_var.cf_name) - - return cube - - -def _load_aux_factory(engine, cube): - """ - Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory. - - """ - formula_type = engine.requires.get("formula_type") - if formula_type in [ - "atmosphere_sigma_coordinate", - "atmosphere_hybrid_height_coordinate", - "atmosphere_hybrid_sigma_pressure_coordinate", - "ocean_sigma_z_coordinate", - "ocean_sigma_coordinate", - "ocean_s_coordinate", - "ocean_s_coordinate_g1", - "ocean_s_coordinate_g2", - ]: - - def coord_from_term(term): - # Convert term names to coordinates (via netCDF variable names). - name = engine.requires["formula_terms"].get(term, None) - if name is not None: - for coord, cf_var_name in engine.cube_parts["coordinates"]: - if cf_var_name == name: - return coord - warnings.warn( - "Unable to find coordinate for variable " - "{!r}".format(name) - ) - - if formula_type == "atmosphere_sigma_coordinate": - pressure_at_top = coord_from_term("ptop") - sigma = coord_from_term("sigma") - surface_air_pressure = coord_from_term("ps") - factory = AtmosphereSigmaFactory( - pressure_at_top, sigma, surface_air_pressure - ) - elif formula_type == "atmosphere_hybrid_height_coordinate": - delta = coord_from_term("a") - sigma = coord_from_term("b") - orography = coord_from_term("orog") - factory = HybridHeightFactory(delta, sigma, orography) - elif formula_type == "atmosphere_hybrid_sigma_pressure_coordinate": - # Hybrid pressure has two valid versions of its formula terms: - # "p0: var1 a: var2 b: var3 ps: var4" or - # "ap: var1 b: var2 ps: var3" where "ap = p0 * a" - # Attempt to get the "ap" term. - delta = coord_from_term("ap") - if delta is None: - # The "ap" term is unavailable, so try getting terms "p0" - # and "a" terms in order to derive an "ap" equivalent term. - coord_p0 = coord_from_term("p0") - if coord_p0 is not None: - if coord_p0.shape != (1,): - msg = ( - "Expecting {!r} to be a scalar reference " - "pressure coordinate, got shape {!r}".format( - coord_p0.var_name, coord_p0.shape - ) - ) - raise ValueError(msg) - if coord_p0.has_bounds(): - msg = ( - "Ignoring atmosphere hybrid sigma pressure " - "scalar coordinate {!r} bounds.".format( - coord_p0.name() - ) - ) - warnings.warn(msg) - coord_a = coord_from_term("a") - if coord_a is not None: - if coord_a.units.is_unknown(): - # Be graceful, and promote unknown to dimensionless units. - coord_a.units = "1" - delta = coord_a * coord_p0.points[0] - delta.units = coord_a.units * coord_p0.units - delta.rename("vertical pressure") - delta.var_name = "ap" - cube.add_aux_coord(delta, cube.coord_dims(coord_a)) - - sigma = coord_from_term("b") - surface_air_pressure = coord_from_term("ps") - factory = HybridPressureFactory(delta, sigma, surface_air_pressure) - elif formula_type == "ocean_sigma_z_coordinate": - sigma = coord_from_term("sigma") - eta = coord_from_term("eta") - depth = coord_from_term("depth") - depth_c = coord_from_term("depth_c") - nsigma = coord_from_term("nsigma") - zlev = coord_from_term("zlev") - factory = OceanSigmaZFactory( - sigma, eta, depth, depth_c, nsigma, zlev - ) - elif formula_type == "ocean_sigma_coordinate": - sigma = coord_from_term("sigma") - eta = coord_from_term("eta") - depth = coord_from_term("depth") - factory = OceanSigmaFactory(sigma, eta, depth) - elif formula_type == "ocean_s_coordinate": - s = coord_from_term("s") - eta = coord_from_term("eta") - depth = coord_from_term("depth") - a = coord_from_term("a") - depth_c = coord_from_term("depth_c") - b = coord_from_term("b") - factory = OceanSFactory(s, eta, depth, a, b, depth_c) - elif formula_type == "ocean_s_coordinate_g1": - s = coord_from_term("s") - c = coord_from_term("c") - eta = coord_from_term("eta") - depth = coord_from_term("depth") - depth_c = coord_from_term("depth_c") - factory = OceanSg1Factory(s, c, eta, depth, depth_c) - elif formula_type == "ocean_s_coordinate_g2": - s = coord_from_term("s") - c = coord_from_term("c") - eta = coord_from_term("eta") - depth = coord_from_term("depth") - depth_c = coord_from_term("depth_c") - factory = OceanSg2Factory(s, c, eta, depth, depth_c) - cube.add_aux_factory(factory) - - -def _translate_constraints_to_var_callback(constraints): - """ - Translate load constraints into a simple data-var filter function, if possible. - - Returns: - * function(cf_var:CFDataVariable): --> bool, - or None. - - For now, ONLY handles a single NameConstraint with no 'STASH' component. - - """ - import iris._constraints - - constraints = iris._constraints.list_of_constraints(constraints) - result = None - if len(constraints) == 1: - (constraint,) = constraints - if ( - isinstance(constraint, iris._constraints.NameConstraint) - and constraint.STASH == "none" - ): - # As long as it doesn't use a STASH match, then we can treat it as - # a testing against name properties of cf_var. - # That's just like testing against name properties of a cube, except that they may not all exist. - def inner(cf_datavar): - match = True - for name in constraint._names: - expected = getattr(constraint, name) - if name != "STASH" and expected != "none": - attr_name = "cf_name" if name == "var_name" else name - # Fetch property : N.B. CFVariable caches the property values - # The use of a default here is the only difference from the code in NameConstraint. - if not hasattr(cf_datavar, attr_name): - continue - actual = getattr(cf_datavar, attr_name, "") - if actual != expected: - match = False - break - return match - - result = inner - return result - - -def load_cubes(filenames, callback=None, constraints=None): - """ - Loads cubes from a list of NetCDF filenames/URLs. - - Args: - - * filenames (string/list): - One or more NetCDF filenames/DAP URLs to load from. - - Kwargs: - - * callback (callable function): - Function which can be passed on to :func:`iris.io.run_callback`. - - Returns: - Generator of loaded NetCDF :class:`iris.cube.Cube`. - - """ - # TODO: rationalise UGRID/mesh handling once experimental.ugrid is folded - # into standard behaviour. - # Deferred import to avoid circular imports. - from iris.experimental.ugrid.cf import CFUGridReader - from iris.experimental.ugrid.load import ( - PARSE_UGRID_ON_LOAD, - _build_mesh_coords, - _meshes_from_cf, - ) - from iris.io import run_callback - - # Create a low-level data-var filter from the original load constraints, if they are suitable. - var_callback = _translate_constraints_to_var_callback(constraints) - - # Create an actions engine. - engine = _actions_engine() - - if isinstance(filenames, str): - filenames = [filenames] - - for filename in filenames: - # Ingest the netCDF file. - meshes = {} - if PARSE_UGRID_ON_LOAD: - cf = CFUGridReader(filename) - meshes = _meshes_from_cf(cf) - else: - cf = iris.fileformats.cf.CFReader(filename) - - # Process each CF data variable. - data_variables = list(cf.cf_group.data_variables.values()) + list( - cf.cf_group.promoted.values() - ) - for cf_var in data_variables: - if var_callback and not var_callback(cf_var): - # Deliver only selected results. - continue - - # cf_var-specific mesh handling, if a mesh is present. - # Build the mesh_coords *before* loading the cube - avoids - # mesh-related attributes being picked up by - # _add_unused_attributes(). - mesh_name = None - mesh = None - mesh_coords, mesh_dim = [], None - if PARSE_UGRID_ON_LOAD: - mesh_name = getattr(cf_var, "mesh", None) - if mesh_name is not None: - try: - mesh = meshes[mesh_name] - except KeyError: - message = ( - f"File does not contain mesh: '{mesh_name}' - " - f"referenced by variable: '{cf_var.cf_name}' ." - ) - logger.debug(message) - if mesh is not None: - mesh_coords, mesh_dim = _build_mesh_coords(mesh, cf_var) - - cube = _load_cube(engine, cf, cf_var, filename) - - # Attach the mesh (if present) to the cube. - for mesh_coord in mesh_coords: - cube.add_aux_coord(mesh_coord, mesh_dim) - - # Process any associated formula terms and attach - # the corresponding AuxCoordFactory. - try: - _load_aux_factory(engine, cube) - except ValueError as e: - warnings.warn("{}".format(e)) - - # Perform any user registered callback function. - cube = run_callback(callback, cube, cf_var, filename) - - # Callback mechanism may return None, which must not be yielded - if cube is None: - continue - - yield cube - - def _bytes_if_ascii(string): """ Convert the given string to a byte string (str in py2k, bytes in py3k) @@ -946,7 +459,10 @@ def _setncattr(variable, name, attribute): Put the given attribute on the given netCDF4 Data type, casting attributes as we go to bytes rather than unicode. + NOTE: variable needs to be a _thread_safe_nc._ThreadSafeWrapper subclass. + """ + assert hasattr(variable, "THREAD_SAFE_FLAG") attribute = _bytes_if_ascii(attribute) return variable.setncattr(name, attribute) @@ -957,9 +473,12 @@ class _FillValueMaskCheckAndStoreTarget: given value and whether it was masked, before passing the chunk to the given target. + NOTE: target needs to be a _thread_safe_nc._ThreadSafeWrapper subclass. + """ def __init__(self, target, fill_value=None): + assert hasattr(target, "THREAD_SAFE_FLAG") self.target = target self.fill_value = fill_value self.contains_value = False @@ -1031,7 +550,7 @@ def __init__(self, filename, netcdf_format): self._formula_terms_cache = {} #: NetCDF dataset try: - self._dataset = netCDF4.Dataset( + self._dataset = _thread_safe_nc.DatasetWrapper( filename, mode="w", format=netcdf_format ) except RuntimeError: @@ -1368,6 +887,8 @@ def _create_cf_dimensions( unlimited_dim_names.append(dim_name) for dim_name in dimension_names: + # NOTE: these dim-names have been chosen by _get_dim_names, and + # were already checked+fixed to avoid any name collisions. if dim_name not in self._dataset.dimensions: if dim_name in unlimited_dim_names: size = None @@ -1460,6 +981,10 @@ def _add_mesh(self, cube_or_mesh): last_dim = f"{cf_mesh_name}_{loc_from}_N_{loc_to}s" # Create if it does not already exist. if last_dim not in self._dataset.dimensions: + while last_dim in self._dataset.variables: + # Also avoid collision with variable names. + # See '_get_dim_names' for reason. + last_dim = self._increment_name(last_dim) length = conn.shape[1 - conn.location_axis] self._dataset.createDimension(last_dim, length) @@ -1758,7 +1283,9 @@ def _get_dim_names(self, cube_or_mesh): """ - def record_dimension(names_list, dim_name, length, matching_coords=[]): + def record_dimension( + names_list, dim_name, length, matching_coords=None + ): """ Record a file dimension, its length and associated "coordinates" (which may in fact also be connectivities). @@ -1767,6 +1294,8 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]): matches the earlier finding. """ + if matching_coords is None: + matching_coords = [] if dim_name not in self._existing_dim: self._existing_dim[dim_name] = length else: @@ -1861,8 +1390,19 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]): assert dim_name is not None # Ensure it is a valid variable name. dim_name = self.cf_valid_var_name(dim_name) - # Disambiguate if it matches an existing one. - while dim_name in self._existing_dim: + # Disambiguate if it has the same name as an existing + # dimension. + # NOTE: *OR* if it matches the name of an existing file + # variable. Because there is a bug ... + # See https://github.com/Unidata/netcdf-c/issues/1772 + # N.B. the workarounds here *ONLY* function because the + # caller (write) will not create any more variables + # in between choosing dim names (here), and creating + # the new dims (via '_create_cf_dimensions'). + while ( + dim_name in self._existing_dim + or dim_name in self._dataset.variables + ): dim_name = self._increment_name(dim_name) # Record the new dimension. @@ -1907,9 +1447,15 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]): dim_name = self._get_coord_variable_name( cube, coord ) + # Disambiguate if it has the same name as an + # existing dimension. + # OR if it matches an existing file variable name. + # NOTE: check against variable names is needed + # because of a netcdf bug ... see note in the + # mesh dimensions block above. while ( dim_name in self._existing_dim - or dim_name in self._name_coord_map.names + or dim_name in self._dataset.variables ): dim_name = self._increment_name(dim_name) @@ -1917,16 +1463,18 @@ def record_dimension(names_list, dim_name, length, matching_coords=[]): # No CF-netCDF coordinates describe this data dimension. # Make up a new, distinct dimension name dim_name = f"dim{dim}" - if dim_name in self._existing_dim: - # Increment name if conflicted with one already existing. - if self._existing_dim[dim_name] != cube.shape[dim]: - while ( - dim_name in self._existing_dim - and self._existing_dim[dim_name] - != cube.shape[dim] - or dim_name in self._name_coord_map.names - ): - dim_name = self._increment_name(dim_name) + # Increment name if conflicted with one already existing + # (or planned) + # NOTE: check against variable names is needed because + # of a netcdf bug ... see note in the mesh dimensions + # block above. + while ( + dim_name in self._existing_dim + and ( + self._existing_dim[dim_name] != cube.shape[dim] + ) + ) or dim_name in self._dataset.variables: + dim_name = self._increment_name(dim_name) # Record the dimension. record_dimension( @@ -2057,6 +1605,12 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): if bounds_dimension_name not in self._dataset.dimensions: # Create the bounds dimension with the appropriate extent. + while bounds_dimension_name in self._dataset.variables: + # Also avoid collision with variable names. + # See '_get_dim_names' for reason. + bounds_dimension_name = self._increment_name( + bounds_dimension_name + ) self._dataset.createDimension(bounds_dimension_name, n_bounds) boundsvar_name = "{}_{}".format(cf_name, varname_extra) @@ -2337,6 +1891,12 @@ def _create_generic_cf_array_var( # Determine whether to create the string length dimension. if string_dimension_name not in self._dataset.dimensions: + while string_dimension_name in self._dataset.variables: + # Also avoid collision with variable names. + # See '_get_dim_names' for reason. + string_dimension_name = self._increment_name( + string_dimension_name + ) self._dataset.createDimension( string_dimension_name, string_dimension_depth ) @@ -2511,6 +2071,8 @@ def add_ellipsoid(ellipsoid): else: cf_var_grid.semi_major_axis = semi_major cf_var_grid.semi_minor_axis = semi_minor + if ellipsoid.datum is not None: + cf_var_grid.horizontal_datum_name = ellipsoid.datum # latlon if isinstance(cs, iris.coord_systems.GeogCS): @@ -2553,11 +2115,15 @@ def add_ellipsoid(ellipsoid): cf_var_grid.longitude_of_projection_origin = ( cs.longitude_of_projection_origin ) - # The Mercator class has implicit defaults for certain - # parameters - cf_var_grid.false_easting = 0.0 - cf_var_grid.false_northing = 0.0 - cf_var_grid.scale_factor_at_projection_origin = 1.0 + cf_var_grid.false_easting = cs.false_easting + cf_var_grid.false_northing = cs.false_northing + # Only one of these should be set + if cs.standard_parallel is not None: + cf_var_grid.standard_parallel = cs.standard_parallel + elif cs.scale_factor_at_projection_origin is not None: + cf_var_grid.scale_factor_at_projection_origin = ( + cs.scale_factor_at_projection_origin + ) # lcc elif isinstance(cs, iris.coord_systems.LambertConformal): @@ -2569,27 +2135,46 @@ def add_ellipsoid(ellipsoid): cf_var_grid.false_easting = cs.false_easting cf_var_grid.false_northing = cs.false_northing - # stereo - elif isinstance(cs, iris.coord_systems.Stereographic): + # polar stereo (have to do this before Stereographic because it subclasses it) + elif isinstance(cs, iris.coord_systems.PolarStereographic): + if cs.ellipsoid: + add_ellipsoid(cs.ellipsoid) + cf_var_grid.latitude_of_projection_origin = cs.central_lat + cf_var_grid.straight_vertical_longitude_from_pole = ( + cs.central_lon + ) + cf_var_grid.false_easting = cs.false_easting + cf_var_grid.false_northing = cs.false_northing + # Only one of these should be set if cs.true_scale_lat is not None: - warnings.warn( - "Stereographic coordinate systems with " - "true scale latitude specified are not " - "yet handled" + cf_var_grid.true_scale_lat = cs.true_scale_lat + elif cs.scale_factor_at_projection_origin is not None: + cf_var_grid.scale_factor_at_projection_origin = ( + cs.scale_factor_at_projection_origin ) else: - if cs.ellipsoid: - add_ellipsoid(cs.ellipsoid) - cf_var_grid.longitude_of_projection_origin = ( - cs.central_lon + cf_var_grid.scale_factor_at_projection_origin = 1.0 + + # stereo + elif isinstance(cs, iris.coord_systems.Stereographic): + if cs.ellipsoid: + add_ellipsoid(cs.ellipsoid) + cf_var_grid.longitude_of_projection_origin = cs.central_lon + cf_var_grid.latitude_of_projection_origin = cs.central_lat + cf_var_grid.false_easting = cs.false_easting + cf_var_grid.false_northing = cs.false_northing + # Only one of these should be set + if cs.true_scale_lat is not None: + msg = ( + "It is not valid CF to save a true_scale_lat for " + "a Stereographic grid mapping." ) - cf_var_grid.latitude_of_projection_origin = ( - cs.central_lat + raise ValueError(msg) + elif cs.scale_factor_at_projection_origin is not None: + cf_var_grid.scale_factor_at_projection_origin = ( + cs.scale_factor_at_projection_origin ) - cf_var_grid.false_easting = cs.false_easting - cf_var_grid.false_northing = cs.false_northing - # The Stereographic class has an implicit scale - # factor + else: cf_var_grid.scale_factor_at_projection_origin = 1.0 # osgb (a specific tmerc) @@ -2738,9 +2323,9 @@ def _create_cf_data_variable( cmin, cmax = _co_realise_lazy_arrays([cmin, cmax]) n = dtype.itemsize * 8 if masked: - scale_factor = (cmax - cmin) / (2 ** n - 2) + scale_factor = (cmax - cmin) / (2**n - 2) else: - scale_factor = (cmax - cmin) / (2 ** n - 1) + scale_factor = (cmax - cmin) / (2**n - 1) if dtype.kind == "u": add_offset = cmin elif dtype.kind == "i": @@ -2752,7 +2337,13 @@ def _create_cf_data_variable( dtype = data.dtype.newbyteorder("=") def set_packing_ncattrs(cfvar): - """Set netCDF packing attributes.""" + """ + Set netCDF packing attributes. + + NOTE: cfvar needs to be a _thread_safe_nc._ThreadSafeWrapper subclass. + + """ + assert hasattr(cfvar, "THREAD_SAFE_FLAG") if packing: if scale_factor: _setncattr(cfvar, "scale_factor", scale_factor) @@ -2865,6 +2456,14 @@ def _increment_name(self, varname): @staticmethod def _lazy_stream_data(data, fill_value, fill_warn, cf_var): + if hasattr(data, "shape") and data.shape == (1,) + cf_var.shape: + # (Don't do this check for string data). + # Reduce dimensionality where the data array has an extra dimension + # versus the cf_var - to avoid a broadcasting ambiguity. + # Happens when bounds data is for a scalar point - array is 2D but + # contains just 1 row, so the cf_var is 1D. + data = data.squeeze(axis=0) + if is_lazy_data(data): def store(data, cf_var, fill_value): @@ -2891,7 +2490,9 @@ def store(data, cf_var, fill_value): if fill_value is not None: fill_value_to_check = fill_value else: - fill_value_to_check = netCDF4.default_fillvals[dtype.str[1:]] + fill_value_to_check = _thread_safe_nc.default_fillvals[ + dtype.str[1:] + ] else: fill_value_to_check = None diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index b0e0f6e864..fd1ccb0e95 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -24,7 +24,7 @@ NIMROD_DEFAULT = -32767.0 TIME_UNIT = cf_units.Unit( - "seconds since 1970-01-01 00:00:00", calendar=cf_units.CALENDAR_GREGORIAN + "seconds since 1970-01-01 00:00:00", calendar=cf_units.CALENDAR_STANDARD ) @@ -33,7 +33,7 @@ class TranslationWarning(Warning): def is_missing(field, value): - """Returns True if value matches an "is-missing" number.""" + """Return True if value matches an "is-missing" number.""" return any( np.isclose(value, [field.int_mdi, field.float32_mdi, NIMROD_DEFAULT]) ) @@ -86,7 +86,8 @@ def name(cube, field, handle_metadata_errors): def remove_unprintable_chars(input_str): """ - Removes unprintable characters from a string and returns the result. + Remove unprintable characters from a string and return the result. + """ return "".join( c if c in string.printable else " " for c in input_str @@ -278,7 +279,7 @@ def forecast_period(cube): def mask_cube(cube, field): """ - Updates cube.data to be a masked array if appropriate. + Update cube.data to be a masked array if appropriate. """ dtype = cube.dtype @@ -307,16 +308,17 @@ def experiment(cube, field): def proj_biaxial_ellipsoid(field, handle_metadata_errors): """ - Returns the correct dictionary of arguments needed to define an + Return the correct dictionary of arguments needed to define an iris.coord_systems.GeogCS. Based firstly on the value given by ellipsoid, then by grid if ellipsoid is missing, select the right pre-defined ellipsoid dictionary (Airy_1830 or international_1924). - References: - Airy 1830: https://georepository.com/ellipsoid_7001/Airy-1830.html - International 1924: https://georepository.com/ellipsoid_7022/International-1924.html + References + ---------- + Airy 1830: https://georepository.com/ellipsoid_7001/Airy-1830.html + International 1924: https://georepository.com/ellipsoid_7022/International-1924.html """ airy_1830 = { @@ -357,10 +359,12 @@ def proj_biaxial_ellipsoid(field, handle_metadata_errors): def set_british_national_grid_defaults(field, handle_metadata_errors): - """Check for missing coord-system meta-data and set default values for + """ + Check for missing coord-system meta-data and set default values for the Ordnance Survey GB Transverse Mercator projection. Some Radarnet - files are missing these.""" + files are missing these. + """ if handle_metadata_errors: if is_missing(field, field.true_origin_latitude): field.true_origin_latitude = 49.0 @@ -472,8 +476,12 @@ def horizontal_grid(cube, field, handle_metadata_errors): def vertical_coord(cube, field): - """Add a vertical coord to the cube, with bounds, if appropriate. - Handles special numbers for "at-sea-level" (8888) and "at-ground-level" (9999).""" + """ + Add a vertical coord to the cube, with bounds, if appropriate. + Handles special numbers for "at-sea-level" (8888) and "at-ground-level" + (9999). + + """ # vertical_codes contains conversions from the Nimrod Documentation for the # header entry 20 for the vertical coordinate type # Unhandled vertical_codes values (no use case identified): @@ -670,7 +678,10 @@ def add_attr(item): def known_threshold_coord(field): """ Supplies known threshold coord meta-data for known use cases. - threshold_value_alt exists because some meta-data are mis-assigned in the Nimrod data. + + threshold_value_alt exists because some meta-data are mis-assigned in the + Nimrod data. + """ coord_keys = {} if ( @@ -715,9 +726,11 @@ def known_threshold_coord(field): def probability_coord(cube, field, handle_metadata_errors): """ - Adds a coord relating to probability meta-data from the header to the + Add a coord relating to probability meta-data from the header to the cube if appropriate. + Must be run after the name method. + """ probtype_lookup = { 1: { @@ -848,7 +861,7 @@ def probability_coord(cube, field, handle_metadata_errors): def soil_type_coord(cube, field): - """Add soil type as a coord if appropriate""" + """Add soil type as a coord if appropriate.""" soil_type_codes = { 1: "broadleaf_tree", 2: "needleleaf_tree", @@ -905,17 +918,18 @@ def run(field, handle_metadata_errors=True): """ Convert a NIMROD field to an Iris cube. - Args: - - * field - a :class:`~iris.fileformats.nimrod.NimrodField` - - * handle_metadata_errors - Set to False to omit handling of known meta-data deficiencies - in Nimrod-format data - - Returns: + Args + ---- + field: :class:`~iris.fileformats.nimrod.NimrodField` - * A new :class:`~iris.cube.Cube`, created from the NimrodField. + handle_metadata_errors + Set to False to omit handling of known meta-data deficiencies + in Nimrod-format data + Returns + ------- + :class:`~iris.cube.Cube` + A new :class:`~iris.cube.Cube`, created from the NimrodField. """ cube = iris.cube.Cube(field.data) diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index 9f213ec4db..cff088cf89 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -253,14 +253,13 @@ class STASH(collections.namedtuple("STASH", "model section item")): def __new__(cls, model, section, item): """ - - Args: - - * model + Args + ---- + model A positive integer less than 100, or None. - * section + section A non-negative integer less than 100, or None. - * item + item A positive integer less than 1000, or None. """ @@ -350,7 +349,9 @@ class SplittableInt: >>> print(three_six_two[2]) 3 - .. note:: No support for negative numbers + Notes + ----- + No support for negative numbers """ @@ -358,11 +359,12 @@ def __init__(self, value, name_mapping_dict=None): """ Build a SplittableInt given the positive integer value provided. - Kwargs: - - * name_mapping_dict - (dict) - A special mapping to provide name based access to specific integer - positions: + Args + ---- + **kwargs + * name_mapping_dict - (dict) + A special mapping to provide name based access to specific + integer positions: >>> a = SplittableInt(1234, {'hundreds': 2}) >>> print(a.hundreds) @@ -373,6 +375,7 @@ def __init__(self, value, name_mapping_dict=None): >>> print(a) 1934 + """ if value < 0: raise ValueError( @@ -403,7 +406,7 @@ def _calculate_str_value_from_value(self): def _calculate_value_from_str_value(self): self._value = np.sum( - [10 ** i * val for i, val in enumerate(self._strvalue)] + [10**i * val for i, val in enumerate(self._strvalue)] ) def __len__(self): @@ -418,7 +421,7 @@ def __getitem__(self, key): # if the key returns a list of values, then combine them together # to an integer if isinstance(val, list): - val = sum([10 ** i * val for i, val in enumerate(val)]) + val = sum([10**i * val for i, val in enumerate(val)]) return val @@ -622,7 +625,7 @@ def __getstate__(self): def __setstate__(self, state): # Because we have __slots__, this is needed to support Pickle.load() # (Use setattr, as there is no object dictionary.) - for (key, value) in state: + for key, value in state: setattr(self, key, value) def __eq__(self, other): @@ -764,6 +767,18 @@ def _data_bytes_to_shaped_array( else: # Reform in row-column order + actual_length = np.prod(data.shape) + if (expected_length := np.prod(data_shape)) != actual_length: + if (expected_length < actual_length) and (data.ndim == 1): + # known use case where mule adds padding to data payload + # for a collapsed field. + data = data[:expected_length] + else: + emsg = ( + f"PP field data containing {actual_length} words does not " + f"match expected length of {expected_length} words." + ) + raise ValueError(emsg) data.shape = data_shape # Mask the array @@ -789,7 +804,7 @@ def _data_bytes_to_shaped_array( def _header_defn(release_number): """ - Returns the zero-indexed header definition for a particular release of + Return the zero-indexed header definition for a particular release of a PPField. """ @@ -803,7 +818,7 @@ def _header_defn(release_number): def _pp_attribute_names(header_defn): """ - Returns the allowed attributes of a PPField: + Return the allowed attributes of a PPField: all of the normal headers (i.e. not the _SPECIAL_HEADERS), the _SPECIAL_HEADERS with '_' prefixed, the possible extra data headers. @@ -860,7 +875,7 @@ def __init__(self, header=None): def __getattr__(self, key): """ - This method supports deferred attribute creation, which offers a + Method supports deferred attribute creation, which offers a significant loading optimisation, particularly when not all attributes are referenced and therefore created on the instance. @@ -922,7 +937,6 @@ def t2(self): def __repr__(self): """Return a string representation of the PP field.""" - # Define an ordering on the basic header names attribute_priority_lookup = { name: loc[0] for name, loc in self.HEADER_DEFN @@ -960,7 +974,7 @@ def __repr__(self): @property def stash(self): """ - A stash property giving access to the associated STASH object, + Stash property giving access to the associated STASH object, now supporting __eq__ """ @@ -1044,7 +1058,7 @@ def lbproc(self, value): @property def data(self): """ - The :class:`numpy.ndarray` representing the multidimensional data + :class:`numpy.ndarray` representing the multidimensional data of the pp file """ @@ -1064,7 +1078,7 @@ def core_data(self): def calendar(self): """Return the calendar of the field.""" # TODO #577 What calendar to return when ibtim.ic in [0, 3] - calendar = cf_units.CALENDAR_GREGORIAN + calendar = cf_units.CALENDAR_STANDARD if self.lbtim.ic == 2: calendar = cf_units.CALENDAR_360_DAY elif self.lbtim.ic == 4: @@ -1075,7 +1089,6 @@ def _read_extra_data( self, pp_file, file_reader, extra_len, little_ended=False ): """Read the extra data section and update the self appropriately.""" - dtype_endian_char = "<" if little_ended else ">" # While there is still extra data to decode run this loop while extra_len > 0: @@ -1124,10 +1137,8 @@ def y_bounds(self): def save(self, file_handle): """ - Save the PPField to the given file object - (typically created with :func:`open`). - - :: + Save the PPField to the given file object. + (typically created with :func:`open`):: # to append the field to a file with open(filename, 'ab') as fh: @@ -1137,15 +1148,13 @@ def save(self, file_handle): with open(filename, 'wb') as fh: a_pp_field.save(fh) - - .. note:: - - The fields which are automatically calculated are: 'lbext', - 'lblrec' and 'lbuser[0]'. Some fields are not currently - populated, these are: 'lbegin', 'lbnrec', 'lbuser[1]'. + Notes + ----- + The fields which are automatically calculated are: 'lbext', + 'lblrec' and 'lbuser[0]'. Some fields are not currently + populated, these are: 'lbegin', 'lbnrec', 'lbuser[1]'. """ - # Get the actual data content. data = self.data mdi = self.bmdi @@ -1361,9 +1370,9 @@ def time_unit(self, time_unit, epoch="epoch"): def coord_system(self): """Return a CoordSystem for this PPField. - Returns: - Currently, a :class:`~iris.coord_systems.GeogCS` or - :class:`~iris.coord_systems.RotatedGeogCS`. + Returns + ------- + :class:`~iris.coord_systems.GeogCS` or class:`~iris.coord_systems.RotatedGeogCS`. """ geog_cs = iris.coord_systems.GeogCS(EARTH_RADIUS) @@ -1408,9 +1417,11 @@ def _y_coord_name(self): def copy(self): """ - Returns a deep copy of this PPField. + Return a deep copy of this PPField. - Returns: + Returns + ------- + :class:`PPField`: A copy instance of the :class:`PPField`. """ @@ -1470,7 +1481,7 @@ class PPField2(PPField): @property def t1(self): """ - A cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, and lbmin attributes. """ @@ -1504,7 +1515,7 @@ def t1(self, dt): @property def t2(self): """ - A cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, and lbmind attributes. """ @@ -1551,7 +1562,7 @@ class PPField3(PPField): @property def t1(self): """ - A cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, + cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, lbmin, and lbsec attributes. """ @@ -1586,7 +1597,7 @@ def t1(self, dt): @property def t2(self): """ - A cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, + cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, lbmind, and lbsecd attributes. """ @@ -1638,20 +1649,20 @@ def load(filename, read_data=False, little_ended=False): """ Return an iterator of PPFields given a filename. - Args: - - * filename - string of the filename to load. - - Kwargs: - - * read_data - boolean - Flag whether or not the data should be read, if False an empty - data manager will be provided which can subsequently load the data - on demand. Default False. - - * little_ended - boolean - If True, file contains all little-ended words (header and data). - + Args + ---- + filename + string of the filename to load. + **kwargs + * read_data - boolean + Flag whether or not the data should be read, if False an empty + data manager will be provided which can subsequently load the data + on demand. Default False. + * little_ended - boolean + If True, file contains all little-ended words (header and data). + + Notes + ----- To iterate through all of the fields in a pp file:: for field in iris.fileformats.pp.load(filename): @@ -1737,7 +1748,7 @@ def _interpret_fields(fields): def _create_field_data(field, data_shape, land_mask_field=None): """ - Modifies a field's ``_data`` attribute either by: + Modify a field's ``_data`` attribute either by: * converting a 'deferred array bytes' tuple into a lazy array, * converting LoadedArrayBytes into an actual numpy array. @@ -1834,7 +1845,7 @@ def calc_array(mask, values): def _field_gen(filename, read_data_bytes, little_ended=False): """ - Returns a generator of "half-formed" PPField instances derived from + Return a generator of "half-formed" PPField instances derived from the given filename. A field returned by the generator is only "half-formed" because its @@ -1966,7 +1977,7 @@ def _field_gen(filename, read_data_bytes, little_ended=False): def _convert_constraints(constraints): """ - Converts known constraints from Iris semantics to PP semantics + Convert known constraints from Iris semantics to PP semantics ignoring all unknown constraints. """ @@ -1976,8 +1987,9 @@ def _convert_constraints(constraints): def _make_func(stashobj): """ - Provides unique name-space for each lambda function's stashobj + Provide unique name-space for each lambda function's stashobj variable. + """ return lambda stash: stash == stashobj @@ -2010,17 +2022,15 @@ def _make_func(stashobj): def pp_filter(field): """ - return True if field is to be kept, + Return True if field is to be kept, False if field does not match filter """ res = True if field.stash not in _STASH_ALLOW: if pp_constraints.get("stash"): - res = False for call_func in pp_constraints["stash"]: - if call_func(str(field.stash)): res = True break @@ -2035,24 +2045,23 @@ def pp_filter(field): def load_cubes(filenames, callback=None, constraints=None): """ - Loads cubes from a list of pp filenames. - - Args: - - * filenames - list of pp filenames to load - - Kwargs: - - * constraints - a list of Iris constraints - - * callback - a function which can be passed on to - :func:`iris.io.run_callback` - - .. note:: - - The resultant cubes may not be in the order that they are in the file - (order is not preserved when there is a field with orography - references) + Load cubes from a list of pp filenames. + + Args + ---- + filenames + list of pp filenames to load + **kwargs + * constraints + a list of Iris constraints + * callback + a function which can be passed on to :func:`iris.io.run_callback` + + Notes + ----- + The resultant cubes may not be in the order that they are in the file + (order is not preserved when there is a field with orography + references) """ return _load_cubes_variable_loader( @@ -2062,24 +2071,23 @@ def load_cubes(filenames, callback=None, constraints=None): def load_cubes_little_endian(filenames, callback=None, constraints=None): """ - Loads cubes from a list of pp filenames containing little-endian data. - - Args: - - * filenames - list of pp filenames to load - - Kwargs: - - * constraints - a list of Iris constraints - - * callback - a function which can be passed on to - :func:`iris.io.run_callback` - - .. note:: - - The resultant cubes may not be in the order that they are in the file - (order is not preserved when there is a field with orography - references) + Load cubes from a list of pp filenames containing little-endian data. + + Args + ---- + filenames + list of pp filenames to load + **kwargs + * constraints + a list of Iris constraints + * callback + a function which can be passed on to :func:`iris.io.run_callback` + + Notes + ----- + The resultant cubes may not be in the order that they are in the file + (order is not preserved when there is a field with orography + references) """ return _load_cubes_variable_loader( @@ -2096,14 +2104,18 @@ def load_pairs_from_fields(pp_fields): Convert an iterable of PP fields into an iterable of tuples of (Cubes, PPField). - Args: - - * pp_fields: + Args + ---- + pp_fields: An iterable of :class:`iris.fileformats.pp.PPField`. - Returns: + Returns + ------- + :class:`iris.cube.Cube` An iterable of :class:`iris.cube.Cube`\ s. + Notes + ----- This capability can be used to filter out fields before they are passed to the load pipeline, and amend the cubes once they are created, using PP metadata conditions. Where this filtering @@ -2192,26 +2204,30 @@ def save(cube, target, append=False, field_coords=None): """ Use the PP saving rules (and any user rules) to save a cube to a PP file. - Args: - - * cube - A :class:`iris.cube.Cube` - * target - A filename or open file handle. - - Kwargs: + Args + ---- + cube: :class:`iris.cube.Cube` - * append - Whether to start a new file afresh or add the cube(s) - to the end of the file. - Only applicable when target is a filename, not a file - handle. - Default is False. - - * field_coords - list of 2 coords or coord names which are to be used - for reducing the given cube into 2d slices, - which will ultimately determine the x and y - coordinates of the resulting fields. - If None, the final two dimensions are chosen - for slicing. + target + A filename or open file handle. + **kwargs + * append + Whether to start a new file afresh or add the cube(s) + to the end of the file. + Only applicable when target is a filename, not a file + handle. + Default is False. + * field_coords + list of 2 coords or coord names which are to be used + for reducing the given cube into 2d slices, + which will ultimately determine the x and y + coordinates of the resulting fields. + If None, the final two dimensions are chosen + for slicing. + + Notes + ----- See also :func:`iris.io.save`. Note that :func:`iris.save` is the preferred method of saving. This allows a :class:`iris.cube.CubeList` or a sequence of cubes to be saved to a PP file. @@ -2226,21 +2242,18 @@ def save_pairs_from_cube(cube, field_coords=None, target=None): Use the PP saving rules to convert a cube or iterable of cubes to an iterable of (2D cube, PP field) pairs. - Args: - - * cube: + Args + ---- + cube: A :class:`iris.cube.Cube` - - Kwargs: - - * field_coords: - List of 2 coords or coord names which are to be used for - reducing the given cube into 2d slices, which will ultimately - determine the x and y coordinates of the resulting fields. - If None, the final two dimensions are chosen for slicing. - - * target: - A filename or open file handle. + **kwargs + * field_coords: + List of 2 coords or coord names which are to be used for + reducing the given cube into 2d slices, which will ultimately + determine the x and y coordinates of the resulting fields. + If None, the final two dimensions are chosen for slicing. + * target: + A filename or open file handle. """ # Open issues @@ -2348,21 +2361,18 @@ def as_fields(cube, field_coords=None, target=None): Use the PP saving rules (and any user rules) to convert a cube to an iterable of PP fields. - Args: - - * cube: + Args + ---- + cube A :class:`iris.cube.Cube` - - Kwargs: - - * field_coords: - List of 2 coords or coord names which are to be used for - reducing the given cube into 2d slices, which will ultimately - determine the x and y coordinates of the resulting fields. - If None, the final two dimensions are chosen for slicing. - - * target: - A filename or open file handle. + **kwargs + * field_coords: + List of 2 coords or coord names which are to be used for + reducing the given cube into 2d slices, which will ultimately + determine the x and y coordinates of the resulting fields. + If None, the final two dimensions are chosen for slicing. + * target: + A filename or open file handle. """ return ( @@ -2377,22 +2387,22 @@ def save_fields(fields, target, append=False): """ Save an iterable of PP fields to a PP file. - Args: - - * fields: + Args + ---- + fields: An iterable of PP fields. - * target: + target: A filename or open file handle. - - Kwargs: - - * append: - Whether to start a new file afresh or add the cube(s) to the end - of the file. - Only applicable when target is a filename, not a file handle. - Default is False. - - See also :func:`iris.io.save`. + **kwargs + * append: + Whether to start a new file afresh or add the cube(s) to the end + of the file. + Only applicable when target is a filename, not a file handle. + Default is False. + + See Also + -------- + :func:`iris.io.save`. """ # Open issues diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index 82f40dbf14..11d03e978a 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -9,6 +9,7 @@ # SciTools/iris-code-generators:tools/gen_rules.py import calendar +from functools import wraps import cf_units import numpy as np @@ -514,7 +515,7 @@ def _new_coord_and_dims( _HOURS_UNIT = cf_units.Unit("hours") -def _epoch_date_hours(epoch_hours_unit, datetime): +def _epoch_date_hours_internals(epoch_hours_unit, datetime): """ Return an 'hours since epoch' number for a date. @@ -547,7 +548,7 @@ def _epoch_date_hours(epoch_hours_unit, datetime): if m == 0: # Add a 'January', by changing month=0 to 1. m = 1 - if calendar == cf_units.CALENDAR_GREGORIAN: + if calendar == cf_units.CALENDAR_STANDARD: days_offset += 31 elif calendar == cf_units.CALENDAR_360_DAY: days_offset += 30 @@ -560,7 +561,7 @@ def _epoch_date_hours(epoch_hours_unit, datetime): if y == 0: # Add a 'Year 0', by changing year=0 to 1. y = 1 - if calendar == cf_units.CALENDAR_GREGORIAN: + if calendar == cf_units.CALENDAR_STANDARD: days_in_year_0 = 366 elif calendar == cf_units.CALENDAR_360_DAY: days_in_year_0 = 360 @@ -589,6 +590,30 @@ def _epoch_date_hours(epoch_hours_unit, datetime): return epoch_hours +_epoch_date_hours_cache = {} +_epoch_date_hours_cache_max_size = 128 # lru_cache default + + +@wraps(_epoch_date_hours_internals) +def _epoch_date_hours(epoch_hours_unit, datetime): + # Not using functools.lru_cache because it does an equality check that fails + # on datetime objects from different calendars. + + key = (epoch_hours_unit, hash(datetime)) + + if key not in _epoch_date_hours_cache: + _epoch_date_hours_cache[key] = _epoch_date_hours_internals( + epoch_hours_unit, datetime + ) + + # Limit cache size + while len(_epoch_date_hours_cache) > _epoch_date_hours_cache_max_size: + oldest_item = next(iter(_epoch_date_hours_cache)) + _epoch_date_hours_cache.pop(oldest_item, None) + + return _epoch_date_hours_cache[key] + + def _convert_time_coords( lbcode, lbtim, @@ -731,7 +756,6 @@ def date2year(t_in): ) ) ): - coords_and_dims.append( _new_coord_and_dims( do_vector, diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index ed156b5a05..0369fc9fd0 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -398,7 +398,7 @@ def _calendar_rules(cube, pp): if time_coord is not None: if time_coord.units.calendar == "360_day": pp.lbtim.ic = 2 - elif time_coord.units.calendar == "gregorian": + elif time_coord.units.calendar == "standard": pp.lbtim.ic = 1 elif time_coord.units.calendar == "365_day": pp.lbtim.ic = 4 @@ -422,7 +422,16 @@ def _grid_and_pole_rules(cube, pp): lat_coord = vector_coord(cube, "latitude") grid_lat_coord = vector_coord(cube, "grid_latitude") - if lon_coord and not is_regular(lon_coord): + scalar_lon_coord = scalar_coord(cube, "longitude") + + if lon_coord is None and grid_lon_coord is None and scalar_lon_coord: + # default value of 360.0 degrees to specify a circular wrap of + # the collapsed scalar longitude coordinate, based on examples + # of model output for several different diagnostics + pp.bdx = (unit := scalar_lon_coord.units) and unit.modulus or 360.0 + pp.bzx = scalar_lon_coord.points[0] - pp.bdx + pp.lbnpt = scalar_lon_coord.shape[0] + elif lon_coord and not is_regular(lon_coord): pp.bzx = 0 pp.bdx = 0 pp.lbnpt = lon_coord.shape[0] diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py index 07ed5eb8ce..51940b7c4d 100644 --- a/lib/iris/fileformats/rules.py +++ b/lib/iris/fileformats/rules.py @@ -394,7 +394,7 @@ def _load_pairs_from_fields_and_filenames( yield (cube, field) regrid_cache = {} - for (cube, factories, field) in results_needing_reference: + for cube, factories, field in results_needing_reference: _resolve_factory_references( cube, factories, concrete_reference_targets, regrid_cache ) diff --git a/lib/iris/fileformats/um/_fast_load_structured_fields.py b/lib/iris/fileformats/um/_fast_load_structured_fields.py index d193aa30ce..64b7f8e891 100644 --- a/lib/iris/fileformats/um/_fast_load_structured_fields.py +++ b/lib/iris/fileformats/um/_fast_load_structured_fields.py @@ -133,6 +133,7 @@ def element_arrays_and_dims(self): def _field_vector_element_arrays(self): """Define the field components used in the structure analysis.""" + # Define functions to make t1 and t2 values as date-time tuples. # These depend on header version (PPField2 has no seconds values). def t1_fn(fld): diff --git a/lib/iris/fileformats/um_cf_map.py b/lib/iris/fileformats/um_cf_map.py index 8aee67ae3e..01539960a5 100644 --- a/lib/iris/fileformats/um_cf_map.py +++ b/lib/iris/fileformats/um_cf_map.py @@ -16,10 +16,12 @@ LBFC_TO_CF = { 5: CFName('atmosphere_boundary_layer_thickness', None, 'm'), 16: CFName('air_temperature', None, 'K'), + 22: CFName('wet_bulb_potential_temperature', None, 'K'), 23: CFName('soil_temperature', None, 'K'), 27: CFName('air_density', None, 'kg m-3'), 36: CFName('land_area_fraction', None, '1'), 37: CFName('sea_ice_area_fraction', None, '1'), + 42: CFName('upward_air_velocity', None, 'm s-1'), 50: CFName('wind_speed', None, 'm s-1'), 56: CFName('x_wind', None, 'm s-1'), 57: CFName('y_wind', None, 'm s-1'), @@ -28,11 +30,16 @@ 83: CFName('potential_vorticity_of_atmosphere_layer', None, 'Pa-1 s-1'), 94: CFName('convective_rainfall_amount', None, 'kg m-2'), 97: CFName('rainfall_flux', None, 'kg m-2 s-1'), + 98: CFName('convective_rainfall_flux', None, 'kg m-2 s-1'), + 99: CFName('stratiform_rainfall_flux', None, 'kg m-2 s-1'), 102: CFName('stratiform_rainfall_amount', None, 'kg m-2'), + 106: CFName('soil_moisture_content', None, 'kg m-2'), 108: CFName('snowfall_flux', None, 'kg m-2 s-1'), 111: CFName('surface_runoff_amount', None, 'kg m-2'), 116: CFName('stratiform_snowfall_amount', None, 'kg m-2'), 117: CFName('convective_snowfall_amount', None, 'kg m-2'), + 118: CFName('stratiform_snowfall_flux', None, 'kg m-2 s-1'), + 119: CFName('convective_snowfall_flux', None, 'kg m-2 s-1'), 122: CFName('moisture_content_of_soil_layer', None, 'kg m-2'), 183: CFName('wind_speed', None, 'm s-1'), 200: CFName('toa_incoming_shortwave_flux', None, 'W m-2'), @@ -899,7 +906,7 @@ 'm01s30i301': CFName(None, 'Heavyside function on pressure levels', '1'), 'm01s30i302': CFName('virtual_temperature', None, 'K'), 'm01s30i310': CFName('northward_transformed_eulerian_mean_air_velocity', None, 'm s-1'), - 'm01s30i311': CFName('northward_transformed_eulerian_mean_air_velocity', None, 'm s-1'), + 'm01s30i311': CFName('upward_transformed_eulerian_mean_air_velocity', None, 'm s-1'), 'm01s30i312': CFName('northward_eliassen_palm_flux_in_air', None, 'kg s-2'), 'm01s30i313': CFName('upward_eliassen_palm_flux_in_air', None, 'kg s-2'), 'm01s30i314': CFName('tendency_of_eastward_wind_due_to_eliassen_palm_flux_divergence', None, 'm s-2'), @@ -1157,7 +1164,9 @@ CFName('cloud_area_fraction_in_atmosphere_layer', None, '1'): 1720, CFName('convective_cloud_area_fraction', None, '1'): 34, CFName('convective_rainfall_amount', None, 'kg m-2'): 94, + CFName('convective_rainfall_flux', None, 'kg m-2 s-1'): 98, CFName('convective_snowfall_amount', None, 'kg m-2'): 117, + CFName('convective_snowfall_flux', None, 'kg m-2 s-1'): 119, CFName('dimensionless_exner_function', None, '1'): 7, CFName('divergence_of_wind', None, 's-1'): 74, CFName('downward_heat_flux_in_sea_ice', None, 'W m-2'): 261, @@ -1203,6 +1212,7 @@ CFName('soil_albedo', None, '1'): 1395, CFName('soil_carbon_content', None, 'kg m-2'): 1397, CFName('soil_hydraulic_conductivity_at_saturation', None, 'm s-1'): 333, + CFName('soil_moisture_content', None, 'kg m-2'): 106, CFName('soil_moisture_content_at_field_capacity', None, 'kg m-2'): 1559, CFName('soil_porosity', None, '1'): 332, CFName('soil_suction_at_saturation', None, 'Pa'): 342, @@ -1212,8 +1222,10 @@ CFName('specific_kinetic_energy_of_air', None, 'm2 s-2'): 60, CFName('stratiform_cloud_area_fraction_in_atmosphere_layer', None, '1'): 220, CFName('stratiform_rainfall_amount', None, 'kg m-2'): 102, + CFName('stratiform_rainfall_flux', None, 'kg m-2 s-1'): 99, CFName('stratiform_rainfall_rate', None, 'kg m-2 s-1'): 99, CFName('stratiform_snowfall_amount', None, 'kg m-2'): 116, + CFName('stratiform_snowfall_flux', None, 'kg m-2 s-1'): 118, CFName('subsurface_runoff_amount', None, 'kg m-2'): 112, CFName('subsurface_runoff_flux', None, 'kg m-2 s-1'): 1533, CFName('surface_albedo_assuming_deep_snow', None, '1'): 328, @@ -1260,6 +1272,7 @@ CFName('volume_fraction_of_condensed_water_in_soil_at_critical_point', None, '1'): 330, CFName('volume_fraction_of_condensed_water_in_soil_at_wilting_point', None, '1'): 329, CFName('water_potential_evaporation_flux', None, 'kg m-2 s-1'): 115, + CFName('wet_bulb_potential_temperature', None, 'K'): 22, CFName('wind_mixing_energy_flux_into_sea_water', None, 'W m-2'): 182, CFName('wind_speed', None, 'm s-1'): 50, CFName('x_wind', None, 'm s-1'): 56, diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index 034fa4baab..7dd08c723c 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -59,6 +59,11 @@ def run_callback(callback, cube, field, filename): It is possible that this function returns None for certain callbacks, the caller of this function should handle this case. + .. note:: + + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ from iris.cube import Cube @@ -131,20 +136,26 @@ def decode_uri(uri, default="file"): return scheme, part -def expand_filespecs(file_specs): +def expand_filespecs(file_specs, files_expected=True): """ Find all matching file paths from a list of file-specs. - Args: - - * file_specs (iterable of string): - File paths which may contain '~' elements or wildcards. - - Returns: - A well-ordered list of matching absolute file paths. - If any of the file-specs match no existing files, an - exception is raised. - + Parameters + ---------- + file_specs : iterable of str + File paths which may contain ``~`` elements or wildcards. + files_expected : bool, default=True + Whether file is expected to exist (i.e. for load). + + Returns + ------- + list of str + if files_expected is ``True``: + A well-ordered list of matching absolute file paths. + If any of the file-specs match no existing files, an + exception is raised. + if files_expected is ``False``: + A list of expanded file paths. """ # Remove any hostname component - currently unused filenames = [ @@ -154,26 +165,30 @@ def expand_filespecs(file_specs): for fn in file_specs ] - # Try to expand all filenames as globs - glob_expanded = OrderedDict( - [[fn, sorted(glob.glob(fn))] for fn in filenames] - ) - - # If any of the specs expanded to an empty list then raise an error - all_expanded = glob_expanded.values() - - if not all(all_expanded): - msg = "One or more of the files specified did not exist:" - for pattern, expanded in glob_expanded.items(): - if expanded: - msg += '\n - "{}" matched {} file(s)'.format( - pattern, len(expanded) - ) - else: - msg += '\n * "{}" didn\'t match any files'.format(pattern) - raise IOError(msg) + if files_expected: + # Try to expand all filenames as globs + glob_expanded = OrderedDict( + [[fn, sorted(glob.glob(fn))] for fn in filenames] + ) - return [fname for fnames in all_expanded for fname in fnames] + # If any of the specs expanded to an empty list then raise an error + all_expanded = glob_expanded.values() + if not all(all_expanded): + msg = "One or more of the files specified did not exist:" + for pattern, expanded in glob_expanded.items(): + if expanded: + msg += '\n - "{}" matched {} file(s)'.format( + pattern, len(expanded) + ) + else: + msg += '\n * "{}" didn\'t match any files'.format( + pattern + ) + raise IOError(msg) + result = [fname for fnames in all_expanded for fname in fnames] + else: + result = filenames + return result def load_files(filenames, callback, constraints=None): @@ -216,7 +231,7 @@ def load_files(filenames, callback, constraints=None): def load_http(urls, callback): """ - Takes a list of urls and a callback function, and returns a generator + Takes a list of OPeNDAP URLs and a callback function, and returns a generator of Cubes from the given URLs. .. note:: @@ -226,11 +241,11 @@ def load_http(urls, callback): """ # Create default dict mapping iris format handler to its associated filenames + from iris.fileformats import FORMAT_AGENT + handler_map = collections.defaultdict(list) for url in urls: - handling_format_spec = iris.fileformats.FORMAT_AGENT.get_spec( - url, None - ) + handling_format_spec = FORMAT_AGENT.get_spec(url, None) handler_map[handling_format_spec].append(url) # Call each iris format handler with the appropriate filenames @@ -356,65 +371,69 @@ def save(source, target, saver=None, **kwargs): A custom saver can be provided to the function to write to a different file format. - Args: - - * source: - :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or - sequence of cubes. - * target: - A filename (or writeable, depending on file format). + Parameters + ---------- + source : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList` + target : str or pathlib.PurePath or io.TextIOWrapper When given a filename or file, Iris can determine the - file format. Filename can be given as a string or - :class:`pathlib.PurePath`. - - Kwargs: - - * saver: - Optional. Specifies the file format to save. + file format. + saver : str or function, optional + Specifies the file format to save. If omitted, Iris will attempt to determine the format. - If a string, this is the recognised filename extension (where the actual filename may not have it). + Otherwise the value is a saver function, of the form: ``my_saver(cube, target)`` plus any custom keywords. It is assumed that a saver will accept an ``append`` keyword - if it's file format can handle multiple cubes. See also + if its file format can handle multiple cubes. See also :func:`iris.io.add_saver`. + **kwargs : dict, optional + All other keywords are passed through to the saver function; see the + relevant saver documentation for more information on keyword arguments. - All other keywords are passed through to the saver function; see the - relevant saver documentation for more information on keyword arguments. + Warnings + -------- + Saving a cube whose data has been loaded lazily + (if `cube.has_lazy_data()` returns `True`) to the same file it expects + to load data from will cause both the data in-memory and the data on + disk to be lost. - Examples:: + .. code-block:: python - # Save a cube to PP - iris.save(my_cube, "myfile.pp") + cube = iris.load_cube("somefile.nc") + # The next line causes data loss in 'somefile.nc' and the cube. + iris.save(cube, "somefile.nc") - # Save a cube list to a PP file, appending to the contents of the file - # if it already exists - iris.save(my_cube_list, "myfile.pp", append=True) + In general, overwriting a file which is the source for any lazily loaded + data can result in corruption. Users should proceed with caution when + attempting to overwrite an existing file. - # Save a cube to netCDF, defaults to NETCDF4 file format - iris.save(my_cube, "myfile.nc") + Examples + -------- + >>> # Setting up + >>> import iris + >>> my_cube = iris.load_cube(iris.sample_data_path('air_temp.pp')) + >>> my_cube_list = iris.load(iris.sample_data_path('space_weather.nc')) - # Save a cube list to netCDF, using the NETCDF3_CLASSIC storage option - iris.save(my_cube_list, "myfile.nc", netcdf_format="NETCDF3_CLASSIC") + >>> # Save a cube to PP + >>> iris.save(my_cube, "myfile.pp") - .. warning:: + >>> # Save a cube list to a PP file, appending to the contents of the file + >>> # if it already exists + >>> iris.save(my_cube_list, "myfile.pp", append=True) - Saving a cube whose data has been loaded lazily - (if `cube.has_lazy_data()` returns `True`) to the same file it expects - to load data from will cause both the data in-memory and the data on - disk to be lost. + >>> # Save a cube to netCDF, defaults to NETCDF4 file format + >>> iris.save(my_cube, "myfile.nc") - .. code-block:: python + >>> # Save a cube list to netCDF, using the NETCDF3_CLASSIC storage option + >>> iris.save(my_cube_list, "myfile.nc", netcdf_format="NETCDF3_CLASSIC") - cube = iris.load_cube("somefile.nc") - # The next line causes data loss in 'somefile.nc' and the cube. - iris.save(cube, "somefile.nc") + Notes + ------ - In general, overwriting a file which is the source for any lazily loaded - data can result in corruption. Users should proceed with caution when - attempting to overwrite an existing file. + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ from iris.cube import Cube, CubeList @@ -423,6 +442,8 @@ def save(source, target, saver=None, **kwargs): if isinstance(target, pathlib.PurePath): target = str(target) if isinstance(target, str) and saver is None: + # Converts tilde or wildcards to absolute path + (target,) = expand_filespecs([str(target)], False) saver = find_saver(target) elif hasattr(target, "name") and saver is None: saver = find_saver(target.name) diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index edf448e95b..a8e333c566 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -134,8 +134,9 @@ def get_spec(self, basename, buffer_obj): value = value[:50] + "..." printable_values[key] = value msg = ( - "No format specification could be found for the given buffer." - " File element cache:\n {}".format(printable_values) + "No format specification could be found for the given buffer. " + "Perhaps a plugin is missing or has not been loaded. " + "File element cache:\n {}".format(printable_values) ) raise ValueError(msg) diff --git a/lib/iris/iterate.py b/lib/iris/iterate.py index 636635ee78..d6bac77d3b 100644 --- a/lib/iris/iterate.py +++ b/lib/iris/iterate.py @@ -58,6 +58,10 @@ def izip(*cubes, **kwargs): ... 'grid_longitude']): ... pass + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. """ if not cubes: raise TypeError("Expected one or more cubes.") diff --git a/lib/iris/palette.py b/lib/iris/palette.py index 626ae4e341..a1c0a1e878 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -15,6 +15,7 @@ import re import cf_units +from matplotlib import colormaps as mpl_colormaps import matplotlib.cm as mpl_cm import matplotlib.colors as mpl_colors import numpy as np @@ -120,6 +121,11 @@ def cmap_norm(cube): Tuple of :class:`matplotlib.colors.LinearSegmentedColormap` and :class:`iris.palette.SymmetricNormalize` + Notes + ------ + This function maintains laziness when called; it does not realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + """ args, kwargs = _default_cmap_norm((cube,), {}) return kwargs.get("cmap"), kwargs.get("norm") @@ -337,7 +343,7 @@ def _load_palette(): ) # Register the color map for use. - mpl_cm.register_cmap(cmap=cmap) + mpl_colormaps.register(cmap) # Ensure to load the color map palettes. diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 4c421792a7..417b6b11de 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -9,8 +9,9 @@ See also: http://pandas.pydata.org/ """ - import datetime +from itertools import chain, combinations +import warnings import cf_units from cf_units import Unit @@ -25,69 +26,146 @@ from pandas.tseries.index import DatetimeIndex # pandas <0.20 import iris -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube +from iris._deprecation import warn_deprecated +from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord +from iris.cube import Cube, CubeList -def _add_iris_coord(cube, name, points, dim, calendar=None): +def _get_dimensional_metadata(name, values, calendar=None, dm_class=None): """ - Add a Coord to a Cube from a Pandas index or columns array. + Create a Coord or other dimensional metadata from a Pandas index or columns array. - If no calendar is specified for a time series, Gregorian is assumed. + If no calendar is specified for a time series, Standard is assumed. """ units = Unit("unknown") if calendar is None: - calendar = cf_units.CALENDAR_GREGORIAN + calendar = cf_units.CALENDAR_STANDARD + + # Getting everything into a single datetime format is hard! - # Convert pandas datetime objects to python datetime obejcts. - if isinstance(points, DatetimeIndex): - points = np.array([i.to_pydatetime() for i in points]) + # Convert out of NumPy's own datetime format. + if np.issubdtype(values.dtype, np.datetime64): + values = pandas.to_datetime(values) + + # Convert pandas datetime objects to python datetime objects. + if isinstance(values, DatetimeIndex): + values = np.array([i.to_pydatetime() for i in values]) # Convert datetime objects to Iris' current datetime representation. - if points.dtype == object: + if values.dtype == object: dt_types = (datetime.datetime, cftime.datetime) - if all([isinstance(i, dt_types) for i in points]): + if all([isinstance(i, dt_types) for i in values]): units = Unit("hours since epoch", calendar=calendar) - points = units.date2num(points) + values = units.date2num(values) - points = np.array(points) - if np.issubdtype(points.dtype, np.number) and iris.util.monotonic( - points, strict=True - ): - coord = DimCoord(points, units=units) - coord.rename(name) + values = np.array(values) + + if dm_class is None: + if np.issubdtype(values.dtype, np.number) and iris.util.monotonic( + values, strict=True + ): + dm_class = DimCoord + else: + dm_class = AuxCoord + + instance = dm_class(values, units=units) + if name is not None: + # Use rename() to attempt standard_name but fall back on long_name. + instance.rename(str(name)) + + return instance + + +def _add_iris_coord(cube, name, points, dim, calendar=None): + """ + Add a Coord or other dimensional metadata to a Cube from a Pandas index or columns array. + """ + # Most functionality has been abstracted to _get_dimensional_metadata, + # allowing re-use in as_cube() and as_cubes(). + coord = _get_dimensional_metadata(name, points, calendar) + + if coord.__class__ == DimCoord: cube.add_dim_coord(coord, dim) else: - coord = AuxCoord(points, units=units) - coord.rename(name) cube.add_aux_coord(coord, dim) -def as_cube(pandas_array, copy=True, calendars=None): +def _series_index_unique(pandas_series: pandas.Series): """ - Convert a Pandas array into an Iris cube. + Find an index grouping of a :class:`pandas.Series` that has just one Series value per group. - Args: + Iterates through grouping single index levels, then combinations of 2 + levels, then 3 etcetera, until single :class:`~pandas.Series` values per + group are found. Returns a ``tuple`` of the index levels that group to + produce single values, as soon as one is found. + + Returns ``None`` if no index level combination produces single values. + + """ + unique_number = pandas_series.nunique() + pandas_index = pandas_series.index + levels_range = range(pandas_index.nlevels) + if unique_number == 1: + # Scalar - identical for all indices. + result = () + else: + result = None + levels_combinations = chain( + *[ + combinations(levels_range, levels + 1) + for levels in levels_range + ] + ) + for lc in levels_combinations: + if pandas_series.groupby(level=lc).nunique().max() == 1: + result = lc + # Escape as early as possible - heavy operation. + break + return result + + +def as_cube( + pandas_array, + copy=True, + calendars=None, +): + """ + Convert a Pandas Series/DataFrame into a 1D/2D Iris Cube. - * pandas_array - A Pandas Series or DataFrame. + .. deprecated:: 3.3.0 - Kwargs: + This function is scheduled for removal in a future release, being + replaced by :func:`iris.pandas.as_cubes`, which offers richer + dimensional intelligence. - * copy - Whether to make a copy of the data. - Defaults to True. + Parameters + ---------- + pandas_array : :class:`pandas.Series` or :class:`pandas.DataFrame` + The Pandas object to convert + copy : bool, default=True + Whether to copy `pandas_array`, or to create array views where + possible. Provided in case of memory limit concerns. + calendars : dict, optional + A dict mapping a dimension to a calendar. Required to convert datetime + indices/columns. - * calendars - A dict mapping a dimension to a calendar. - Required to convert datetime indices/columns. + Notes + ----- + This function will copy your data by default. Example usage:: - as_cube(series, calendars={0: cf_units.CALENDAR_360_DAY}) - as_cube(data_frame, calendars={1: cf_units.CALENDAR_GREGORIAN}) - - .. note:: This function will copy your data by default. + as_cube(series, calendars={0: cf_units.CALENDAR_360_DAY}) + as_cube(data_frame, calendars={1: cf_units.CALENDAR_STANDARD}) """ + message = ( + "iris.pandas.as_cube has been deprecated, and will be removed in a " + "future release. Please use iris.pandas.as_cubes instead." + ) + warn_deprecated(message) + calendars = calendars or {} if pandas_array.ndim not in [1, 2]: raise ValueError( @@ -116,6 +194,305 @@ def as_cube(pandas_array, copy=True, calendars=None): return cube +def as_cubes( + pandas_structure, + copy=True, + calendars=None, + aux_coord_cols=None, + cell_measure_cols=None, + ancillary_variable_cols=None, +): + """ + Convert a Pandas Series/DataFrame into n-dimensional Iris Cubes, including dimensional metadata. + + The index of `pandas_structure` will be used for generating the + :class:`~iris.cube.Cube` dimension(s) and :class:`~iris.coords.DimCoord`\\ s. + Other dimensional metadata may span multiple dimensions - based on how the + column values vary with the index values. + + Parameters + ---------- + pandas_structure : :class:`pandas.Series` or :class:`pandas.DataFrame` + The Pandas object to convert + copy : bool, default=True + Whether the Cube :attr:`~iris.cube.Cube.data` is a copy of the + `pandas_structure` column, or a view of the same array. Arrays other than + the data (coords etc.) are always copies. This option is provided to + help with memory size concerns. + calendars : dict, optional + Calendar conversions for individual date-time coordinate + columns/index-levels e.g. ``{"my_column": cf_units.CALENDAR_360_DAY}``. + aux_coord_cols, cell_measure_cols, ancillary_variable_cols : list of str, optional + Names of columns to be converted into :class:`~iris.coords.AuxCoord`, + :class:`~iris.coords.CellMeasure` and + :class:`~iris.coords.AncillaryVariable` objects. + + Returns + -------- + :class:`~iris.cube.CubeList` + One :class:`~iris.cube.Cube` for each column not referenced in + `aux_coord_cols`/`cell_measure_cols`/`ancillary_variable_cols`. + + Notes + ----- + A :class:`~pandas.DataFrame` using columns as a second data dimension will + need to be 'melted' before conversion. See the Examples for how. + + :class:`dask.dataframe.DataFrame`\\ s are not supported. + + Examples + -------- + >>> from iris.pandas import as_cubes + >>> import numpy as np + >>> from pandas import DataFrame, Series + + Converting a simple :class:`~pandas.Series` : + + >>> my_series = Series([300, 301, 302], name="air_temperature") + >>> converted_cubes = as_cubes(my_series) + >>> print(converted_cubes) + 0: air_temperature / (unknown) (unknown: 3) + >>> print(converted_cubes[0]) + air_temperature / (unknown) (unknown: 3) + Dimension coordinates: + unknown x + + A :class:`~pandas.DataFrame`, with a custom index becoming the + :class:`~iris.coords.DimCoord` : + + >>> my_df = DataFrame({ + ... "air_temperature": [300, 301, 302], + ... "longitude": [30, 40, 50] + ... }) + >>> my_df = my_df.set_index("longitude") + >>> converted_cubes = as_cubes(my_df) + >>> print(converted_cubes[0]) + air_temperature / (unknown) (longitude: 3) + Dimension coordinates: + longitude x + + A :class:`~pandas.DataFrame` representing two 3-dimensional datasets, + including a 2-dimensional :class:`~iris.coords.AuxCoord` : + + >>> my_df = DataFrame({ + ... "air_temperature": np.arange(300, 312, 1), + ... "air_pressure": np.arange(1000, 1012, 1), + ... "longitude": [0, 10] * 6, + ... "latitude": [25, 25, 35, 35] * 3, + ... "height": ([0] * 4) + ([100] * 4) + ([200] * 4), + ... "in_region": [True, False, False, False] * 3 + ... }) + >>> print(my_df) + air_temperature air_pressure longitude latitude height in_region + 0 300 1000 0 25 0 True + 1 301 1001 10 25 0 False + 2 302 1002 0 35 0 False + 3 303 1003 10 35 0 False + 4 304 1004 0 25 100 True + 5 305 1005 10 25 100 False + 6 306 1006 0 35 100 False + 7 307 1007 10 35 100 False + 8 308 1008 0 25 200 True + 9 309 1009 10 25 200 False + 10 310 1010 0 35 200 False + 11 311 1011 10 35 200 False + >>> my_df = my_df.set_index(["longitude", "latitude", "height"]) + >>> my_df = my_df.sort_index() + >>> converted_cubes = as_cubes(my_df, aux_coord_cols=["in_region"]) + >>> print(converted_cubes) + 0: air_temperature / (unknown) (longitude: 2; latitude: 2; height: 3) + 1: air_pressure / (unknown) (longitude: 2; latitude: 2; height: 3) + >>> print(converted_cubes[0]) + air_temperature / (unknown) (longitude: 2; latitude: 2; height: 3) + Dimension coordinates: + longitude x - - + latitude - x - + height - - x + Auxiliary coordinates: + in_region x x - + + Pandas uses ``NaN`` rather than masking data. Converted + :class:`~iris.cube.Cube`\\s can be masked in downstream user code : + + >>> my_series = Series([300, np.NaN, 302], name="air_temperature") + >>> converted_cube = as_cubes(my_series)[0] + >>> print(converted_cube.data) + [300. nan 302.] + >>> converted_cube.data = np.ma.masked_invalid(converted_cube.data) + >>> print(converted_cube.data) + [300.0 -- 302.0] + + If the :class:`~pandas.DataFrame` uses columns as a second dimension, + :func:`pandas.melt` should be used to convert the data to the expected + n-dimensional format : + + >>> my_df = DataFrame({ + ... "latitude": [35, 25], + ... 0: [300, 301], + ... 10: [302, 303], + ... }) + >>> print(my_df) + latitude 0 10 + 0 35 300 302 + 1 25 301 303 + >>> my_df = my_df.melt( + ... id_vars=["latitude"], + ... value_vars=[0, 10], + ... var_name="longitude", + ... value_name="air_temperature" + ... ) + >>> print(my_df) + latitude longitude air_temperature + 0 35 0 300 + 1 25 0 301 + 2 35 10 302 + 3 25 10 303 + >>> my_df = my_df.set_index(["latitude", "longitude"]) + >>> my_df = my_df.sort_index() + >>> converted_cube = as_cubes(my_df)[0] + >>> print(converted_cube) + air_temperature / (unknown) (latitude: 2; longitude: 2) + Dimension coordinates: + latitude x - + longitude - x + + """ + if pandas_structure.empty: + return CubeList() + + calendars = calendars or {} + aux_coord_cols = aux_coord_cols or [] + cell_measure_cols = cell_measure_cols or [] + ancillary_variable_cols = ancillary_variable_cols or [] + + is_series = isinstance(pandas_structure, pandas.Series) + + if copy: + pandas_structure = pandas_structure.copy() + + pandas_index = pandas_structure.index + if not pandas_index.is_unique: + message = ( + f"DataFrame index ({pandas_index.names}) is not unique per " + "row; cannot be used for DimCoords." + ) + raise ValueError(message) + + if not ( + pandas_index.is_monotonic_increasing + or pandas_index.is_monotonic_decreasing + ): + # Need monotonic index for use in DimCoord(s). + # This function doesn't sort_index itself since that breaks the + # option to return a data view instead of a copy. + message = ( + "Pandas index is not monotonic. Consider using the " + "sort_index() method before passing in." + ) + raise ValueError(message) + + cube_shape = getattr(pandas_index, "levshape", (pandas_index.nunique(),)) + n_rows = len(pandas_structure) + if np.product(cube_shape) > n_rows: + message = ( + f"Not all index values have a corresponding row - {n_rows} rows " + f"cannot be reshaped into {cube_shape}. Consider padding with NaN " + "rows where needed." + ) + raise ValueError(message) + + cube_kwargs = {} + + def format_dimensional_metadata(dm_class_, values_, name_, dimensions_): + # Common convenience to get the right DM in the right format for + # Cube creation. + calendar = calendars.get(name_) + instance = _get_dimensional_metadata( + name_, values_, calendar, dm_class_ + ) + return (instance, dimensions_) + + # DimCoords. + dim_coord_kwarg = [] + for ix, dim_name in enumerate(pandas_index.names): + if hasattr(pandas_index, "levels"): + coord_points = pandas_index.levels[ix] + else: + coord_points = pandas_index + new_dim_coord = format_dimensional_metadata( + DimCoord, coord_points, dim_name, ix + ) + dim_coord_kwarg.append(new_dim_coord) + cube_kwargs["dim_coords_and_dims"] = dim_coord_kwarg + + # Other dimensional metadata. + class_arg_mapping = [ + (AuxCoord, aux_coord_cols, "aux_coords_and_dims"), + (CellMeasure, cell_measure_cols, "cell_measures_and_dims"), + ( + AncillaryVariable, + ancillary_variable_cols, + "ancillary_variables_and_dims", + ), + ] + + if is_series: + columns_ignored = any([len(t[1]) > 0 for t in class_arg_mapping]) + if columns_ignored: + ignored_args = ", ".join([t[2] for t in class_arg_mapping]) + message = f"The input pandas_structure is a Series; ignoring arguments: {ignored_args} ." + warnings.warn(message) + class_arg_mapping = [] + + non_data_names = [] + for dm_class, column_names, kwarg in class_arg_mapping: + class_kwarg = [] + non_data_names.extend(column_names) + for column_name in column_names: + column = pandas_structure[column_name] + + # Should be impossible for None to be returned - would require a + # non-unique index, which we protect against. + dimensions = _series_index_unique(column) + + content = column.to_numpy() + # Remove duplicate entries to get down to the correct dimensions + # for this object. _series_index_unique should have ensured + # that we are indeed removing the duplicates. + shaped = content.reshape(cube_shape) + indices = [0] * len(cube_shape) + for dim in dimensions: + indices[dim] = slice(None) + collapsed = shaped[tuple(indices)] + + new_dm = format_dimensional_metadata( + dm_class, collapsed, column_name, dimensions + ) + class_kwarg.append(new_dm) + + cube_kwargs[kwarg] = class_kwarg + + # Cube creation. + if is_series: + data_series_list = [pandas_structure] + else: + data_series_list = [ + pandas_structure[column_name] + for column_name in pandas_structure.columns + if column_name not in non_data_names + ] + cubes = CubeList() + for data_series in data_series_list: + cube_data = data_series.to_numpy().reshape(cube_shape) + new_cube = Cube(cube_data, **cube_kwargs) + if data_series.name is not None: + # Use rename() to attempt standard_name but fall back on long_name. + new_cube.rename(str(data_series.name)) + cubes.append(new_cube) + + return cubes + + def _as_pandas_coord(coord): """Convert an Iris Coord into a Pandas index or columns array.""" index = coord.points @@ -143,26 +520,92 @@ def _get_base(array): raise AssertionError(msg) +def _make_dim_coord_list(cube): + """Get Dimension coordinates.""" + outlist = [] + for dimn in range(cube.ndim): + dimn_coord = cube.coords(dimensions=dimn, dim_coords=True) + if dimn_coord: + outlist += [ + [dimn_coord[0].name(), _as_pandas_coord(dimn_coord[0])] + ] + else: + outlist += [[f"dim{dimn}", range(cube.shape[dimn])]] + return list(zip(*outlist)) + + +def _make_aux_coord_list(cube): + """Get Auxiliary coordinates.""" + outlist = [] + for aux_coord in cube.coords(dim_coords=False): + outlist += [ + [ + aux_coord.name(), + cube.coord_dims(aux_coord), + _as_pandas_coord(aux_coord), + ] + ] + return list(chain.from_iterable([outlist])) + + +def _make_ancillary_variables_list(cube): + """Get Ancillary variables.""" + outlist = [] + for ancil_var in cube.ancillary_variables(): + outlist += [ + [ + ancil_var.name(), + cube.ancillary_variable_dims(ancil_var), + ancil_var.data, + ] + ] + return list(chain.from_iterable([outlist])) + + +def _make_cell_measures_list(cube): + """Get cell measures.""" + outlist = [] + for cell_measure in cube.cell_measures(): + outlist += [ + [ + cell_measure.name(), + cube.cell_measure_dims(cell_measure), + cell_measure.data, + ] + ] + return list(chain.from_iterable([outlist])) + + def as_series(cube, copy=True): """ Convert a 1D cube to a Pandas Series. - Args: - - * cube - The cube to convert to a Pandas Series. - - Kwargs: - - * copy - Whether to make a copy of the data. - Defaults to True. Must be True for masked data. - - .. note:: - - This function will copy your data by default. - If you have a large array that cannot be copied, - make sure it is not masked and use copy=False. + .. deprecated:: 3.4.0 + This function is scheduled for removal in a future release, being + replaced by :func:`iris.pandas.as_data_frame`, which offers improved + multi dimension handling. + + Parameters + ---------- + cube: :class:`Cube` + The cube to convert to a Pandas Series. + copy : bool, default=True + Whether to make a copy of the data. + Defaults to True. Must be True for masked data. + + Notes + ----- + This function will copy your data by default. + If you have a large array that cannot be copied, + make sure it is not masked and use copy=False. """ + message = ( + "iris.pandas.as_series has been deprecated, and will be removed in a " + "future release. Please use iris.pandas.as_data_frame instead." + ) + warn_deprecated(message) + data = cube.data if ma.isMaskedArray(data): if not copy: @@ -170,61 +613,297 @@ def as_series(cube, copy=True): data = data.astype("f").filled(np.nan) elif copy: data = data.copy() - index = None if cube.dim_coords: index = _as_pandas_coord(cube.dim_coords[0]) - series = pandas.Series(data, index) if not copy: _assert_shared(data, series) - return series -def as_data_frame(cube, copy=True): +def as_data_frame( + cube, + copy=True, + add_aux_coords=False, + add_cell_measures=False, + add_ancillary_variables=False, +): """ - Convert a 2D cube to a Pandas DataFrame. - - Args: - - * cube - The cube to convert to a Pandas DataFrame. - - Kwargs: - - * copy - Whether to make a copy of the data. - Defaults to True. Must be True for masked data - and some data types (see notes below). + Convert a :class:`~iris.cube.Cube` to a :class:`pandas.DataFrame`. + + :attr:`~iris.cube.Cube.dim_coords` and :attr:`~iris.cube.Cube.data` are + flattened into a long-style :class:`~pandas.DataFrame`. Other + :attr:`~iris.cube.Cube.aux_coords`, :attr:`~iris.cube.Cube.aux_coords` and :attr:`~iris.cube.Cube.attributes` + may be optionally added as additional :class:`~pandas.DataFrame` columns. + + Parameters + ---------- + cube: :class:`~iris.cube.Cube` + The :class:`~iris.cube.Cube` to be converted to a :class:`pandas.DataFrame`. + copy : bool, default=True + Whether the :class:`pandas.DataFrame` is a copy of the the Cube + :attr:`~iris.cube.Cube.data`. This option is provided to help with memory + size concerns. + add_aux_coords : bool, default=False + If True, add all :attr:`~iris.cube.Cube.aux_coords` (including scalar + coordinates) to the returned :class:`pandas.DataFrame`. + add_cell_measures : bool, default=False + If True, add :attr:`~iris.cube.Cube.cell_measures` to the returned + :class:`pandas.DataFrame`. + add_ancillary_variables: bool, default=False + If True, add :attr:`~iris.cube.Cube.ancillary_variables` to the returned + :class:`pandas.DataFrame`. + + Returns + ------- + :class:`~pandas.DataFrame` + A :class:`~pandas.DataFrame` with :class:`~iris.cube.Cube` dimensions + forming a :class:`~pandas.MultiIndex` + + Warnings + -------- + #. This documentation is for the new ``as_data_frame()`` behaviour, which + is **currently opt-in** to preserve backwards compatibility. The default + legacy behaviour is documented in pre-``v3.4`` documentation (summary: + limited to 2-dimensional :class:`~iris.cube.Cube`\\ s, with only the + :attr:`~iris.cube.Cube.data` and :attr:`~iris.cube.Cube.dim_coords` + being added). The legacy behaviour will be removed in a future version + of Iris, so please opt-in to the new behaviour at your earliest + convenience, via :class:`iris.Future`: + + >>> iris.FUTURE.pandas_ndim = True + + **Breaking change:** to enable the improvements, the new opt-in + behaviour flattens multi-dimensional data into a single + :class:`~pandas.DataFrame` column (the legacy behaviour preserves 2 + dimensions via rows and columns). + + | + + #. Where the :class:`~iris.cube.Cube` contains masked values, these become + :data:`numpy.nan` in the returned :class:`~pandas.DataFrame`. + + Notes + ----- + :class:`dask.dataframe.DataFrame`\\ s are not supported. + + A :class:`~pandas.MultiIndex` :class:`~pandas.DataFrame` is returned by default. + Use the :meth:`~pandas.DataFrame.reset_index` to return a + :class:`~pandas.DataFrame` without :class:`~pandas.MultiIndex` levels. Use + 'inplace=True` to preserve memory object reference. + + :class:`~iris.cube.Cube` data `dtype` is preserved. + + Examples + -------- + >>> import iris + >>> from iris.pandas import as_data_frame + >>> import pandas as pd + >>> pd.set_option('display.width', 1000) + >>> pd.set_option('display.max_columns', 1000) + + Convert a simple :class:`~iris.cube.Cube`: + + >>> path = iris.sample_data_path('ostia_monthly.nc') + >>> cube = iris.load_cube(path) + >>> df = as_data_frame(cube) + >>> print(df) + ... # doctest: +NORMALIZE_WHITESPACE + surface_temperature + time latitude longitude + 2006-04-16 00:00:00 -4.999992 0.000000 301.659271 + 0.833333 301.785004 + 1.666667 301.820984 + 2.500000 301.865234 + 3.333333 301.926819 + ... ... + 2010-09-16 00:00:00 4.444450 355.833313 298.779938 + 356.666656 298.913147 + 357.500000 NaN + 358.333313 NaN + 359.166656 298.995148 + + [419904 rows x 1 columns] + + Using ``add_aux_coords=True`` maps :class:`~iris.coords.AuxCoord` and scalar + coordinate information to the :class:`~pandas.DataFrame`: + + >>> df = as_data_frame(cube, add_aux_coords=True) + >>> print(df) + ... # doctest: +NORMALIZE_WHITESPACE + surface_temperature forecast_period forecast_reference_time + time latitude longitude + 2006-04-16 00:00:00 -4.999992 0.000000 301.659271 0 2006-04-16 12:00:00 + 0.833333 301.785004 0 2006-04-16 12:00:00 + 1.666667 301.820984 0 2006-04-16 12:00:00 + 2.500000 301.865234 0 2006-04-16 12:00:00 + 3.333333 301.926819 0 2006-04-16 12:00:00 + ... ... ... ... + 2010-09-16 00:00:00 4.444450 355.833313 298.779938 0 2010-09-16 12:00:00 + 356.666656 298.913147 0 2010-09-16 12:00:00 + 357.500000 NaN 0 2010-09-16 12:00:00 + 358.333313 NaN 0 2010-09-16 12:00:00 + 359.166656 298.995148 0 2010-09-16 12:00:00 + + [419904 rows x 3 columns] + + To add netCDF global attribution information to the :class:`~pandas.DataFrame`, + add a column directly to the :class:`~pandas.DataFrame`: + + >>> df['STASH'] = str(cube.attributes['STASH']) + >>> print(df) + ... # doctest: +NORMALIZE_WHITESPACE + surface_temperature forecast_period forecast_reference_time STASH + time latitude longitude + 2006-04-16 00:00:00 -4.999992 0.000000 301.659271 0 2006-04-16 12:00:00 m01s00i024 + 0.833333 301.785004 0 2006-04-16 12:00:00 m01s00i024 + 1.666667 301.820984 0 2006-04-16 12:00:00 m01s00i024 + 2.500000 301.865234 0 2006-04-16 12:00:00 m01s00i024 + 3.333333 301.926819 0 2006-04-16 12:00:00 m01s00i024 + ... ... ... ... ... + 2010-09-16 00:00:00 4.444450 355.833313 298.779938 0 2010-09-16 12:00:00 m01s00i024 + 356.666656 298.913147 0 2010-09-16 12:00:00 m01s00i024 + 357.500000 NaN 0 2010-09-16 12:00:00 m01s00i024 + 358.333313 NaN 0 2010-09-16 12:00:00 m01s00i024 + 359.166656 298.995148 0 2010-09-16 12:00:00 m01s00i024 + + [419904 rows x 4 columns] + + To return a :class:`~pandas.DataFrame` without a :class:`~pandas.MultiIndex` + use :meth:`~pandas.DataFrame.reset_index`. Optionally use `inplace=True` keyword + to modify the DataFrame rather than creating a new one: + + >>> df.reset_index(inplace=True) + >>> print(df) + ... # doctest: +NORMALIZE_WHITESPACE + time latitude longitude surface_temperature forecast_period forecast_reference_time STASH + 0 2006-04-16 00:00:00 -4.999992 0.000000 301.659271 0 2006-04-16 12:00:00 m01s00i024 + 1 2006-04-16 00:00:00 -4.999992 0.833333 301.785004 0 2006-04-16 12:00:00 m01s00i024 + 2 2006-04-16 00:00:00 -4.999992 1.666667 301.820984 0 2006-04-16 12:00:00 m01s00i024 + 3 2006-04-16 00:00:00 -4.999992 2.500000 301.865234 0 2006-04-16 12:00:00 m01s00i024 + 4 2006-04-16 00:00:00 -4.999992 3.333333 301.926819 0 2006-04-16 12:00:00 m01s00i024 + ... ... ... ... ... ... ... + 419899 2010-09-16 00:00:00 4.444450 355.833313 298.779938 0 2010-09-16 12:00:00 m01s00i024 + 419900 2010-09-16 00:00:00 4.444450 356.666656 298.913147 0 2010-09-16 12:00:00 m01s00i024 + 419901 2010-09-16 00:00:00 4.444450 357.500000 NaN 0 2010-09-16 12:00:00 m01s00i024 + 419902 2010-09-16 00:00:00 4.444450 358.333313 NaN 0 2010-09-16 12:00:00 m01s00i024 + 419903 2010-09-16 00:00:00 4.444450 359.166656 298.995148 0 2010-09-16 12:00:00 m01s00i024 + + [419904 rows x 7 columns] + + To retrieve a :class:`~pandas.Series` from `df` :class:`~pandas.DataFrame`, + subselect a column: + + >>> df['surface_temperature'] + 0 301.659271 + 1 301.785004 + 2 301.820984 + 3 301.865234 + 4 301.926819 + ... + 419899 298.779938 + 419900 298.913147 + 419901 NaN + 419902 NaN + 419903 298.995148 + Name: surface_temperature, Length: 419904, dtype: float32 - .. note:: + """ - This function will copy your data by default. - If you have a large array that cannot be copied, - make sure it is not masked and use copy=False. + def merge_metadata(meta_var_list): + """Add auxiliary cube metadata to the DataFrame""" + nonlocal data_frame + for meta_var_name, meta_var_index, meta_var in meta_var_list: + if not meta_var_index: + # Broadcast any meta var informtation without an associated + # dimension over the whole DataFrame + data_frame[meta_var_name] = meta_var.squeeze() + else: + meta_df = pandas.DataFrame( + meta_var.ravel(), + columns=[meta_var_name], + index=pandas.MultiIndex.from_product( + [coords[i] for i in meta_var_index], + names=[coord_names[i] for i in meta_var_index], + ), + ) + # Merge to main data frame + data_frame = pandas.merge( + data_frame, + meta_df, + left_index=True, + right_index=True, + sort=False, + ) + return data_frame + + if iris.FUTURE.pandas_ndim: + # Checks + if not isinstance(cube, iris.cube.Cube): + raise TypeError( + f"Expected input to be iris.cube.Cube instance, got: {type(cube)}" + ) + if copy: + data = cube.data.copy() + else: + data = cube.data + if ma.isMaskedArray(data): + if not copy: + raise ValueError("Masked arrays must always be copied.") + data = data.astype("f").filled(np.nan) + + # Extract dim coord information: separate lists for dim names and dim values + coord_names, coords = _make_dim_coord_list(cube) + # Make base DataFrame + index = pandas.MultiIndex.from_product(coords, names=coord_names) + data_frame = pandas.DataFrame( + data.ravel(), columns=[cube.name()], index=index + ) - .. note:: + if add_aux_coords: + data_frame = merge_metadata(_make_aux_coord_list(cube)) + if add_ancillary_variables: + data_frame = merge_metadata(_make_ancillary_variables_list(cube)) + if add_cell_measures: + data_frame = merge_metadata(_make_cell_measures_list(cube)) - Pandas will sometimes make a copy of the array, - for example when creating from an int32 array. - Iris will detect this and raise an exception if copy=False. + if copy: + result = data_frame.reorder_levels(coord_names).sort_index() + else: + data_frame.reorder_levels(coord_names).sort_index(inplace=True) + result = data_frame - """ - data = cube.data - if ma.isMaskedArray(data): + else: + message = ( + "You are using legacy 2-dimensional behaviour in" + "'iris.pandas.as_data_frame()'. This will be removed in a future" + "version of Iris. Please opt-in to the improved " + "n-dimensional behaviour at your earliest convenience by setting: " + "'iris.FUTURE.pandas_ndim = True'. More info is in the " + "documentation." + ) + warnings.warn(message, FutureWarning) + + # The legacy behaviour. + data = cube.data + if ma.isMaskedArray(data): + if not copy: + raise ValueError("Masked arrays must always be copied.") + data = data.astype("f").filled(np.nan) + elif copy: + data = data.copy() + + index = columns = None + if cube.coords(dimensions=[0]): + index = _as_pandas_coord(cube.coord(dimensions=[0])) + if cube.coords(dimensions=[1]): + columns = _as_pandas_coord(cube.coord(dimensions=[1])) + + data_frame = pandas.DataFrame(data, index, columns) if not copy: - raise ValueError("Masked arrays must always be copied.") - data = data.astype("f").filled(np.nan) - elif copy: - data = data.copy() + _assert_shared(data, data_frame) - index = columns = None - if cube.coords(dimensions=[0]): - index = _as_pandas_coord(cube.coord(dimensions=[0])) - if cube.coords(dimensions=[1]): - columns = _as_pandas_coord(cube.coord(dimensions=[1])) - - data_frame = pandas.DataFrame(data, index, columns) - if not copy: - _assert_shared(data, data_frame) + result = data_frame - return data_frame + return result diff --git a/lib/iris/plot.py b/lib/iris/plot.py index 0e9645c783..8cd849b716 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -13,11 +13,13 @@ import collections import datetime +import warnings import cartopy.crs as ccrs from cartopy.geodesic import Geodesic import cartopy.mpl.geoaxes import cftime +import matplotlib.animation as animation import matplotlib.axes import matplotlib.collections as mpl_collections import matplotlib.dates as mpl_dates @@ -587,14 +589,14 @@ def _fixup_dates(coord, values): # Convert coordinate values into tuples of # (year, month, day, hour, min, sec) dates = [coord.units.num2date(val).timetuple()[0:6] for val in values] - if coord.units.calendar == "gregorian": + if coord.units.calendar == "standard": r = [datetime.datetime(*date) for date in dates] else: try: - import nc_time_axis + import nc_time_axis # noqa: F401 except ImportError: msg = ( - "Cannot plot against time in a non-gregorian " + "Cannot plot against time in a non-standard " 'calendar, because "nc_time_axis" is not available : ' "Install the package from " "https://github.com/SciTools/nc-time-axis to enable " @@ -603,12 +605,10 @@ def _fixup_dates(coord, values): raise IrisError(msg) r = [ - nc_time_axis.CalendarDateTime( - cftime.datetime(*date, calendar=coord.units.calendar), - coord.units.calendar, - ) + cftime.datetime(*date, calendar=coord.units.calendar) for date in dates ] + values = np.empty(len(r), dtype=object) values[:] = r return values @@ -647,20 +647,43 @@ def _u_object_from_v_object(v_object): def _get_plot_objects(args): - if len(args) > 1 and isinstance( + if len(args) > 2 and isinstance( + args[2], (iris.cube.Cube, iris.coords.Coord) + ): + # three arguments + u_object, v_object1, v_object2 = args[:3] + u1, v1 = _uv_from_u_object_v_object(u_object, v_object1) + _, v2 = _uv_from_u_object_v_object(u_object, v_object2) + args = args[3:] + if u1.size != v1.size or u1.size != v2.size: + msg = "The x and y-axis objects are not all compatible. They should have equal sizes but got ({}: {}), ({}: {}) and ({}: {})" + raise ValueError( + msg.format( + u_object.name(), + u1.size, + v_object1.name(), + v1.size, + v_object2.name(), + v2.size, + ) + ) + u = u1 + v = (v1, v2) + v_object = (v_object1, v_object2) + elif len(args) > 1 and isinstance( args[1], (iris.cube.Cube, iris.coords.Coord) ): # two arguments u_object, v_object = args[:2] u, v = _uv_from_u_object_v_object(u_object, v_object) args = args[2:] - if len(u) != len(v): + if u.size != v.size: msg = ( "The x and y-axis objects are not compatible. They should " "have equal sizes but got ({}: {}) and ({}: {})." ) raise ValueError( - msg.format(u_object.name(), len(u), v_object.name(), len(v)) + msg.format(u_object.name(), u.size, v_object.name(), v.size) ) else: # single argument @@ -675,7 +698,7 @@ def _get_plot_objects(args): if ( isinstance(v_object, iris.cube.Cube) and isinstance(u_object, iris.coords.Coord) - and iris.util.guess_coord_axis(u_object) in ["Y", "Z"] + and iris.util.guess_coord_axis(u_object) == "Z" ): u_object, v_object = v_object, u_object u, v = v, u @@ -825,6 +848,52 @@ def _draw_1d_from_points(draw_method_name, arg_func, *args, **kwargs): return result +def _draw_two_1d_from_points(draw_method_name, arg_func, *args, **kwargs): + """ + This function is equivalend to _draw_two_1d_from_points but expects two + y-axis variables rather than one (such as is required for .fill_between). It + can't be used where the y-axis variables are string coordinates. The y-axis + variable provided first has precedence where the two differ on whether the + axis should be inverted or whether a map should be drawn. + """ + # NB. In the interests of clarity we use "u" to refer to the horizontal + # axes on the matplotlib plot and "v" for the vertical axes. + + # retrieve the objects that are plotted on the horizontal and vertical + # axes (cubes or coordinates) and their respective values, along with the + # argument tuple with these objects removed + u_object, v_objects, u, vs, args = _get_plot_objects(args) + + v_object1, _ = v_objects + v1, v2 = vs + + # if both u_object and v_object are coordinates then check if a map + # should be drawn + if ( + isinstance(u_object, iris.coords.Coord) + and isinstance(v_object1, iris.coords.Coord) + and _can_draw_map([v_object1, u_object]) + ): + # Replace non-cartopy subplot/axes with a cartopy alternative and set + # the transform keyword. + kwargs = _ensure_cartopy_axes_and_determine_kwargs( + u_object, v_object1, kwargs + ) + + axes = kwargs.pop("axes", None) + draw_method = getattr(axes if axes else plt, draw_method_name) + if arg_func is not None: + args, kwargs = arg_func(u, v1, v2, *args, **kwargs) + result = draw_method(*args, **kwargs) + else: + result = draw_method(u, v1, v2, *args, **kwargs) + + # Invert y-axis if necessary. + _invert_yaxis(v_object1, axes) + + return result + + def _replace_axes_with_cartopy_axes(cartopy_proj): """ Replace non-cartopy subplot/axes with a cartopy alternative @@ -845,7 +914,9 @@ def _replace_axes_with_cartopy_axes(cartopy_proj): ylabel=ax.get_ylabel(), ) else: + position = ax.get_position(original=True) _ = fig.add_axes( + position, projection=cartopy_proj, title=ax.get_title(), xlabel=ax.get_xlabel(), @@ -978,16 +1049,28 @@ def _map_common( # is useful in anywhere other than this plotting routine, it may be better # placed in the CS. if getattr(x_coord, "circular", False): + original_length = y.shape[1] _, direction = iris.util.monotonic( x_coord.points, return_direction=True ) y = np.append(y, y[:, 0:1], axis=1) x = np.append(x, x[:, 0:1] + 360 * direction, axis=1) data = ma.concatenate([data, data[:, 0:1]], axis=1) - if "_v_data" in kwargs: - v_data = kwargs["_v_data"] - v_data = ma.concatenate([v_data, v_data[:, 0:1]], axis=1) - kwargs["_v_data"] = v_data + + # Having extended the data, we also need to extend extra kwargs for + # matplotlib (e.g. point colours) + for key, val in kwargs.items(): + try: + val_arr = np.array(val) + except TypeError: + continue + if val_arr.ndim >= 2 and val_arr.shape[1] == original_length: + # Concatenate the first column to the end of the data then + # update kwargs + val_arr = ma.concatenate( + [val_arr, val_arr[:, 0:1, ...]], axis=1 + ) + kwargs[key] = val_arr # Replace non-cartopy subplot/axes with a cartopy alternative and set the # transform keyword. @@ -1263,11 +1346,6 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None): axes=axes, ) - # set the _is_stroked property to get a single color grid. - # See https://github.com/matplotlib/matplotlib/issues/1302 - result._is_stroked = False - if hasattr(result, "_wrapped_collection_fix"): - result._wrapped_collection_fix._is_stroked = False return result @@ -1587,6 +1665,45 @@ def scatter(x, y, *args, **kwargs): return _draw_1d_from_points("scatter", _plot_args, *args, **kwargs) +def fill_between(x, y1, y2, *args, **kwargs): + """ + Plots y1 and y2 against x, and fills the space between them. + + Args: + + * x: :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` + A cube or a coordinate to plot on the x-axis. + + * y1: :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` + First cube or a coordinate to plot on the y-axis. + + * y2: :class:`~iris.cube.Cube` or :class:`~iris.coords.Coord` + Second cube or a coordinate to plot on the y-axis. + + Kwargs: + + * axes: :class:`matplotlib.axes.Axes` + The axes to use for drawing. Defaults to the current axes if none + provided. + + See :func:`matplotlib.pyplot.fill_between` for details of additional valid + keyword arguments. + + """ + # here we are more specific about argument types than generic 1d plotting + if not isinstance(x, (iris.cube.Cube, iris.coords.Coord)): + raise TypeError("x must be a cube or a coordinate.") + if not isinstance(y1, (iris.cube.Cube, iris.coords.Coord)): + raise TypeError("y1 must be a cube or a coordinate.") + if not isinstance(y1, (iris.cube.Cube, iris.coords.Coord)): + raise TypeError("y2 must be a cube or a coordinate.") + args = (x, y1, y2) + args + _plot_args = None + return _draw_two_1d_from_points( + "fill_between", _plot_args, *args, **kwargs + ) + + # Provide convenience show method from pyplot show = plt.show @@ -1688,3 +1805,114 @@ def citation(text, figure=None, axes=None): anchor.patch.set_boxstyle("round, pad=0, rounding_size=0.2") axes = axes if axes else figure.gca() axes.add_artist(anchor) + + +def animate(cube_iterator, plot_func, fig=None, **kwargs): + """ + Animates the given cube iterator. + + Parameters + ---------- + cube_iterator : iterable of :class:`iris.cube.Cube` objects + Each animation frame corresponds to each :class:`iris.cube.Cube` + object. See :meth:`iris.cube.Cube.slices`. + plot_func : :mod:`iris.plot` or :mod:`iris.quickplot` plotting function + Plotting function used to animate. Must accept the signature + ``plot_func(cube, vmin=vmin, vmax=vmax, coords=coords)``. + :func:`~iris.plot.contourf`, :func:`~iris.plot.contour`, + :func:`~iris.plot.pcolor` and :func:`~iris.plot.pcolormesh` + all conform to this signature. + fig : :class:`matplotlib.figure.Figure` instance, optional + By default, the current figure will be used or a new figure instance + created if no figure is available. See :func:`matplotlib.pyplot.gcf`. + **kwargs : dict, optional + Valid keyword arguments: + + coords: list of :class:`~iris.coords.Coord` objects or coordinate names + Use the given coordinates as the axes for the plot. The order of the + given coordinates indicates which axis to use for each, where the first + element is the horizontal axis of the plot and the second element is + the vertical axis of the plot. + interval: int, float or long + Defines the time interval in milliseconds between successive frames. + A default interval of 100ms is set. + vmin, vmax: int, float or long + Color scaling values, see :class:`matplotlib.colors.Normalize` for + further details. Default values are determined by the min-max across + the data set over the entire sequence. + + See :class:`matplotlib.animation.FuncAnimation` for details of other + valid keyword arguments. + + Returns + ------- + :class:`~matplotlib.animation.FuncAnimation` object suitable for + saving and or plotting. + + Examples + -------- + >>> import iris + >>> from iris import plot as iplt + >>> from iris import quickplot as qplt + >>> my_cube = iris.load_cube(iris.sample_data_path("A1B_north_america.nc")) + + To animate along a set of :class:`~iris.cube.Cube` slices : + + >>> cube_iter = my_cube.slices(("longitude", "latitude")) + >>> ani = iplt.animate(cube_iter, qplt.contourf) + >>> iplt.show() + + """ + kwargs.setdefault("interval", 100) + coords = kwargs.pop("coords", None) + + if fig is None: + fig = plt.gcf() + + def update_animation_iris(i, cubes, vmin, vmax, coords): + # Clearing the figure is currently necessary for compatibility with + # the iris quickploting module - due to the colorbar. + plt.gcf().clf() + plot_func(cubes[i], vmin=vmin, vmax=vmax, coords=coords) + + # Turn cube iterator into a list to determine plot ranges. + # NOTE: we check that we are not providing a cube as this has a deprecated + # iter special method. + if hasattr(cube_iterator, "__iter__") and not isinstance( + cube_iterator, iris.cube.Cube + ): + cubes = iris.cube.CubeList(cube_iterator) + else: + msg = "iterable type object required for animation, {} given".format( + type(cube_iterator) + ) + raise TypeError(msg) + + supported = ["iris.plot", "iris.quickplot"] + if plot_func.__module__ not in supported: + msg = ( + 'Given plotting module "{}" may not be supported, intended ' + "use: {}." + ) + msg = msg.format(plot_func.__module__, supported) + warnings.warn(msg, UserWarning) + + supported = ["contour", "contourf", "pcolor", "pcolormesh"] + if plot_func.__name__ not in supported: + msg = ( + 'Given plotting function "{}" may not be supported, intended ' + "use: {}." + ) + msg = msg.format(plot_func.__name__, supported) + warnings.warn(msg, UserWarning) + + # Determine plot range. + vmin = kwargs.pop("vmin", min([cc.data.min() for cc in cubes])) + vmax = kwargs.pop("vmax", max([cc.data.max() for cc in cubes])) + + update = update_animation_iris + frames = range(len(cubes)) + + return animation.FuncAnimation( + fig, update, frames=frames, fargs=(cubes, vmin, vmax, coords), **kwargs + ) diff --git a/lib/iris/plugins/README.md b/lib/iris/plugins/README.md new file mode 100644 index 0000000000..e8dee1de2c --- /dev/null +++ b/lib/iris/plugins/README.md @@ -0,0 +1,10 @@ +# Iris plugins + +`iris.plugins` is a [namespace package] allowing arbitrary plugins to be +installed alongside Iris. + +See [the Iris documentation][plugins] for more information. + + +[namespace package]: https://packaging.python.org/en/latest/guides/packaging-namespace-packages/ +[plugins]: https://scitools-iris.readthedocs.io/en/latest/community/plugins.html diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 2c4a94b1d0..6006314265 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -45,7 +45,6 @@ def _title(cube_or_coord, with_units): or units.is_no_unit() or units == cf_units.Unit("1") ): - if _use_symbol(units): units = units.symbol elif units.is_time_reference(): @@ -71,7 +70,7 @@ def _label(cube, mode, result=None, ndims=2, coords=None, axes=None): if result is not None: draw_edges = mode == iris.coords.POINT_MODE bar = plt.colorbar( - result, orientation="horizontal", drawedges=draw_edges + result, ax=axes, orientation="horizontal", drawedges=draw_edges ) has_known_units = not ( cube.units.is_unknown() or cube.units.is_no_unit() @@ -311,5 +310,19 @@ def scatter(x, y, *args, **kwargs): return result +def fill_between(x, y1, y2, *args, **kwargs): + """ + Draws a labelled fill_between plot based on the given cubes or coordinates. + + See :func:`iris.plot.fill_between` for details of valid arguments and + keyword arguments. + + """ + axes = kwargs.get("axes") + result = iplt.fill_between(x, y1, y2, *args, **kwargs) + _label_1d_plot(x, y1, axes=axes) + return result + + # Provide a convenience show method from pyplot. show = plt.show diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index c1df4f628b..5529b899c5 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -11,19 +11,11 @@ The primary class for this module is :class:`IrisTest`. -By default, this module sets the matplotlib backend to "agg". But when -this module is imported it checks ``sys.argv`` for the flag "-d". If -found, it is removed from ``sys.argv`` and the matplotlib backend is -switched to "tkagg" to allow the interactive visual inspection of -graphical test results. - """ -import codecs import collections from collections.abc import Mapping import contextlib -import datetime import difflib import filecmp import functools @@ -34,44 +26,29 @@ import math import os import os.path +from pathlib import Path import re import shutil import subprocess import sys -import threading -from typing import Dict, List +from typing import AnyStr import unittest from unittest import mock import warnings import xml.dom.minidom import zlib -import filelock import numpy as np import numpy.ma as ma import requests import iris.config import iris.cube +import iris.tests.graphics as graphics import iris.util -# Test for availability of matplotlib. -# (And remove matplotlib as an iris.tests dependency.) -try: - import matplotlib - - # Override any user settings e.g. from matplotlibrc file. - matplotlib.rcdefaults() - # Set backend *after* rcdefaults, as we don't want that overridden (#3846). - matplotlib.use("agg") - # Standardise the figure size across matplotlib versions. - # This permits matplotlib png image comparison. - matplotlib.rcParams["figure.figsize"] = [8.0, 6.0] - import matplotlib.pyplot as plt -except ImportError: - MPL_AVAILABLE = False -else: - MPL_AVAILABLE = True +MPL_AVAILABLE = graphics.MPL_AVAILABLE + try: from osgeo import gdal # noqa @@ -111,10 +88,6 @@ #: Basepath for test results. _RESULT_PATH = os.path.join(os.path.dirname(__file__), "results") -#: Default perceptual hash size. -_HASH_SIZE = 16 -#: Default maximum perceptual hash hamming distance. -_HAMMING_DISTANCE = 2 if "--data-files-used" in sys.argv: sys.argv.remove("--data-files-used") @@ -131,18 +104,6 @@ os.environ["IRIS_TEST_CREATE_MISSING"] = "true" -# Whether to display matplotlib output to the screen. -_DISPLAY_FIGURES = False - -if MPL_AVAILABLE and "-d" in sys.argv: - sys.argv.remove("-d") - plt.switch_backend("tkagg") - _DISPLAY_FIGURES = True - -# Threading non re-entrant blocking lock to ensure thread-safe plotting. -_lock = threading.Lock() - - def main(): """A wrapper for unittest.main() which adds iris.test specific options to the help (-h) output.""" if "-h" in sys.argv or "--help" in sys.argv: @@ -179,53 +140,78 @@ def main(): unittest.main() -def get_data_path(relative_path): +def _assert_masked_array(assertion, a, b, strict, **kwargs): + # Compare masks. + a_mask, b_mask = ma.getmaskarray(a), ma.getmaskarray(b) + np.testing.assert_array_equal(a_mask, b_mask) + + if strict: + # Compare all data values. + assertion(a.data, b.data, **kwargs) + else: + # Compare only unmasked data values. + assertion( + ma.compressed(a), + ma.compressed(b), + **kwargs, + ) + + +def assert_masked_array_equal(a, b, strict=False): """ - Return the absolute path to a data file when given the relative path - as a string, or sequence of strings. + Check that masked arrays are equal. This requires the + unmasked values and masks to be identical. + + Args: + + * a, b (array-like): + Two arrays to compare. + + Kwargs: + + * strict (bool): + If True, perform a complete mask and data array equality check. + If False (default), the data array equality considers only unmasked + elements. """ - if not isinstance(relative_path, str): - relative_path = os.path.join(*relative_path) - test_data_dir = iris.config.TEST_DATA_DIR - if test_data_dir is None: - test_data_dir = "" - data_path = os.path.join(test_data_dir, relative_path) - - if _EXPORT_DATAPATHS_FILE is not None: - _EXPORT_DATAPATHS_FILE.write(data_path + "\n") - - if isinstance(data_path, str) and not os.path.exists(data_path): - # if the file is gzipped, ungzip it and return the path of the ungzipped - # file. - gzipped_fname = data_path + ".gz" - if os.path.exists(gzipped_fname): - with gzip.open(gzipped_fname, "rb") as gz_fh: - try: - with open(data_path, "wb") as fh: - fh.writelines(gz_fh) - except IOError: - # Put ungzipped data file in a temporary path, since we - # can't write to the original path (maybe it is owned by - # the system.) - _, ext = os.path.splitext(data_path) - data_path = iris.util.create_temp_filename(suffix=ext) - with open(data_path, "wb") as fh: - fh.writelines(gz_fh) - - return data_path - - -class IrisTest_nometa(unittest.TestCase): + _assert_masked_array(np.testing.assert_array_equal, a, b, strict) + + +def assert_masked_array_almost_equal(a, b, decimal=6, strict=False): + """ + Check that masked arrays are almost equal. This requires the + masks to be identical, and the unmasked values to be almost + equal. + + Args: + + * a, b (array-like): + Two arrays to compare. + + Kwargs: + + * strict (bool): + If True, perform a complete mask and data array equality check. + If False (default), the data array equality considers only unmasked + elements. + + * decimal (int): + Equality tolerance level for + :meth:`numpy.testing.assert_array_almost_equal`, with the meaning + 'abs(desired-actual) < 0.5 * 10**(-decimal)' + + """ + _assert_masked_array( + np.testing.assert_array_almost_equal, a, b, strict, decimal=decimal + ) + + +class IrisTest(unittest.TestCase): """A subclass of unittest.TestCase which provides Iris specific testing functionality.""" _assertion_counts = collections.defaultdict(int) - @classmethod - def setUpClass(cls): - # Ensure that the CF profile if turned-off for testing. - iris.site_configuration["cf_profile"] = None - def _assert_str_same( self, reference_str, @@ -250,6 +236,43 @@ def _assert_str_same( % (type_comparison_name, reference_filename, diff) ) + @staticmethod + def get_data_path(relative_path): + """ + Return the absolute path to a data file when given the relative path + as a string, or sequence of strings. + + """ + if not isinstance(relative_path, str): + relative_path = os.path.join(*relative_path) + test_data_dir = iris.config.TEST_DATA_DIR + if test_data_dir is None: + test_data_dir = "" + data_path = os.path.join(test_data_dir, relative_path) + + if _EXPORT_DATAPATHS_FILE is not None: + _EXPORT_DATAPATHS_FILE.write(data_path + "\n") + + if isinstance(data_path, str) and not os.path.exists(data_path): + # if the file is gzipped, ungzip it and return the path of the ungzipped + # file. + gzipped_fname = data_path + ".gz" + if os.path.exists(gzipped_fname): + with gzip.open(gzipped_fname, "rb") as gz_fh: + try: + with open(data_path, "wb") as fh: + fh.writelines(gz_fh) + except IOError: + # Put ungzipped data file in a temporary path, since we + # can't write to the original path (maybe it is owned by + # the system.) + _, ext = os.path.splitext(data_path) + data_path = iris.util.create_temp_filename(suffix=ext) + with open(data_path, "wb") as fh: + fh.writelines(gz_fh) + + return data_path + @staticmethod def get_result_path(relative_path): """ @@ -261,25 +284,6 @@ def get_result_path(relative_path): relative_path = os.path.join(*relative_path) return os.path.abspath(os.path.join(_RESULT_PATH, relative_path)) - def assertStringEqual( - self, reference_str, test_str, type_comparison_name="strings" - ): - if reference_str != test_str: - diff = "\n".join( - difflib.unified_diff( - reference_str.splitlines(), - test_str.splitlines(), - "Reference", - "Test result", - "", - "", - 0, - ) - ) - self.fail( - "{} do not match:\n{}".format(type_comparison_name, diff) - ) - def result_path(self, basename=None, ext=""): """ Return the full path to a test result, generated from the \ @@ -373,8 +377,8 @@ def assertCDL(self, netcdf_filename, reference_filename=None, flags="-h"): flags = list(map(str, flags)) try: - # Python3 only: use subprocess.run() - args = ["ncdump"] + flags + [netcdf_filename] + exe_path = env_bin_path("ncdump") + args = [exe_path] + flags + [netcdf_filename] cdl = subprocess.check_output(args, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as exc: print(exc.output) @@ -598,16 +602,6 @@ def _recordWarningMatches(self, expected_regexp=""): expr = re.compile(expected_regexp) matches.extend(message for message in messages if expr.search(message)) - @contextlib.contextmanager - def assertWarnsRegexp(self, expected_regexp=""): - # Check that a warning is raised matching a given expression. - with self._recordWarningMatches(expected_regexp) as matches: - yield - - msg = "Warning matching '{}' not raised." - msg = msg.format(expected_regexp) - self.assertTrue(matches, msg) - @contextlib.contextmanager def assertLogs(self, logger=None, level=None, msg_regex=None): """ @@ -654,85 +648,14 @@ def assertNoWarningsRegexp(self, expected_regexp=""): msg = msg.format(expected_regexp, matches) self.assertFalse(matches, msg) - def _assertMaskedArray(self, assertion, a, b, strict, **kwargs): - # Define helper function to extract unmasked values as a 1d - # array. - def unmasked_data_as_1d_array(array): - array = ma.asarray(array) - if array.ndim == 0: - if array.mask: - data = np.array([]) - else: - data = np.array([array.data]) - else: - data = array.data[~ma.getmaskarray(array)] - return data - - # Compare masks. This will also check that the array shapes - # match, which is not tested when comparing unmasked values if - # strict is False. - a_mask, b_mask = ma.getmaskarray(a), ma.getmaskarray(b) - np.testing.assert_array_equal(a_mask, b_mask) - - if strict: - assertion(a.data, b.data, **kwargs) - else: - assertion( - unmasked_data_as_1d_array(a), - unmasked_data_as_1d_array(b), - **kwargs, - ) - - def assertMaskedArrayEqual(self, a, b, strict=False): - """ - Check that masked arrays are equal. This requires the - unmasked values and masks to be identical. - - Args: - - * a, b (array-like): - Two arrays to compare. - - Kwargs: - - * strict (bool): - If True, perform a complete mask and data array equality check. - If False (default), the data array equality considers only unmasked - elements. - - """ - self._assertMaskedArray(np.testing.assert_array_equal, a, b, strict) + assertMaskedArrayEqual = staticmethod(assert_masked_array_equal) def assertArrayAlmostEqual(self, a, b, decimal=6): np.testing.assert_array_almost_equal(a, b, decimal=decimal) - def assertMaskedArrayAlmostEqual(self, a, b, decimal=6, strict=False): - """ - Check that masked arrays are almost equal. This requires the - masks to be identical, and the unmasked values to be almost - equal. - - Args: - - * a, b (array-like): - Two arrays to compare. - - Kwargs: - - * strict (bool): - If True, perform a complete mask and data array equality check. - If False (default), the data array equality considers only unmasked - elements. - - * decimal (int): - Equality tolerance level for - :meth:`numpy.testing.assert_array_almost_equal`, with the meaning - 'abs(desired-actual) < 0.5 * 10**(-decimal)' - - """ - self._assertMaskedArray( - np.testing.assert_array_almost_equal, a, b, strict, decimal=decimal - ) + assertMaskedArrayAlmostEqual = staticmethod( + assert_masked_array_almost_equal + ) def assertArrayAllClose(self, a, b, rtol=1.0e-7, atol=1.0e-8, **kwargs): """ @@ -872,137 +795,10 @@ def check_graphic(self): output directory, and the imagerepo.json file being updated. """ - from PIL import Image - import imagehash - - dev_mode = os.environ.get("IRIS_TEST_CREATE_MISSING") - unique_id = self._unique_id() - repo_fname = os.path.join(_RESULT_PATH, "imagerepo.json") - with open(repo_fname, "rb") as fi: - repo: Dict[str, List[str]] = json.load( - codecs.getreader("utf-8")(fi) - ) - - try: - #: The path where the images generated by the tests should go. - image_output_directory = os.path.join( - os.path.dirname(__file__), "result_image_comparison" - ) - if not os.access(image_output_directory, os.W_OK): - if not os.access(os.getcwd(), os.W_OK): - raise IOError( - "Write access to a local disk is required " - "to run image tests. Run the tests from a " - "current working directory you have write " - "access to to avoid this issue." - ) - else: - image_output_directory = os.path.join( - os.getcwd(), "iris_image_test_output" - ) - result_fname = os.path.join( - image_output_directory, "result-" + unique_id + ".png" - ) - - if not os.path.isdir(image_output_directory): - # Handle race-condition where the directories are - # created sometime between the check above and the - # creation attempt below. - try: - os.makedirs(image_output_directory) - except OSError as err: - # Don't care about "File exists" - if err.errno != 17: - raise - - def _create_missing(): - fname = "{}.png".format(phash) - base_uri = ( - "https://scitools.github.io/test-iris-imagehash/" - "images/v4/{}" - ) - uri = base_uri.format(fname) - hash_fname = os.path.join(image_output_directory, fname) - uris = repo.setdefault(unique_id, []) - uris.append(uri) - print("Creating image file: {}".format(hash_fname)) - figure.savefig(hash_fname) - msg = "Creating imagerepo entry: {} -> {}" - print(msg.format(unique_id, uri)) - lock = filelock.FileLock( - os.path.join(_RESULT_PATH, "imagerepo.lock") - ) - # The imagerepo.json file is a critical resource, so ensure - # thread safe read/write behaviour via platform independent - # file locking. - with lock.acquire(timeout=600): - with open(repo_fname, "wb") as fo: - json.dump( - repo, - codecs.getwriter("utf-8")(fo), - indent=4, - sort_keys=True, - ) - - # Calculate the test result perceptual image hash. - buffer = io.BytesIO() - figure = plt.gcf() - figure.savefig(buffer, format="png") - buffer.seek(0) - phash = imagehash.phash(Image.open(buffer), hash_size=_HASH_SIZE) - - if unique_id not in repo: - # The unique id might not be fully qualified, e.g. - # expects iris.tests.test_quickplot.TestLabels.test_contour.0, - # but got test_quickplot.TestLabels.test_contour.0 - # if we find single partial match from end of the key - # then use that, else fall back to the unknown id state. - matches = [key for key in repo if key.endswith(unique_id)] - if len(matches) == 1: - unique_id = matches[0] - - if unique_id in repo: - uris = repo[unique_id] - # Extract the hex basename strings from the uris. - hexes = [ - os.path.splitext(os.path.basename(uri))[0] for uri in uris - ] - # Create the expected perceptual image hashes from the uris. - to_hash = imagehash.hex_to_hash - expected = [to_hash(uri_hex) for uri_hex in hexes] - - # Calculate hamming distance vector for the result hash. - distances = [e - phash for e in expected] - - if np.all([hd > _HAMMING_DISTANCE for hd in distances]): - if dev_mode: - _create_missing() - else: - figure.savefig(result_fname) - msg = ( - "Bad phash {} with hamming distance {} " - "for test {}." - ) - msg = msg.format(phash, distances, unique_id) - if _DISPLAY_FIGURES: - emsg = "Image comparison would have failed: {}" - print(emsg.format(msg)) - else: - emsg = "Image comparison failed: {}" - raise AssertionError(emsg.format(msg)) - else: - if dev_mode: - _create_missing() - else: - figure.savefig(result_fname) - emsg = "Missing image test result: {}." - raise AssertionError(emsg.format(unique_id)) - - if _DISPLAY_FIGURES: - plt.show() - - finally: - plt.close() + graphics.check_graphic( + self._unique_id(), + _RESULT_PATH, + ) def _remove_testcase_patches(self): """Helper to remove per-testcase patches installed by :meth:`patch`.""" @@ -1140,112 +936,11 @@ def assertEqualAndKind(self, value, expected): ) -# An environment variable controls whether test timings are output. -# -# NOTE: to run tests with timing output, nosetests cannot be used. -# At present, that includes not using "python setup.py test" -# The typically best way is like this : -# $ export IRIS_TEST_TIMINGS=1 -# $ python -m unittest discover -s iris.tests -# and commonly adding ... -# | grep "TIMING TEST" >iris_test_output.txt -# -_PRINT_TEST_TIMINGS = bool(int(os.environ.get("IRIS_TEST_TIMINGS", 0))) - - -def _method_path(meth, cls): - return ".".join([cls.__module__, cls.__name__, meth.__name__]) - - -def _testfunction_timing_decorator(fn, cls): - # Function decorator for making a testcase print its execution time. - @functools.wraps(fn) - def inner(*args, **kwargs): - start_time = datetime.datetime.now() - try: - result = fn(*args, **kwargs) - finally: - end_time = datetime.datetime.now() - elapsed_time = (end_time - start_time).total_seconds() - msg = '\n TEST TIMING -- "{}" took : {:12.6f} sec.' - name = _method_path(fn, cls) - print(msg.format(name, elapsed_time)) - return result - - return inner - - -def iristest_timing_decorator(cls): - # Class decorator to make all "test_.." functions print execution timings. - if _PRINT_TEST_TIMINGS: - # NOTE: 'dir' scans *all* class properties, including inherited ones. - attr_names = dir(cls) - for attr_name in attr_names: - attr = getattr(cls, attr_name) - if callable(attr) and attr_name.startswith("test"): - attr = _testfunction_timing_decorator(attr, cls) - setattr(cls, attr_name, attr) - return cls - - -class _TestTimingsMetaclass(type): - # An alternative metaclass for IrisTest subclasses, which makes - # them print execution timings for all the testcases. - # This is equivalent to applying the @iristest_timing_decorator to - # every test class that inherits from IrisTest. - # NOTE: however, it means you *cannot* specify a different metaclass for - # your test class inheriting from IrisTest. - # See below for how to solve that where needed. - def __new__(cls, clsname, base_classes, attrs): - result = type.__new__(cls, clsname, base_classes, attrs) - if _PRINT_TEST_TIMINGS: - result = iristest_timing_decorator(result) - return result - - -class IrisTest(IrisTest_nometa, metaclass=_TestTimingsMetaclass): - # Derive the 'ordinary' IrisTest from IrisTest_nometa, but add the - # metaclass that enables test timings output. - # This means that all subclasses also get the timing behaviour. - # However, if a different metaclass is *wanted* for an IrisTest subclass, - # this would cause a metaclass conflict. - # Instead, you can inherit from IrisTest_nometa and apply the - # @iristest_timing_decorator explicitly to your new testclass. - pass - - +get_data_path = IrisTest.get_data_path get_result_path = IrisTest.get_result_path -class GraphicsTestMixin: - - # nose directive: dispatch tests concurrently. - _multiprocess_can_split_ = True - - def setUp(self): - # Acquire threading non re-entrant blocking lock to ensure - # thread-safe plotting. - _lock.acquire() - # Make sure we have no unclosed plots from previous tests before - # generating this one. - if MPL_AVAILABLE: - plt.close("all") - - def tearDown(self): - # If a plotting test bombs out it can leave the current figure - # in an odd state, so we make sure it's been disposed of. - if MPL_AVAILABLE: - plt.close("all") - # Release the non re-entrant blocking lock. - _lock.release() - - -class GraphicsTest(GraphicsTestMixin, IrisTest): - pass - - -class GraphicsTest_nometa(GraphicsTestMixin, IrisTest_nometa): - # Graphicstest without the metaclass providing test timings. +class GraphicsTest(graphics.GraphicsTestMixin, IrisTest): pass @@ -1290,23 +985,7 @@ class MyGeoTiffTests(test.IrisTest): return skip(fn) -def skip_plot(fn): - """ - Decorator to choose whether to run tests, based on the availability of the - matplotlib library. - - Example usage: - @skip_plot - class MyPlotTests(test.GraphicsTest): - ... - - """ - skip = unittest.skipIf( - condition=not MPL_AVAILABLE, - reason="Graphics tests require the matplotlib library.", - ) - - return skip(fn) +skip_plot = graphics.skip_plot skip_sample_data = unittest.skipIf( @@ -1352,3 +1031,30 @@ def wrapped(self, *args, **kwargs): return result return wrapped + + +def env_bin_path(exe_name: AnyStr = None): + """ + Return a Path object for (an executable in) the environment bin directory. + + Parameters + ---------- + exe_name : str + If set, the name of an executable to append to the path. + + Returns + ------- + exe_path : Path + A path to the bin directory, or an executable file within it. + + Notes + ----- + For use in tests which spawn commands which should call executables within + the Python environment, since many IDEs (Eclipse, PyCharm) don't + automatically include this location in $PATH (as opposed to $PYTHONPATH). + """ + exe_path = Path(os.__file__) + exe_path = (exe_path / "../../../bin").resolve() + if exe_name is not None: + exe_path = exe_path / exe_name + return exe_path diff --git a/lib/iris/tests/experimental/test_animate.py b/lib/iris/tests/experimental/test_animate.py deleted file mode 100644 index d8010767b8..0000000000 --- a/lib/iris/tests/experimental/test_animate.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test the animation of cubes within iris. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import numpy as np - -import iris -from iris.coord_systems import GeogCS - -# Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: - import iris.experimental.animate as animate - import iris.plot as iplt - - -@tests.skip_plot -class IntegrationTest(tests.GraphicsTest): - def setUp(self): - super().setUp() - cube = iris.cube.Cube(np.arange(36, dtype=np.int32).reshape((3, 3, 4))) - cs = GeogCS(6371229) - - coord = iris.coords.DimCoord( - points=np.array([1, 2, 3], dtype=np.int32), long_name="time" - ) - cube.add_dim_coord(coord, 0) - - coord = iris.coords.DimCoord( - points=np.array([-1, 0, 1], dtype=np.int32), - standard_name="latitude", - units="degrees", - coord_system=cs, - ) - cube.add_dim_coord(coord, 1) - coord = iris.coords.DimCoord( - points=np.array([-1, 0, 1, 2], dtype=np.int32), - standard_name="longitude", - units="degrees", - coord_system=cs, - ) - cube.add_dim_coord(coord, 2) - self.cube = cube - - def test_cube_animation(self): - # This follows :meth:`~matplotlib.animation.FuncAnimation.save` - # to ensure that each frame corresponds to known accepted frames for - # the animation. - cube_iter = self.cube.slices(("latitude", "longitude")) - - ani = animate.animate(cube_iter, iplt.contourf) - - # Disconnect the first draw callback to stop the animation. - ani._fig.canvas.mpl_disconnect(ani._first_draw_id) - # Update flag to indicate drawing happens. Without this, a warning is - # thrown when the ani object is destroyed, and this warning sometimes - # interferes with unrelated tests (#4330). - ani._draw_was_started = True - - ani = [ani] - # Extract frame data - for data in zip(*[a.new_saved_frame_seq() for a in ani]): - # Draw each frame - for anim, d in zip(ani, data): - anim._draw_next_frame(d, blit=False) - self.check_graphic() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/graphics/README.md b/lib/iris/tests/graphics/README.md new file mode 100755 index 0000000000..069fc01f70 --- /dev/null +++ b/lib/iris/tests/graphics/README.md @@ -0,0 +1,51 @@ +# Graphics Tests + +Iris may be used to create various forms of graphical output; to ensure +the output is consistent, there are automated tests to check against +known acceptable graphical output. + +At present graphical tests are used in the following areas of Iris: + +* Module `iris.tests.test_plot` +* Module `iris.tests.test_quickplot` +* Gallery plots contained in `docs/gallery_tests`. + + +## Challenges + +Iris uses many dependencies that provide functionality, an example that +applies here is `matplotlib`. When there are updates to `matplotlib` or a +dependency of it, this may result in a change in the rendered graphical +output. This means that there may be no changes to `Iris`, but due to an +updated dependency any automated tests that compare a graphical output to a +known acceptable output may fail. The failure may also not be visually +perceived as it may be a simple pixel shift. + + +## Testing Strategy + +The `iris.tests.IrisTest.check_graphic` test routine calls out to +`iris.tests.graphics.check_graphic` which tests against the **acceptable** +result. It does this using an image **hash** comparison technique which allows +us to be robust against minor variations based on underlying library updates. + +This consists of: + +* The `graphics.check_graphic` function uses a perceptual + **image hash** of the outputs (see https://github.com/JohannesBuchner/imagehash) + as the basis for checking test results. + +* The hashes of known **acceptable** results for each test are stored in a + lookup dictionary, saved to the repo file + `lib/iris/tests/results/imagerepo.json` + (`link `_) . + +* An actual baseline image for each hash value is stored in the test data + repository (`link `_). + +* The baseline images allow human-eye assessment of whether a new output is + judged to be close enough to the older ones, or not. + +* The utility script `iris/tests/idiff.py` automates checking, enabling the + developer to easily compare the proposed new **acceptable** result image + against the existing accepted baseline image, for each failing test. \ No newline at end of file diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py new file mode 100755 index 0000000000..544d989564 --- /dev/null +++ b/lib/iris/tests/graphics/__init__.py @@ -0,0 +1,286 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +# !/usr/bin/env python +""" +Contains Iris graphic testing utilities + +By default, this module sets the matplotlib backend to "agg". But when +this module is imported it checks ``sys.argv`` for the flag "-d". If +found, it is removed from ``sys.argv`` and the matplotlib backend is +switched to "tkagg" to allow the interactive visual inspection of +graphical test results. +""" + +import codecs +import io +import json +import os +from pathlib import Path +import sys +import threading +from typing import Callable, Dict, Union +import unittest + +import filelock + +# Test for availability of matplotlib. +# (And remove matplotlib as an iris.tests dependency.) +try: + import matplotlib + + # Override any user settings e.g. from matplotlibrc file. + matplotlib.rcdefaults() + # Set backend *after* rcdefaults, as we don't want that overridden (#3846). + matplotlib.use("agg") + # Standardise the figure size across matplotlib versions. + # This permits matplotlib png image comparison. + matplotlib.rcParams["figure.figsize"] = [8.0, 6.0] + import matplotlib.pyplot as plt +except ImportError: + MPL_AVAILABLE = False +else: + MPL_AVAILABLE = True + +# Whether to display matplotlib output to the screen. +_DISPLAY_FIGURES = False + +if MPL_AVAILABLE and "-d" in sys.argv: + sys.argv.remove("-d") + plt.switch_backend("tkagg") + _DISPLAY_FIGURES = True + +# Threading non re-entrant blocking lock to ensure thread-safe plotting in the +# GraphicsTestMixin. +_lock = threading.Lock() + +#: Default perceptual hash size. +HASH_SIZE = 16 +#: Default maximum perceptual hash hamming distance. +HAMMING_DISTANCE = 2 +# Prefix for image test results (that aren't yet verified as good to add to +# reference images) +RESULT_PREFIX = "result-" +# Name of the imagerepo json and associated file lock +IMAGE_REPO_DIR = Path(__file__).parents[1] / "results" +IMAGE_REPO_PATH = IMAGE_REPO_DIR / "imagerepo.json" +IMAGE_REPO_LOCK_PATH = IMAGE_REPO_DIR / "imagerepo.lock" + + +__all__ = [ + "GraphicsTestMixin", + "MPL_AVAILABLE", + "RESULT_PREFIX", + "check_graphic", + "fully_qualify", + "generate_repo_from_baselines", + "get_phash", + "read_repo_json", + "repos_equal", + "skip_plot", + "write_repo_json", +] + + +def _output_dir() -> Path: + test_output_dir = Path(__file__).parents[1] / Path( + "result_image_comparison" + ) + + if not os.access(test_output_dir, os.W_OK): + if not os.access(Path("."), os.W_OK): + raise IOError( + "Write access to a local disk is required " + "to run image tests. Run the tests from a " + "current working directory you have write " + "access to to avoid this issue." + ) + else: + test_output_dir = Path(".") / "iris_image_test_output" + + return test_output_dir + + +def read_repo_json() -> Dict[str, str]: + with open(IMAGE_REPO_PATH, "rb") as fi: + repo: Dict[str, str] = json.load(codecs.getreader("utf-8")(fi)) + return repo + + +def write_repo_json(data: Dict[str, str]) -> None: + string_data = {} + for key, val in data.items(): + string_data[key] = str(val) + with open(IMAGE_REPO_PATH, "wb") as fo: + json.dump( + string_data, + codecs.getwriter("utf-8")(fo), + indent=4, + sort_keys=True, + ) + + +def repos_equal(repo1: Dict[str, str], repo2: Dict[str, str]) -> bool: + if sorted(repo1.keys()) != sorted(repo2.keys()): + return False + for key, val in repo1.items(): + if str(val) != str(repo2[key]): + return False + return True + + +def get_phash(input: Path) -> str: + from PIL import Image + import imagehash + + return imagehash.phash(Image.open(input), hash_size=HASH_SIZE) + + +def generate_repo_from_baselines(baseline_image_dir: Path) -> Dict[str, str]: + repo = {} + for path in baseline_image_dir.iterdir(): + phash = get_phash(path) + repo[path.stem] = phash + return repo + + +def fully_qualify(test_id: str, repo: str) -> Dict[str, str]: + # If the test_id isn't in the repo as it stands, look for it + if test_id not in repo: + test_id_candidates = [x for x in repo.keys() if x.endswith(test_id)] + if len(test_id_candidates) == 1: + (test_id,) = test_id_candidates + return test_id + + +def check_graphic(test_id: str, results_dir: Union[str, Path]) -> None: + """ + Check the hash of the current matplotlib figure matches the expected + image hash for the current graphic test. + + To create missing image test results, set the IRIS_TEST_CREATE_MISSING + environment variable before running the tests. This will result in new + and appropriately ".png" image files being generated in the image + output directory, and the imagerepo.json file being updated. + + """ + from imagehash import hex_to_hash + + dev_mode = os.environ.get("IRIS_TEST_CREATE_MISSING") + + #: The path where the images generated by the tests should go. + test_output_dir = _output_dir() + test_output_dir.mkdir(exist_ok=True) + + # The path where the image matching this test should be saved if necessary + result_path = test_output_dir / f"{RESULT_PREFIX}{test_id}.png" + + results_dir = Path(results_dir) + repo = read_repo_json() + + # Check if test_id is fully qualified, if it's not then try to work + # out what it should be + test_id = fully_qualify(test_id, repo) + + try: + + def _create_missing(phash: str) -> None: + output_path = test_output_dir / (test_id + ".png") + + print(f"Creating image file: {output_path}") + figure.savefig(output_path) + + msg = "Creating imagerepo entry: {} -> {}" + print(msg.format(test_id, phash)) + # The imagerepo.json file is a critical resource, so ensure + # thread safe read/write behaviour via platform independent + # file locking. + lock = filelock.FileLock(IMAGE_REPO_LOCK_PATH) + with lock.acquire(timeout=600): + # Read the file again in case it changed, then edit before + # releasing lock + repo = read_repo_json() + repo[test_id] = phash + write_repo_json(repo) + + # Calculate the test result perceptual image hash. + buffer = io.BytesIO() + figure = plt.gcf() + figure.savefig(buffer, format="png") + buffer.seek(0) + phash = get_phash(buffer) + + if test_id in repo: + expected = hex_to_hash(repo[test_id]) + + # Calculate hamming distance vector for the result hash. + distance = expected - phash + + if distance > HAMMING_DISTANCE: + if dev_mode: + _create_missing(phash) + else: + figure.savefig(result_path) + msg = ( + "Bad phash {} with hamming distance {} " "for test {}." + ) + msg = msg.format(phash, distance, test_id) + if _DISPLAY_FIGURES: + emsg = "Image comparison would have failed: {}" + print(emsg.format(msg)) + else: + emsg = "Image comparison failed: {}" + raise AssertionError(emsg.format(msg)) + else: + if dev_mode: + _create_missing(phash) + else: + figure.savefig(result_path) + emsg = "Missing image test result: {}." + raise AssertionError(emsg.format(test_id)) + + if _DISPLAY_FIGURES: + plt.show() + + finally: + plt.close() + + +class GraphicsTestMixin: + def setUp(self) -> None: + # Acquire threading non re-entrant blocking lock to ensure + # thread-safe plotting. + _lock.acquire() + # Make sure we have no unclosed plots from previous tests before + # generating this one. + if MPL_AVAILABLE: + plt.close("all") + + def tearDown(self) -> None: + # If a plotting test bombs out it can leave the current figure + # in an odd state, so we make sure it's been disposed of. + if MPL_AVAILABLE: + plt.close("all") + # Release the non re-entrant blocking lock. + _lock.release() + + +def skip_plot(fn: Callable) -> Callable: + """ + Decorator to choose whether to run tests, based on the availability of the + matplotlib library. + + Example usage: + @skip_plot + class MyPlotTests(test.GraphicsTest): + ... + + """ + skip = unittest.skipIf( + condition=not MPL_AVAILABLE, + reason="Graphics tests require the matplotlib library.", + ) + + return skip(fn) diff --git a/lib/iris/tests/graphics/idiff.py b/lib/iris/tests/graphics/idiff.py new file mode 100755 index 0000000000..a355f2cf82 --- /dev/null +++ b/lib/iris/tests/graphics/idiff.py @@ -0,0 +1,208 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +# !/usr/bin/env python +""" +Provides "diff-like" comparison of images. + +Currently relies on matplotlib for image processing so limited to PNG format. + +""" + +import argparse +from pathlib import Path +import re +import sys +import warnings + +# Force iris.tests to use the ```tkagg``` backend by using the '-d' +# command-line argument as idiff is an interactive tool that requires a +# gui interface. +sys.argv.append("-d") +from PIL import Image # noqa +import matplotlib.image as mimg # noqa +import matplotlib.pyplot as plt # noqa +import matplotlib.testing.compare as mcompare # noqa +from matplotlib.testing.exceptions import ImageComparisonFailure # noqa +import matplotlib.widgets as mwidget # noqa + +import iris.tests # noqa +import iris.tests.graphics as graphics # noqa + +# Allows restoration of test id from result image name +_RESULT_NAME_PATTERN = re.compile(graphics.RESULT_PREFIX + r"(.*).png") + + +def extract_test_key(result_image_name): + """ + Extracts the name of the test which a result image refers to + """ + name_match = _RESULT_NAME_PATTERN.match(str(result_image_name)) + if name_match: + test_key = name_match.group(1) + else: + emsg = f"Incorrectly named image in result dir: {result_image_name}" + raise ValueError(emsg) + return test_key + + +_POSTFIX_DIFF = "-failed-diff.png" + + +def diff_viewer( + test_id, + status, + phash, + expected_path, + result_path, + diff_fname, +): + fig = plt.figure(figsize=(14, 12)) + plt.suptitle(expected_path.name) + ax = plt.subplot(221) + ax.imshow(mimg.imread(expected_path)) + ax = plt.subplot(222, sharex=ax, sharey=ax) + ax.imshow(mimg.imread(result_path)) + ax = plt.subplot(223, sharex=ax, sharey=ax) + ax.imshow(mimg.imread(diff_fname)) + + result_dir = result_path.parent + + repo = graphics.read_repo_json() + + def accept(event): + if test_id not in repo: + repo[test_id] = phash + graphics.write_repo_json(repo) + out_file = result_dir / (test_id + ".png") + result_path.rename(out_file) + msg = f"ACCEPTED: {result_path.name} -> {out_file.name}" + print(msg) + else: + msg = f"DUPLICATE: {result_path.name} -> {expected_path.name} (ignored)" + print(msg) + result_path.unlink() + diff_fname.unlink() + plt.close() + + def reject(event): + if test_id not in repo: + print(f"REJECTED: {result_path.name}") + else: + msg = f"DUPLICATE: {result_path.name} -> {expected_path.name} (ignored)" + print(msg) + result_path.unlink() + diff_fname.unlink() + plt.close() + + def skip(event): + # Let's keep both the result and the diff files. + print(f"SKIPPED: {result_path.name}") + plt.close() + + ax_accept = plt.axes([0.59, 0.05, 0.1, 0.075]) + ax_reject = plt.axes([0.7, 0.05, 0.1, 0.075]) + ax_skip = plt.axes([0.81, 0.05, 0.1, 0.075]) + baccept = mwidget.Button(ax_accept, "Accept") + baccept.on_clicked(accept) + breject = mwidget.Button(ax_reject, "Reject") + breject.on_clicked(reject) + bskip = mwidget.Button(ax_skip, "Skip") + bskip.on_clicked(skip) + plt.text(0.59, 0.15, status, transform=fig.transFigure) + plt.show() + + +def step_over_diffs(result_dir, display=True): + processed = False + + if display: + msg = "\nComparing the expected image with the test result image." + print(msg) + + # Remove old image diff results. + for fname in result_dir.glob(f"*{_POSTFIX_DIFF}"): + fname.unlink() + + reference_image_dir = Path(iris.tests.get_data_path("images")) + repo = graphics.read_repo_json() + + # Filter out all non-test result image files. + results = [] + for fname in sorted(result_dir.glob(f"{graphics.RESULT_PREFIX}*.png")): + # We only care about PNG images. + try: + im = Image.open(fname) + if im.format != "PNG": + # Ignore - it's not a png image. + continue + except IOError: + # Ignore - it's not an image. + continue + results.append(fname) + + count = len(results) + + for count_index, result_path in enumerate(results): + test_key = extract_test_key(result_path.name) + test_key = graphics.fully_qualify(test_key, repo) + reference_image_path = reference_image_dir / (test_key + ".png") + + try: + # Calculate the test result perceptual image hash. + phash = graphics.get_phash(result_path) + distance = graphics.get_phash(reference_image_path) - phash + except FileNotFoundError: + wmsg = "Ignoring unregistered test result {!r}." + warnings.warn(wmsg.format(test_key)) + continue + + processed = True + + try: + # Creates the diff file when the images aren't identical + mcompare.compare_images(reference_image_path, result_path, tol=0) + except Exception as e: + if isinstance(e, ValueError) or isinstance( + e, ImageComparisonFailure + ): + print(f"Could not compare {result_path}: {e}") + continue + else: + # Propagate the exception, keeping the stack trace + raise + diff_path = result_dir / Path(f"{result_path.stem}{_POSTFIX_DIFF}") + args = phash, reference_image_path, result_path, diff_path + if display: + status = f"Image {count_index + 1} of {count}: hamming distance = {distance}" + prefix = test_key, status + yield prefix + args + else: + yield args + if display and not processed: + print("\nThere are no iris test result images to process.\n") + + +if __name__ == "__main__": + default = Path(iris.tests.__file__).parent / Path( + "result_image_comparison" + ) + description = "Iris graphic test difference tool." + formatter_class = argparse.RawTextHelpFormatter + parser = argparse.ArgumentParser( + description=description, formatter_class=formatter_class + ) + help = "path to iris tests result image directory (default: %(default)s)" + parser.add_argument("--resultdir", "-r", default=default, help=help) + help = 'force "iris.tests" to use the tkagg backend (default: %(default)s)' + parser.add_argument("-d", action="store_true", default=True, help=help) + args = parser.parse_args() + result_dir = Path(args.resultdir) + if not result_dir.is_dir(): + emsg = f"Invalid results directory: {result_dir}" + raise ValueError(emsg) + + for args in step_over_diffs(result_dir): + diff_viewer(*args) diff --git a/lib/iris/tests/graphics/recreate_imagerepo.py b/lib/iris/tests/graphics/recreate_imagerepo.py new file mode 100755 index 0000000000..02ddaad2cb --- /dev/null +++ b/lib/iris/tests/graphics/recreate_imagerepo.py @@ -0,0 +1,74 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +# !/usr/bin/env python +""" +Updates imagerepo.json based on the baseline images + +""" + +import argparse +from pathlib import Path + +from imagehash import hex_to_hash + +import iris.tests +import iris.tests.graphics as graphics + + +def update_json(baseline_image_dir: Path, dry_run: bool = False): + repo = graphics.read_repo_json() + suggested_repo = graphics.generate_repo_from_baselines(baseline_image_dir) + + if graphics.repos_equal(repo, suggested_repo): + msg = ( + f"No change in contents of {graphics.IMAGE_REPO_PATH} based on " + f"{baseline_image_dir}" + ) + print(msg) + else: + for key in sorted(set(repo.keys()) | set(suggested_repo.keys())): + old_val = repo.get(key) + new_val = suggested_repo.get(key) + if old_val is None: + repo[key] = suggested_repo[key] + print(key) + print(f"\t{old_val} -> {new_val}") + elif new_val is None: + del repo[key] + print(key) + print(f"\t{old_val} -> {new_val}") + else: + difference = hex_to_hash(str(old_val)) - hex_to_hash( + str(new_val) + ) + if difference > 0: + print(key) + print(f"\t{old_val} -> {new_val} ({difference})") + repo[key] = suggested_repo[key] + if not dry_run: + graphics.write_repo_json(repo) + + +if __name__ == "__main__": + default_baseline_image_dir = Path( + iris.tests.IrisTest.get_data_path("images") + ) + description = "Update imagerepo.json based on contents of the baseline image directory" + formatter_class = argparse.RawTextHelpFormatter + parser = argparse.ArgumentParser( + description=description, formatter_class=formatter_class + ) + help = "path to iris tests result image directory (default: %(default)s)" + parser.add_argument( + "--image-dir", default=default_baseline_image_dir, help=help + ) + help = "dry run (don't actually update imagerepo.json)" + parser.add_argument("--dry-run", action="store_true", help=help) + args = parser.parse_args() + update_json( + args.image_dir, + args.dry_run, + ) diff --git a/lib/iris/tests/idiff.py b/lib/iris/tests/idiff.py deleted file mode 100755 index 9770ca116f..0000000000 --- a/lib/iris/tests/idiff.py +++ /dev/null @@ -1,314 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -# !/usr/bin/env python -""" -Provides "diff-like" comparison of images. - -Currently relies on matplotlib for image processing so limited to PNG format. - -""" - -import argparse -import codecs -import contextlib -from glob import glob -import json -import os.path -import shutil -import sys -import warnings - -# Force iris.tests to use the ```tkagg``` backend by using the '-d' -# command-line argument as idiff is an interactive tool that requires a -# gui interface. -sys.argv.append("-d") -from PIL import Image # noqa -import filelock # noqa -import imagehash # noqa -import matplotlib.image as mimg # noqa -import matplotlib.pyplot as plt # noqa -import matplotlib.testing.compare as mcompare # noqa -from matplotlib.testing.exceptions import ImageComparisonFailure # noqa -import matplotlib.widgets as mwidget # noqa -import numpy as np # noqa -import requests # noqa - -import iris.tests # noqa -import iris.util as iutil # noqa - -_POSTFIX_DIFF = "-failed-diff.png" -_POSTFIX_JSON = os.path.join("results", "imagerepo.json") -_POSTFIX_LOCK = os.path.join("results", "imagerepo.lock") - - -@contextlib.contextmanager -def temp_png(suffix=""): - if suffix: - suffix = "-{}".format(suffix) - fname = iutil.create_temp_filename(suffix + ".png") - try: - yield fname - finally: - os.remove(fname) - - -def diff_viewer( - repo, - key, - repo_fname, - phash, - status, - expected_fname, - result_fname, - diff_fname, -): - fig = plt.figure(figsize=(14, 12)) - plt.suptitle(os.path.basename(expected_fname)) - ax = plt.subplot(221) - ax.imshow(mimg.imread(expected_fname)) - ax = plt.subplot(222, sharex=ax, sharey=ax) - ax.imshow(mimg.imread(result_fname)) - ax = plt.subplot(223, sharex=ax, sharey=ax) - ax.imshow(mimg.imread(diff_fname)) - - result_dir = os.path.dirname(result_fname) - fname = "{}.png".format(phash) - base_uri = "https://scitools.github.io/test-iris-imagehash/images/v4/{}" - uri = base_uri.format(fname) - phash_fname = os.path.join(result_dir, fname) - - def accept(event): - if uri not in repo[key]: - # Ensure to maintain strict time order where the first uri - # associated with the repo key is the oldest, and the last - # uri is the youngest - repo[key].append(uri) - # Update the image repo. - with open(repo_fname, "wb") as fo: - json.dump( - repo, - codecs.getwriter("utf-8")(fo), - indent=4, - sort_keys=True, - ) - os.rename(result_fname, phash_fname) - msg = "ACCEPTED: {} -> {}" - print( - msg.format( - os.path.basename(result_fname), - os.path.basename(phash_fname), - ) - ) - else: - msg = "DUPLICATE: {} -> {} (ignored)" - print( - msg.format( - os.path.basename(result_fname), - os.path.basename(phash_fname), - ) - ) - os.remove(result_fname) - os.remove(diff_fname) - plt.close() - - def reject(event): - if uri not in repo[key]: - print("REJECTED: {}".format(os.path.basename(result_fname))) - else: - msg = "DUPLICATE: {} -> {} (ignored)" - print( - msg.format( - os.path.basename(result_fname), - os.path.basename(phash_fname), - ) - ) - os.remove(result_fname) - os.remove(diff_fname) - plt.close() - - def skip(event): - # Let's keep both the result and the diff files. - print("SKIPPED: {}".format(os.path.basename(result_fname))) - plt.close() - - ax_accept = plt.axes([0.59, 0.05, 0.1, 0.075]) - ax_reject = plt.axes([0.7, 0.05, 0.1, 0.075]) - ax_skip = plt.axes([0.81, 0.05, 0.1, 0.075]) - baccept = mwidget.Button(ax_accept, "Accept") - baccept.on_clicked(accept) - breject = mwidget.Button(ax_reject, "Reject") - breject.on_clicked(reject) - bskip = mwidget.Button(ax_skip, "Skip") - bskip.on_clicked(skip) - plt.text(0.59, 0.15, status, transform=fig.transFigure) - plt.show() - - -def _calculate_hit(uris, phash, action): - # Extract the hex basename strings from the uris. - hexes = [os.path.splitext(os.path.basename(uri))[0] for uri in uris] - # Create the expected perceptual image hashes from the uris. - to_hash = imagehash.hex_to_hash - expected = [to_hash(uri_hex) for uri_hex in hexes] - # Calculate the hamming distance vector for the result hash. - distances = [e - phash for e in expected] - - if action == "first": - index = 0 - elif action == "last": - index = -1 - elif action == "similar": - index = np.argmin(distances) - elif action == "different": - index = np.argmax(distances) - else: - emsg = "Unknown action: {!r}" - raise ValueError(emsg.format(action)) - - return index, distances[index] - - -def step_over_diffs(result_dir, action, display=True): - processed = False - dname = os.path.dirname(iris.tests.__file__) - lock = filelock.FileLock(os.path.join(dname, _POSTFIX_LOCK)) - if action in ["first", "last"]: - kind = action - elif action in ["similar", "different"]: - kind = "most {}".format(action) - else: - emsg = "Unknown action: {!r}" - raise ValueError(emsg.format(action)) - if display: - msg = ( - "\nComparing the {!r} expected image with " - "the test result image." - ) - print(msg.format(kind)) - - # Remove old image diff results. - target = os.path.join(result_dir, "*{}".format(_POSTFIX_DIFF)) - for fname in glob(target): - os.remove(fname) - - with lock.acquire(timeout=30): - # Load the imagerepo. - repo_fname = os.path.join(dname, _POSTFIX_JSON) - with open(repo_fname, "rb") as fi: - repo = json.load(codecs.getreader("utf-8")(fi)) - - # Filter out all non-test result image files. - target_glob = os.path.join(result_dir, "result-*.png") - results = [] - for fname in sorted(glob(target_glob)): - # We only care about PNG images. - try: - im = Image.open(fname) - if im.format != "PNG": - # Ignore - it's not a png image. - continue - except IOError: - # Ignore - it's not an image. - continue - results.append(fname) - - count = len(results) - - for count_index, result_fname in enumerate(results): - key = os.path.splitext( - "-".join(result_fname.split("result-")[1:]) - )[0] - try: - # Calculate the test result perceptual image hash. - phash = imagehash.phash( - Image.open(result_fname), hash_size=iris.tests._HASH_SIZE - ) - uris = repo[key] - hash_index, distance = _calculate_hit(uris, phash, action) - uri = uris[hash_index] - except KeyError: - wmsg = "Ignoring unregistered test result {!r}." - warnings.warn(wmsg.format(key)) - continue - with temp_png(key) as expected_fname: - processed = True - resource = requests.get(uri) - if resource.status_code == 200: - with open(expected_fname, "wb") as fo: - fo.write(resource.content) - else: - # Perhaps the uri has not been pushed into the repo yet, - # so check if a local "developer" copy is available ... - local_fname = os.path.join( - result_dir, os.path.basename(uri) - ) - if not os.path.isfile(local_fname): - emsg = "Bad URI {!r} for test {!r}." - raise ValueError(emsg.format(uri, key)) - else: - # The temporary expected filename has the test name - # baked into it, and is used in the diff plot title. - # So copy the local file to the exected file to - # maintain this helpfulness. - shutil.copy(local_fname, expected_fname) - try: - mcompare.compare_images( - expected_fname, result_fname, tol=0 - ) - except Exception as e: - if isinstance(e, ValueError) or isinstance( - e, ImageComparisonFailure - ): - print( - "Could not compare {}: {}".format(result_fname, e) - ) - continue - else: - # Propagate the exception, keeping the stack trace - raise - diff_fname = os.path.splitext(result_fname)[0] + _POSTFIX_DIFF - args = expected_fname, result_fname, diff_fname - if display: - msg = "Image {} of {}: hamming distance = {} " "[{!r}]" - status = msg.format(count_index + 1, count, distance, kind) - prefix = repo, key, repo_fname, phash, status - yield prefix + args - else: - yield args - if display and not processed: - print("\nThere are no iris test result images to process.\n") - - -if __name__ == "__main__": - default = os.path.join( - os.path.dirname(iris.tests.__file__), "result_image_comparison" - ) - description = "Iris graphic test difference tool." - formatter_class = argparse.RawTextHelpFormatter - parser = argparse.ArgumentParser( - description=description, formatter_class=formatter_class - ) - help = "path to iris tests result image directory (default: %(default)s)" - parser.add_argument("--resultdir", "-r", default=default, help=help) - help = 'force "iris.tests" to use the tkagg backend (default: %(default)s)' - parser.add_argument("-d", action="store_true", default=True, help=help) - help = """ -first - compare result image with first (oldest) expected image -last - compare result image with last (youngest) expected image -similar - compare result image with most similar expected image (default) -different - compare result image with most unsimilar expected image -""" - choices = ("first", "last", "similar", "different") - parser.add_argument( - "action", nargs="?", choices=choices, default="similar", help=help - ) - args = parser.parse_args() - result_dir = args.resultdir - if not os.path.isdir(result_dir): - emsg = "Invalid results directory: {}" - raise ValueError(emsg.format(result_dir)) - for args in step_over_diffs(result_dir, args.action): - diff_viewer(*args) diff --git a/lib/iris/tests/integration/analysis/__init__.py b/lib/iris/tests/integration/analysis/__init__.py deleted file mode 100644 index 20b6250b70..0000000000 --- a/lib/iris/tests/integration/analysis/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for the :mod:`iris.analysis` package.""" diff --git a/lib/iris/tests/integration/analysis/test_area_weighted.py b/lib/iris/tests/integration/analysis/test_area_weighted.py deleted file mode 100644 index d01da79a56..0000000000 --- a/lib/iris/tests/integration/analysis/test_area_weighted.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for area weighted regridding.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import iris -from iris.analysis import AreaWeighted - - -@tests.skip_data -class AreaWeightedTests(tests.IrisTest): - def setUp(self): - # Prepare a cube and a template - - cube_file_path = tests.get_data_path( - ["NetCDF", "regrid", "regrid_xyt.nc"] - ) - self.cube = iris.load_cube(cube_file_path) - - template_file_path = tests.get_data_path( - ["NetCDF", "regrid", "regrid_template_global_latlon.nc"] - ) - self.template_cube = iris.load_cube(template_file_path) - - def test_regrid_area_w_lazy(self): - # Regrid the cube onto the template. - out = self.cube.regrid(self.template_cube, AreaWeighted()) - # Check data is still lazy - self.assertTrue(self.cube.has_lazy_data()) - self.assertTrue(out.has_lazy_data()) - # Save the data - with self.temp_filename(suffix=".nc") as fname: - iris.save(out, fname) - - def test_regrid_area_w_lazy_chunked(self): - # Chunked data makes the regridder run repeatedly - self.cube.data = self.cube.lazy_data().rechunk((1, -1, -1)) - # Regrid the cube onto the template. - out = self.cube.regrid(self.template_cube, AreaWeighted()) - # Check data is still lazy - self.assertTrue(self.cube.has_lazy_data()) - self.assertTrue(out.has_lazy_data()) - # Save the data - with self.temp_filename(suffix=".nc") as fname: - iris.save(out, fname) - - def test_regrid_area_w_real_save(self): - real_cube = self.cube.copy() - real_cube.data - # Regrid the cube onto the template. - out = real_cube.regrid(self.template_cube, AreaWeighted()) - # Realise the data - out.data - # Save the data - with self.temp_filename(suffix=".nc") as fname: - iris.save(out, fname) - - def test_regrid_area_w_real_start(self): - real_cube = self.cube.copy() - real_cube.data - # Regrid the cube onto the template. - out = real_cube.regrid(self.template_cube, AreaWeighted()) - # Save the data - with self.temp_filename(suffix=".nc") as fname: - iris.save(out, fname) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/aux_factory/__init__.py b/lib/iris/tests/integration/aux_factory/__init__.py deleted file mode 100644 index 58ba6fb82b..0000000000 --- a/lib/iris/tests/integration/aux_factory/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for the :mod:`iris.aux_factory` package.""" diff --git a/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py deleted file mode 100644 index 4b2464b272..0000000000 --- a/lib/iris/tests/integration/aux_factory/test_OceanSigmaZFactory.py +++ /dev/null @@ -1,196 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Integratation tests for the -`iris.aux_factory.OceanSigmaZFactory` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import itertools - -import numpy as np - -from iris._lazy_data import as_lazy_data -from iris.tests.stock import ocean_sigma_z as stock_sample_osz -import iris.util - - -class Test_sample(tests.IrisTest): - def setUp(self): - self.cube = stock_sample_osz() - # Snapshot result, printed with ... - # >>> np.set_printoptions(linewidth=180, - # formatter={'float':lambda x:'{:-09.3f}'.format(x)}) - # >>> print(repr(coord.points)) - self.basic_derived_result = np.array( - [ - [ - [ - [-0000.632, -0000.526, -0000.421, -0000.316], - [-0000.789, -0000.684, -0000.579, -0000.474], - [-0000.947, -0000.842, -0000.737, -0000.632], - ], - [ - [-0014.358, -0014.264, -0014.169, -0014.074], - [-0014.501, -0014.406, -0014.311, -0014.216], - [-0014.643, -0014.548, -0014.453, -0014.358], - ], - [ - [-0082.993, -0082.951, -0082.908, -0082.866], - [-0083.056, -0083.014, -0082.972, -0082.929], - [-0083.119, -0083.077, -0083.035, -0082.993], - ], - [ - [-0368.400, -0368.400, -0368.400, -0368.400], - [-0368.400, -0368.400, -0368.400, -0368.400], - [-0368.400, -0368.400, -0368.400, -0368.400], - ], - [ - [-1495.600, -1495.600, -1495.600, -1495.600], - [-1495.600, -1495.600, -1495.600, -1495.600], - [-1495.600, -1495.600, -1495.600, -1495.600], - ], - ], - [ - [ - [-0000.842, -0000.737, -0000.632, -0000.526], - [-0001.000, -0000.895, -0000.789, -0000.684], - [-0001.158, -0001.053, -0000.947, -0000.842], - ], - [ - [-0014.548, -0014.453, -0014.358, -0014.264], - [-0014.690, -0014.595, -0014.501, -0014.406], - [-0014.832, -0014.737, -0014.643, -0014.548], - ], - [ - [-0083.077, -0083.035, -0082.993, -0082.951], - [-0083.140, -0083.098, -0083.056, -0083.014], - [-0083.203, -0083.161, -0083.119, -0083.077], - ], - [ - [-0368.400, -0368.400, -0368.400, -0368.400], - [-0368.400, -0368.400, -0368.400, -0368.400], - [-0368.400, -0368.400, -0368.400, -0368.400], - ], - [ - [-1495.600, -1495.600, -1495.600, -1495.600], - [-1495.600, -1495.600, -1495.600, -1495.600], - [-1495.600, -1495.600, -1495.600, -1495.600], - ], - ], - ] - ) - - self.derived_coord_name = ( - "sea_surface_height_above_reference_ellipsoid" - ) - - def _check_result(self, cube, expected_result=None, **kwargs): - if expected_result is None: - expected_result = self.basic_derived_result - coord = cube.coord(self.derived_coord_name) - result = coord.points - self.assertArrayAllClose(result, expected_result, atol=0.005, **kwargs) - - def test_basic(self): - self._check_result(self.cube) - - def _lazy_testcube(self): - cube = self.cube - for dep_name in ("depth", "layer_depth", "ocean_sigma_z_coordinate"): - coord = cube.coord(dep_name) - coord.points = as_lazy_data(coord.points, coord.shape) - return cube - - def test_nonlazy_cube_has_lazy_derived(self): - # Check same results when key coords are made lazy. - cube = self.cube - self.assertEqual(cube.coord("depth").has_lazy_points(), False) - self.assertEqual( - cube.coord(self.derived_coord_name).has_lazy_points(), True - ) - - def test_lazy_cube_same_result(self): - cube = self._lazy_testcube() - self.assertEqual(cube.coord("depth").has_lazy_points(), True) - self.assertEqual( - cube.coord(self.derived_coord_name).has_lazy_points(), True - ) - self._check_result(cube) - - def test_transpose(self): - # Check it works with all possible dimension orders. - for dims_list in itertools.permutations(range(self.cube.ndim)): - cube = self.cube.copy() - cube.transpose(dims_list) - expected = self.basic_derived_result.transpose(dims_list) - msg = "Unexpected result when cube transposed by {}" - msg = msg.format(dims_list) - self._check_result(cube, expected, err_msg=msg) - - def test_lazy_transpose(self): - # Check lazy calc works with all possible dimension orders. - for dims_list in itertools.permutations(range(self.cube.ndim)): - cube = self._lazy_testcube().copy() - cube.transpose(dims_list) - expected = self.basic_derived_result.transpose(dims_list) - msg = "Unexpected result when cube transposed by {}" - msg = msg.format(dims_list) - self._check_result(cube, expected, err_msg=msg) - - def test_extra_dims(self): - # Insert some extra cube dimensions + check it still works. - cube = self.cube - cube = iris.util.new_axis(cube) - cube = iris.util.new_axis(cube) - cube = iris.util.new_axis(cube) - # N.B. shape is now (1, 1, 1, t, z, y, x) - cube.transpose((0, 3, 1, 4, 5, 2, 6)) - # N.B. shape is now (1, t, 1, z, y, 1, x) - # Should get same original result, as derived dims are the same. - self._check_result(cube) - - def test_no_sigma(self): - # Check it still works when 'sigma' is removed. - # NOTE: the unit test for this does not cover all cases because it - # doesn't provide a time dimension. - - # Set all sigma points to zero + snapshot the resulting derived points. - trial_cube = self.cube.copy() - trial_cube.coord("ocean_sigma_z_coordinate").points[:] = 0.0 - expected = trial_cube.coord(self.derived_coord_name).points - - # Remove sigma altogether + check the result is the same. - cube = self.cube - cube.remove_coord("ocean_sigma_z_coordinate") - self._check_result(cube, expected) - - def test_no_eta(self): - # Check it still works when 'eta' is removed. - # NOTE: the unit test for this does not cover all cases because it - # doesn't provide a time dimension. - - # Set all sigma points to zero + snapshot the resulting derived points. - trial_cube = self.cube.copy() - trial_cube.coord("sea_surface_height").points[:] = 0.0 - expected = trial_cube.coord(self.derived_coord_name).points - # Check this has no variation between the two timepoints. - self.assertArrayAllClose(expected[0], expected[1]) - # Take first time, as no sigma --> result *has* no time dimension. - expected = expected[0] - - # Remove eta altogether + check the result is the same. - cube = self.cube - cube.remove_coord("sea_surface_height") - self._check_result(cube, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/concatenate/__init__.py b/lib/iris/tests/integration/concatenate/__init__.py deleted file mode 100644 index fb136098ee..0000000000 --- a/lib/iris/tests/integration/concatenate/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for the :mod:`iris._concatenate` package.""" diff --git a/lib/iris/tests/integration/concatenate/test_concatenate.py b/lib/iris/tests/integration/concatenate/test_concatenate.py deleted file mode 100644 index 4e3f453e0a..0000000000 --- a/lib/iris/tests/integration/concatenate/test_concatenate.py +++ /dev/null @@ -1,327 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Integration tests for concatenating cubes with differing time coord epochs -using :func:`iris.util.unify_time_units`. - -""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -import cf_units -import numpy as np - -from iris._concatenate import concatenate -import iris.coords -import iris.cube -import iris.tests.stock as stock -from iris.util import unify_time_units - - -class Test_concatenate__epoch(tests.IrisTest): - def simple_1d_time_cubes(self, reftimes, coords_points): - cubes = [] - data_points = [273, 275, 278, 277, 274] - for reftime, coord_points in zip(reftimes, coords_points): - cube = iris.cube.Cube( - np.array(data_points, dtype=np.float32), - standard_name="air_temperature", - units="K", - ) - unit = cf_units.Unit(reftime, calendar="gregorian") - coord = iris.coords.DimCoord( - points=np.array(coord_points, dtype=np.float32), - standard_name="time", - units=unit, - ) - cube.add_dim_coord(coord, 0) - cubes.append(cube) - return cubes - - def test_concat_1d_with_differing_time_units(self): - reftimes = [ - "hours since 1970-01-01 00:00:00", - "hours since 1970-01-02 00:00:00", - ] - coords_points = [[1, 2, 3, 4, 5], [1, 2, 3, 4, 5]] - cubes = self.simple_1d_time_cubes(reftimes, coords_points) - unify_time_units(cubes) - result = concatenate(cubes) - self.assertEqual(len(result), 1) - self.assertEqual(result[0].shape, (10,)) - - -class Test_cubes_with_aux_coord(tests.IrisTest): - def create_cube(self): - data = np.arange(4).reshape(2, 2) - - lat = iris.coords.DimCoord( - [0, 30], standard_name="latitude", units="degrees" - ) - lon = iris.coords.DimCoord( - [0, 15], standard_name="longitude", units="degrees" - ) - height = iris.coords.AuxCoord([1.5], standard_name="height", units="m") - t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="gregorian" - ) - time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) - - cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") - cube.add_dim_coord(time, 0) - cube.add_dim_coord(lat, 1) - cube.add_aux_coord(lon, 1) - cube.add_aux_coord(height) - return cube - - def test_diff_aux_coord(self): - cube_a = self.create_cube() - cube_b = cube_a.copy() - cube_b.coord("time").points = [12, 18] - cube_b.coord("longitude").points = [120, 150] - - result = concatenate([cube_a, cube_b]) - self.assertEqual(len(result), 2) - - def test_ignore_diff_aux_coord(self): - cube_a = self.create_cube() - cube_b = cube_a.copy() - cube_b.coord("time").points = [12, 18] - cube_b.coord("longitude").points = [120, 150] - - result = concatenate([cube_a, cube_b], check_aux_coords=False) - self.assertEqual(len(result), 1) - self.assertEqual(result[0].shape, (4, 2)) - - -class Test_cubes_with_cell_measure(tests.IrisTest): - def create_cube(self): - data = np.arange(4).reshape(2, 2) - - lat = iris.coords.DimCoord( - [0, 30], standard_name="latitude", units="degrees" - ) - volume = iris.coords.CellMeasure( - [0, 15], measure="volume", long_name="volume" - ) - area = iris.coords.CellMeasure( - [1.5], standard_name="height", units="m" - ) - t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="gregorian" - ) - time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) - - cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") - cube.add_dim_coord(time, 0) - cube.add_dim_coord(lat, 1) - cube.add_cell_measure(volume, 1) - cube.add_cell_measure(area) - return cube - - def test_diff_cell_measure(self): - cube_a = self.create_cube() - cube_b = cube_a.copy() - cube_b.coord("time").points = [12, 18] - cube_b.cell_measure("volume").data = [120, 150] - - result = concatenate([cube_a, cube_b]) - self.assertEqual(len(result), 2) - - def test_ignore_diff_cell_measure(self): - cube_a = self.create_cube() - cube_b = cube_a.copy() - cube_b.coord("time").points = [12, 18] - cube_b.cell_measure("volume").data = [120, 150] - - result = concatenate([cube_a, cube_b], check_cell_measures=False) - self.assertEqual(len(result), 1) - self.assertEqual(result[0].shape, (4, 2)) - - -class Test_cubes_with_ancillary_variables(tests.IrisTest): - def create_cube(self): - data = np.arange(4).reshape(2, 2) - - lat = iris.coords.DimCoord( - [0, 30], standard_name="latitude", units="degrees" - ) - quality = iris.coords.AncillaryVariable([0, 15], long_name="quality") - height = iris.coords.AncillaryVariable( - [1.5], standard_name="height", units="m" - ) - t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="gregorian" - ) - time = iris.coords.DimCoord([0, 6], standard_name="time", units=t_unit) - - cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") - cube.add_dim_coord(time, 0) - cube.add_dim_coord(lat, 1) - cube.add_ancillary_variable(quality, 1) - cube.add_ancillary_variable(height) - return cube - - def test_diff_ancillary_variables(self): - cube_a = self.create_cube() - cube_b = cube_a.copy() - cube_b.coord("time").points = [12, 18] - cube_b.ancillary_variable("quality").data = [120, 150] - - result = concatenate([cube_a, cube_b]) - self.assertEqual(len(result), 2) - - def test_ignore_diff_ancillary_variables(self): - cube_a = self.create_cube() - cube_b = cube_a.copy() - cube_b.coord("time").points = [12, 18] - cube_b.ancillary_variable("quality").data = [120, 150] - - result = concatenate([cube_a, cube_b], check_ancils=False) - self.assertEqual(len(result), 1) - self.assertEqual(result[0].shape, (4, 2)) - - -class Test_anonymous_dims(tests.IrisTest): - def setUp(self): - data = np.arange(12).reshape(2, 3, 2) - self.cube = iris.cube.Cube( - data, standard_name="air_temperature", units="K" - ) - - # Time coord - t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="gregorian" - ) - t_coord = iris.coords.DimCoord( - [0, 6], standard_name="time", units=t_unit - ) - self.cube.add_dim_coord(t_coord, 0) - - # Lats and lons - self.x_coord = iris.coords.DimCoord( - [15, 30], standard_name="longitude", units="degrees" - ) - self.y_coord = iris.coords.DimCoord( - [0, 30, 60], standard_name="latitude", units="degrees" - ) - self.x_coord_2D = iris.coords.AuxCoord( - [[0, 15], [30, 45], [60, 75]], - standard_name="longitude", - units="degrees", - ) - self.y_coord_non_monotonic = iris.coords.AuxCoord( - [0, 30, 15], standard_name="latitude", units="degrees" - ) - - def test_matching_2d_longitudes(self): - cube1 = self.cube - cube1.add_dim_coord(self.y_coord, 1) - cube1.add_aux_coord(self.x_coord_2D, (1, 2)) - - cube2 = cube1.copy() - cube2.coord("time").points = [12, 18] - result = concatenate([cube1, cube2]) - self.assertEqual(len(result), 1) - - def test_differing_2d_longitudes(self): - cube1 = self.cube - cube1.add_aux_coord(self.y_coord, 1) - cube1.add_aux_coord(self.x_coord_2D, (1, 2)) - - cube2 = cube1.copy() - cube2.coord("time").points = [12, 18] - cube2.coord("longitude").points = [[-30, -15], [0, 15], [30, 45]] - - result = concatenate([cube1, cube2]) - self.assertEqual(len(result), 2) - - def test_matching_non_monotonic_latitudes(self): - cube1 = self.cube - cube1.add_aux_coord(self.y_coord_non_monotonic, 1) - cube1.add_aux_coord(self.x_coord, 2) - - cube2 = cube1.copy() - cube2.coord("time").points = [12, 18] - - result = concatenate([cube1, cube2]) - self.assertEqual(len(result), 1) - - def test_differing_non_monotonic_latitudes(self): - cube1 = self.cube - cube1.add_aux_coord(self.y_coord_non_monotonic, 1) - cube1.add_aux_coord(self.x_coord, 2) - - cube2 = cube1.copy() - cube2.coord("time").points = [12, 18] - cube2.coord("latitude").points = [30, 0, 15] - - result = concatenate([cube1, cube2]) - self.assertEqual(len(result), 2) - - def test_concatenate_along_anon_dim(self): - cube1 = self.cube - cube1.add_aux_coord(self.y_coord_non_monotonic, 1) - cube1.add_aux_coord(self.x_coord, 2) - - cube2 = cube1.copy() - cube2.coord("latitude").points = [30, 0, 15] - - result = concatenate([cube1, cube2]) - self.assertEqual(len(result), 2) - - -class Test_anonymous_dims_alternate_mapping(tests.IrisTest): - # Ensure that anonymous concatenation is not sensitive to dimension mapping - # of the anonymous dimension. - def setUp(self): - self.cube = stock.simple_3d() - coord = self.cube.coord("wibble") - self.cube.remove_coord(coord) - self.cube.add_aux_coord(coord, 0) - - def test_concatenate_anom_1st_dim(self): - # Check that concatenation along a non anonymous dimension is - # insensitive to the dimension which is anonymous. - # Concatenate along longitude. - # DIM: cube(--, lat, lon) & cube(--, lat, lon') - # AUX: cube(wibble, --, --) & cube(wibble, --, --) - cube1 = self.cube[..., :2] - cube2 = self.cube[..., 2:] - result = concatenate([cube1, cube2]) - self.assertEqual(len(result), 1) - - def test_concatenate_anom_2nd_dim(self): - # Check that concatenation along a non anonymous dimension is - # insensitive to the dimension which is anonymous. - # Concatenate along longitude. - # DIM: cube(lon, --, lat) & cube(lon', ---, lat) - # AUX: cube(--, wibble, --) & cube(--, wibble, --) - cube1 = self.cube[..., :2] - cube2 = self.cube[..., 2:] - cube1.transpose((2, 0, 1)) - cube2.transpose((2, 0, 1)) - result = concatenate([cube1, cube2]) - self.assertEqual(len(result), 1) - - def test_concatenate_anom_3rd_dim(self): - # Check that concatenation along a non anonymous dimension is - # insensitive to the dimension which is anonymous. - # Concatenate along longitude. - # DIM: cube(lat, lon, --) & cube(lat, lon', --) - # AUX: cube(--, --, wibble) & cube(--, --, wibble) - cube1 = self.cube[..., :2] - cube2 = self.cube[..., 2:] - cube1.transpose((1, 2, 0)) - cube2.transpose((1, 2, 0)) - result = concatenate([cube1, cube2]) - self.assertEqual(len(result), 1) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/experimental/__init__.py b/lib/iris/tests/integration/experimental/__init__.py deleted file mode 100644 index 269cf3dd9a..0000000000 --- a/lib/iris/tests/integration/experimental/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for the :mod:`iris.experimental` package.""" diff --git a/lib/iris/tests/integration/experimental/test_CubeRepresentation.py b/lib/iris/tests/integration/experimental/test_CubeRepresentation.py deleted file mode 100644 index 48a3e51b52..0000000000 --- a/lib/iris/tests/integration/experimental/test_CubeRepresentation.py +++ /dev/null @@ -1,164 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for cube html representation.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from html import escape - -import numpy as np - -from iris.cube import Cube -from iris.experimental.representation import CubeRepresentation -import iris.tests.stock as stock - - -@tests.skip_data -class TestNoMetadata(tests.IrisTest): - # Test the situation where we have a cube with no metadata at all. - def setUp(self): - self.shape = (2, 3, 4) - self.cube = Cube(np.arange(24).reshape(self.shape)) - self.representer = CubeRepresentation(self.cube) - self.representer.repr_html() - - def test_cube_name(self): - expected = "Unknown" # This cube has no metadata. - result = self.representer.name - self.assertEqual(expected, result) - - def test_cube_units(self): - expected = "unknown" # This cube has no metadata. - result = self.representer.units - self.assertEqual(expected, result) - - def test_dim_names(self): - expected = ["--"] * len(self.shape) - result = self.representer.names - self.assertEqual(expected, result) - - def test_shape(self): - result = self.representer.shapes - self.assertEqual(result, self.shape) - - -@tests.skip_data -class TestMissingMetadata(tests.IrisTest): - def setUp(self): - self.cube = stock.realistic_3d() - - def test_no_coords(self): - all_coords = [coord.name() for coord in self.cube.coords()] - for coord in all_coords: - self.cube.remove_coord(coord) - representer = CubeRepresentation(self.cube) - result = representer.repr_html().lower() - self.assertNotIn("dimension coordinates", result) - self.assertNotIn("auxiliary coordinates", result) - self.assertNotIn("scalar coordinates", result) - self.assertIn("attributes", result) - - def test_no_dim_coords(self): - dim_coords = [c.name() for c in self.cube.coords(dim_coords=True)] - for coord in dim_coords: - self.cube.remove_coord(coord) - representer = CubeRepresentation(self.cube) - result = representer.repr_html().lower() - self.assertNotIn("dimension coordinates", result) - self.assertIn("auxiliary coordinates", result) - self.assertIn("scalar coordinates", result) - self.assertIn("attributes", result) - - def test_no_aux_coords(self): - aux_coords = ["forecast_period"] - for coord in aux_coords: - self.cube.remove_coord(coord) - representer = CubeRepresentation(self.cube) - result = representer.repr_html().lower() - self.assertIn("dimension coordinates", result) - self.assertNotIn("auxiliary coordinates", result) - self.assertIn("scalar coordinates", result) - self.assertIn("attributes", result) - - def test_no_scalar_coords(self): - aux_coords = ["air_pressure"] - for coord in aux_coords: - self.cube.remove_coord(coord) - representer = CubeRepresentation(self.cube) - result = representer.repr_html().lower() - self.assertIn("dimension coordinates", result) - self.assertIn("auxiliary coordinates", result) - self.assertNotIn("scalar coordinates", result) - self.assertIn("attributes", result) - - def test_no_attrs(self): - self.cube.attributes = {} - representer = CubeRepresentation(self.cube) - result = representer.repr_html().lower() - self.assertIn("dimension coordinates", result) - self.assertIn("auxiliary coordinates", result) - self.assertIn("scalar coordinates", result) - self.assertNotIn("attributes", result) - - def test_no_cell_methods(self): - representer = CubeRepresentation(self.cube) - result = representer.repr_html().lower() - self.assertNotIn("cell methods", result) - - -@tests.skip_data -class TestScalarCube(tests.IrisTest): - def setUp(self): - self.cube = stock.realistic_3d()[0, 0, 0] - self.representer = CubeRepresentation(self.cube) - self.representer.repr_html() - - def test_identfication(self): - # Is this scalar cube accurately identified? - self.assertTrue(self.representer.scalar_cube) - - def test_header__name(self): - header = self.representer._make_header() - expected_name = escape(self.cube.name().title().replace("_", " ")) - self.assertIn(expected_name, header) - - def test_header__units(self): - header = self.representer._make_header() - expected_units = escape(self.cube.units.symbol) - self.assertIn(expected_units, header) - - def test_header__scalar_str(self): - # Check that 'scalar cube' is placed in the header. - header = self.representer._make_header() - expected_str = "(scalar cube)" - self.assertIn(expected_str, header) - - def test_content__scalars(self): - # Check an element "Scalar coordinates" is present in the main content. - content = self.representer._make_content() - expected_str = "Scalar coordinates" - self.assertIn(expected_str, content) - - def test_content__specific_scalar_coord(self): - # Check a specific scalar coord is present in the main content. - content = self.representer._make_content() - expected_coord = self.cube.coords()[0] - expected_coord_name = escape(expected_coord.name()) - self.assertIn(expected_coord_name, content) - expected_coord_val = escape(str(expected_coord.points[0])) - self.assertIn(expected_coord_val, content) - - def test_content__attributes(self): - # Check an element "attributes" is present in the main content. - content = self.representer._make_content() - expected_str = "Attributes" - self.assertIn(expected_str, content) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py b/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py deleted file mode 100644 index 1ace02ea8a..0000000000 --- a/lib/iris/tests/integration/experimental/test_regrid_ProjectedUnstructured.py +++ /dev/null @@ -1,158 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for experimental regridding.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import cartopy.crs as ccrs -from cf_units import Unit -import numpy as np - -import iris -import iris.aux_factory -from iris.coord_systems import GeogCS -from iris.experimental.regrid import ( - ProjectedUnstructuredLinear, - ProjectedUnstructuredNearest, -) -from iris.tests.stock import global_pp - - -@tests.skip_data -class TestProjectedUnstructured(tests.IrisTest): - def setUp(self): - path = tests.get_data_path( - ("NetCDF", "unstructured_grid", "theta_nodal_xios.nc") - ) - self.src = iris.load_cube(path, "Potential Temperature") - - src_lat = self.src.coord("latitude") - src_lon = self.src.coord("longitude") - src_lat.coord_system = src_lon.coord_system = GeogCS(6370000) - src_lat.convert_units(Unit("degrees")) - src_lon.convert_units(Unit("degrees")) - - self.global_grid = global_pp() - - def test_nearest(self): - res = self.src.regrid(self.global_grid, ProjectedUnstructuredNearest()) - self.assertArrayShapeStats( - res, (1, 6, 73, 96), 315.8913582, 11.00063922733, rtol=1e-8 - ) - self.assertArrayShapeStats( - res[:, 0], (1, 73, 96), 299.99993826, 3.9226378869e-5 - ) - - def test_nearest_sinusoidal(self): - crs = ccrs.Sinusoidal() - res = self.src.regrid( - self.global_grid, ProjectedUnstructuredNearest(crs) - ) - self.assertArrayShapeStats( - res, (1, 6, 73, 96), 315.891358296, 11.000639227, rtol=1e-8 - ) - self.assertArrayShapeStats( - res[:, 0], (1, 73, 96), 299.99993826, 3.9223839688e-5 - ) - - def test_nearest_gnomonic_uk_domain(self): - crs = ccrs.Gnomonic(central_latitude=60.0) - uk_grid = self.global_grid.intersection( - longitude=(-20, 20), latitude=(40, 80) - ) - res = self.src.regrid(uk_grid, ProjectedUnstructuredNearest(crs)) - - self.assertArrayShapeStats( - res, - (1, 6, 17, 11), - 315.8854720963427, - 11.000539210625737, - rtol=1e-8, - ) - self.assertArrayShapeStats( - res[:, 0], - (1, 17, 11), - 299.9999985207442, - 3.53574517015874e-05, - ) - expected_subset = np.array( - [ - [318.92881733, 318.92881733, 318.92881733], - [318.92881733, 318.92881733, 318.92881733], - [318.92881733, 318.92881733, 318.92881733], - ] - ) - self.assertArrayAlmostEqual( - expected_subset, res.data[0, 3, 5:8, 4:7].data - ) - - def test_nearest_aux_factories(self): - src = self.src - - (xy_dim_len,) = src.coord(axis="X").shape - (z_dim_len,) = src.coord("levels").shape - - src.add_aux_coord( - iris.coords.AuxCoord( - np.arange(z_dim_len) + 40, long_name="level_height", units="m" - ), - 1, - ) - src.add_aux_coord( - iris.coords.AuxCoord( - np.arange(z_dim_len) + 50, long_name="sigma", units="1" - ), - 1, - ) - src.add_aux_coord( - iris.coords.AuxCoord( - np.arange(xy_dim_len) + 100, - long_name="surface_altitude", - units="m", - ), - 2, - ) - src.add_aux_factory( - iris.aux_factory.HybridHeightFactory( - delta=src.coord("level_height"), - sigma=src.coord("sigma"), - orography=src.coord("surface_altitude"), - ) - ) - res = src.regrid(self.global_grid, ProjectedUnstructuredNearest()) - - self.assertArrayShapeStats( - res, (1, 6, 73, 96), 315.8913582, 11.000639227334, rtol=1e-8 - ) - self.assertArrayShapeStats( - res[:, 0], (1, 73, 96), 299.99993826, 3.9226378869e-5 - ) - self.assertEqual(res.coord("altitude").shape, (6, 73, 96)) - - def test_linear_sinusoidal(self): - res = self.src.regrid(self.global_grid, ProjectedUnstructuredLinear()) - self.assertArrayShapeStats( - res, (1, 6, 73, 96), 315.8914839, 11.0006338412, rtol=1e-8 - ) - self.assertArrayShapeStats( - res[:, 0], (1, 73, 96), 299.99993826, 3.775024069e-5 - ) - expected_subset = np.array( - [ - [299.999987, 299.999996, 299.999999], - [299.999984, 299.999986, 299.999988], - [299.999973, 299.999977, 299.999982], - ] - ) - self.assertArrayAlmostEqual( - expected_subset, res.data[0, 0, 20:23, 40:43].data - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/experimental/test_ugrid_load.py b/lib/iris/tests/integration/experimental/test_ugrid_load.py deleted file mode 100644 index af97458ded..0000000000 --- a/lib/iris/tests/integration/experimental/test_ugrid_load.py +++ /dev/null @@ -1,247 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Integration tests for NetCDF-UGRID file loading. - -todo: fold these tests into netcdf tests when experimental.ugrid is folded into - standard behaviour. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from collections.abc import Iterable - -from iris import Constraint, load -from iris.experimental.ugrid import logger -from iris.experimental.ugrid.load import ( - PARSE_UGRID_ON_LOAD, - load_mesh, - load_meshes, -) -from iris.experimental.ugrid.mesh import Mesh -from iris.tests.stock.netcdf import ( - _file_from_cdl_template as create_file_from_cdl_template, -) -from iris.tests.unit.tests.stock.test_netcdf import XIOSFileMixin - - -def ugrid_load(uris, constraints=None, callback=None): - # TODO: remove constraint once files no longer have orphan connectivities. - orphan_connectivities = ( - "Mesh2d_half_levels_edge_face_links", - "Mesh2d_half_levels_face_links", - "Mesh2d_half_levels_face_edges", - "Mesh2d_full_levels_edge_face_links", - "Mesh2d_full_levels_face_links", - "Mesh2d_full_levels_face_edges", - ) - filter_orphan_connectivities = Constraint( - cube_func=lambda cube: cube.var_name not in orphan_connectivities - ) - if constraints is None: - constraints = filter_orphan_connectivities - else: - if not isinstance(constraints, Iterable): - constraints = [constraints] - constraints.append(filter_orphan_connectivities) - - with PARSE_UGRID_ON_LOAD.context(): - return load(uris, constraints, callback) - - -@tests.skip_data -class TestBasic(tests.IrisTest): - def common_test(self, load_filename, assert_filename): - cube_list = ugrid_load( - tests.get_data_path( - ["NetCDF", "unstructured_grid", load_filename] - ), - ) - self.assertEqual(1, len(cube_list)) - cube = cube_list[0] - self.assertCML(cube, ["experimental", "ugrid", assert_filename]) - - def test_2D_1t_face_half_levels(self): - self.common_test( - "lfric_ngvat_2D_1t_face_half_levels_main_conv_rain.nc", - "2D_1t_face_half_levels.cml", - ) - - def test_3D_1t_face_half_levels(self): - self.common_test( - "lfric_ngvat_3D_1t_half_level_face_grid_derived_theta_in_w3.nc", - "3D_1t_face_half_levels.cml", - ) - - def test_3D_1t_face_full_levels(self): - self.common_test( - "lfric_ngvat_3D_1t_full_level_face_grid_main_area_fraction_unit1.nc", - "3D_1t_face_full_levels.cml", - ) - - def test_2D_72t_face_half_levels(self): - self.common_test( - "lfric_ngvat_2D_72t_face_half_levels_main_conv_rain.nc", - "2D_72t_face_half_levels.cml", - ) - - def test_3D_snow_pseudo_levels(self): - self.common_test( - "lfric_ngvat_3D_snow_pseudo_levels_1t_face_half_levels_main_snow_layer_temp.nc", - "3D_snow_pseudo_levels.cml", - ) - - def test_3D_soil_pseudo_levels(self): - self.common_test( - "lfric_ngvat_3D_soil_pseudo_levels_1t_face_half_levels_main_soil_temperature.nc", - "3D_soil_pseudo_levels.cml", - ) - - def test_3D_tile_pseudo_levels(self): - self.common_test( - "lfric_ngvat_3D_tile_pseudo_levels_1t_face_half_levels_main_sw_up_tile.nc", - "3D_tile_pseudo_levels.cml", - ) - - def test_3D_veg_pseudo_levels(self): - self.common_test( - "lfric_ngvat_3D_veg_pseudo_levels_1t_face_half_levels_main_snowpack_density.nc", - "3D_veg_pseudo_levels.cml", - ) - - def test_no_mesh(self): - with PARSE_UGRID_ON_LOAD.context(): - cube_list = load( - tests.get_data_path( - ["NetCDF", "unstructured_grid", "theta_nodal_not_ugrid.nc"] - ) - ) - self.assertTrue(all([cube.mesh is None for cube in cube_list])) - - -@tests.skip_data -class TestMultiplePhenomena(tests.IrisTest): - def test_multiple_phenomena(self): - cube_list = ugrid_load( - tests.get_data_path( - ["NetCDF", "unstructured_grid", "lfric_surface_mean.nc"] - ), - ) - self.assertCML( - cube_list, ("experimental", "ugrid", "surface_mean.cml") - ) - - -class TestTolerantLoading(XIOSFileMixin): - # N.B. using parts of the XIOS-like file integration testing, to make - # temporary netcdf files from stored CDL templates. - @classmethod - def setUpClass(cls): - super().setUpClass() # create cls.temp_dir = dir for test files - - @classmethod - def tearDownClass(cls): - super().setUpClass() # destroy temp dir - - # Create a testfile according to testcase-specific arguments. - # NOTE: with this, parent "create_synthetic_test_cube" can load a cube. - def create_synthetic_file(self, **create_kwargs): - template_name = create_kwargs["template"] # required kwarg - testfile_name = "tmp_netcdf" - template_subs = dict( - NUM_NODES=7, NUM_FACES=3, DATASET_NAME=testfile_name - ) - kwarg_subs = create_kwargs.get("subs", {}) # optional kwarg - template_subs.update(kwarg_subs) - filepath = create_file_from_cdl_template( - temp_file_dir=self.temp_dir, - dataset_name=testfile_name, - dataset_type=template_name, - template_subs=template_subs, - ) - return str(filepath) # N.B. Path object not usable in iris.load - - def test_mesh_bad_topology_dimension(self): - # Check that the load generates a suitable warning. - log_regex = r"topology_dimension.* ignoring" - with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): - template = "minimal_bad_topology_dim" - dim_line = "mesh_var:topology_dimension = 1 ;" # which is wrong ! - cube = self.create_synthetic_test_cube( - template=template, subs=dict(TOPOLOGY_DIM_DEFINITION=dim_line) - ) - - # Check that the result has topology-dimension of 2 (not 1). - self.assertEqual(cube.mesh.topology_dimension, 2) - - def test_mesh_no_topology_dimension(self): - # Check that the load generates a suitable warning. - log_regex = r"Mesh variable.* has no 'topology_dimension'" - with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): - template = "minimal_bad_topology_dim" - dim_line = "" # don't create ANY topology_dimension property - cube = self.create_synthetic_test_cube( - template=template, subs=dict(TOPOLOGY_DIM_DEFINITION=dim_line) - ) - - # Check that the result has the correct topology-dimension value. - self.assertEqual(cube.mesh.topology_dimension, 2) - - def test_mesh_bad_cf_role(self): - # Check that the load generates a suitable warning. - log_regex = r"inappropriate cf_role" - with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): - template = "minimal_bad_mesh_cf_role" - dim_line = 'mesh_var:cf_role = "foo" ;' - _ = self.create_synthetic_test_cube( - template=template, subs=dict(CF_ROLE_DEFINITION=dim_line) - ) - - def test_mesh_no_cf_role(self): - # Check that the load generates a suitable warning. - log_regex = r"no cf_role attribute" - with self.assertLogs(logger, level="WARNING", msg_regex=log_regex): - template = "minimal_bad_mesh_cf_role" - dim_line = "" - _ = self.create_synthetic_test_cube( - template=template, subs=dict(CF_ROLE_DEFINITION=dim_line) - ) - - -@tests.skip_data -class Test_load_mesh(tests.IrisTest): - def common_test(self, file_name, mesh_var_name): - with PARSE_UGRID_ON_LOAD.context(): - mesh = load_mesh( - tests.get_data_path(["NetCDF", "unstructured_grid", file_name]) - ) - # NOTE: cannot use CML tests as this isn't supported for non-Cubes. - self.assertIsInstance(mesh, Mesh) - self.assertEqual(mesh.var_name, mesh_var_name) - - def test_full_file(self): - self.common_test( - "lfric_ngvat_2D_1t_face_half_levels_main_conv_rain.nc", - "Mesh2d_half_levels", - ) - - def test_mesh_file(self): - self.common_test("mesh_C12.nc", "dynamics") - - def test_no_mesh(self): - with PARSE_UGRID_ON_LOAD.context(): - meshes = load_meshes( - tests.get_data_path( - ["NetCDF", "unstructured_grid", "theta_nodal_not_ugrid.nc"] - ) - ) - self.assertDictEqual({}, meshes) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/experimental/test_ugrid_save.py b/lib/iris/tests/integration/experimental/test_ugrid_save.py deleted file mode 100644 index eb2cb04f79..0000000000 --- a/lib/iris/tests/integration/experimental/test_ugrid_save.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Integration tests for NetCDF-UGRID file saving. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import glob -from pathlib import Path -import shutil -from subprocess import check_call -import tempfile - -import iris -from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD -import iris.fileformats.netcdf -from iris.tests import IrisTest -from iris.tests.stock.netcdf import _add_standard_data - - -class TestBasicSave(IrisTest): - @classmethod - def setUpClass(cls): - cls.temp_dir = Path(tempfile.mkdtemp()) - cls.examples_dir = ( - Path(__file__).absolute().parent / "ugrid_conventions_examples" - ) - example_paths = glob.glob(str(cls.examples_dir / "*ex*.cdl")) - example_names = [ - str(Path(filepath).name).split("_")[1] # = "ex" - for filepath in example_paths - ] - cls.example_names_paths = { - name: path for name, path in zip(example_names, example_paths) - } - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.temp_dir) - - def test_example_result_cdls(self): - # Snapshot the result of saving the example cases. - for ex_name, filepath in self.example_names_paths.items(): - target_ncfile_path = str(self.temp_dir / f"{ex_name}.nc") - # Create a netcdf file from the test CDL. - check_call( - f"ncgen {filepath} -k4 -o {target_ncfile_path}", shell=True - ) - # Fill in blank data-variables. - _add_standard_data(target_ncfile_path) - # Load as Iris data - with PARSE_UGRID_ON_LOAD.context(): - cubes = iris.load(target_ncfile_path) - # Re-save, to check the save behaviour. - resave_ncfile_path = str(self.temp_dir / f"{ex_name}_resaved.nc") - iris.save(cubes, resave_ncfile_path) - # Check the output against a CDL snapshot. - refdir_relpath = ( - "integration/experimental/ugrid_save/TestBasicSave/" - ) - reffile_name = str(Path(filepath).name).replace(".nc", ".cdl") - reffile_path = refdir_relpath + reffile_name - self.assertCDL(resave_ncfile_path, reference_filename=reffile_path) - - def test_example_roundtrips(self): - # Check that save-and-loadback leaves Iris data unchanged, - # for data derived from each UGRID example CDL. - for ex_name, filepath in self.example_names_paths.items(): - target_ncfile_path = str(self.temp_dir / f"{ex_name}.nc") - # Create a netcdf file from the test CDL. - check_call( - f"ncgen {filepath} -k4 -o {target_ncfile_path}", shell=True - ) - # Fill in blank data-variables. - _add_standard_data(target_ncfile_path) - # Load the original as Iris data - with PARSE_UGRID_ON_LOAD.context(): - orig_cubes = iris.load(target_ncfile_path) - - if "ex4" in ex_name: - # Discard the extra formula terms component cubes - # Saving these does not do what you expect - orig_cubes = orig_cubes.extract("datavar") - - # Save-and-load-back to compare the Iris saved result. - resave_ncfile_path = str(self.temp_dir / f"{ex_name}_resaved.nc") - iris.save(orig_cubes, resave_ncfile_path) - with PARSE_UGRID_ON_LOAD.context(): - savedloaded_cubes = iris.load(resave_ncfile_path) - - # This should match the original exactly - # ..EXCEPT for our inability to compare meshes. - for orig, reloaded in zip(orig_cubes, savedloaded_cubes): - for cube in (orig, reloaded): - # Remove conventions attributes, which may differ. - cube.attributes.pop("Conventions", None) - # Remove var-names, which may differ. - cube.var_name = None - - # Compare the mesh contents (as we can't compare actual meshes) - self.assertEqual(orig.location, reloaded.location) - orig_mesh = orig.mesh - reloaded_mesh = reloaded.mesh - self.assertEqual( - orig_mesh.all_coords, reloaded_mesh.all_coords - ) - self.assertEqual( - orig_mesh.all_connectivities, - reloaded_mesh.all_connectivities, - ) - # Index the cubes to replace meshes with meshcoord-derived aux coords. - # This needs [:0] on the mesh dim, so do that on all dims. - keys = tuple([slice(0, None)] * orig.ndim) - orig = orig[keys] - reloaded = reloaded[keys] - # Resulting cubes, with collapsed mesh, should be IDENTICAL. - self.assertEqual(orig, reloaded) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/experimental/ugrid_conventions_examples/README.txt b/lib/iris/tests/integration/experimental/ugrid_conventions_examples/README.txt deleted file mode 100644 index 2a9b5bde35..0000000000 --- a/lib/iris/tests/integration/experimental/ugrid_conventions_examples/README.txt +++ /dev/null @@ -1,16 +0,0 @@ -Examples generated from CDL example sections in UGRID conventions v1.0 - ( see webpage: https://ugrid-conventions.github.io/ugrid-conventions/ ) - -CHANGES: - * added a data-var to all examples, for ease of iris-roundtripping - * EX4 : - - had a couple of missing ";"s at lineends - - the formula terms (depth+surface) should map to 'Mesh2_layers', and not to the mesh at all. - - use Mesh2d_layers dim, and have no 'mesh' or 'location' - * "EX4a" -- possibly (future) closer mix of hybrid-vertical and mesh dimensions - - *don't* think we can have a hybrid coord ON the mesh dimension - - mesh being a vertical location (only) seems to make no sense - - .. and implies that the mesh is 1d and ordered, which is not really unstructured at all - - *could* have hybrid-height with the _orography_ mapping to the mesh - - doesn't match the UGRID examples, but see : iris.tests.unit.fileformats.netcdf.test_Saver__ugrid.TestSaveUgrid__cube.test_nonmesh_hybrid_dim - diff --git a/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex1_1d_mesh.cdl b/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex1_1d_mesh.cdl deleted file mode 100644 index d022fedc61..0000000000 --- a/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex1_1d_mesh.cdl +++ /dev/null @@ -1,55 +0,0 @@ -netcdf ex1_1d_mesh { -dimensions: -nMesh1_node = 5 ; // nNodes -nMesh1_edge = 4 ; // nEdges - -Two = 2; - -variables: -// Mesh topology -integer Mesh1 ; -Mesh1:cf_role = "mesh_topology" ; -Mesh1:long_name = "Topology data of 1D network" ; -Mesh1:topology_dimension = 1 ; -Mesh1:node_coordinates = "Mesh1_node_x Mesh1_node_y" ; -Mesh1:edge_node_connectivity = "Mesh1_edge_nodes" ; -Mesh1:edge_coordinates = "Mesh1_edge_x Mesh1_edge_y" ; // optional attribute -integer Mesh1_edge_nodes(nMesh1_edge, Two) ; -Mesh1_edge_nodes:cf_role = "edge_node_connectivity" ; -Mesh1_edge_nodes:long_name = "Maps every edge/link to the two nodes that it connects." ; -Mesh1_edge_nodes:start_index = 1 ; - -// Mesh node coordinates -double Mesh1_node_x(nMesh1_node) ; -Mesh1_node_x:standard_name = "longitude" ; -Mesh1_node_x:long_name = "Longitude of 1D network nodes." ; -Mesh1_node_x:units = "degrees_east" ; -double Mesh1_node_y(nMesh1_node) ; -Mesh1_node_y:standard_name = "latitude" ; -Mesh1_node_y:long_name = "Latitude of 1D network nodes." ; -Mesh1_node_y:units = "degrees_north" ; - -// Optional mesh edge coordinate variables -double Mesh1_edge_x(nMesh1_edge) ; -Mesh1_edge_x:standard_name = "longitude" ; -Mesh1_edge_x:long_name = "Characteristic longitude of 1D network edge (e.g. midpoint of the edge)." ; -Mesh1_edge_x:units = "degrees_east" ; -Mesh1_edge_x:bounds = "Mesh1_edge_xbnds" ; -double Mesh1_edge_y(nMesh1_edge) ; -Mesh1_edge_y:standard_name = "latitude" ; -Mesh1_edge_y:long_name = "Characteristic latitude of 1D network edge (e.g. midpoint of the edge)." ; -Mesh1_edge_y:units = "degrees_north" ; -Mesh1_edge_y:bounds = "Mesh1_edge_ybnds" ; -double Mesh1_edge_xbnds(nMesh1_edge,Two) ; -Mesh1_edge_xbnds:standard_name = "longitude" ; -Mesh1_edge_xbnds:long_name = "Longitude bounds of 1D network edge (i.e. begin and end longitude)." ; -Mesh1_edge_xbnds:units = "degrees_east" ; -double Mesh1_edge_ybnds(nMesh1_edge,Two) ; -Mesh1_edge_ybnds:standard_name = "latitude" ; -Mesh1_edge_ybnds:long_name = "Latitude bounds of 1D network edge (i.e. begin and end latitude)." ; -Mesh1_edge_ybnds:units = "degrees_north" ; - -float datavar(nMesh1_edge) ; - datavar:mesh = "Mesh1" ; - datavar:location = "edge" ; -} diff --git a/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex2_2d_triangular.cdl b/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex2_2d_triangular.cdl deleted file mode 100644 index 1e4e483826..0000000000 --- a/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex2_2d_triangular.cdl +++ /dev/null @@ -1,84 +0,0 @@ -netcdf ex2_2d_triangular { -dimensions: -nMesh2_node = 4 ; // nNodes -nMesh2_edge = 5 ; // nEdges -nMesh2_face = 2 ; // nFaces - -Two = 2 ; -Three = 3 ; - -variables: -// Mesh topology -integer Mesh2 ; -Mesh2:cf_role = "mesh_topology" ; -Mesh2:long_name = "Topology data of 2D unstructured mesh" ; -Mesh2:topology_dimension = 2 ; -Mesh2:node_coordinates = "Mesh2_node_x Mesh2_node_y" ; -Mesh2:face_node_connectivity = "Mesh2_face_nodes" ; -Mesh2:face_dimension = "nMesh2_face" ; -Mesh2:edge_node_connectivity = "Mesh2_edge_nodes" ; // attribute required if variables will be defined on edges -Mesh2:edge_dimension = "nMesh2_edge" ; -Mesh2:edge_coordinates = "Mesh2_edge_x Mesh2_edge_y" ; // optional attribute (requires edge_node_connectivity) -Mesh2:face_coordinates = "Mesh2_face_x Mesh2_face_y" ; // optional attribute -Mesh2:face_edge_connectivity = "Mesh2_face_edges" ; // optional attribute (requires edge_node_connectivity) -Mesh2:face_face_connectivity = "Mesh2_face_links" ; // optional attribute -Mesh2:edge_face_connectivity = "Mesh2_edge_face_links" ; // optional attribute (requires edge_node_connectivity) -integer Mesh2_face_nodes(nMesh2_face, Three) ; -Mesh2_face_nodes:cf_role = "face_node_connectivity" ; -Mesh2_face_nodes:long_name = "Maps every triangular face to its three corner nodes." ; -Mesh2_face_nodes:start_index = 1 ; -integer Mesh2_edge_nodes(nMesh2_edge, Two) ; -Mesh2_edge_nodes:cf_role = "edge_node_connectivity" ; -Mesh2_edge_nodes:long_name = "Maps every edge to the two nodes that it connects." ; -Mesh2_edge_nodes:start_index = 1 ; - -// Optional mesh topology variables -integer Mesh2_face_edges(nMesh2_face, Three) ; -Mesh2_face_edges:cf_role = "face_edge_connectivity" ; -Mesh2_face_edges:long_name = "Maps every triangular face to its three edges." ; -Mesh2_face_edges:start_index = 1 ; -integer Mesh2_face_links(nMesh2_face, Three) ; -Mesh2_face_links:cf_role = "face_face_connectivity" ; -Mesh2_face_links:long_name = "neighbor faces for faces" ; -Mesh2_face_links:start_index = 1 ; -Mesh2_face_links:_FillValue = -999 ; -Mesh2_face_links:comment = "missing neighbor faces are indicated using _FillValue" ; -integer Mesh2_edge_face_links(nMesh2_edge, Two) ; -Mesh2_edge_face_links:cf_role = "edge_face_connectivity" ; -Mesh2_edge_face_links:long_name = "neighbor faces for edges" ; -Mesh2_edge_face_links:start_index = 1 ; -Mesh2_edge_face_links:_FillValue = -999 ; -Mesh2_edge_face_links:comment = "missing neighbor faces are indicated using _FillValue" ; - -// Mesh node coordinates -double Mesh2_node_x(nMesh2_node) ; -Mesh2_node_x:standard_name = "longitude" ; -Mesh2_node_x:long_name = "Longitude of 2D mesh nodes." ; -Mesh2_node_x:units = "degrees_east" ; -double Mesh2_node_y(nMesh2_node) ; -Mesh2_node_y:standard_name = "latitude" ; -Mesh2_node_y:long_name = "Latitude of 2D mesh nodes." ; -Mesh2_node_y:units = "degrees_north" ; - -// Optional mesh face and edge coordinate variables -double Mesh2_face_x(nMesh2_face) ; -Mesh2_face_x:standard_name = "longitude" ; -Mesh2_face_x:long_name = "Characteristics longitude of 2D mesh triangle (e.g. circumcenter coordinate)." ; -Mesh2_face_x:units = "degrees_east" ; -double Mesh2_face_y(nMesh2_face) ; -Mesh2_face_y:standard_name = "latitude" ; -Mesh2_face_y:long_name = "Characteristics latitude of 2D mesh triangle (e.g. circumcenter coordinate)." ; -Mesh2_face_y:units = "degrees_north" ; -double Mesh2_edge_x(nMesh2_edge) ; -Mesh2_edge_x:standard_name = "longitude" ; -Mesh2_edge_x:long_name = "Characteristic longitude of 2D mesh edge (e.g. midpoint of the edge)." ; -Mesh2_edge_x:units = "degrees_east" ; -double Mesh2_edge_y(nMesh2_edge) ; -Mesh2_edge_y:standard_name = "latitude" ; -Mesh2_edge_y:long_name = "Characteristic latitude of 2D mesh edge (e.g. midpoint of the edge)." ; -Mesh2_edge_y:units = "degrees_north" ; - -float datavar(nMesh2_face) ; - datavar:mesh = "Mesh2" ; - datavar:location = "face" ; -} diff --git a/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex3_2d_flexible.cdl b/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex3_2d_flexible.cdl deleted file mode 100644 index 2fa077d152..0000000000 --- a/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex3_2d_flexible.cdl +++ /dev/null @@ -1,99 +0,0 @@ -netcdf ex3_2d_flexible { -dimensions: -nMesh2_node = 5 ; // nNodes -nMesh2_edge = 6 ; // nEdges -nMesh2_face = 2 ; // nFaces -nMaxMesh2_face_nodes = 4 ; // MaxNumNodesPerFace - -Two = 2 ; - -variables: -// Mesh topology -integer Mesh2 ; -Mesh2:cf_role = "mesh_topology" ; -Mesh2:long_name = "Topology data of 2D unstructured mesh" ; -Mesh2:topology_dimension = 2 ; -Mesh2:node_coordinates = "Mesh2_node_x Mesh2_node_y" ; -Mesh2:face_node_connectivity = "Mesh2_face_nodes" ; -Mesh2:face_dimension = "nMesh2_face" ; -Mesh2:edge_node_connectivity = "Mesh2_edge_nodes" ; // attribute required if variables will be defined on edges -Mesh2:edge_dimension = "nMesh2_edge" ; -Mesh2:edge_coordinates = "Mesh2_edge_x Mesh2_edge_y" ; // optional attribute (requires edge_node_connectivity) -Mesh2:face_coordinates = "Mesh2_face_x Mesh2_face_y" ; // optional attribute -Mesh2:face_edge_connectivity = "Mesh2_face_edges" ; // optional attribute (requires edge_node_connectivity) -Mesh2:face_face_connectivity = "Mesh2_face_links" ; // optional attribute -Mesh2:edge_face_connectivity = "Mesh2_edge_face_links" ; // optional attribute (requires edge_node_connectivity) -integer Mesh2_face_nodes(nMesh2_face, nMaxMesh2_face_nodes) ; -Mesh2_face_nodes:cf_role = "face_node_connectivity" ; -Mesh2_face_nodes:long_name = "Maps every face to its corner nodes." ; -Mesh2_face_nodes:_FillValue = 999999 ; -Mesh2_face_nodes:start_index = 1 ; -integer Mesh2_edge_nodes(nMesh2_edge, Two) ; -Mesh2_edge_nodes:cf_role = "edge_node_connectivity" ; -Mesh2_edge_nodes:long_name = "Maps every edge to the two nodes that it connects." ; -Mesh2_edge_nodes:start_index = 1 ; - -// Optional mesh topology variables -integer Mesh2_face_edges(nMesh2_face, nMaxMesh2_face_nodes) ; -Mesh2_face_edges:cf_role = "face_edge_connectivity" ; -Mesh2_face_edges:long_name = "Maps every face to its edges." ; -Mesh2_face_edges:_FillValue = 999999 ; -Mesh2_face_edges:start_index = 1 ; -integer Mesh2_face_links(nMesh2_face, nMaxMesh2_face_nodes) ; -Mesh2_face_links:cf_role = "face_face_connectivity" ; -Mesh2_face_links:long_name = "neighbor faces for faces" ; -Mesh2_face_links:start_index = 1 ; -Mesh2_face_links:_FillValue = -999 ; -Mesh2_face_links:comment = "missing edges as well as missing neighbor faces are indicated using _FillValue" ; -integer Mesh2_edge_face_links(nMesh2_edge, Two) ; -Mesh2_edge_face_links:cf_role = "edge_face_connectivity" ; -Mesh2_edge_face_links:long_name = "neighbor faces for edges" ; -Mesh2_edge_face_links:start_index = 1 ; -Mesh2_edge_face_links:_FillValue = -999 ; -Mesh2_edge_face_links:comment = "missing neighbor faces are indicated using _FillValue" ; - -// Mesh node coordinates -double Mesh2_node_x(nMesh2_node) ; -Mesh2_node_x:standard_name = "longitude" ; -Mesh2_node_x:long_name = "Longitude of 2D mesh nodes." ; -Mesh2_node_x:units = "degrees_east" ; -double Mesh2_node_y(nMesh2_node) ; -Mesh2_node_y:standard_name = "latitude" ; -Mesh2_node_y:long_name = "Latitude of 2D mesh nodes." ; -Mesh2_node_y:units = "degrees_north" ; - -// Optional mesh face and edge coordinate variables -double Mesh2_face_x(nMesh2_face) ; -Mesh2_face_x:standard_name = "longitude" ; -Mesh2_face_x:long_name = "Characteristics longitude of 2D mesh face." ; -Mesh2_face_x:units = "degrees_east" ; -Mesh2_face_x:bounds = "Mesh2_face_xbnds" ; -double Mesh2_face_y(nMesh2_face) ; -Mesh2_face_y:standard_name = "latitude" ; -Mesh2_face_y:long_name = "Characteristics latitude of 2D mesh face." ; -Mesh2_face_y:units = "degrees_north" ; -Mesh2_face_y:bounds = "Mesh2_face_ybnds" ; -double Mesh2_face_xbnds(nMesh2_face,nMaxMesh2_face_nodes) ; -Mesh2_face_xbnds:standard_name = "longitude" ; -Mesh2_face_xbnds:long_name = "Longitude bounds of 2D mesh face (i.e. corner coordinates)." ; -Mesh2_face_xbnds:units = "degrees_east" ; -Mesh2_face_xbnds:_FillValue = 9.9692099683868690E36; -double Mesh2_face_ybnds(nMesh2_face,nMaxMesh2_face_nodes) ; -Mesh2_face_ybnds:standard_name = "latitude" ; -Mesh2_face_ybnds:long_name = "Latitude bounds of 2D mesh face (i.e. corner coordinates)." ; -Mesh2_face_ybnds:units = "degrees_north" ; -Mesh2_face_ybnds:_FillValue = 9.9692099683868690E36; -double Mesh2_edge_x(nMesh2_edge) ; -Mesh2_edge_x:standard_name = "longitude" ; -Mesh2_edge_x:long_name = "Characteristic longitude of 2D mesh edge (e.g. midpoint of the edge)." ; -Mesh2_edge_x:units = "degrees_east" ; -double Mesh2_edge_y(nMesh2_edge) ; -Mesh2_edge_y:standard_name = "latitude" ; -Mesh2_edge_y:long_name = "Characteristic latitude of 2D mesh edge (e.g. midpoint of the edge)." ; -Mesh2_edge_y:units = "degrees_north" ; -// bounds variables for edges skipped - -float datavar(nMesh2_face) ; - datavar:mesh = "Mesh2" ; - datavar:location = "face" ; -} diff --git a/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex4_3d_layered.cdl b/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex4_3d_layered.cdl deleted file mode 100644 index d154502018..0000000000 --- a/lib/iris/tests/integration/experimental/ugrid_conventions_examples/ugrid_ex4_3d_layered.cdl +++ /dev/null @@ -1,120 +0,0 @@ -netcdf ex4_3d_layered { -dimensions: -nMesh2_node = 6 ; // nNodes -nMesh2_edge = 7 ; // nEdges -nMesh2_face = 2 ; // nFaces -nMaxMesh2_face_nodes = 4 ; // MaxNumNodesPerFace -Mesh2_layers = 10 ; - -Two = 2 ; - -variables: -// Mesh topology -integer Mesh2 ; -Mesh2:cf_role = "mesh_topology" ; -Mesh2:long_name = "Topology data of 2D unstructured mesh" ; -Mesh2:topology_dimension = 2 ; -Mesh2:node_coordinates = "Mesh2_node_x Mesh2_node_y" ; -Mesh2:face_node_connectivity = "Mesh2_face_nodes" ; -Mesh2:face_dimension = "nMesh2_face" ; -Mesh2:edge_node_connectivity = "Mesh2_edge_nodes" ; // attribute required if variables will be defined on edges -Mesh2:edge_dimension = "nMesh2_edge" ; -Mesh2:edge_coordinates = "Mesh2_edge_x Mesh2_edge_y" ; // optional attribute (requires edge_node_connectivity) -Mesh2:face_coordinates = "Mesh2_face_x Mesh2_face_y" ; // optional attribute -Mesh2:face_edge_connectivity = "Mesh2_face_edges" ; // optional attribute (requires edge_node_connectivity) -Mesh2:face_face_connectivity = "Mesh2_face_links" ; // optional attribute -Mesh2:edge_face_connectivity = "Mesh2_edge_face_links" ; // optional attribute (requires edge_node_connectivity) -integer Mesh2_face_nodes(nMesh2_face, nMaxMesh2_face_nodes) ; -Mesh2_face_nodes:cf_role = "face_node_connectivity" ; -Mesh2_face_nodes:long_name = "Maps every face to its corner nodes." ; -Mesh2_face_nodes:_FillValue = 999999 ; -Mesh2_face_nodes:start_index = 1 ; -integer Mesh2_edge_nodes(nMesh2_edge, Two) ; -Mesh2_edge_nodes:cf_role = "edge_node_connectivity" ; -Mesh2_edge_nodes:long_name = "Maps every edge to the two nodes that it connects." ; -Mesh2_edge_nodes:start_index = 1 ; - -// Optional mesh topology variables -integer Mesh2_face_edges(nMesh2_face, nMaxMesh2_face_nodes) ; -Mesh2_face_edges:cf_role = "face_edge_connectivity" ; -Mesh2_face_edges:long_name = "Maps every face to its edges." ; -Mesh2_face_edges:_FillValue = 999999 ; -Mesh2_face_edges:start_index = 1 ; -integer Mesh2_face_links(nMesh2_face, nMaxMesh2_face_nodes) ; -Mesh2_face_links:cf_role = "face_face_connectivity" ; -Mesh2_face_links:long_name = "neighbor faces for faces" ; -Mesh2_face_links:start_index = 1 ; -Mesh2_face_links:_FillValue = -999 ; -Mesh2_face_links:comment = "missing edges as well as missing neighbor faces are indicated using _FillValue" ; -integer Mesh2_edge_face_links(nMesh2_edge, Two) ; -Mesh2_edge_face_links:cf_role = "edge_face_connectivity" ; -Mesh2_edge_face_links:long_name = "neighbor faces for edges" ; -Mesh2_edge_face_links:start_index = 1 ; -Mesh2_edge_face_links:_FillValue = -999 ; -Mesh2_edge_face_links:comment = "missing neighbor faces are indicated using _FillValue" ; - -// Mesh node coordinates -double Mesh2_node_x(nMesh2_node) ; -Mesh2_node_x:standard_name = "longitude" ; -Mesh2_node_x:long_name = "Longitude of 2D mesh nodes." ; -Mesh2_node_x:units = "degrees_east" ; -double Mesh2_node_y(nMesh2_node) ; -Mesh2_node_y:standard_name = "latitude" ; -Mesh2_node_y:long_name = "Latitude of 2D mesh nodes." ; -Mesh2_node_y:units = "degrees_north" ; - -// Optional mesh face and edge coordinate variables -double Mesh2_face_x(nMesh2_face) ; -Mesh2_face_x:standard_name = "longitude" ; -Mesh2_face_x:long_name = "Characteristics longitude of 2D mesh face." ; -Mesh2_face_x:units = "degrees_east" ; -Mesh2_face_x:bounds = "Mesh2_face_xbnds" ; -double Mesh2_face_y(nMesh2_face) ; -Mesh2_face_y:standard_name = "latitude" ; -Mesh2_face_y:long_name = "Characteristics latitude of 2D mesh face." ; -Mesh2_face_y:units = "degrees_north" ; -Mesh2_face_y:bounds = "Mesh2_face_ybnds" ; -double Mesh2_face_xbnds(nMesh2_face,nMaxMesh2_face_nodes) ; -Mesh2_face_xbnds:standard_name = "longitude" ; -Mesh2_face_xbnds:long_name = "Longitude bounds of 2D mesh face (i.e. corner coordinates)." ; -Mesh2_face_xbnds:units = "degrees_east" ; -Mesh2_face_xbnds:_FillValue = 9.9692099683868690E36; -double Mesh2_face_ybnds(nMesh2_face,nMaxMesh2_face_nodes) ; -Mesh2_face_ybnds:standard_name = "latitude" ; -Mesh2_face_ybnds:long_name = "Latitude bounds of 2D mesh face (i.e. corner coordinates)." ; -Mesh2_face_ybnds:units = "degrees_north" ; -Mesh2_face_ybnds:_FillValue = 9.9692099683868690E36; -double Mesh2_edge_x(nMesh2_edge) ; -Mesh2_edge_x:standard_name = "longitude" ; -Mesh2_edge_x:long_name = "Characteristic longitude of 2D mesh edge (e.g. midpoint of the edge)." ; -Mesh2_edge_x:units = "degrees_east" ; -double Mesh2_edge_y(nMesh2_edge) ; -Mesh2_edge_y:standard_name = "latitude" ; -Mesh2_edge_y:long_name = "Characteristic latitude of 2D mesh edge (e.g. midpoint of the edge)." ; -Mesh2_edge_y:units = "degrees_north" ; -// bounds variables for edges skipped - -// Vertical coordinate -double Mesh2_layers(Mesh2_layers) ; -Mesh2_layers:standard_name = "ocean_sigma_coordinate" ; -Mesh2_layers:long_name = "sigma at layer midpoints" ; -Mesh2_layers:positive = "up" ; -Mesh2_layers:formula_terms = "sigma: Mesh2_layers eta: Mesh2_surface depth: Mesh2_depth" ; -double Mesh2_depth(Mesh2_layers) ; -Mesh2_depth:standard_name = "sea_floor_depth_below_geoid" ; -Mesh2_depth:units = "m" ; -Mesh2_depth:positive = "down" ; -Mesh2_depth:coordinates = "Mesh2_node_x Mesh2_node_y" ; -double Mesh2_surface(Mesh2_layers) ; -Mesh2_surface:standard_name = "sea_surface_height_above_geoid" ; -Mesh2_surface:units = "m" ; -Mesh2_surface:coordinates = "Mesh2_face_x Mesh2_face_y" ; - -float datavar(Mesh2_layers, nMesh2_face) ; - datavar:mesh = "Mesh2" ; - datavar:location = "face" ; - -data: -Mesh2_layers = 0., 1., 2., 3., 4., 5., 6., 7., 8., 9. ; - -} diff --git a/lib/iris/tests/integration/fast_load/__init__.py b/lib/iris/tests/integration/fast_load/__init__.py deleted file mode 100644 index a94785ca58..0000000000 --- a/lib/iris/tests/integration/fast_load/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for :mod:`iris.fileformats.um` fast load functions.""" diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py deleted file mode 100644 index a510ef7257..0000000000 --- a/lib/iris/tests/integration/fast_load/test_fast_load.py +++ /dev/null @@ -1,690 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for fast-loading FF and PP files.""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -from collections.abc import Iterable -import shutil -import tempfile - -import numpy as np - -import iris -from iris.coord_systems import GeogCS -import iris.coords -from iris.coords import AuxCoord, CellMethod, DimCoord -from iris.cube import Cube, CubeList -from iris.exceptions import IgnoreCubeException -from iris.fileformats.pp import EARTH_RADIUS, STASH -from iris.fileformats.um._fast_load import STRUCTURED_LOAD_CONTROLS - - -class Mixin_FieldTest: - # A mixin providing common facilities for fast-load testing : - # * create 'raw' cubes to produce the desired PP fields in a test file. - # * save 'raw' cubes to temporary PP files that get deleted afterwards. - # * control whether tests run with 'normal' or 'fast' loading. - - def setUp(self): - # Create a private temporary directory. - self.temp_dir_path = tempfile.mkdtemp() - # Initialise temporary filename generation. - self.tempfile_count = 0 - self.tempfile_path_fmt = ( - "{dir_path}/tempfile_{prefix}_{file_number:06d}{suffix}" - ) - # Enable fast loading, if the inheritor enables it. - # N.B. *requires* the user to define "self.do_fast_loads" (no default). - if self.do_fast_loads: - # Enter a 'structured load' context. - self.load_context = STRUCTURED_LOAD_CONTROLS.context( - loads_use_structured=True - ) - # N.B. we can't use a 'with', so issue separate 'enter' and 'exit' - # calls instead. - self.load_context.__enter__() - - def tearDown(self): - # Delete temporary directory. - shutil.rmtree(self.temp_dir_path) - if self.do_fast_loads: - # End the 'fast loading' context. - self.load_context.__exit__(None, None, None) - - def _temp_filepath(self, user_name="", suffix=".pp"): - # Return the filepath for a new temporary file. - self.tempfile_count += 1 - file_path = self.tempfile_path_fmt.format( - dir_path=self.temp_dir_path, - prefix=user_name, - file_number=self.tempfile_count, - suffix=suffix, - ) - return file_path - - def save_fieldcubes(self, cubes, basename=""): - # Save cubes to a temporary file, and return its filepath. - file_path = self._temp_filepath(user_name=basename, suffix=".pp") - iris.save(cubes, file_path) - return file_path - - def fields( - self, - c_t=None, - cft=None, - ctp=None, - c_h=None, - c_p=None, - phn=0, - mmm=None, - pse=None, - ): - # Return a list of 2d cubes representing raw PPFields, from args - # specifying sequences of (scalar) coordinate values. - # TODO? : add bounds somehow ? - # - # Arguments 'c' are either a single int value, making a scalar - # coord, or a string of characters : '0'-'9' (index) or '-' (missing). - # The indexes select point values from fixed list of possibles. - # - # Argument 'c_h' and 'c_p' represent height or pressure values, so - # ought to be mutually exclusive -- these control LBVC. - # - # Argument 'phn' indexes phenomenon types. - # - # Argument 'mmm' denotes existence (or not) of a cell method of type - # 'average' or 'min' or 'max' (values '012' respectively), applying to - # the time values -- ultimately, this controls LBTIM. - # - # Argument 'pse' denotes pseudo-level numbers. - # These translate into 'LBUSER5' values. - - # Get the number of result cubes, defined by the 'longest' arg. - def arglen(arg): - # Get the 'length' of a control argument. - if arg is None: - result = 0 - elif isinstance(arg, str): - result = len(arg) - else: - result = 1 - return result - - n_flds = max(arglen(x) for x in (c_t, cft, ctp, c_h, c_p, mmm)) - - # Make basic anonymous test cubes. - ny, nx = 3, 5 - data = np.arange(n_flds * ny * nx, dtype=np.float32) - data = data.reshape((n_flds, ny, nx)) - cubes = [Cube(data[i]) for i in range(n_flds)] - - # Define test point values for making coordinates. - time_unit = "hours since 1970-01-01" - period_unit = "hours" - height_unit = "m" - pressure_unit = "hPa" - time_values = 24.0 * np.arange(10) - height_values = 100.0 * np.arange(1, 11) - pressure_values = [ - 100.0, - 150.0, - 200.0, - 250.0, - 300.0, - 500.0, - 850.0, - 1000.0, - ] - pseudolevel_values = range(1, 11) # A valid value is >= 1. - - # Test phenomenon details. - # NOTE: in order to write/readback as identical, these also contain a - # canonical unit and matching STASH attribute. - # Those could in principle be looked up, but it's a bit awkward. - phenomenon_values = [ - ("air_temperature", "K", "m01s01i004"), - ("x_wind", "m s-1", "m01s00i002"), - ("y_wind", "m s-1", "m01s00i003"), - ("specific_humidity", "kg kg-1", "m01s00i010"), - ] - - # Test cell-methods. - # NOTE: if you add an *interval* to any of these cell-methods, it is - # not saved into the PP file (?? or maybe not loaded back again ??). - # This could be a PP save/load bug, or maybe just because no bounds ? - cell_method_values = [ - CellMethod("mean", "time"), - CellMethod("maximum", "time"), - CellMethod("minimum", "time"), - ] - - # Define helper to decode an argument as a list of test values. - def arg_vals(arg, vals): - # Decode an argument to a list of 'n_flds' coordinate point values. - # (or 'None' where missing) - - # First get a list of value indices from the argument. - # Can be: a single index value; a list of indices; or a string. - if isinstance(arg, Iterable) and not isinstance(arg, str): - # Can also just pass a simple iterable of values. - inds = [int(val) for val in arg] - else: - n_vals = arglen(arg) - if n_vals == 0: - inds = [None] * n_flds - elif n_vals == 1: - inds = [int(arg)] * n_flds - else: - assert isinstance(arg, str) - inds = [None if char == "-" else int(char) for char in arg] - - # Convert indices to selected point values. - values = [None if ind is None else vals[int(ind)] for ind in inds] - - return values - - # Apply phenomenon_values definitions. - phenomena = arg_vals(phn, phenomenon_values) - for cube, (name, units, stash) in zip(cubes, phenomena): - cube.rename(name) - # NOTE: in order to get a cube that will write+readback the same, - # the units must be the canonical one. - cube.units = units - # NOTE: in order to get a cube that will write+readback the same, - # we must include a STASH attribute. - cube.attributes["STASH"] = STASH.from_msi(stash) - cube.fill_value = np.float32(-1e30) - - # Add x and y coords. - cs = GeogCS(EARTH_RADIUS) - xvals = np.linspace(0.0, 180.0, nx) - co_x = DimCoord( - np.array(xvals, dtype=np.float32), - standard_name="longitude", - units="degrees", - coord_system=cs, - ) - yvals = np.linspace(-45.0, 45.0, ny) - co_y = DimCoord( - np.array(yvals, dtype=np.float32), - standard_name="latitude", - units="degrees", - coord_system=cs, - ) - for cube in cubes: - cube.add_dim_coord(co_y, 0) - cube.add_dim_coord(co_x, 1) - - # Add multiple scalar coordinates as defined by the arguments. - def arg_coords(arg, name, unit, vals=None): - # Decode an argument to a list of scalar coordinates. - if vals is None: - vals = np.arange(n_flds + 2) # Note allowance - vals = arg_vals(arg, vals) - coords = [ - None if val is None else DimCoord([val], units=unit) - for val in vals - ] - # Apply names separately, as 'pressure' is not a standard name. - for coord in coords: - if coord: - coord.rename(name) - # Also fix heights to match what comes from a PP file. - if name == "height": - coord.attributes["positive"] = "up" - return coords - - def add_arg_coords(arg, name, unit, vals=None): - # Add scalar coordinates to each cube, for one argument. - coords = arg_coords(arg, name, unit, vals) - for cube, coord in zip(cubes, coords): - if coord: - cube.add_aux_coord(coord) - - add_arg_coords(c_t, "time", time_unit, time_values) - add_arg_coords(cft, "forecast_reference_time", time_unit) - add_arg_coords(ctp, "forecast_period", period_unit, time_values) - add_arg_coords(c_h, "height", height_unit, height_values) - add_arg_coords(c_p, "pressure", pressure_unit, pressure_values) - add_arg_coords(pse, "pseudo_level", "1", pseudolevel_values) - - # Add cell methods as required. - methods = arg_vals(mmm, cell_method_values) - for cube, method in zip(cubes, methods): - if method: - cube.add_cell_method(method) - - return cubes - - -class MixinBasic: - # A mixin of tests that can be applied to *either* standard or fast load. - # The "real" test classes must inherit this, and Mixin_FieldTest, - # and define 'self.do_fast_loads' as True or False. - # - # Basic functional tests. - - def test_basic(self): - # Show that basic load merging works. - flds = self.fields(c_t="123", cft="000", ctp="123", c_p=0) - file = self.save_fieldcubes(flds) - results = iris.load(file) - expected = CubeList(flds).merge() - self.assertEqual(results, expected) - - def test_phenomena(self): - # Show that different phenomena are merged into distinct cubes. - flds = self.fields(c_t="1122", phn="0101") - file = self.save_fieldcubes(flds) - results = iris.load(file) - expected = CubeList(flds).merge() - self.assertEqual(results, expected) - - def test_cross_file_concatenate(self): - # Combine vector dimensions (i.e. concatenate) across multiple files. - fldset_1 = self.fields(c_t="12") - fldset_2 = self.fields(c_t="34") - file_1 = self.save_fieldcubes(fldset_1) - file_2 = self.save_fieldcubes(fldset_2) - results = iris.load((file_1, file_2)) - expected = CubeList(fldset_1 + fldset_2).merge() - self.assertEqual(results, expected) - - def test_cell_method(self): - # Check that cell methods (i.e. LBPROC values) produce distinct - # phenomena. - flds = self.fields(c_t="000111222", mmm="-01-01-01") - file = self.save_fieldcubes(flds) - results = iris.load(file) - expected = CubeList( - CubeList(flds[i_start::3]).merge_cube() for i_start in range(3) - ) - self.assertEqual(results, expected) - - -class MixinCallDetails: - # A mixin of tests that can be applied to *either* standard or fast load. - # The "real" test classes must inherit this, and Mixin_FieldTest, - # and define 'self.do_fast_loads' as True or False. - # - # Tests for different load calls and load-call arguments. - - def test_stash_constraint(self): - # Check that an attribute constraint functions correctly. - # Note: this is a special case in "fileformats.pp". - flds = self.fields(c_t="1122", phn="0101") - file = self.save_fieldcubes(flds) - airtemp_flds = [fld for fld in flds if fld.name() == "air_temperature"] - stash_attribute = airtemp_flds[0].attributes["STASH"] - results = iris.load( - file, iris.AttributeConstraint(STASH=stash_attribute) - ) - expected = CubeList(airtemp_flds).merge() - self.assertEqual(results, expected) - - def test_ordinary_constraint(self): - # Check that a 'normal' constraint functions correctly. - # Note: *should* be independent of structured loading. - flds = self.fields(c_h="0123") - file = self.save_fieldcubes(flds) - height_constraint = iris.Constraint(height=lambda h: 150.0 < h < 350.0) - results = iris.load(file, height_constraint) - expected = CubeList(flds[1:3]).merge() - self.assertEqual(results, expected) - - def test_callback(self): - # Use 2 timesteps each of (air-temp on height) and (rh on pressure). - flds = self.fields(c_t="0011", phn="0303", c_h="0-1-", c_p="-2-3") - file = self.save_fieldcubes(flds) - - if not self.do_fast_loads: - - def callback(cube, field, filename): - self.assertEqual(filename, file) - lbvc = field.lbvc - if lbvc == 1: - # reject the height level data (accept only pressure). - raise IgnoreCubeException() - else: - # Record the LBVC value. - cube.attributes["LBVC"] = lbvc - - else: - - def callback(cube, collation, filename): - self.assertEqual(filename, file) - lbvcs = [fld.lbvc for fld in collation.fields] - lbvc0 = lbvcs[0] - if not np.all(lbvcs == lbvc0): - msg = "Fields have different LBVCs : {}" - raise ValueError(msg.format(set(lbvcs))) - if lbvc0 == 1: - # reject the height level data (accept only pressure). - raise IgnoreCubeException() - else: - # Record the LBVC values. - cube.attributes["A_LBVC"] = lbvcs - - results = iris.load(file, callback=callback) - - # Make an 'expected' from selected fields, with the expected attribute. - expected = CubeList([flds[1], flds[3]]).merge() - if not self.do_fast_loads: - # This is actually a NumPy int32, so honour that here. - expected[0].attributes["LBVC"] = np.int32(8) - else: - expected[0].attributes["A_LBVC"] = [8, 8] - - self.assertEqual(results, expected) - - def test_load_cube(self): - flds = self.fields(c_t="123", cft="000", ctp="123", c_p=0) - file = self.save_fieldcubes(flds) - results = iris.load_cube(file) - expected = CubeList(flds).merge_cube() - self.assertEqual(results, expected) - - def test_load_cubes(self): - flds = self.fields(c_h="0123") - file = self.save_fieldcubes(flds) - height_constraints = [ - iris.Constraint(height=300.0), - iris.Constraint(height=lambda h: 150.0 < h < 350.0), - iris.Constraint("air_temperature"), - ] - results = iris.load_cubes(file, height_constraints) - expected = CubeList( - [ - flds[2], - CubeList(flds[1:3]).merge_cube(), - CubeList(flds).merge_cube(), - ] - ) - self.assertEqual(results, expected) - - def test_load_raw(self): - fldset_1 = self.fields(c_t="015", phn="001") - fldset_2 = self.fields(c_t="234") - file_1 = self.save_fieldcubes(fldset_1) - file_2 = self.save_fieldcubes(fldset_2) - results = iris.load_raw((file_1, file_2)) - if not self.do_fast_loads: - # Each 'raw' cube is just one field. - expected = CubeList(fldset_1 + fldset_2) - else: - # 'Raw' cubes have combined (vector) times within each file. - # The 'other' phenomenon appears seperately. - expected = CubeList( - [ - CubeList(fldset_1[:2]).merge_cube(), - CubeList(fldset_2).merge_cube(), - fldset_1[2], - ] - ) - - # Again here, the order of these results is not stable : - # It varies with random characters in the temporary filepath. - # - # ***************************************************************** - # *** Here, this is clearly ALSO the case for "standard" loads. *** - # ***************************************************************** - # - # E.G. run "test_fast_load.py -v TestCallDetails__Iris.test_load_raw" : - # If you remove the sort operations, this fails "sometimes". - # - # To fix this, sort both expected and results by (first) timepoint - # - for which purpose we made all the time values different. - - def timeorder(cube): - return cube.coord("time").points[0] - - expected = sorted(expected, key=timeorder) - results = sorted(results, key=timeorder) - - self.assertEqual(results, expected) - - -class MixinDimsAndOrdering: - # A mixin of tests that can be applied to *either* standard or fast load. - # The "real" test classes must inherit this, and Mixin_FieldTest, - # and define 'self.do_fast_loads' as True or False. - # - # Tests for multidimensional results and dimension orderings. - - def test_multidim(self): - # Check that a full 2-phenom * 2d structure all works properly. - flds = self.fields(c_t="00001111", c_h="00110011", phn="01010101") - file = self.save_fieldcubes(flds) - results = iris.load(file) - expected = CubeList(flds).merge() - self.assertEqual(results, expected) - - def test_odd_order(self): - # Show that an erratic interleaving of phenomena fields still works. - # N.B. field sequences *within* each phenomenon are properly ordered. - flds = self.fields(c_t="00010111", c_h="00101101", phn="01001011") - file = self.save_fieldcubes(flds) - results = iris.load(file) - expected = CubeList(flds).merge() - self.assertEqual(results, expected) - - def test_v_t_order(self): - # With height varying faster than time, first dimension is time, - # which matches the 'normal' load behaviour. - flds = self.fields(c_t="000111", c_h="012012") - file = self.save_fieldcubes(flds) - results = iris.load(file) - expected = CubeList(flds).merge() - # Order is (t, h, y, x), which is "standard". - self.assertEqual(expected[0].coord_dims("time"), (0,)) - self.assertEqual(expected[0].coord_dims("height"), (1,)) - self.assertEqual(results, expected) - - def test_t_v_order(self): - # With time varying faster than height, first dimension is height, - # which does not match the 'normal' load. - flds = self.fields(c_t="010101", c_h="001122") - file = self.save_fieldcubes(flds) - results = iris.load(file) - expected = CubeList(flds).merge() - if not self.do_fast_loads: - # Order is (t, h, y, x), which is "standard". - self.assertEqual(results[0].coord_dims("time"), (0,)) - self.assertEqual(results[0].coord_dims("height"), (1,)) - else: - # Order is (h, t, y, x), which is *not* "standard". - self.assertEqual(results[0].coord_dims("time"), (1,)) - self.assertEqual(results[0].coord_dims("height"), (0,)) - expected[0].transpose((1, 0, 2, 3)) - self.assertEqual(results, expected) - - def test_missing_combination(self): - # A case where one field is 'missing' to make a 2d result. - flds = self.fields(c_t="00011", c_h="01202") - file = self.save_fieldcubes(flds) - results = iris.load(file) - expected = CubeList(flds).merge() - self.assertEqual(expected[0].coord_dims("time"), (0,)) - self.assertEqual(expected[0].coord_dims("height"), (0,)) - if self.do_fast_loads: - # Something a bit weird happens to the 'height' coordinate in this - # case (and not for standard load). - for cube in expected: - cube.coord("height").points = np.array( - cube.coord("height").points, dtype=np.float32 - ) - cube.coord("height").attributes = {} - self.assertEqual(results, expected) - - -class MixinProblemCases: - def test_FAIL_scalar_vector_concatenate(self): - # Structured load can produce a scalar coordinate from one file, and a - # matching vector one from another file, but these won't "combine". - # We'd really like to fix this one... - (single_timepoint_fld,) = self.fields(c_t="1") - multi_timepoint_flds = self.fields(c_t="23") - file_single = self.save_fieldcubes( - [single_timepoint_fld], basename="single" - ) - file_multi = self.save_fieldcubes( - multi_timepoint_flds, basename="multi" - ) - - results = iris.load((file_single, file_multi)) - if not self.do_fast_loads: - # This is what we'd LIKE to get (what iris.load gives). - expected = CubeList( - multi_timepoint_flds + [single_timepoint_fld] - ).merge() - else: - # This is what we ACTUALLY get at present. - # It can't combine the scalar and vector time coords. - expected = CubeList( - [ - CubeList(multi_timepoint_flds).merge_cube(), - single_timepoint_fld, - ] - ) - # NOTE: in this case, we need to sort the results to ensure a - # repeatable ordering, because ??somehow?? the random temporary - # directory name affects the ordering of the cubes in the result ! - results = CubeList(sorted(results, key=lambda cube: cube.shape)) - self.assertEqual(results, expected) - - def test_FAIL_phenomena_nostash(self): - # If we remove the 'STASH' attributes, certain phenomena can still be - # successfully encoded+decoded by standard load using LBFC values. - # Structured loading gets this wrong, because it does not use LBFC in - # characterising phenomena. - flds = self.fields(c_t="1122", phn="0101") - for fld in flds: - del fld.attributes["STASH"] - file = self.save_fieldcubes(flds) - results = iris.load(file) - if not self.do_fast_loads: - # This is what we'd LIKE to get (what iris.load gives). - expected = CubeList(flds).merge() - else: - # At present, we get a cube incorrectly combined together over all - # 4 timepoints, with the same phenomenon for all (!wrong!). - # It's a bit tricky to arrange the existing data like that. - # Do it by hacking the time values to allow merge, and then fixing - # up the time - old_t1, old_t2 = ( - fld.coord("time").points[0] for fld in (flds[0], flds[2]) - ) - for i_fld, fld in enumerate(flds): - # Hack the phenomena to all look like the first one. - fld.rename("air_temperature") - fld.units = "K" - # Hack the time points so the 4 cube can merge into one. - fld.coord("time").points = [old_t1 + i_fld] - one_cube = CubeList(flds).merge_cube() - # Replace time dim with an anonymous dim. - co_t_fake = one_cube.coord("time") - one_cube.remove_coord(co_t_fake) - # Reconstruct + add back the expected auxiliary time coord. - co_t_new = AuxCoord( - [old_t1, old_t1, old_t2, old_t2], - standard_name="time", - units=co_t_fake.units, - ) - one_cube.add_aux_coord(co_t_new, 0) - expected = [one_cube] - self.assertEqual(results, expected) - - def test_FAIL_pseudo_levels(self): - # Show how pseudo levels are handled. - flds = self.fields(c_t="000111222", pse="123123123") - file = self.save_fieldcubes(flds) - results = iris.load(file) - expected = CubeList(flds).merge() - - # NOTE: this problem is now fixed : Structured load gives the same answer. - # - # if not self.do_fast_loads: - # expected = CubeList(flds).merge() - # else: - # # Structured loading doesn't understand pseudo-level. - # # The result is rather horrible... - # - # # First get a cube over 9 timepoints. - # flds = self.fields(c_t='012345678', - # pse=1) # result gets level==2, not clear why. - # - # # Replace the time coord with an AUX coord. - # nine_timepoints_cube = CubeList(flds).merge_cube() - # co_time = nine_timepoints_cube.coord('time') - # nine_timepoints_cube.remove_coord(co_time) - # nine_timepoints_cube.add_aux_coord(AuxCoord.from_coord(co_time), - # 0) - # # Set the expected timepoints equivalent to '000111222'. - # nine_timepoints_cube.coord('time').points = \ - # np.array([0.0, 0.0, 0.0, 24.0, 24.0, 24.0, 48.0, 48.0, 48.0]) - # # Make a cubelist with this single cube. - # expected = CubeList([nine_timepoints_cube]) - - self.assertEqual(results, expected) - - -class TestBasic__Iris(Mixin_FieldTest, MixinBasic, tests.IrisTest): - # Finally, an actual test-class (unittest.TestCase) : - # run the 'basic' tests with *normal* loading. - do_fast_loads = False - - -class TestBasic__Fast(Mixin_FieldTest, MixinBasic, tests.IrisTest): - # Finally, an actual test-class (unittest.TestCase) : - # run the 'basic' tests with *FAST* loading. - do_fast_loads = True - - -class TestCallDetails__Iris(Mixin_FieldTest, MixinCallDetails, tests.IrisTest): - # Finally, an actual test-class (unittest.TestCase) : - # run the 'call details' tests with *normal* loading. - do_fast_loads = False - - -class TestCallDetails__Fast(Mixin_FieldTest, MixinCallDetails, tests.IrisTest): - # Finally, an actual test-class (unittest.TestCase) : - # run the 'call details' tests with *FAST* loading. - do_fast_loads = True - - -class TestDimsAndOrdering__Iris( - Mixin_FieldTest, MixinDimsAndOrdering, tests.IrisTest -): - # Finally, an actual test-class (unittest.TestCase) : - # run the 'dimensions and ordering' tests with *normal* loading. - do_fast_loads = False - - -class TestDimsAndOrdering__Fast( - Mixin_FieldTest, MixinDimsAndOrdering, tests.IrisTest -): - # Finally, an actual test-class (unittest.TestCase) : - # run the 'dimensions and ordering' tests with *FAST* loading. - do_fast_loads = True - - -class TestProblems__Iris(Mixin_FieldTest, MixinProblemCases, tests.IrisTest): - # Finally, an actual test-class (unittest.TestCase) : - # run the 'failure cases' tests with *normal* loading. - do_fast_loads = False - - -class TestProblems__Fast(Mixin_FieldTest, MixinProblemCases, tests.IrisTest): - # Finally, an actual test-class (unittest.TestCase) : - # run the 'failure cases' tests with *FAST* loading. - do_fast_loads = True - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/plot/__init__.py b/lib/iris/tests/integration/plot/__init__.py deleted file mode 100644 index aafa488e2d..0000000000 --- a/lib/iris/tests/integration/plot/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for the :mod:`iris.plot` package.""" diff --git a/lib/iris/tests/integration/plot/test_colorbar.py b/lib/iris/tests/integration/plot/test_colorbar.py deleted file mode 100644 index a306e6c82f..0000000000 --- a/lib/iris/tests/integration/plot/test_colorbar.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test interaction between :mod:`iris.plot` and -:func:`matplotlib.pyplot.colorbar` - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.coords import AuxCoord -import iris.tests.stock - -# Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: - import matplotlib.pyplot as plt - - from iris.plot import ( - contour, - contourf, - pcolor, - pcolormesh, - points, - scatter, - ) - - -@tests.skip_plot -class TestColorBarCreation(tests.GraphicsTest): - def setUp(self): - super().setUp() - self.draw_functions = (contour, contourf, pcolormesh, pcolor) - self.cube = iris.tests.stock.lat_lon_cube() - self.cube.coord("longitude").guess_bounds() - self.cube.coord("latitude").guess_bounds() - self.traj_lon = AuxCoord( - np.linspace(-180, 180, 50), - standard_name="longitude", - units="degrees", - ) - self.traj_lat = AuxCoord( - np.sin(np.deg2rad(self.traj_lon.points)) * 30.0, - standard_name="latitude", - units="degrees", - ) - - def test_common_draw_functions(self): - for draw_function in self.draw_functions: - mappable = draw_function(self.cube) - cbar = plt.colorbar() - self.assertIs( - cbar.mappable, - mappable, - msg="Problem with draw function iris.plot.{}".format( - draw_function.__name__ - ), - ) - - def test_common_draw_functions_specified_mappable(self): - for draw_function in self.draw_functions: - mappable_initial = draw_function(self.cube, cmap="cool") - _ = draw_function(self.cube) - cbar = plt.colorbar(mappable_initial) - self.assertIs( - cbar.mappable, - mappable_initial, - msg="Problem with draw function iris.plot.{}".format( - draw_function.__name__ - ), - ) - - def test_points_with_c_kwarg(self): - mappable = points(self.cube, c=self.cube.data) - cbar = plt.colorbar() - self.assertIs(cbar.mappable, mappable) - - def test_points_with_c_kwarg_specified_mappable(self): - mappable_initial = points(self.cube, c=self.cube.data, cmap="cool") - _ = points(self.cube, c=self.cube.data) - cbar = plt.colorbar(mappable_initial) - self.assertIs(cbar.mappable, mappable_initial) - - def test_scatter_with_c_kwarg(self): - mappable = scatter( - self.traj_lon, self.traj_lat, c=self.traj_lon.points - ) - cbar = plt.colorbar() - self.assertIs(cbar.mappable, mappable) - - def test_scatter_with_c_kwarg_specified_mappable(self): - mappable_initial = scatter( - self.traj_lon, self.traj_lat, c=self.traj_lon.points - ) - _ = scatter( - self.traj_lon, self.traj_lat, c=self.traj_lon.points, cmap="cool" - ) - cbar = plt.colorbar(mappable_initial) - self.assertIs(cbar.mappable, mappable_initial) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/plot/test_netcdftime.py b/lib/iris/tests/integration/plot/test_netcdftime.py deleted file mode 100644 index 340f37dda7..0000000000 --- a/lib/iris/tests/integration/plot/test_netcdftime.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test plot of time coord with non-gregorian calendar. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from cf_units import Unit -import cftime -import numpy as np - -from iris.coords import AuxCoord - -if tests.NC_TIME_AXIS_AVAILABLE: - from nc_time_axis import CalendarDateTime - - -# Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: - import iris.plot as iplt - - -@tests.skip_nc_time_axis -@tests.skip_plot -class Test(tests.GraphicsTest): - def test_360_day_calendar(self): - n = 360 - calendar = "360_day" - time_unit = Unit("days since 1970-01-01 00:00", calendar=calendar) - time_coord = AuxCoord(np.arange(n), "time", units=time_unit) - times = [time_unit.num2date(point) for point in time_coord.points] - times = [ - cftime.datetime( - atime.year, - atime.month, - atime.day, - atime.hour, - atime.minute, - atime.second, - calendar=calendar, - ) - for atime in times - ] - expected_ydata = np.array( - [CalendarDateTime(time, calendar) for time in times] - ) - (line1,) = iplt.plot(time_coord) - result_ydata = line1.get_ydata() - self.assertArrayEqual(expected_ydata, result_ydata) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/plot/test_nzdateline.py b/lib/iris/tests/integration/plot/test_nzdateline.py deleted file mode 100644 index 0051549794..0000000000 --- a/lib/iris/tests/integration/plot/test_nzdateline.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test set up of limited area map extents which bridge the date line. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import iris - -# Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: - import matplotlib.pyplot as plt - - from iris.plot import pcolormesh - - -@tests.skip_plot -@tests.skip_data -class TestExtent(tests.IrisTest): - def test_dateline(self): - dpath = tests.get_data_path(["PP", "nzgust.pp"]) - cube = iris.load_cube(dpath) - pcolormesh(cube) - # Ensure that the limited area expected for NZ is set. - # This is set in longitudes with the datum set to the - # International Date Line. - self.assertTrue( - -10 < plt.gca().get_xlim()[0] < -5 - and 5 < plt.gca().get_xlim()[1] < 10 - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/plot/test_plot_2d_coords.py b/lib/iris/tests/integration/plot/test_plot_2d_coords.py deleted file mode 100644 index b8fbc5e31a..0000000000 --- a/lib/iris/tests/integration/plot/test_plot_2d_coords.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test plots with two dimensional coordinates. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import cartopy.crs as ccrs -import matplotlib.pyplot as plt -import numpy as np - -import iris -from iris.analysis.cartography import unrotate_pole -from iris.coords import AuxCoord -from iris.cube import Cube - -# Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: - import iris.quickplot as qplt - - -@tests.skip_data -def simple_cube_w_2d_coords(): - path = tests.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) - cube = iris.load_cube(path) - return cube - - -@tests.skip_plot -@tests.skip_data -class Test(tests.GraphicsTest): - def test_2d_coord_bounds_platecarree(self): - # To avoid a problem with Cartopy smearing the data where the - # longitude wraps, we set the central_longitude - cube = simple_cube_w_2d_coords()[0, 0] - ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=180)) - qplt.pcolormesh(cube) - ax.coastlines(resolution="110m", color="red") - self.check_graphic() - - def test_2d_coord_bounds_northpolarstereo(self): - cube = simple_cube_w_2d_coords()[0, 0] - ax = plt.axes(projection=ccrs.NorthPolarStereo()) - qplt.pcolormesh(cube) - ax.coastlines(resolution="110m", color="red") - self.check_graphic() - - -@tests.skip_plot -class Test2dContour(tests.GraphicsTest): - def test_2d_coords_contour(self): - ny, nx = 4, 6 - x1 = np.linspace(-20, 70, nx) - y1 = np.linspace(10, 60, ny) - data = np.zeros((ny, nx)) - data.flat[:] = np.arange(nx * ny) % 7 - cube = Cube(data, long_name="Odd data") - x2, y2 = np.meshgrid(x1, y1) - true_lons, true_lats = unrotate_pole(x2, y2, -130.0, 77.0) - co_x = AuxCoord(true_lons, standard_name="longitude", units="degrees") - co_y = AuxCoord(true_lats, standard_name="latitude", units="degrees") - cube.add_aux_coord(co_y, (0, 1)) - cube.add_aux_coord(co_x, (0, 1)) - ax = plt.axes(projection=ccrs.PlateCarree()) - qplt.contourf(cube) - ax.coastlines(resolution="110m", color="red") - ax.gridlines(draw_labels=True) - ax.set_extent((0, 180, 0, 90)) - self.check_graphic() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/plot/test_vector_plots.py b/lib/iris/tests/integration/plot/test_vector_plots.py deleted file mode 100644 index 37f506bd17..0000000000 --- a/lib/iris/tests/integration/plot/test_vector_plots.py +++ /dev/null @@ -1,233 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test some key usages of :func:`iris.plot.quiver`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import cartopy.crs as ccrs -import numpy as np - -from iris.coord_systems import Mercator -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube -from iris.tests.stock import sample_2d_latlons - -# Run tests in no graphics mode if matplotlib is not available. -if tests.MPL_AVAILABLE: - import matplotlib.pyplot as plt - - from iris.plot import barbs, quiver - - -@tests.skip_plot -class MixinVectorPlotCases: - """ - Test examples mixin, used by separate barb, quiver + streamplot classes. - - NOTE: at present for barb and quiver only, as streamplot does not support - arbitrary coordinates. - - """ - - def plot(self, plotname, *args, **kwargs): - plot_function = self.plot_function_to_test() - plot_function(*args, **kwargs) - plt.suptitle(plotname) - - @staticmethod - def _nonlatlon_xyuv(): - # Create common x, y, u, v arrays for quiver/streamplot testing. - x = np.array([0.0, 2, 3, 5]) - y = np.array([0.0, 2.5, 4]) - uv = np.array( - [ - [(0.0, 0), (0, 1), (0, -1), (2, 1)], - [(-1, 0), (-1, -1), (-1, 1), (-2, 1)], - [(1.0, 0), (1, -1), (1, 1), (-2, 2)], - ] - ) - uv = np.array(uv) - u, v = uv[..., 0], uv[..., 1] - return x, y, u, v - - @staticmethod - def _nonlatlon_uv_cubes(x, y, u, v): - # Create u and v test cubes from x, y, u, v arrays. - coord_cls = DimCoord if x.ndim == 1 else AuxCoord - x_coord = coord_cls(x, long_name="x") - y_coord = coord_cls(y, long_name="y") - u_cube = Cube(u, long_name="u", units="ms-1") - if x.ndim == 1: - u_cube.add_dim_coord(y_coord, 0) - u_cube.add_dim_coord(x_coord, 1) - else: - u_cube.add_aux_coord(y_coord, (0, 1)) - u_cube.add_aux_coord(x_coord, (0, 1)) - v_cube = u_cube.copy() - v_cube.rename("v") - v_cube.data = v - return u_cube, v_cube - - def test_non_latlon_1d_coords(self): - # Plot against simple 1D x and y coords. - x, y, u, v = self._nonlatlon_xyuv() - u_cube, v_cube = self._nonlatlon_uv_cubes(x, y, u, v) - self.plot("nonlatlon, 1-d coords", u_cube, v_cube) - plt.xlim(x.min() - 1, x.max() + 2) - plt.ylim(y.min() - 1, y.max() + 2) - self.check_graphic() - - def test_non_latlon_2d_coords(self): - # Plot against expanded 2D x and y coords. - x, y, u, v = self._nonlatlon_xyuv() - x, y = np.meshgrid(x, y) - u_cube, v_cube = self._nonlatlon_uv_cubes(x, y, u, v) - # Call plot : N.B. default gives wrong coords order. - self.plot("nonlatlon_2d", u_cube, v_cube, coords=("x", "y")) - plt.xlim(x.min() - 1, x.max() + 2) - plt.ylim(y.min() - 1, y.max() + 2) - self.check_graphic() - - @staticmethod - def _latlon_uv_cubes(grid_cube): - # Make a sample grid into u and v data for quiver/streamplot testing. - u_cube = grid_cube.copy() - u_cube.rename("dx") - u_cube.units = "ms-1" - v_cube = u_cube.copy() - v_cube.rename("dy") - ny, nx = u_cube.shape - nn = nx * ny - angles = np.arange(nn).reshape((ny, nx)) - angles = (angles * 360.0 / 5.5) % 360.0 - scale = np.arange(nn) % 5 - scale = (scale + 4) / 4 - scale = scale.reshape((ny, nx)) - u_cube.data = scale * np.cos(np.deg2rad(angles)) - v_cube.data = scale * np.sin(np.deg2rad(angles)) - return u_cube, v_cube - - def test_2d_plain_latlon(self): - # Test 2d vector plotting with implicit (PlateCarree) coord system. - u_cube, v_cube = self._latlon_uv_cubes(sample_2d_latlons()) - ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=180)) - self.plot( - "latlon_2d", u_cube, v_cube, coords=("longitude", "latitude") - ) - ax.coastlines(resolution="110m", color="red") - ax.set_global() - self.check_graphic() - - def test_2d_plain_latlon_on_polar_map(self): - # Test 2d vector plotting onto a different projection. - u_cube, v_cube = self._latlon_uv_cubes(sample_2d_latlons()) - ax = plt.axes(projection=ccrs.NorthPolarStereo()) - self.plot( - "latlon_2d_polar", u_cube, v_cube, coords=("longitude", "latitude") - ) - ax.coastlines(resolution="110m", color="red") - self.check_graphic() - - def test_2d_rotated_latlon(self): - # Test plotting vectors in a rotated latlon coord system. - u_cube, v_cube = self._latlon_uv_cubes(sample_2d_latlons(rotated=True)) - ax = plt.axes(projection=ccrs.PlateCarree(central_longitude=180)) - self.plot( - "2d_rotated", u_cube, v_cube, coords=("longitude", "latitude") - ) - ax.coastlines(resolution="110m", color="red") - ax.set_global() - self.check_graphic() - - def test_fail_unsupported_coord_system(self): - # Test plotting vectors in a rotated latlon coord system. - u_cube, v_cube = self._latlon_uv_cubes(sample_2d_latlons()) - patch_coord_system = Mercator() - for cube in u_cube, v_cube: - for coord in cube.coords(): - coord.coord_system = patch_coord_system - re_msg = ( - r"Can only plot .* lat-lon projection, .* " - r"This .* translates as Cartopy \+proj=merc .*" - ) - with self.assertRaisesRegex(ValueError, re_msg): - self.plot( - "2d_rotated", u_cube, v_cube, coords=("longitude", "latitude") - ) - - def test_circular_longitude(self): - # Test circular longitude does not cause a crash. - res = 5 - lat = DimCoord( - np.arange(-90, 91, res), "latitude", units="degrees_north" - ) - lon = DimCoord( - np.arange(0, 360, res), - "longitude", - units="degrees_east", - circular=True, - ) - nlat = len(lat.points) - nlon = len(lon.points) - u_arr = np.ones((nlat, nlon)) - v_arr = np.ones((nlat, nlon)) - u_cube = Cube( - u_arr, - dim_coords_and_dims=[(lat, 0), (lon, 1)], - standard_name="eastward_wind", - ) - v_cube = Cube( - v_arr, - dim_coords_and_dims=[(lat, 0), (lon, 1)], - standard_name="northward_wind", - ) - - self.plot("circular", u_cube, v_cube, coords=("longitude", "latitude")) - - -class TestBarbs(MixinVectorPlotCases, tests.GraphicsTest): - def setUp(self): - super().setUp() - - @staticmethod - def _nonlatlon_xyuv(): - # Increase the range of wind speeds used in the barbs test to test more - # barbs shapes than just circles - x, y, u, v = MixinVectorPlotCases._nonlatlon_xyuv() - scale_factor = 50 - u *= scale_factor - v *= scale_factor - return x, y, u, v - - @staticmethod - def _latlon_uv_cubes(grid_cube): - # Increase the range of wind speeds used in the barbs test to test all - # barbs shapes - u_cube, v_cube = MixinVectorPlotCases._latlon_uv_cubes(grid_cube) - scale_factor = 30 - u_cube.data *= scale_factor - v_cube.data *= scale_factor - return u_cube, v_cube - - def plot_function_to_test(self): - return barbs - - -class TestQuiver(MixinVectorPlotCases, tests.GraphicsTest): - def setUp(self): - super().setUp() - - def plot_function_to_test(self): - return quiver - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_PartialDateTime.py b/lib/iris/tests/integration/test_PartialDateTime.py deleted file mode 100644 index 563af1035c..0000000000 --- a/lib/iris/tests/integration/test_PartialDateTime.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for :class:`iris.time.PartialDateTime`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import iris -from iris.time import PartialDateTime - - -class Test(tests.IrisTest): - @tests.skip_data - def test_cftime_interface(self): - # The `netcdf4` Python module introduced new calendar classes by v1.2.7 - # This test is primarily of this interface, so the - # final test assertion is simple. - filename = tests.get_data_path(("PP", "structured", "small.pp")) - cube = iris.load_cube(filename) - pdt = PartialDateTime(year=1992, month=10, day=1, hour=2) - time_constraint = iris.Constraint(time=lambda cell: cell < pdt) - sub_cube = cube.extract(time_constraint) - self.assertEqual(sub_cube.coord("time").points.shape, (1,)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_climatology.py b/lib/iris/tests/integration/test_climatology.py deleted file mode 100644 index ba1ccaf888..0000000000 --- a/lib/iris/tests/integration/test_climatology.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for loading and saving netcdf files.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from os.path import dirname -from os.path import join as path_join -from os.path import sep as os_sep -import shutil -from subprocess import check_call -import tempfile - -import iris -from iris.tests import stock - - -class TestClimatology(iris.tests.IrisTest): - reference_cdl_path = os_sep.join( - [ - dirname(tests.__file__), - ( - "results/integration/climatology/TestClimatology/" - "reference_simpledata.cdl" - ), - ] - ) - - @classmethod - def _simple_cdl_string(cls): - with open(cls.reference_cdl_path, "r") as f: - cdl_content = f.read() - # Add the expected CDL first line since this is removed from the - # stored results file. - cdl_content = "netcdf {\n" + cdl_content - - return cdl_content - - @staticmethod - def _load_sanitised_cube(filepath): - cube = iris.load_cube(filepath) - # Remove attributes convention, if any. - cube.attributes.pop("Conventions", None) - # Remove any var-names. - for coord in cube.coords(): - coord.var_name = None - cube.var_name = None - return cube - - @classmethod - def setUpClass(cls): - # Create a temp directory for temp files. - cls.temp_dir = tempfile.mkdtemp() - cls.path_ref_cdl = path_join(cls.temp_dir, "standard.cdl") - cls.path_ref_nc = path_join(cls.temp_dir, "standard.nc") - # Create reference CDL file. - with open(cls.path_ref_cdl, "w") as f_out: - f_out.write(cls._simple_cdl_string()) - # Create reference netCDF file from reference CDL. - command = "ncgen -o {} {}".format(cls.path_ref_nc, cls.path_ref_cdl) - check_call(command, shell=True) - cls.path_temp_nc = path_join(cls.temp_dir, "tmp.nc") - - # Create reference cube. - cls.cube_ref = stock.climatology_3d() - - @classmethod - def tearDownClass(cls): - # Destroy a temp directory for temp files. - shutil.rmtree(cls.temp_dir) - - ############################################################################### - # Round-trip tests - - def test_cube_to_cube(self): - # Save reference cube to file, load cube from same file, test against - # reference cube. - iris.save(self.cube_ref, self.path_temp_nc) - cube = self._load_sanitised_cube(self.path_temp_nc) - self.assertEqual(cube, self.cube_ref) - - def test_file_to_file(self): - # Load cube from reference file, save same cube to file, test against - # reference CDL. - cube = iris.load_cube(self.path_ref_nc) - iris.save(cube, self.path_temp_nc) - self.assertCDL( - self.path_temp_nc, - reference_filename=self.reference_cdl_path, - flags="", - ) - - # NOTE: - # The saving half of the round-trip tests is tested in the - # appropriate dedicated test class: - # unit.fileformats.netcdf.test_Saver.Test_write.test_with_climatology . - # The loading half has no equivalent dedicated location, so is tested - # here as test_load_from_file. - - def test_load_from_file(self): - # Create cube from file, test against reference cube. - cube = self._load_sanitised_cube(self.path_ref_nc) - self.assertEqual(cube, self.cube_ref) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_cube.py b/lib/iris/tests/integration/test_cube.py deleted file mode 100644 index 996362f594..0000000000 --- a/lib/iris/tests/integration/test_cube.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for :class:`iris.cube.Cube`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -import iris -from iris._lazy_data import as_lazy_data, is_lazy_data -from iris.analysis import MEAN -from iris.cube import Cube - - -class Test_aggregated_by(tests.IrisTest): - @tests.skip_data - def test_agg_by_aux_coord(self): - problem_test_file = tests.get_data_path( - ("NetCDF", "testing", "small_theta_colpex.nc") - ) - cube = iris.load_cube(problem_test_file, "air_potential_temperature") - - # Test aggregating by aux coord, notably the `forecast_period` aux - # coord on `cube`, whose `_points` attribute is a lazy array. - # This test then ensures that aggregating using `points` instead is - # successful. - - # First confirm we've got a lazy array. - # NB. This checks the merge process in `load_cube()` hasn't - # triggered the load of the coordinate's data. - forecast_period_coord = cube.coord("forecast_period") - - self.assertTrue(is_lazy_data(forecast_period_coord.core_points())) - - # Now confirm we can aggregate along this coord. - res_cube = cube.aggregated_by("forecast_period", MEAN) - res_cell_methods = res_cube.cell_methods[0] - self.assertEqual(res_cell_methods.coord_names, ("forecast_period",)) - self.assertEqual(res_cell_methods.method, "mean") - - -class TestDataFillValue(tests.IrisTest): - def test_real(self): - data = np.ma.masked_array([1, 2, 3], [0, 1, 0], fill_value=10) - cube = Cube(data) - cube.data.fill_value = 20 - self.assertEqual(cube.data.fill_value, 20) - - def test_lazy(self): - data = np.ma.masked_array([1, 2, 3], [0, 1, 0], fill_value=10) - data = as_lazy_data(data) - cube = Cube(data) - cube.data.fill_value = 20 - self.assertEqual(cube.data.fill_value, 20) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_ff.py b/lib/iris/tests/integration/test_ff.py deleted file mode 100644 index 0b0ccf4c5c..0000000000 --- a/lib/iris/tests/integration/test_ff.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for loading LBC fieldsfiles.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -import iris - - -@tests.skip_data -class TestLBC(tests.IrisTest): - def setUp(self): - # Load multiple cubes from a test file. - file_path = tests.get_data_path(("FF", "lbc", "small_lbc")) - self.all_cubes = iris.load(file_path) - # Select the second cube for detailed checks (the first is orography). - self.test_cube = self.all_cubes[1] - - def test_various_cubes_shapes(self): - # Check a few aspects of the loaded cubes. - cubes = self.all_cubes - self.assertEqual(len(cubes), 10) - self.assertEqual(cubes[0].shape, (16, 16)) - self.assertEqual(cubes[1].shape, (2, 4, 16, 16)) - self.assertEqual(cubes[3].shape, (2, 5, 16, 16)) - - def test_cube_coords(self): - # Check coordinates of one cube. - cube = self.test_cube - self.assertEqual(len(cube.coords()), 8) - for name, shape in [ - ("forecast_reference_time", (1,)), - ("time", (2,)), - ("forecast_period", (2,)), - ("model_level_number", (4,)), - ("level_height", (1,)), - ("sigma", (1,)), - ("grid_latitude", (16,)), - ("grid_longitude", (16,)), - ]: - coords = cube.coords(name) - self.assertEqual( - len(coords), - 1, - "expected one {!r} coord, found {}".format(name, len(coords)), - ) - (coord,) = coords - self.assertEqual( - coord.shape, - shape, - "coord {!r} shape is {} instead of {!r}.".format( - name, coord.shape, shape - ), - ) - - def test_cube_data(self): - # Check just a few points of the data. - cube = self.test_cube - self.assertArrayAllClose( - cube.data[:, ::2, 6, 13], - np.array([[4.218922, 10.074577], [4.626897, 6.520156]]), - atol=1.0e-6, - ) - - def test_cube_mask(self): - # Check the data mask : should be just the centre 6x2 section. - cube = self.test_cube - mask = np.zeros((2, 4, 16, 16), dtype=bool) - mask[:, :, 7:9, 5:11] = True - self.assertArrayEqual(cube.data.mask, mask) - - -@tests.skip_data -class TestSkipField(tests.IrisTest): - def test_missing_lbrel(self): - infile = tests.get_data_path(("FF", "lbrel_missing")) - with mock.patch("warnings.warn") as warn_fn: - fields = iris.load(infile) - self.assertIn( - "Input field skipped as PPField creation failed : " - "error = 'Unsupported header release number: -32768'", - warn_fn.call_args[0][0], - ) - self.assertEqual(len(fields), 2) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_netcdf.py b/lib/iris/tests/integration/test_netcdf.py deleted file mode 100644 index f7aaa1d05c..0000000000 --- a/lib/iris/tests/integration/test_netcdf.py +++ /dev/null @@ -1,734 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for loading and saving netcdf files.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from contextlib import contextmanager -from itertools import repeat -import os.path -from os.path import join as path_join -import shutil -from subprocess import check_call -import tempfile -from unittest import mock -import warnings - -import netCDF4 as nc -import numpy as np -import numpy.ma as ma - -import iris -from iris.coords import CellMethod -from iris.cube import Cube, CubeList -from iris.fileformats.netcdf import ( - CF_CONVENTIONS_VERSION, - Saver, - UnknownCellMethodWarning, -) -import iris.tests.stock as stock - - -@tests.skip_data -class TestAtmosphereSigma(tests.IrisTest): - def setUp(self): - # Modify stock cube so it is suitable to have a atmosphere sigma - # factory added to it. - cube = stock.realistic_4d_no_derived() - cube.coord("surface_altitude").rename("surface_air_pressure") - cube.coord("surface_air_pressure").units = "Pa" - cube.coord("sigma").units = "1" - ptop_coord = iris.coords.AuxCoord(1000.0, var_name="ptop", units="Pa") - cube.add_aux_coord(ptop_coord, ()) - cube.remove_coord("level_height") - # Construct and add atmosphere sigma factory. - factory = iris.aux_factory.AtmosphereSigmaFactory( - cube.coord("ptop"), - cube.coord("sigma"), - cube.coord("surface_air_pressure"), - ) - cube.add_aux_factory(factory) - self.cube = cube - - def test_save(self): - with self.temp_filename(suffix=".nc") as filename: - iris.save(self.cube, filename) - self.assertCDL(filename) - - def test_save_load_loop(self): - # Ensure that the AtmosphereSigmaFactory is automatically loaded - # when loading the file. - with self.temp_filename(suffix=".nc") as filename: - iris.save(self.cube, filename) - cube = iris.load_cube(filename, "air_potential_temperature") - assert cube.coords("air_pressure") - - -@tests.skip_data -class TestHybridPressure(tests.IrisTest): - def setUp(self): - # Modify stock cube so it is suitable to have a - # hybrid pressure factory added to it. - cube = stock.realistic_4d_no_derived() - cube.coord("surface_altitude").rename("surface_air_pressure") - cube.coord("surface_air_pressure").units = "Pa" - cube.coord("level_height").rename("level_pressure") - cube.coord("level_pressure").units = "Pa" - # Construct and add hybrid pressure factory. - factory = iris.aux_factory.HybridPressureFactory( - cube.coord("level_pressure"), - cube.coord("sigma"), - cube.coord("surface_air_pressure"), - ) - cube.add_aux_factory(factory) - self.cube = cube - - def test_save(self): - with self.temp_filename(suffix=".nc") as filename: - iris.save(self.cube, filename) - self.assertCDL(filename) - - def test_save_load_loop(self): - # Tests an issue where the variable names in the formula - # terms changed to the standard_names instead of the variable names - # when loading a previously saved cube. - with self.temp_filename(suffix=".nc") as filename, self.temp_filename( - suffix=".nc" - ) as other_filename: - iris.save(self.cube, filename) - cube = iris.load_cube(filename, "air_potential_temperature") - iris.save(cube, other_filename) - other_cube = iris.load_cube( - other_filename, "air_potential_temperature" - ) - self.assertEqual(cube, other_cube) - - -@tests.skip_data -class TestSaveMultipleAuxFactories(tests.IrisTest): - def test_hybrid_height_and_pressure(self): - cube = stock.realistic_4d() - cube.add_aux_coord( - iris.coords.DimCoord( - 1200.0, long_name="level_pressure", units="hPa" - ) - ) - cube.add_aux_coord( - iris.coords.DimCoord(0.5, long_name="other sigma", units="1") - ) - cube.add_aux_coord( - iris.coords.DimCoord( - 1000.0, long_name="surface_air_pressure", units="hPa" - ) - ) - factory = iris.aux_factory.HybridPressureFactory( - cube.coord("level_pressure"), - cube.coord("other sigma"), - cube.coord("surface_air_pressure"), - ) - cube.add_aux_factory(factory) - with self.temp_filename(suffix=".nc") as filename: - iris.save(cube, filename) - self.assertCDL(filename) - - def test_shared_primary(self): - cube = stock.realistic_4d() - factory = iris.aux_factory.HybridHeightFactory( - cube.coord("level_height"), - cube.coord("sigma"), - cube.coord("surface_altitude"), - ) - factory.rename("another altitude") - cube.add_aux_factory(factory) - with self.temp_filename( - suffix=".nc" - ) as filename, self.assertRaisesRegex( - ValueError, "multiple aux factories" - ): - iris.save(cube, filename) - - def test_hybrid_height_cubes(self): - hh1 = stock.simple_4d_with_hybrid_height() - hh1.attributes["cube"] = "hh1" - hh2 = stock.simple_4d_with_hybrid_height() - hh2.attributes["cube"] = "hh2" - sa = hh2.coord("surface_altitude") - sa.points = sa.points * 10 - with self.temp_filename(".nc") as fname: - iris.save([hh1, hh2], fname) - cubes = iris.load(fname, "air_temperature") - cubes = sorted(cubes, key=lambda cube: cube.attributes["cube"]) - self.assertCML(cubes) - - def test_hybrid_height_cubes_on_dimension_coordinate(self): - hh1 = stock.hybrid_height() - hh2 = stock.hybrid_height() - sa = hh2.coord("surface_altitude") - sa.points = sa.points * 10 - emsg = "Unable to create dimensonless vertical coordinate." - with self.temp_filename(".nc") as fname, self.assertRaisesRegex( - ValueError, emsg - ): - iris.save([hh1, hh2], fname) - - -class TestUmVersionAttribute(tests.IrisTest): - def test_single_saves_as_global(self): - cube = Cube( - [1.0], - standard_name="air_temperature", - units="K", - attributes={"um_version": "4.3"}, - ) - with self.temp_filename(".nc") as nc_path: - iris.save(cube, nc_path) - self.assertCDL(nc_path) - - def test_multiple_same_saves_as_global(self): - cube_a = Cube( - [1.0], - standard_name="air_temperature", - units="K", - attributes={"um_version": "4.3"}, - ) - cube_b = Cube( - [1.0], - standard_name="air_pressure", - units="hPa", - attributes={"um_version": "4.3"}, - ) - with self.temp_filename(".nc") as nc_path: - iris.save(CubeList([cube_a, cube_b]), nc_path) - self.assertCDL(nc_path) - - def test_multiple_different_saves_on_variables(self): - cube_a = Cube( - [1.0], - standard_name="air_temperature", - units="K", - attributes={"um_version": "4.3"}, - ) - cube_b = Cube( - [1.0], - standard_name="air_pressure", - units="hPa", - attributes={"um_version": "4.4"}, - ) - with self.temp_filename(".nc") as nc_path: - iris.save(CubeList([cube_a, cube_b]), nc_path) - self.assertCDL(nc_path) - - -@contextmanager -def _patch_site_configuration(): - def cf_patch_conventions(conventions): - return ", ".join([conventions, "convention1, convention2"]) - - def update(config): - config["cf_profile"] = mock.Mock(name="cf_profile") - config["cf_patch"] = mock.Mock(name="cf_patch") - config["cf_patch_conventions"] = cf_patch_conventions - - orig_site_config = iris.site_configuration.copy() - update(iris.site_configuration) - yield - iris.site_configuration = orig_site_config - - -class TestConventionsAttributes(tests.IrisTest): - def test_patching_conventions_attribute(self): - # Ensure that user defined conventions are wiped and those which are - # saved patched through site_config can be loaded without an exception - # being raised. - cube = Cube( - [1.0], - standard_name="air_temperature", - units="K", - attributes={"Conventions": "some user defined conventions"}, - ) - - # Patch the site configuration dictionary. - with _patch_site_configuration(), self.temp_filename(".nc") as nc_path: - iris.save(cube, nc_path) - res = iris.load_cube(nc_path) - - self.assertEqual( - res.attributes["Conventions"], - "{}, {}, {}".format( - CF_CONVENTIONS_VERSION, "convention1", "convention2" - ), - ) - - -class TestLazySave(tests.IrisTest): - @tests.skip_data - def test_lazy_preserved_save(self): - fpath = tests.get_data_path( - ("NetCDF", "label_and_climate", "small_FC_167_mon_19601101.nc") - ) - acube = iris.load_cube(fpath, "air_temperature") - self.assertTrue(acube.has_lazy_data()) - # Also check a coord with lazy points + bounds. - self.assertTrue(acube.coord("forecast_period").has_lazy_points()) - self.assertTrue(acube.coord("forecast_period").has_lazy_bounds()) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(acube) - # Check that cube data is not realised, also coord points + bounds. - self.assertTrue(acube.has_lazy_data()) - self.assertTrue(acube.coord("forecast_period").has_lazy_points()) - self.assertTrue(acube.coord("forecast_period").has_lazy_bounds()) - - -@tests.skip_data -class TestCellMeasures(tests.IrisTest): - def setUp(self): - self.fname = tests.get_data_path(("NetCDF", "ORCA2", "votemper.nc")) - - def test_load_raw(self): - (cube,) = iris.load_raw(self.fname) - self.assertEqual(len(cube.cell_measures()), 1) - self.assertEqual(cube.cell_measures()[0].measure, "area") - - def test_load(self): - cube = iris.load_cube(self.fname) - self.assertEqual(len(cube.cell_measures()), 1) - self.assertEqual(cube.cell_measures()[0].measure, "area") - - def test_merge_cell_measure_aware(self): - (cube1,) = iris.load_raw(self.fname) - (cube2,) = iris.load_raw(self.fname) - cube2._cell_measures_and_dims[0][0].var_name = "not_areat" - cubes = CubeList([cube1, cube2]).merge() - self.assertEqual(len(cubes), 2) - - def test_concatenate_cell_measure_aware(self): - (cube1,) = iris.load_raw(self.fname) - cube1 = cube1[:, :, 0, 0] - cm_and_dims = cube1._cell_measures_and_dims - (cube2,) = iris.load_raw(self.fname) - cube2 = cube2[:, :, 0, 0] - cube2._cell_measures_and_dims[0][0].var_name = "not_areat" - cube2.coord("time").points = cube2.coord("time").points + 1 - cubes = CubeList([cube1, cube2]).concatenate() - self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims) - self.assertEqual(len(cubes), 2) - - def test_concatenate_cell_measure_match(self): - (cube1,) = iris.load_raw(self.fname) - cube1 = cube1[:, :, 0, 0] - cm_and_dims = cube1._cell_measures_and_dims - (cube2,) = iris.load_raw(self.fname) - cube2 = cube2[:, :, 0, 0] - cube2.coord("time").points = cube2.coord("time").points + 1 - cubes = CubeList([cube1, cube2]).concatenate() - self.assertEqual(cubes[0]._cell_measures_and_dims, cm_and_dims) - self.assertEqual(len(cubes), 1) - - def test_round_trip(self): - (cube,) = iris.load(self.fname) - with self.temp_filename(suffix=".nc") as filename: - iris.save(cube, filename, unlimited_dimensions=[]) - (round_cube,) = iris.load_raw(filename) - self.assertEqual(len(round_cube.cell_measures()), 1) - self.assertEqual(round_cube.cell_measures()[0].measure, "area") - - def test_print(self): - cube = iris.load_cube(self.fname) - printed = cube.__str__() - self.assertIn( - ( - "Cell measures:\n" - " cell_area - - " - " x x" - ), - printed, - ) - - -@tests.skip_data -class TestCMIP6VolcelloLoad(tests.IrisTest): - def setUp(self): - self.fname = tests.get_data_path( - ( - "NetCDF", - "volcello", - "volcello_Ofx_CESM2_deforest-globe_r1i1p1f1_gn.nc", - ) - ) - - def test_cmip6_volcello_load_issue_3367(self): - # Ensure that reading a file which references itself in - # `cell_measures` can be read. At the same time, ensure that we - # still receive a warning about other variables mentioned in - # `cell_measures` i.e. a warning should be raised about missing - # areacello. - areacello_str = "areacello" - volcello_str = "volcello" - expected_msg = ( - "Missing CF-netCDF measure variable %r, " - "referenced by netCDF variable %r" % (areacello_str, volcello_str) - ) - - with mock.patch("warnings.warn") as warn: - # ensure file loads without failure - cube = iris.load_cube(self.fname) - warn.assert_has_calls([mock.call(expected_msg)]) - - # extra check to ensure correct variable was found - assert cube.standard_name == "ocean_volume" - - -class TestSelfReferencingVarLoad(tests.IrisTest): - def setUp(self): - self.temp_dir_path = os.path.join( - tempfile.mkdtemp(), "issue_3367_volcello_test_file.nc" - ) - dataset = nc.Dataset(self.temp_dir_path, "w") - - dataset.createDimension("lat", 4) - dataset.createDimension("lon", 5) - dataset.createDimension("lev", 3) - - latitudes = dataset.createVariable("lat", np.float64, ("lat",)) - longitudes = dataset.createVariable("lon", np.float64, ("lon",)) - levels = dataset.createVariable("lev", np.float64, ("lev",)) - volcello = dataset.createVariable( - "volcello", np.float32, ("lat", "lon", "lev") - ) - - latitudes.standard_name = "latitude" - latitudes.units = "degrees_north" - latitudes.axis = "Y" - latitudes[:] = np.linspace(-90, 90, 4) - - longitudes.standard_name = "longitude" - longitudes.units = "degrees_east" - longitudes.axis = "X" - longitudes[:] = np.linspace(0, 360, 5) - - levels.standard_name = "olevel" - levels.units = "centimeters" - levels.positive = "down" - levels.axis = "Z" - levels[:] = np.linspace(0, 10 ** 5, 3) - - volcello.id = "volcello" - volcello.out_name = "volcello" - volcello.standard_name = "ocean_volume" - volcello.units = "m3" - volcello.realm = "ocean" - volcello.frequency = "fx" - volcello.cell_measures = "area: areacello volume: volcello" - volcello = np.arange(4 * 5 * 3).reshape((4, 5, 3)) - - dataset.close() - - def test_self_referencing_load_issue_3367(self): - # Ensure that reading a file which references itself in - # `cell_measures` can be read. At the same time, ensure that we - # still receive a warning about other variables mentioned in - # `cell_measures` i.e. a warning should be raised about missing - # areacello. - areacello_str = "areacello" - volcello_str = "volcello" - expected_msg = ( - "Missing CF-netCDF measure variable %r, " - "referenced by netCDF variable %r" % (areacello_str, volcello_str) - ) - - with mock.patch("warnings.warn") as warn: - # ensure file loads without failure - cube = iris.load_cube(self.temp_dir_path) - warn.assert_called_with(expected_msg) - - # extra check to ensure correct variable was found - assert cube.standard_name == "ocean_volume" - - def tearDown(self): - os.remove(self.temp_dir_path) - - -class TestCellMethod_unknown(tests.IrisTest): - def test_unknown_method(self): - cube = Cube([1, 2], long_name="odd_phenomenon") - cube.add_cell_method(CellMethod(method="oddity", coords=("x",))) - temp_dirpath = tempfile.mkdtemp() - try: - temp_filepath = os.path.join(temp_dirpath, "tmp.nc") - iris.save(cube, temp_filepath) - with warnings.catch_warnings(record=True) as warning_records: - iris.load(temp_filepath) - # Filter to get the warning we are interested in. - warning_messages = [record.message for record in warning_records] - warning_messages = [ - warn - for warn in warning_messages - if isinstance(warn, UnknownCellMethodWarning) - ] - self.assertEqual(len(warning_messages), 1) - message = warning_messages[0].args[0] - msg = ( - "NetCDF variable 'odd_phenomenon' contains unknown cell " - "method 'oddity'" - ) - self.assertIn(msg, message) - finally: - shutil.rmtree(temp_dirpath) - - -@tests.skip_data -class TestCoordSystem(tests.IrisTest): - def test_load_laea_grid(self): - cube = iris.load_cube( - tests.get_data_path( - ("NetCDF", "lambert_azimuthal_equal_area", "euro_air_temp.nc") - ) - ) - self.assertCML(cube, ("netcdf", "netcdf_laea.cml")) - - -def _get_scale_factor_add_offset(cube, datatype): - """Utility function used by netCDF data packing tests.""" - if isinstance(datatype, dict): - dt = np.dtype(datatype["dtype"]) - else: - dt = np.dtype(datatype) - cmax = cube.data.max() - cmin = cube.data.min() - n = dt.itemsize * 8 - if ma.isMaskedArray(cube.data): - masked = True - else: - masked = False - if masked: - scale_factor = (cmax - cmin) / (2 ** n - 2) - else: - scale_factor = (cmax - cmin) / (2 ** n - 1) - if dt.kind == "u": - add_offset = cmin - elif dt.kind == "i": - if masked: - add_offset = (cmax + cmin) / 2 - else: - add_offset = cmin + 2 ** (n - 1) * scale_factor - return (scale_factor, add_offset) - - -@tests.skip_data -class TestPackedData(tests.IrisTest): - def _single_test(self, datatype, CDLfilename, manual=False): - # Read PP input file. - file_in = tests.get_data_path( - ( - "PP", - "cf_processing", - "000003000000.03.236.000128.1990.12.01.00.00.b.pp", - ) - ) - cube = iris.load_cube(file_in) - scale_factor, offset = _get_scale_factor_add_offset(cube, datatype) - if manual: - packspec = dict( - dtype=datatype, scale_factor=scale_factor, add_offset=offset - ) - else: - packspec = datatype - # Write Cube to netCDF file. - with self.temp_filename(suffix=".nc") as file_out: - iris.save(cube, file_out, packing=packspec) - decimal = int(-np.log10(scale_factor)) - packedcube = iris.load_cube(file_out) - # Check that packed cube is accurate to expected precision - self.assertArrayAlmostEqual( - cube.data, packedcube.data, decimal=decimal - ) - # Check the netCDF file against CDL expected output. - self.assertCDL( - file_out, - ("integration", "netcdf", "TestPackedData", CDLfilename), - ) - - def test_single_packed_signed(self): - """Test saving a single CF-netCDF file with packing.""" - self._single_test("i2", "single_packed_signed.cdl") - - def test_single_packed_unsigned(self): - """Test saving a single CF-netCDF file with packing into unsigned.""" - self._single_test("u1", "single_packed_unsigned.cdl") - - def test_single_packed_manual_scale(self): - """Test saving a single CF-netCDF file with packing with scale - factor and add_offset set manually.""" - self._single_test("i2", "single_packed_manual.cdl", manual=True) - - def _multi_test(self, CDLfilename, multi_dtype=False): - """Test saving multiple packed cubes with pack_dtype list.""" - # Read PP input file. - file_in = tests.get_data_path( - ("PP", "cf_processing", "abcza_pa19591997_daily_29.b.pp") - ) - cubes = iris.load(file_in) - # ensure cube order is the same: - cubes.sort(key=lambda cube: cube.cell_methods[0].method) - datatype = "i2" - scale_factor, offset = _get_scale_factor_add_offset(cubes[0], datatype) - if multi_dtype: - packdict = dict( - dtype=datatype, scale_factor=scale_factor, add_offset=offset - ) - packspec = [packdict, None, "u2"] - dtypes = packspec - else: - packspec = datatype - dtypes = repeat(packspec) - - # Write Cube to netCDF file. - with self.temp_filename(suffix=".nc") as file_out: - iris.save(cubes, file_out, packing=packspec) - # Check the netCDF file against CDL expected output. - self.assertCDL( - file_out, - ("integration", "netcdf", "TestPackedData", CDLfilename), - ) - packedcubes = iris.load(file_out) - packedcubes.sort(key=lambda cube: cube.cell_methods[0].method) - for cube, packedcube, dtype in zip(cubes, packedcubes, dtypes): - if dtype: - sf, ao = _get_scale_factor_add_offset(cube, dtype) - decimal = int(-np.log10(sf)) - # Check that packed cube is accurate to expected precision - self.assertArrayAlmostEqual( - cube.data, packedcube.data, decimal=decimal - ) - else: - self.assertArrayEqual(cube.data, packedcube.data) - - def test_multi_packed_single_dtype(self): - """Test saving multiple packed cubes with the same pack_dtype.""" - # Read PP input file. - self._multi_test("multi_packed_single_dtype.cdl") - - def test_multi_packed_multi_dtype(self): - """Test saving multiple packed cubes with pack_dtype list.""" - # Read PP input file. - self._multi_test("multi_packed_multi_dtype.cdl", multi_dtype=True) - - -class TestScalarCube(tests.IrisTest): - def test_scalar_cube_save_load(self): - cube = iris.cube.Cube(1, long_name="scalar_cube") - with self.temp_filename(suffix=".nc") as fout: - iris.save(cube, fout) - scalar_cube = iris.load_cube(fout) - self.assertEqual(scalar_cube.name(), "scalar_cube") - - -class TestStandardName(tests.IrisTest): - def test_standard_name_roundtrip(self): - standard_name = "air_temperature detection_minimum" - cube = iris.cube.Cube(1, standard_name=standard_name) - with self.temp_filename(suffix=".nc") as fout: - iris.save(cube, fout) - detection_limit_cube = iris.load_cube(fout) - self.assertEqual(detection_limit_cube.standard_name, standard_name) - - -class TestLoadMinimalGeostationary(tests.IrisTest): - """ - Check we can load data with a geostationary grid-mapping, even when the - 'false-easting' and 'false_northing' properties are missing. - - """ - - _geostationary_problem_cdl = """ -netcdf geostationary_problem_case { -dimensions: - y = 2 ; - x = 3 ; -variables: - short radiance(y, x) ; - radiance:standard_name = "toa_outgoing_radiance_per_unit_wavelength" ; - radiance:units = "W m-2 sr-1 um-1" ; - radiance:coordinates = "y x" ; - radiance:grid_mapping = "imager_grid_mapping" ; - short y(y) ; - y:units = "rad" ; - y:axis = "Y" ; - y:long_name = "fixed grid projection y-coordinate" ; - y:standard_name = "projection_y_coordinate" ; - short x(x) ; - x:units = "rad" ; - x:axis = "X" ; - x:long_name = "fixed grid projection x-coordinate" ; - x:standard_name = "projection_x_coordinate" ; - int imager_grid_mapping ; - imager_grid_mapping:grid_mapping_name = "geostationary" ; - imager_grid_mapping:perspective_point_height = 35786023. ; - imager_grid_mapping:semi_major_axis = 6378137. ; - imager_grid_mapping:semi_minor_axis = 6356752.31414 ; - imager_grid_mapping:latitude_of_projection_origin = 0. ; - imager_grid_mapping:longitude_of_projection_origin = -75. ; - imager_grid_mapping:sweep_angle_axis = "x" ; - -data: - - // coord values, just so these can be dim-coords - y = 0, 1 ; - x = 0, 1, 2 ; - -} -""" - - @classmethod - def setUpClass(cls): - # Create a temp directory for transient test files. - cls.temp_dir = tempfile.mkdtemp() - cls.path_test_cdl = path_join(cls.temp_dir, "geos_problem.cdl") - cls.path_test_nc = path_join(cls.temp_dir, "geos_problem.nc") - # Create a reference file from the CDL text. - with open(cls.path_test_cdl, "w") as f_out: - f_out.write(cls._geostationary_problem_cdl) - # Call 'ncgen' to make an actual netCDF file from the CDL. - command = "ncgen -o {} {}".format(cls.path_test_nc, cls.path_test_cdl) - check_call(command, shell=True) - - @classmethod - def tearDownClass(cls): - # Destroy the temp directory. - shutil.rmtree(cls.temp_dir) - - def test_geostationary_no_false_offsets(self): - # Check we can load the test data and coordinate system properties are correct. - cube = iris.load_cube(self.path_test_nc) - # Check the coordinate system properties has the correct default properties. - cs = cube.coord_system() - self.assertIsInstance(cs, iris.coord_systems.Geostationary) - self.assertEqual(cs.false_easting, 0.0) - self.assertEqual(cs.false_northing, 0.0) - - -@tests.skip_data -class TestConstrainedLoad(tests.IrisTest): - filename = tests.get_data_path( - ("NetCDF", "label_and_climate", "A1B-99999a-river-sep-2070-2099.nc") - ) - - def test_netcdf_with_NameConstraint(self): - constr = iris.NameConstraint(var_name="cdf_temp_dmax_tmean_abs") - cubes = iris.load(self.filename, constr) - self.assertEqual(len(cubes), 1) - self.assertEqual(cubes[0].var_name, "cdf_temp_dmax_tmean_abs") - - def test_netcdf_with_no_constraint(self): - cubes = iris.load(self.filename) - self.assertEqual(len(cubes), 3) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_new_axis.py b/lib/iris/tests/integration/test_new_axis.py deleted file mode 100644 index 876eccbb63..0000000000 --- a/lib/iris/tests/integration/test_new_axis.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for :func:`iris.util.new_axis`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import iris -from iris.util import new_axis - - -class Test(tests.IrisTest): - @tests.skip_data - def test_lazy_data(self): - filename = tests.get_data_path(("PP", "globClim1", "theta.pp")) - cube = iris.load_cube(filename) - new_cube = new_axis(cube, "time") - self.assertTrue(cube.has_lazy_data()) - self.assertTrue(new_cube.has_lazy_data()) - self.assertEqual(new_cube.shape, (1,) + cube.shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_pickle.py b/lib/iris/tests/integration/test_pickle.py deleted file mode 100644 index fa5ddbd73e..0000000000 --- a/lib/iris/tests/integration/test_pickle.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for pickling things.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import pickle - -import iris - - -class Common: - def pickle_cube(self, protocol): - # Ensure that data proxies are pickleable. - cube = iris.load(self.path)[0] - with self.temp_filename(".pkl") as filename: - with open(filename, "wb") as f: - pickle.dump(cube, f, protocol) - with open(filename, "rb") as f: - ncube = pickle.load(f) - self.assertEqual(ncube, cube) - - def test_protocol_0(self): - self.pickle_cube(0) - - def test_protocol_1(self): - self.pickle_cube(1) - - def test_protocol_2(self): - self.pickle_cube(2) - - -@tests.skip_data -class test_netcdf(Common, tests.IrisTest): - def setUp(self): - self.path = tests.get_data_path( - ("NetCDF", "global", "xyt", "SMALL_hires_wind_u_for_ipcc4.nc") - ) - - -@tests.skip_data -class test_pp(Common, tests.IrisTest): - def setUp(self): - self.path = tests.get_data_path(("PP", "aPPglob1", "global.pp")) - - -@tests.skip_data -class test_ff(Common, tests.IrisTest): - def setUp(self): - self.path = tests.get_data_path(("FF", "n48_multi_field")) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_pp.py b/lib/iris/tests/integration/test_pp.py deleted file mode 100644 index db2113025d..0000000000 --- a/lib/iris/tests/integration/test_pp.py +++ /dev/null @@ -1,808 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for loading and saving PP files.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import os -from unittest import mock - -from cf_units import Unit -import numpy as np - -from iris.aux_factory import HybridHeightFactory, HybridPressureFactory -from iris.coords import AuxCoord, CellMethod, DimCoord -from iris.cube import Cube -from iris.exceptions import IgnoreCubeException -import iris.fileformats.pp -from iris.fileformats.pp import load_pairs_from_fields -import iris.fileformats.pp_load_rules -from iris.fileformats.pp_save_rules import verify -import iris.util - - -class TestVertical(tests.IrisTest): - def _test_coord(self, cube, point, bounds=None, **kwargs): - coords = cube.coords(**kwargs) - self.assertEqual( - len(coords), - 1, - "failed to find exactly one coord" " using: {}".format(kwargs), - ) - self.assertEqual(coords[0].points, point) - if bounds is not None: - self.assertArrayEqual(coords[0].bounds, [bounds]) - - @staticmethod - def _mock_field(**kwargs): - mock_data = np.zeros(1) - mock_core_data = mock.MagicMock(return_value=mock_data) - field = mock.MagicMock( - lbuser=[0] * 7, - lbrsvd=[0] * 4, - brsvd=[0] * 4, - brlev=0, - t1=mock.MagicMock(year=1990, month=1, day=3), - t2=mock.MagicMock(year=1990, month=1, day=3), - core_data=mock_core_data, - realised_dtype=mock_data.dtype, - ) - field.configure_mock(**kwargs) - return field - - def test_soil_level_round_trip(self): - # Use pp.load_cubes() to convert a fake PPField into a Cube. - # NB. Use MagicMock so that SplittableInt header items, such as - # LBCODE, support len(). - soil_level = 1234 - field = self._mock_field( - lbvc=6, lblev=soil_level, stash=iris.fileformats.pp.STASH(1, 0, 9) - ) - load = mock.Mock(return_value=iter([field])) - with mock.patch("iris.fileformats.pp.load", new=load) as load: - cube = next(iris.fileformats.pp.load_cubes("DUMMY")) - - self.assertIn("soil", cube.standard_name) - self._test_coord(cube, soil_level, long_name="soil_model_level_number") - - # Now use the save rules to convert the Cube back into a PPField. - field = iris.fileformats.pp.PPField3() - field.lbfc = 0 - field.lbvc = 0 - field.brsvd = [None] * 4 - field.brlev = None - field = verify(cube, field) - - # Check the vertical coordinate is as originally specified. - self.assertEqual(field.lbvc, 6) - self.assertEqual(field.lblev, soil_level) - self.assertEqual(field.blev, soil_level) - self.assertEqual(field.brsvd[0], 0) - self.assertEqual(field.brlev, 0) - - def test_soil_depth_round_trip(self): - # Use pp.load_cubes() to convert a fake PPField into a Cube. - # NB. Use MagicMock so that SplittableInt header items, such as - # LBCODE, support len(). - lower, point, upper = 1.2, 3.4, 5.6 - brsvd = [lower, 0, 0, 0] - field = self._mock_field( - lbvc=6, - blev=point, - brsvd=brsvd, - brlev=upper, - stash=iris.fileformats.pp.STASH(1, 0, 9), - ) - load = mock.Mock(return_value=iter([field])) - with mock.patch("iris.fileformats.pp.load", new=load) as load: - cube = next(iris.fileformats.pp.load_cubes("DUMMY")) - - self.assertIn("soil", cube.standard_name) - self._test_coord( - cube, point, bounds=[lower, upper], standard_name="depth" - ) - - # Now use the save rules to convert the Cube back into a PPField. - field = iris.fileformats.pp.PPField3() - field.lbfc = 0 - field.lbvc = 0 - field.brlev = None - field.brsvd = [None] * 4 - field = verify(cube, field) - - # Check the vertical coordinate is as originally specified. - self.assertEqual(field.lbvc, 6) - self.assertEqual(field.blev, point) - self.assertEqual(field.brsvd[0], lower) - self.assertEqual(field.brlev, upper) - - def test_potential_temperature_level_round_trip(self): - # Check save+load for data on 'potential temperature' levels. - - # Use pp.load_cubes() to convert a fake PPField into a Cube. - # NB. Use MagicMock so that SplittableInt header items, such as - # LBCODE, support len(). - potm_value = 22.5 - field = self._mock_field(lbvc=19, blev=potm_value) - load = mock.Mock(return_value=iter([field])) - with mock.patch("iris.fileformats.pp.load", new=load): - cube = next(iris.fileformats.pp.load_cubes("DUMMY")) - - self._test_coord( - cube, potm_value, standard_name="air_potential_temperature" - ) - - # Now use the save rules to convert the Cube back into a PPField. - field = iris.fileformats.pp.PPField3() - field.lbfc = 0 - field.lbvc = 0 - field = verify(cube, field) - - # Check the vertical coordinate is as originally specified. - self.assertEqual(field.lbvc, 19) - self.assertEqual(field.blev, potm_value) - - @staticmethod - def _field_with_data(scale=1, **kwargs): - x, y = 40, 30 - mock_data = np.arange(1200).reshape(y, x) * scale - mock_core_data = mock.MagicMock(return_value=mock_data) - field = mock.MagicMock( - core_data=mock_core_data, - realised_dtype=mock_data.dtype, - lbcode=[1], - lbnpt=x, - lbrow=y, - bzx=350, - bdx=1.5, - bzy=40, - bdy=1.5, - lbuser=[0] * 7, - lbrsvd=[0] * 4, - t1=mock.MagicMock(year=1990, month=1, day=3), - t2=mock.MagicMock(year=1990, month=1, day=3), - ) - - field._x_coord_name = lambda: "longitude" - field._y_coord_name = lambda: "latitude" - field.coord_system = lambda: None - field.configure_mock(**kwargs) - return field - - def test_hybrid_pressure_round_trip(self): - # Use pp.load_cubes() to convert fake PPFields into Cubes. - # NB. Use MagicMock so that SplittableInt header items, such as - # LBCODE, support len(). - - # Make a fake reference surface field. - pressure_field = self._field_with_data( - 10, - stash=iris.fileformats.pp.STASH(1, 0, 409), - lbuser=[0, 0, 0, 409, 0, 0, 0], - ) - - # Make a fake data field which needs the reference surface. - model_level = 5678 - sigma_lower, sigma, sigma_upper = 0.85, 0.9, 0.95 - delta_lower, delta, delta_upper = 0.05, 0.1, 0.15 - data_field = self._field_with_data( - lbvc=9, - lblev=model_level, - bhlev=delta, - bhrlev=delta_lower, - blev=sigma, - brlev=sigma_lower, - brsvd=[sigma_upper, delta_upper], - ) - - # Convert both fields to cubes. - load = mock.Mock(return_value=iter([pressure_field, data_field])) - with mock.patch("iris.fileformats.pp.load", new=load) as load: - pressure_cube, data_cube = iris.fileformats.pp.load_cubes("DUMMY") - - # Check the reference surface cube looks OK. - self.assertEqual(pressure_cube.standard_name, "surface_air_pressure") - self.assertEqual(pressure_cube.units, "Pa") - - # Check the data cube is set up to use hybrid-pressure. - self._test_coord( - data_cube, model_level, standard_name="model_level_number" - ) - self._test_coord( - data_cube, - delta, - [delta_lower, delta_upper], - long_name="level_pressure", - ) - self._test_coord( - data_cube, sigma, [sigma_lower, sigma_upper], long_name="sigma" - ) - aux_factories = data_cube.aux_factories - self.assertEqual(len(aux_factories), 1) - surface_coord = aux_factories[0].dependencies["surface_air_pressure"] - self.assertArrayEqual( - surface_coord.points, np.arange(12000, step=10).reshape(30, 40) - ) - - # Now use the save rules to convert the Cubes back into PPFields. - pressure_field = iris.fileformats.pp.PPField3() - pressure_field.lbfc = 0 - pressure_field.lbvc = 0 - pressure_field.brsvd = [None, None] - pressure_field.lbuser = [None] * 7 - pressure_field = verify(pressure_cube, pressure_field) - - data_field = iris.fileformats.pp.PPField3() - data_field.lbfc = 0 - data_field.lbvc = 0 - data_field.brsvd = [None, None] - data_field.lbuser = [None] * 7 - data_field = verify(data_cube, data_field) - - # The reference surface field should have STASH=409 - self.assertArrayEqual( - pressure_field.lbuser, [None, None, None, 409, None, None, 1] - ) - - # Check the data field has the vertical coordinate as originally - # specified. - self.assertEqual(data_field.lbvc, 9) - self.assertEqual(data_field.lblev, model_level) - self.assertEqual(data_field.bhlev, delta) - self.assertEqual(data_field.bhrlev, delta_lower) - self.assertEqual(data_field.blev, sigma) - self.assertEqual(data_field.brlev, sigma_lower) - self.assertEqual(data_field.brsvd, [sigma_upper, delta_upper]) - - def test_hybrid_pressure_with_duplicate_references(self): - # Make a fake reference surface field. - pressure_field = self._field_with_data( - 10, - stash=iris.fileformats.pp.STASH(1, 0, 409), - lbuser=[0, 0, 0, 409, 0, 0, 0], - ) - - # Make a fake data field which needs the reference surface. - model_level = 5678 - sigma_lower, sigma, sigma_upper = 0.85, 0.9, 0.95 - delta_lower, delta, delta_upper = 0.05, 0.1, 0.15 - data_field = self._field_with_data( - lbvc=9, - lblev=model_level, - bhlev=delta, - bhrlev=delta_lower, - blev=sigma, - brlev=sigma_lower, - brsvd=[sigma_upper, delta_upper], - ) - - # Convert both fields to cubes. - load = mock.Mock( - return_value=iter([data_field, pressure_field, pressure_field]) - ) - msg = "Multiple reference cubes for surface_air_pressure" - with mock.patch( - "iris.fileformats.pp.load", new=load - ) as load, mock.patch("warnings.warn") as warn: - _, _, _ = iris.fileformats.pp.load_cubes("DUMMY") - warn.assert_called_with(msg) - - def test_hybrid_height_with_non_standard_coords(self): - # Check the save rules are using the AuxFactory to find the - # hybrid height coordinates and not relying on their names. - ny, nx = 30, 40 - sigma_lower, sigma, sigma_upper = 0.75, 0.8, 0.75 - delta_lower, delta, delta_upper = 150, 200, 250 - - cube = Cube(np.zeros((ny, nx)), "air_temperature") - level_coord = AuxCoord(0, "model_level_number", units="1") - cube.add_aux_coord(level_coord) - delta_coord = AuxCoord( - delta, - bounds=[[delta_lower, delta_upper]], - long_name="moog", - units="m", - ) - sigma_coord = AuxCoord( - sigma, - bounds=[[sigma_lower, sigma_upper]], - long_name="mavis", - units="1", - ) - surface_altitude_coord = AuxCoord( - np.zeros((ny, nx)), "surface_altitude", units="m" - ) - cube.add_aux_coord(delta_coord) - cube.add_aux_coord(sigma_coord) - cube.add_aux_coord(surface_altitude_coord, (0, 1)) - cube.add_aux_factory( - HybridHeightFactory( - delta_coord, sigma_coord, surface_altitude_coord - ) - ) - - field = iris.fileformats.pp.PPField3() - field.lbfc = 0 - field.lbvc = 0 - field.brsvd = [None, None] - field.lbuser = [None] * 7 - field = verify(cube, field) - - self.assertEqual(field.blev, delta) - self.assertEqual(field.brlev, delta_lower) - self.assertEqual(field.bhlev, sigma) - self.assertEqual(field.bhrlev, sigma_lower) - self.assertEqual(field.brsvd, [delta_upper, sigma_upper]) - - def test_hybrid_pressure_with_non_standard_coords(self): - # Check the save rules are using the AuxFactory to find the - # hybrid pressure coordinates and not relying on their names. - ny, nx = 30, 40 - sigma_lower, sigma, sigma_upper = 0.75, 0.8, 0.75 - delta_lower, delta, delta_upper = 0.15, 0.2, 0.25 - - cube = Cube(np.zeros((ny, nx)), "air_temperature") - level_coord = AuxCoord(0, "model_level_number", units="1") - cube.add_aux_coord(level_coord) - delta_coord = AuxCoord( - delta, - bounds=[[delta_lower, delta_upper]], - long_name="moog", - units="Pa", - ) - sigma_coord = AuxCoord( - sigma, - bounds=[[sigma_lower, sigma_upper]], - long_name="mavis", - units="1", - ) - surface_air_pressure_coord = AuxCoord( - np.zeros((ny, nx)), "surface_air_pressure", units="Pa" - ) - cube.add_aux_coord(delta_coord) - cube.add_aux_coord(sigma_coord) - cube.add_aux_coord(surface_air_pressure_coord, (0, 1)) - cube.add_aux_factory( - HybridPressureFactory( - delta_coord, sigma_coord, surface_air_pressure_coord - ) - ) - - field = iris.fileformats.pp.PPField3() - field.lbfc = 0 - field.lbvc = 0 - field.brsvd = [None, None] - field.lbuser = [None] * 7 - field = verify(cube, field) - - self.assertEqual(field.bhlev, delta) - self.assertEqual(field.bhrlev, delta_lower) - self.assertEqual(field.blev, sigma) - self.assertEqual(field.brlev, sigma_lower) - self.assertEqual(field.brsvd, [sigma_upper, delta_upper]) - - def test_hybrid_height_round_trip_no_reference(self): - # Use pp.load_cubes() to convert fake PPFields into Cubes. - # NB. Use MagicMock so that SplittableInt header items, such as - # LBCODE, support len(). - # Make a fake data field which needs the reference surface. - model_level = 5678 - sigma_lower, sigma, sigma_upper = 0.85, 0.9, 0.95 - delta_lower, delta, delta_upper = 0.05, 0.1, 0.15 - data_field = self._field_with_data( - lbvc=65, - lblev=model_level, - bhlev=sigma, - bhrlev=sigma_lower, - blev=delta, - brlev=delta_lower, - brsvd=[delta_upper, sigma_upper], - ) - - # Convert field to a cube. - load = mock.Mock(return_value=iter([data_field])) - with mock.patch( - "iris.fileformats.pp.load", new=load - ) as load, mock.patch("warnings.warn") as warn: - (data_cube,) = iris.fileformats.pp.load_cubes("DUMMY") - - msg = ( - "Unable to create instance of HybridHeightFactory. " - "The source data contains no field(s) for 'orography'." - ) - warn.assert_called_with(msg) - - # Check the data cube is set up to use hybrid height. - self._test_coord( - data_cube, model_level, standard_name="model_level_number" - ) - self._test_coord( - data_cube, - delta, - [delta_lower, delta_upper], - long_name="level_height", - ) - self._test_coord( - data_cube, sigma, [sigma_lower, sigma_upper], long_name="sigma" - ) - # Check that no aux factory is created (due to missing - # reference surface). - aux_factories = data_cube.aux_factories - self.assertEqual(len(aux_factories), 0) - - # Now use the save rules to convert the Cube back into a PPField. - data_field = iris.fileformats.pp.PPField3() - data_field.lbfc = 0 - data_field.lbvc = 0 - data_field.brsvd = [None, None] - data_field.lbuser = [None] * 7 - data_field = verify(data_cube, data_field) - - # Check the data field has the vertical coordinate as originally - # specified. - self.assertEqual(data_field.lbvc, 65) - self.assertEqual(data_field.lblev, model_level) - self.assertEqual(data_field.bhlev, sigma) - self.assertEqual(data_field.bhrlev, sigma_lower) - self.assertEqual(data_field.blev, delta) - self.assertEqual(data_field.brlev, delta_lower) - self.assertEqual(data_field.brsvd, [delta_upper, sigma_upper]) - - -class TestSaveLBFT(tests.IrisTest): - def setUp(self): - delta_start = 24 - delta_mid = 36 - self.delta_end = 369 * 24 - ref_offset = 10 * 24 - self.args = (delta_start, delta_mid, self.delta_end, ref_offset) - - def create_cube(self, fp_min, fp_mid, fp_max, ref_offset, season=None): - cube = Cube(np.zeros((3, 4))) - cube.add_aux_coord( - AuxCoord( - standard_name="forecast_period", - units="hours", - points=fp_mid, - bounds=[fp_min, fp_max], - ) - ) - cube.add_aux_coord( - AuxCoord( - standard_name="time", - units="hours since epoch", - points=ref_offset + fp_mid, - bounds=[ref_offset + fp_min, ref_offset + fp_max], - ) - ) - if season: - cube.add_aux_coord( - AuxCoord(long_name="clim_season", points=season) - ) - cube.add_cell_method(CellMethod("DUMMY", "clim_season")) - return cube - - def convert_cube_to_field(self, cube): - # Use the save rules to convert the Cube back into a PPField. - field = iris.fileformats.pp.PPField3() - field.lbfc = 0 - field.lbvc = 0 - field.lbtim = 0 - field = verify(cube, field) - return field - - def test_time_mean_from_forecast_period(self): - cube = self.create_cube(24, 36, 48, 72) - field = self.convert_cube_to_field(cube) - self.assertEqual(field.lbft, 48) - - def test_time_mean_from_forecast_reference_time(self): - cube = Cube(np.zeros((3, 4))) - cube.add_aux_coord( - AuxCoord( - standard_name="forecast_reference_time", - units="hours since epoch", - points=72, - ) - ) - cube.add_aux_coord( - AuxCoord( - standard_name="time", - units="hours since epoch", - points=72 + 36, - bounds=[72 + 24, 72 + 48], - ) - ) - field = self.convert_cube_to_field(cube) - self.assertEqual(field.lbft, 48) - - def test_climatological_mean_single_year(self): - cube = Cube(np.zeros((3, 4))) - cube.add_aux_coord( - AuxCoord( - standard_name="forecast_period", - units="hours", - points=36, - bounds=[24, 4 * 24], - ) - ) - cube.add_aux_coord( - AuxCoord( - standard_name="time", - units="hours since epoch", - points=240 + 36, - bounds=[240 + 24, 240 + 4 * 24], - ) - ) - cube.add_aux_coord(AuxCoord(long_name="clim_season", points="DUMMY")) - cube.add_cell_method(CellMethod("DUMMY", "clim_season")) - field = self.convert_cube_to_field(cube) - self.assertEqual(field.lbft, 4 * 24) - - def test_climatological_mean_multi_year_djf(self): - cube = self.create_cube(*self.args, season="djf") - field = self.convert_cube_to_field(cube) - self.assertEqual(field.lbft, self.delta_end) - - def test_climatological_mean_multi_year_mam(self): - cube = self.create_cube(*self.args, season="mam") - field = self.convert_cube_to_field(cube) - self.assertEqual(field.lbft, self.delta_end) - - def test_climatological_mean_multi_year_jja(self): - cube = self.create_cube(*self.args, season="jja") - field = self.convert_cube_to_field(cube) - self.assertEqual(field.lbft, self.delta_end) - - def test_climatological_mean_multi_year_son(self): - cube = self.create_cube(*self.args, season="son") - field = self.convert_cube_to_field(cube) - self.assertEqual(field.lbft, self.delta_end) - - -class TestCoordinateForms(tests.IrisTest): - def _common(self, x_coord): - nx = len(x_coord.points) - ny = 2 - data = np.zeros((ny, nx), dtype=np.float32) - test_cube = iris.cube.Cube(data) - y0 = np.float32(20.5) - dy = np.float32(3.72) - y_coord = iris.coords.DimCoord.from_regular( - zeroth=y0, - step=dy, - count=ny, - standard_name="latitude", - units="degrees_north", - ) - test_cube.add_dim_coord(x_coord, 1) - test_cube.add_dim_coord(y_coord, 0) - # Write to a temporary PP file and read it back as a PPField - with self.temp_filename(".pp") as pp_filepath: - iris.save(test_cube, pp_filepath) - pp_loader = iris.fileformats.pp.load(pp_filepath) - pp_field = next(pp_loader) - return pp_field - - def test_save_awkward_case_is_regular(self): - # Check that specific "awkward" values still save in a regular form. - nx = 3 - x0 = np.float32(355.626) - dx = np.float32(0.0135) - x_coord = iris.coords.DimCoord.from_regular( - zeroth=x0, - step=dx, - count=nx, - standard_name="longitude", - units="degrees_east", - ) - pp_field = self._common(x_coord) - # Check that the result has the regular coordinates as expected. - self.assertEqual(pp_field.bzx, x0) - self.assertEqual(pp_field.bdx, dx) - self.assertEqual(pp_field.lbnpt, nx) - - def test_save_irregular(self): - # Check that a non-regular coordinate saves as expected. - nx = 3 - x_values = [0.0, 1.1, 2.0] - x_coord = iris.coords.DimCoord( - x_values, standard_name="longitude", units="degrees_east" - ) - pp_field = self._common(x_coord) - # Check that the result has the regular/irregular Y and X as expected. - self.assertEqual(pp_field.bdx, 0.0) - self.assertArrayAllClose(pp_field.x, x_values) - self.assertEqual(pp_field.lbnpt, nx) - - -@tests.skip_data -class TestLoadLittleendian(tests.IrisTest): - def test_load_sample(self): - file_path = tests.get_data_path( - ("PP", "little_endian", "qrparm.orog.pp") - ) - # Ensure it just loads. - cube = iris.load_cube(file_path, "surface_altitude") - self.assertEqual(cube.shape, (110, 160)) - - # Check for sensible floating point numbers. - def check_minmax(array, expect_min, expect_max): - found = np.array([np.min(array), np.max(array)]) - expected = np.array([expect_min, expect_max]) - self.assertArrayAlmostEqual(found, expected, decimal=2) - - lons = cube.coord("grid_longitude").points - lats = cube.coord("grid_latitude").points - data = cube.data - check_minmax(lons, 342.0, 376.98) - check_minmax(lats, -10.48, 13.5) - check_minmax(data, -30.48, 6029.1) - - -@tests.skip_data -class TestAsCubes(tests.IrisTest): - def setUp(self): - dpath = tests.get_data_path( - ["PP", "meanMaxMin", "200806081200__qwpb.T24.pp"] - ) - self.ppfs = iris.fileformats.pp.load(dpath) - - def test_pseudo_level_filter(self): - chosen_ppfs = [] - for ppf in self.ppfs: - if ppf.lbuser[4] == 3: - chosen_ppfs.append(ppf) - cubes_fields = list(load_pairs_from_fields(chosen_ppfs)) - self.assertEqual(len(cubes_fields), 8) - - def test_pseudo_level_filter_none(self): - chosen_ppfs = [] - for ppf in self.ppfs: - if ppf.lbuser[4] == 30: - chosen_ppfs.append(ppf) - cubes = list(load_pairs_from_fields(chosen_ppfs)) - self.assertEqual(len(cubes), 0) - - def test_as_pairs(self): - cube_ppf_pairs = load_pairs_from_fields(self.ppfs) - cubes = [] - for cube, ppf in cube_ppf_pairs: - if ppf.lbuser[4] == 3: - cube.attributes["pseudo level"] = ppf.lbuser[4] - cubes.append(cube) - for cube in cubes: - self.assertEqual(cube.attributes["pseudo level"], 3) - - -class TestSaveLBPROC(tests.IrisTest): - def create_cube(self, longitude_coord="longitude"): - cube = Cube(np.zeros((2, 3, 4))) - tunit = Unit("days since epoch", calendar="gregorian") - tcoord = DimCoord(np.arange(2), standard_name="time", units=tunit) - xcoord = DimCoord( - np.arange(3), standard_name=longitude_coord, units="degrees" - ) - ycoord = DimCoord(points=np.arange(4)) - cube.add_dim_coord(tcoord, 0) - cube.add_dim_coord(xcoord, 1) - cube.add_dim_coord(ycoord, 2) - return cube - - def convert_cube_to_field(self, cube): - field = iris.fileformats.pp.PPField3() - field.lbvc = 0 - return verify(cube, field) - - def test_time_mean_only(self): - cube = self.create_cube() - cube.add_cell_method(CellMethod(method="mean", coords="time")) - field = self.convert_cube_to_field(cube) - self.assertEqual(int(field.lbproc), 128) - - def test_longitudinal_mean_only(self): - cube = self.create_cube() - cube.add_cell_method(CellMethod(method="mean", coords="longitude")) - field = self.convert_cube_to_field(cube) - self.assertEqual(int(field.lbproc), 64) - - def test_grid_longitudinal_mean_only(self): - cube = self.create_cube(longitude_coord="grid_longitude") - cube.add_cell_method( - CellMethod(method="mean", coords="grid_longitude") - ) - field = self.convert_cube_to_field(cube) - self.assertEqual(int(field.lbproc), 64) - - def test_time_mean_and_zonal_mean(self): - cube = self.create_cube() - cube.add_cell_method(CellMethod(method="mean", coords="time")) - cube.add_cell_method(CellMethod(method="mean", coords="longitude")) - field = self.convert_cube_to_field(cube) - self.assertEqual(int(field.lbproc), 192) - - -@tests.skip_data -class TestCallbackLoad(tests.IrisTest): - def setUp(self): - self.pass_name = "air_potential_temperature" - - def callback_wrapper(self): - # Wrap the `iris.exceptions.IgnoreCubeException`-calling callback. - def callback_ignore_cube_exception(cube, field, filename): - if cube.name() != self.pass_name: - raise IgnoreCubeException - - return callback_ignore_cube_exception - - def test_ignore_cube_callback(self): - test_dataset = tests.get_data_path( - ["PP", "globClim1", "dec_subset.pp"] - ) - exception_callback = self.callback_wrapper() - result_cubes = iris.load(test_dataset, callback=exception_callback) - n_result_cubes = len(result_cubes) - # We ignore all but one cube (the `air_potential_temperature` cube). - self.assertEqual(n_result_cubes, 1) - self.assertEqual(result_cubes[0].name(), self.pass_name) - - -@tests.skip_data -class TestZonalMeanBounds(tests.IrisTest): - def test_mulitple_longitude(self): - # test that bounds are set for a zonal mean file with many longitude - # values - orig_file = tests.get_data_path(("PP", "aPPglob1", "global.pp")) - - f = next(iris.fileformats.pp.load(orig_file)) - f.lbproc = 192 # time and zonal mean - - # Write out pp file - temp_filename = iris.util.create_temp_filename(".pp") - with open(temp_filename, "wb") as temp_fh: - f.save(temp_fh) - - # Load pp file - cube = iris.load_cube(temp_filename) - - self.assertTrue(cube.coord("longitude").has_bounds()) - - os.remove(temp_filename) - - def test_singular_longitude(self): - # test that bounds are set for a zonal mean file with a single - # longitude value - - pp_file = tests.get_data_path(("PP", "zonal_mean", "zonal_mean.pp")) - - # Load pp file - cube = iris.load_cube(pp_file) - - self.assertTrue(cube.coord("longitude").has_bounds()) - - -@tests.skip_data -class TestLoadPartialMask(tests.IrisTest): - def test_data(self): - # Ensure that fields merge correctly where one has a mask and one - # doesn't. - filename = tests.get_data_path(["PP", "simple_pp", "partial_mask.pp"]) - - expected_data = np.ma.masked_array( - [[[0, 1], [11, 12]], [[99, 100], [-1, -1]]], - [[[0, 0], [0, 0]], [[0, 0], [1, 1]]], - dtype=np.int32, - ) - cube = iris.load_cube(filename) - - self.assertEqual(expected_data.dtype, cube.data.dtype) - self.assertMaskedArrayEqual(expected_data, cube.data, strict=False) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_pp_constrained_load_cubes.py b/lib/iris/tests/integration/test_pp_constrained_load_cubes.py deleted file mode 100644 index 7ddf39b2ff..0000000000 --- a/lib/iris/tests/integration/test_pp_constrained_load_cubes.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for :func:`iris.fileformats.rules.load_cubes`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import iris -from iris.fileformats import pp -from iris.fileformats.pp_load_rules import convert -from iris.fileformats.rules import load_cubes - - -class Test(tests.IrisTest): - @tests.skip_data - def test_pp_with_stash_constraint(self): - filenames = [tests.get_data_path(("PP", "globClim1", "dec_subset.pp"))] - stcon = iris.AttributeConstraint(STASH="m01s00i004") - pp_constraints = pp._convert_constraints(stcon) - pp_loader = iris.fileformats.rules.Loader(pp.load, {}, convert) - cubes = list(load_cubes(filenames, None, pp_loader, pp_constraints)) - self.assertEqual(len(cubes), 38) - - @tests.skip_data - def test_pp_with_stash_constraints(self): - filenames = [tests.get_data_path(("PP", "globClim1", "dec_subset.pp"))] - stcon1 = iris.AttributeConstraint(STASH="m01s00i004") - stcon2 = iris.AttributeConstraint(STASH="m01s00i010") - pp_constraints = pp._convert_constraints([stcon1, stcon2]) - pp_loader = iris.fileformats.rules.Loader(pp.load, {}, convert) - cubes = list(load_cubes(filenames, None, pp_loader, pp_constraints)) - self.assertEqual(len(cubes), 76) - - @tests.skip_data - def test_pp_no_constraint(self): - filenames = [tests.get_data_path(("PP", "globClim1", "dec_subset.pp"))] - pp_constraints = pp._convert_constraints(None) - pp_loader = iris.fileformats.rules.Loader(pp.load, {}, convert) - cubes = list(load_cubes(filenames, None, pp_loader, pp_constraints)) - self.assertEqual(len(cubes), 152) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_regrid_equivalence.py b/lib/iris/tests/integration/test_regrid_equivalence.py deleted file mode 100644 index 09b47072e0..0000000000 --- a/lib/iris/tests/integration/test_regrid_equivalence.py +++ /dev/null @@ -1,228 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Tests to check the validity of replacing -"iris.analysis._interpolate.regrid`('nearest')" with -"iris.cube.Cube.regrid(scheme=iris.analysis.Nearest())". - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.analysis import Nearest -from iris.coords import DimCoord -from iris.cube import Cube - - -def grid_cube(xx, yy, data=None): - nx, ny = len(xx), len(yy) - if data is not None: - data = np.array(data).reshape((ny, nx)) - else: - data = np.zeros((ny, nx)) - cube = Cube(data) - y_coord = DimCoord(yy, standard_name="latitude", units="degrees") - x_coord = DimCoord(xx, standard_name="longitude", units="degrees") - cube.add_dim_coord(y_coord, 0) - cube.add_dim_coord(x_coord, 1) - return cube - - -ENABLE_DEBUG_OUTPUT = False - - -def _debug_data(cube, test_id): - if ENABLE_DEBUG_OUTPUT: - print - data = cube.data - print("CUBE: {}".format(test_id)) - print(" x={!r}".format(cube.coord("longitude").points)) - print(" y={!r}".format(cube.coord("latitude").points)) - print("data[{}]:".format(type(data))) - print(repr(data)) - - -class MixinCheckingCode: - def test_basic(self): - src_x = [30.0, 40.0, 50.0] - dst_x = [32.0, 42.0] - src_y = [-10.0, 0.0, 10.0] - dst_y = [-8.0, 2.0] - data = [[3.0, 4.0, 5.0], [23.0, 24.0, 25.0], [43.0, 44.0, 45.0]] - expected_result = [[3.0, 4.0], [23.0, 24.0]] - src_cube = grid_cube(src_x, src_y, data) - _debug_data(src_cube, "basic SOURCE") - dst_cube = grid_cube(dst_x, dst_y) - result_cube = self.regrid(src_cube, dst_cube) - _debug_data(result_cube, "basic RESULT") - self.assertArrayAllClose(result_cube.data, expected_result) - - def test_src_extrapolation(self): - src_x = [30.0, 40.0, 50.0] - dst_x = [0.0, 29.0, 39.0] - src_y = [-10.0, 0.0, 10.0] - dst_y = [-50.0, -9.0, -1.0] - data = [[3.0, 4.0, 5.0], [23.0, 24.0, 25.0], [43.0, 44.0, 45.0]] - expected_result = [ - [3.0, 3.0, 4.0], - [3.0, 3.0, 4.0], - [23.0, 23.0, 24.0], - ] - src_cube = grid_cube(src_x, src_y, data) - _debug_data(src_cube, "extrapolate SOURCE") - dst_cube = grid_cube(dst_x, dst_y) - result_cube = self.regrid(src_cube, dst_cube) - _debug_data(result_cube, "extrapolate RESULT") - self.assertArrayAllClose(result_cube.data, expected_result) - - def test_exact_matching_points(self): - src_x = [10.0, 20.0, 30.0] - src_y = [10.0, 20.0, 30.0] - dst_x = [14.9, 15.1, 20.0, 24.9, 25.1] - dst_y = [14.9, 15.1, 20.0, 24.9, 25.1] - data = [[3.0, 4.0, 5.0], [23.0, 24.0, 25.0], [43.0, 44.0, 45.0]] - expected_result = [ - [3.0, 4.0, 4.0, 4.0, 5.0], - [23.0, 24.0, 24.0, 24.0, 25.0], - [23.0, 24.0, 24.0, 24.0, 25.0], - [23.0, 24.0, 24.0, 24.0, 25.0], - [43.0, 44.0, 44.0, 44.0, 45.0], - ] - src_cube = grid_cube(src_x, src_y, data) - _debug_data(src_cube, "matching SOURCE") - dst_cube = grid_cube(dst_x, dst_y) - result_cube = self.regrid(src_cube, dst_cube) - _debug_data(result_cube, "matching RESULt") - self.assertArrayAllClose(result_cube.data, expected_result) - - def test_source_mask(self): - src_x = [40.0, 50.0, 60.0] - src_y = [40.0, 50.0, 60.0] - dst_x = [44.99, 45.01, 48.0, 50.0, 52.0, 54.99, 55.01] - dst_y = [44.99, 45.01, 48.0, 50.0, 52.0, 54.99, 55.01] - data = np.ma.masked_equal( - [[3.0, 4.0, 5.0], [23.0, 999, 25.0], [43.0, 44.0, 45.0]], 999 - ) - expected_result = np.ma.masked_equal( - [ - [3.0, 4.0, 4.0, 4.0, 4.0, 4.0, 5.0], - [23.0, 999, 999, 999, 999, 999, 25.0], - [23.0, 999, 999, 999, 999, 999, 25.0], - [23.0, 999, 999, 999, 999, 999, 25.0], - [23.0, 999, 999, 999, 999, 999, 25.0], - [23.0, 999, 999, 999, 999, 999, 25.0], - [43.0, 44.0, 44.0, 44.0, 44.0, 44.0, 45.0], - ], - 999, - ) - src_cube = grid_cube(src_x, src_y, data) - src_cube.data = np.ma.masked_array(src_cube.data) - src_cube.data[1, 1] = np.ma.masked - _debug_data(src_cube, "masked SOURCE") - dst_cube = grid_cube(dst_x, dst_y) - result_cube = self.regrid( - src_cube, dst_cube, translate_nans_to_mask=True - ) - _debug_data(result_cube, "masked RESULT") - self.assertMaskedArrayEqual(result_cube.data, expected_result) - - def test_wrapping_non_circular(self): - src_x = [-10.0, 0.0, 10.0] - dst_x = [-360.0, -170.0, -1.0, 1.0, 50.0, 170.0, 352.0, 720.0] - src_y = [0.0, 10.0] - dst_y = [0.0, 10.0] - data = [[3.0, 4.0, 5.0], [3.0, 4.0, 5.0]] - src_cube = grid_cube(src_x, src_y, data) - dst_cube = grid_cube(dst_x, dst_y) - expected_result = [ - [4.0, 3.0, 4.0, 4.0, 5.0, 5.0, 3.0, 4.0], - [4.0, 3.0, 4.0, 4.0, 5.0, 5.0, 3.0, 4.0], - ] - _debug_data(src_cube, "noncircular SOURCE") - result_cube = self.regrid(src_cube, dst_cube) - _debug_data(result_cube, "noncircular RESULT") - self.assertArrayAllClose(result_cube.data, expected_result) - - def test_wrapping_circular(self): - # When x-coord is "circular", the above distinction does not apply : - # results are the same for both calculations. - src_x = [-10.0, 0.0, 10.0] - dst_x = [-360.0, -170.0, -1.0, 1.0, 50.0, 170.0, 352.0, 720.0] - src_y = [0.0, 10.0] - dst_y = [0.0, 10.0] - data = [[3.0, 4.0, 5.0], [3.0, 4.0, 5.0]] - src_cube = grid_cube(src_x, src_y, data) - dst_cube = grid_cube(dst_x, dst_y) - src_cube.coord("longitude").circular = True - expected_result = [ - [4.0, 3.0, 4.0, 4.0, 5.0, 5.0, 3.0, 4.0], - [4.0, 3.0, 4.0, 4.0, 5.0, 5.0, 3.0, 4.0], - ] - _debug_data(src_cube, "circular SOURCE") - result_cube = self.regrid(src_cube, dst_cube) - _debug_data(result_cube, "circular RESULT") - self.assertArrayAllClose(result_cube.data, expected_result) - - def test_wrapping_non_angular(self): - src_x = [-10.0, 0.0, 10.0] - dst_x = [-360.0, -170.0, -1.0, 1.0, 50.0, 170.0, 352.0, 720.0] - src_y = [0.0, 10.0] - dst_y = [0.0, 10.0] - data = [[3.0, 4.0, 5.0], [3.0, 4.0, 5.0]] - src_cube = grid_cube(src_x, src_y, data) - dst_cube = grid_cube(dst_x, dst_y) - for co_name in ("longitude", "latitude"): - for cube in (src_cube, dst_cube): - coord = cube.coord(co_name) - coord.coord_system = None - coord.convert_units("1") - # interpolate.regrid --> Wrapping-free results (non-circular). - expected_result = [ - [3.0, 3.0, 4.0, 4.0, 5.0, 5.0, 5.0, 5.0], - [3.0, 3.0, 4.0, 4.0, 5.0, 5.0, 5.0, 5.0], - ] - _debug_data(src_cube, "non-angle-lons SOURCE") - result_cube = self.regrid(src_cube, dst_cube) - _debug_data(result_cube, "non-angle-lons RESULT") - self.assertArrayAllClose(result_cube.data, expected_result) - - def test_source_nan(self): - src_x = [40.0, 50.0, 60.0] - src_y = [40.0, 50.0, 60.0] - dst_x = [44.99, 45.01, 48.0, 50.0, 52.0, 54.99, 55.01] - dst_y = [44.99, 45.01, 48.0, 50.0, 52.0, 54.99, 55.01] - nan = np.nan - data = [[3.0, 4.0, 5.0], [23.0, nan, 25.0], [43.0, 44.0, 45.0]] - expected_result = [ - [3.0, 4.0, 4.0, 4.0, 4.0, 4.0, 5.0], - [23.0, nan, nan, nan, nan, nan, 25.0], - [23.0, nan, nan, nan, nan, nan, 25.0], - [23.0, nan, nan, nan, nan, nan, 25.0], - [23.0, nan, nan, nan, nan, nan, 25.0], - [23.0, nan, nan, nan, nan, nan, 25.0], - [43.0, 44.0, 44.0, 44.0, 44.0, 44.0, 45.0], - ] - src_cube = grid_cube(src_x, src_y, data) - _debug_data(src_cube, "nan SOURCE") - dst_cube = grid_cube(dst_x, dst_y) - result_cube = self.regrid(src_cube, dst_cube) - _debug_data(result_cube, "nan RESULT") - self.assertArrayEqual(result_cube.data, expected_result) - - -class TestCubeRegridNearest(MixinCheckingCode, tests.IrisTest): - scheme = Nearest(extrapolation_mode="extrapolate") - - def regrid(self, src_cube, dst_cube, **kwargs): - return src_cube.regrid(dst_cube, scheme=self.scheme) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_regridding.py b/lib/iris/tests/integration/test_regridding.py deleted file mode 100644 index 4ceac6ab1e..0000000000 --- a/lib/iris/tests/integration/test_regridding.py +++ /dev/null @@ -1,248 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for regridding.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -import iris -from iris.analysis import UnstructuredNearest -from iris.analysis._regrid import RectilinearRegridder as Regridder -from iris.coord_systems import GeogCS -from iris.coords import DimCoord -from iris.cube import Cube -from iris.tests.stock import global_pp, simple_3d - - -@tests.skip_data -class TestOSGBToLatLon(tests.IrisTest): - def setUp(self): - path = tests.get_data_path( - ( - "NIMROD", - "uk2km", - "WO0000000003452", - "201007020900_u1096_ng_ey00_visibility0180_screen_2km", - ) - ) - self.src = iris.load_cube(path)[0] - # Cast up to float64, to work around numpy<=1.8 bug with means of - # arrays of 32bit floats. - self.src.data = self.src.data.astype(np.float64) - self.grid = Cube(np.empty((73, 96))) - cs = GeogCS(6370000) - lat = DimCoord( - np.linspace(46, 65, 73), - "latitude", - units="degrees", - coord_system=cs, - ) - lon = DimCoord( - np.linspace(-14, 8, 96), - "longitude", - units="degrees", - coord_system=cs, - ) - self.grid.add_dim_coord(lat, 0) - self.grid.add_dim_coord(lon, 1) - - def _regrid(self, method): - regridder = Regridder(self.src, self.grid, method, "mask") - result = regridder(self.src) - return result - - def test_linear(self): - res = self._regrid("linear") - self.assertArrayShapeStats(res, (73, 96), 17799.296120, 11207.701323) - - def test_nearest(self): - res = self._regrid("nearest") - self.assertArrayShapeStats(res, (73, 96), 17808.068828, 11225.314310) - - -@tests.skip_data -class TestGlobalSubsample(tests.IrisTest): - def setUp(self): - self.src = global_pp() - _ = self.src.data - # Cast up to float64, to work around numpy<=1.8 bug with means of - # arrays of 32bit floats. - self.src.data = self.src.data.astype(np.float64) - # Subsample and shift the target grid so that we can see a visual - # difference between regridding scheme methods. - grid = self.src[1::2, 1::3] - grid.coord("latitude").points = grid.coord("latitude").points + 1 - grid.coord("longitude").points = grid.coord("longitude").points + 1 - self.grid = grid - - def _regrid(self, method): - regridder = Regridder(self.src, self.grid, method, "mask") - result = regridder(self.src) - return result - - def test_linear(self): - res = self._regrid("linear") - self.assertArrayShapeStats(res, (36, 32), 280.35907, 15.997223) - - def test_nearest(self): - res = self._regrid("nearest") - self.assertArrayShapeStats(res, (36, 32), 280.33726, 16.064001) - - -@tests.skip_data -class TestUnstructured(tests.IrisTest): - def setUp(self): - path = tests.get_data_path( - ("NetCDF", "unstructured_grid", "theta_nodal_xios.nc") - ) - self.src = iris.load_cube(path, "Potential Temperature") - self.grid = simple_3d()[0, :, :] - - def test_nearest(self): - res = self.src.regrid(self.grid, UnstructuredNearest()) - self.assertArrayShapeStats(res, (1, 6, 3, 4), 315.890808, 11.000724) - - -class TestZonalMean_global(tests.IrisTest): - def setUp(self): - np.random.seed(0) - self.src = iris.cube.Cube(np.random.random_integers(0, 10, (140, 1))) - s_crs = iris.coord_systems.GeogCS(6371229.0) - sy_coord = iris.coords.DimCoord( - np.linspace(-90, 90, 140), - standard_name="latitude", - units="degrees", - coord_system=s_crs, - ) - sx_coord = iris.coords.DimCoord( - -180, - bounds=[-180, 180], - standard_name="longitude", - units="degrees", - circular=True, - coord_system=s_crs, - ) - self.src.add_dim_coord(sy_coord, 0) - self.src.add_dim_coord(sx_coord, 1) - - def test_linear_same_crs_global(self): - # Regrid the zonal mean onto an identical coordinate system target, but - # on a different set of longitudes - which should result in no change. - points = [-150, -90, -30, 30, 90, 150] - bounds = [ - [-180, -120], - [-120, -60], - [-60, 0], - [0, 60], - [60, 120], - [120, 180], - ] - sx_coord = self.src.coord(axis="x") - sy_coord = self.src.coord(axis="y") - x_coord = sx_coord.copy(points, bounds=bounds) - grid = iris.cube.Cube( - np.zeros([sy_coord.points.size, x_coord.points.size]) - ) - grid.add_dim_coord(sy_coord, 0) - grid.add_dim_coord(x_coord, 1) - - res = self.src.regrid(grid, iris.analysis.Linear()) - - # Ensure data remains unchanged. - # (the same along each column) - self.assertTrue( - np.array( - [ - (res.data[:, 0] - res.data[:, i]).max() - for i in range(1, res.shape[1]) - ] - ).max() - < 1e-10 - ) - self.assertArrayAlmostEqual(res.data[:, 0], self.src.data.reshape(-1)) - - -class TestZonalMean_regional(TestZonalMean_global, tests.IrisTest): - def setUp(self): - super().setUp() - - # Define a target grid and a target result (what we expect the - # regridder to return). - sx_coord = self.src.coord(axis="x") - sy_coord = self.src.coord(axis="y") - grid_crs = iris.coord_systems.RotatedGeogCS( - 37.5, 177.5, ellipsoid=iris.coord_systems.GeogCS(6371229.0) - ) - grid_x = sx_coord.copy(np.linspace(350, 370, 100)) - grid_x.circular = False - grid_x.coord_system = grid_crs - grid_y = sy_coord.copy(np.linspace(-10, 10, 100)) - grid_y.coord_system = grid_crs - grid = iris.cube.Cube( - np.zeros([grid_y.points.size, grid_x.points.size]) - ) - grid.add_dim_coord(grid_y, 0) - grid.add_dim_coord(grid_x, 1) - - # The target result is derived by regridding a multi-column version of - # the source to the target (i.e. turning a zonal mean regrid into a - # conventional regrid). - self.tar = self.zonal_mean_as_multi_column(self.src).regrid( - grid, iris.analysis.Linear() - ) - self.grid = grid - - def zonal_mean_as_multi_column(self, src_cube): - # Munge the source (duplicate source latitudes) so that we can - # utilise linear regridding as a conventional problem (that is, to - # duplicate columns so that it is no longer a zonal mean problem). - src_cube2 = src_cube.copy() - src_cube2.coord(axis="x").points = -90 - src_cube2.coord(axis="x").bounds = [-180, 0] - src_cube.coord(axis="x").points = 90 - src_cube.coord(axis="x").bounds = [0, 180] - src_cubes = iris.cube.CubeList([src_cube, src_cube2]) - return src_cubes.concatenate_cube() - - def test_linear_rotated_regional(self): - # Ensure that zonal mean source data is linearly interpolated onto a - # high resolution target. - regridder = iris.analysis.Linear() - res = self.src.regrid(self.grid, regridder) - self.assertArrayAlmostEqual(res.data, self.tar.data) - - def test_linear_rotated_regional_no_extrapolation(self): - # Capture the case where our source remains circular but we don't use - # extrapolation. - regridder = iris.analysis.Linear(extrapolation_mode="nan") - res = self.src.regrid(self.grid, regridder) - self.assertArrayAlmostEqual(res.data, self.tar.data) - - def test_linear_rotated_regional_not_circular(self): - # Capture the case where our source is not circular but we utilise - # extrapolation. - regridder = iris.analysis.Linear() - self.src.coord(axis="x").circular = False - res = self.src.regrid(self.grid, regridder) - self.assertArrayAlmostEqual(res.data, self.tar.data) - - def test_linear_rotated_regional_no_extrapolation_not_circular(self): - # Confirm how zonal mean actually works in so far as, that - # extrapolation and circular source handling is the means by which - # these usecases are supported. - # In the case where the source is neither using extrapolation and is - # not circular, then 'nan' values will result (as we would expect). - regridder = iris.analysis.Linear(extrapolation_mode="nan") - self.src.coord(axis="x").circular = False - res = self.src.regrid(self.grid, regridder) - self.assertTrue(np.isnan(res.data).all()) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_subset.py b/lib/iris/tests/integration/test_subset.py deleted file mode 100644 index bc2029afba..0000000000 --- a/lib/iris/tests/integration/test_subset.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for subset.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.coords import DimCoord -from iris.cube import Cube - - -def _make_test_cube(): - data = np.zeros((4, 4, 1)) - lats, longs = [0, 10, 20, 30], [5, 15, 25, 35] - lat_coord = DimCoord(lats, standard_name="latitude", units="degrees") - lon_coord = DimCoord(longs, standard_name="longitude", units="degrees") - vrt_coord = DimCoord([850], long_name="pressure", units="hPa") - return Cube( - data, - long_name="test_cube", - units="1", - attributes=None, - dim_coords_and_dims=[(lat_coord, 0), (lon_coord, 1)], - aux_coords_and_dims=[(vrt_coord, None)], - ) - - -class TestSubset(tests.IrisTest): - def setUp(self): - self.cube = _make_test_cube() - - def test_coordinate_subset(self): - coord = self.cube.coord("pressure") - subsetted = self.cube.subset(coord) - self.assertEqual(self.cube, subsetted) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/test_trajectory.py b/lib/iris/tests/integration/test_trajectory.py deleted file mode 100644 index a8e3acaa41..0000000000 --- a/lib/iris/tests/integration/test_trajectory.py +++ /dev/null @@ -1,353 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for :mod:`iris.analysis.trajectory`.""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import numpy as np - -import iris -from iris._lazy_data import as_lazy_data -from iris.analysis.trajectory import Trajectory -from iris.analysis.trajectory import interpolate as traj_interpolate -import iris.tests.stock as istk - - -@tests.skip_data -class TestColpex(tests.IrisTest): - def setUp(self): - # Load the COLPEX data => TZYX - path = tests.get_data_path( - ["PP", "COLPEX", "theta_and_orog_subset.pp"] - ) - cube = iris.load_cube(path, "air_potential_temperature") - cube.coord("grid_latitude").bounds = None - cube.coord("grid_longitude").bounds = None - # TODO: Workaround until regrid can handle factories - cube.remove_aux_factory(cube.aux_factories[0]) - cube.remove_coord("surface_altitude") - self.cube = cube - - def test_trajectory_extraction(self): - # Pull out a single point - no interpolation required - single_point = traj_interpolate( - self.cube, - [("grid_latitude", [-0.1188]), ("grid_longitude", [359.57958984])], - ) - expected = self.cube[..., 10, 0].data - self.assertArrayAllClose( - single_point[..., 0].data, expected, rtol=2.0e-7 - ) - self.assertCML( - single_point, ("trajectory", "single_point.cml"), checksum=False - ) - - def test_trajectory_extraction_calc(self): - # Pull out another point and test against a manually calculated result. - single_point = [ - ["grid_latitude", [-0.1188]], - ["grid_longitude", [359.584090412]], - ] - scube = self.cube[0, 0, 10:11, 4:6] - x0 = scube.coord("grid_longitude")[0].points - x1 = scube.coord("grid_longitude")[1].points - y0 = scube.data[0, 0] - y1 = scube.data[0, 1] - expected = y0 + ((y1 - y0) * ((359.584090412 - x0) / (x1 - x0))) - trajectory_cube = traj_interpolate(scube, single_point) - self.assertArrayAllClose(trajectory_cube.data, expected, rtol=2.0e-7) - - def _traj_to_sample_points(self, trajectory): - sample_points = [] - src_points = trajectory.sampled_points - for name in src_points[0].keys(): - values = [point[name] for point in src_points] - sample_points.append((name, values)) - return sample_points - - def test_trajectory_extraction_axis_aligned(self): - # Extract a simple, axis-aligned trajectory that is similar to an - # indexing operation. - # (It's not exactly the same because the source cube doesn't have - # regular spacing.) - waypoints = [ - {"grid_latitude": -0.1188, "grid_longitude": 359.57958984}, - {"grid_latitude": -0.1188, "grid_longitude": 359.66870117}, - ] - trajectory = Trajectory(waypoints, sample_count=100) - sample_points = self._traj_to_sample_points(trajectory) - trajectory_cube = traj_interpolate(self.cube, sample_points) - self.assertCML( - trajectory_cube, ("trajectory", "constant_latitude.cml") - ) - - def test_trajectory_extraction_zigzag(self): - # Extract a zig-zag trajectory - waypoints = [ - {"grid_latitude": -0.1188, "grid_longitude": 359.5886}, - {"grid_latitude": -0.0828, "grid_longitude": 359.6606}, - {"grid_latitude": -0.0468, "grid_longitude": 359.6246}, - ] - trajectory = Trajectory(waypoints, sample_count=20) - sample_points = self._traj_to_sample_points(trajectory) - trajectory_cube = traj_interpolate(self.cube[0, 0], sample_points) - expected = np.array( - [ - 287.95953369, - 287.9190979, - 287.95550537, - 287.93240356, - 287.83850098, - 287.87869263, - 287.90942383, - 287.9463501, - 287.74365234, - 287.68856812, - 287.75588989, - 287.54611206, - 287.48522949, - 287.53356934, - 287.60217285, - 287.43795776, - 287.59701538, - 287.52468872, - 287.45025635, - 287.52716064, - ], - dtype=np.float32, - ) - - self.assertCML( - trajectory_cube, ("trajectory", "zigzag.cml"), checksum=False - ) - self.assertArrayAllClose(trajectory_cube.data, expected, rtol=2.0e-7) - - def test_colpex__nearest(self): - # Check a smallish nearest-neighbour interpolation against a result - # snapshot. - test_cube = self.cube[0][0] - # Test points on a regular grid, a bit larger than the source region. - xmin, xmax = [ - fn(test_cube.coord(axis="x").points) for fn in (np.min, np.max) - ] - ymin, ymax = [ - fn(test_cube.coord(axis="x").points) for fn in (np.min, np.max) - ] - fractions = [-0.23, -0.01, 0.27, 0.624, 0.983, 1.052, 1.43] - x_points = [xmin + frac * (xmax - xmin) for frac in fractions] - y_points = [ymin + frac * (ymax - ymin) for frac in fractions] - x_points, y_points = np.meshgrid(x_points, y_points) - sample_points = [ - ("grid_longitude", x_points.flatten()), - ("grid_latitude", y_points.flatten()), - ] - result = traj_interpolate(test_cube, sample_points, method="nearest") - expected = [ - 288.07168579, - 288.07168579, - 287.9367981, - 287.82736206, - 287.78564453, - 287.8374939, - 287.8374939, - 288.07168579, - 288.07168579, - 287.9367981, - 287.82736206, - 287.78564453, - 287.8374939, - 287.8374939, - 288.07168579, - 288.07168579, - 287.9367981, - 287.82736206, - 287.78564453, - 287.8374939, - 287.8374939, - 288.07168579, - 288.07168579, - 287.9367981, - 287.82736206, - 287.78564453, - 287.8374939, - 287.8374939, - 288.07168579, - 288.07168579, - 287.9367981, - 287.82736206, - 287.78564453, - 287.8374939, - 287.8374939, - 288.07168579, - 288.07168579, - 287.9367981, - 287.82736206, - 287.78564453, - 287.8374939, - 287.8374939, - 288.07168579, - 288.07168579, - 287.9367981, - 287.82736206, - 287.78564453, - 287.8374939, - 287.8374939, - ] - self.assertArrayAllClose(result.data, expected) - - -@tests.skip_data -class TestTriPolar(tests.IrisTest): - def setUp(self): - # load data - cubes = iris.load( - tests.get_data_path(["NetCDF", "ORCA2", "votemper.nc"]) - ) - cube = cubes[0] - # The netCDF file has different data types for the points and - # bounds of 'depth'. This wasn't previously supported, so we - # emulate that old behaviour. - b32 = cube.coord("depth").bounds.astype(np.float32) - cube.coord("depth").bounds = b32 - self.cube = cube - # define a latitude trajectory (put coords in a different order - # to the cube, just to be awkward) although avoid south pole - # singularity as a sample point and the issue of snapping to - # multi-equidistant closest points from within orca antarctic hole - latitudes = list(range(-80, 90, 2)) - longitudes = [-90] * len(latitudes) - self.sample_points = [ - ("longitude", longitudes), - ("latitude", latitudes), - ] - - def test_tri_polar(self): - # extract - sampled_cube = traj_interpolate( - self.cube, self.sample_points, method="nearest" - ) - self.assertCML( - sampled_cube, ("trajectory", "tri_polar_latitude_slice.cml") - ) - - def test_tri_polar_method_linear_fails(self): - # Try to request linear interpolation. - # Not allowed, as we have multi-dimensional coords. - self.assertRaises( - iris.exceptions.CoordinateMultiDimError, - traj_interpolate, - self.cube, - self.sample_points, - method="linear", - ) - - def test_tri_polar_method_unknown_fails(self): - # Try to request unknown interpolation. - self.assertRaises( - ValueError, - traj_interpolate, - self.cube, - self.sample_points, - method="linekar", - ) - - def test_tri_polar__nearest(self): - # Check a smallish nearest-neighbour interpolation against a result - # snapshot. - test_cube = self.cube - # Use just one 2d layer, just to be faster. - test_cube = test_cube[0][0] - # Fix the fill value of the data to zero, just so that we get the same - # result under numpy < 1.11 as with 1.11. - # NOTE: numpy<1.11 *used* to assign missing data points into an - # unmasked array as =0.0, now =fill-value. - # TODO: arguably, we should support masked data properly in the - # interpolation routine. In the legacy code, that is unfortunately - # just not the case. - test_cube.data.fill_value = 0 - - # Test points on a regular global grid, with unrelated steps + offsets - # and an extended range of longitude values. - x_points = np.arange(-185.23, +360.0, 73.123) - y_points = np.arange(-89.12, +90.0, 42.847) - x_points, y_points = np.meshgrid(x_points, y_points) - sample_points = [ - ("longitude", x_points.flatten()), - ("latitude", y_points.flatten()), - ] - result = traj_interpolate(test_cube, sample_points, method="nearest") - expected = [ - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 0.0, - 12.13186264, - 10.69991493, - 9.86881161, - 7.08723927, - 9.04308414, - 12.56258678, - 10.63761806, - 9.19426727, - 28.93525505, - 23.85289955, - 26.94649506, - 0.0, - 27.88831711, - 28.65439224, - 23.39414215, - 26.78363228, - 13.53453922, - 0.0, - 17.41485596, - 0.0, - 0.0, - 13.0413475, - 0.0, - 17.10849571, - -1.67040622, - -1.64783156, - 0.0, - -1.97898054, - -1.67642927, - -1.65173221, - -1.623945, - 0.0, - ] - - self.assertArrayAllClose(result.data, expected) - - -class TestLazyData(tests.IrisTest): - def test_hybrid_height(self): - cube = istk.simple_4d_with_hybrid_height() - # Put a lazy array into the cube so we can test deferred loading. - cube.data = as_lazy_data(cube.data) - - # Use opionated grid-latitudes to avoid the issue of platform - # specific behaviour within SciPy cKDTree choosing a different - # equi-distant nearest neighbour point when there are multiple - # valid candidates. - traj = ( - ("grid_latitude", [20.4, 21.6, 22.6, 23.6]), - ("grid_longitude", [31, 32, 33, 34]), - ) - xsec = traj_interpolate(cube, traj, method="nearest") - - # Check that creating the trajectory hasn't led to the original - # data being loaded. - self.assertTrue(cube.has_lazy_data()) - self.assertCML([cube, xsec], ("trajectory", "hybrid_height.cml")) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/integration/um/__init__.py b/lib/iris/tests/integration/um/__init__.py deleted file mode 100644 index a94785ca58..0000000000 --- a/lib/iris/tests/integration/um/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Integration tests for :mod:`iris.fileformats.um` fast load functions.""" diff --git a/lib/iris/tests/integration/um/test_fieldsfile.py b/lib/iris/tests/integration/um/test_fieldsfile.py deleted file mode 100644 index 56b88c2b6d..0000000000 --- a/lib/iris/tests/integration/um/test_fieldsfile.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test the fast loading of structured Fieldsfiles. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip -from iris.cube import CubeList -from iris.fileformats.um import load_cubes as load - - -@tests.skip_data -class TestStructuredLoadFF(tests.IrisTest): - def setUp(self): - self.fname = tests.get_data_path(("FF", "structured", "small")) - - def _merge_cubes(self, cubes): - # Merge the 2D cubes returned by `iris.fileformats.um.load_cubes`. - return CubeList(cubes).merge_cube() - - def test_simple(self): - list_of_cubes = list(load(self.fname, None)) - cube = self._merge_cubes(list_of_cubes) - self.assertCML(cube) - - def test_simple_callback(self): - def callback(cube, field, filename): - cube.attributes["processing"] = "fast-ff" - - list_of_cubes = list(load(self.fname, callback=callback)) - cube = self._merge_cubes(list_of_cubes) - self.assertCML(cube) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml b/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml index 5bba278059..da315c36af 100644 --- a/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml +++ b/lib/iris/tests/results/COLPEX/small_colpex_theta_p_alt.cml @@ -400,7 +400,7 @@ 0.666666666686, 0.833333333314, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64"/> - + + 347926.666667, 347926.833333, 347927.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -923,7 +923,7 @@ 0.666666666686, 0.833333333314, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64"/> - + + 347926.666667, 347926.833333, 347927.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -1057,7 +1057,7 @@ - + + diff --git a/lib/iris/tests/results/FF/air_temperature_1.cml b/lib/iris/tests/results/FF/air_temperature_1.cml index 267aa88d23..043b9acc16 100644 --- a/lib/iris/tests/results/FF/air_temperature_1.cml +++ b/lib/iris/tests/results/FF/air_temperature_1.cml @@ -11,7 +11,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/FF/air_temperature_2.cml b/lib/iris/tests/results/FF/air_temperature_2.cml index 307c58fe72..200a80b54a 100644 --- a/lib/iris/tests/results/FF/air_temperature_2.cml +++ b/lib/iris/tests/results/FF/air_temperature_2.cml @@ -11,7 +11,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/FF/soil_temperature_1.cml b/lib/iris/tests/results/FF/soil_temperature_1.cml index e555a3f5b9..57303636c1 100644 --- a/lib/iris/tests/results/FF/soil_temperature_1.cml +++ b/lib/iris/tests/results/FF/soil_temperature_1.cml @@ -11,7 +11,7 @@ - + + diff --git a/lib/iris/tests/results/FF/surface_altitude_1.cml b/lib/iris/tests/results/FF/surface_altitude_1.cml index 27cfad3d09..2669624d37 100644 --- a/lib/iris/tests/results/FF/surface_altitude_1.cml +++ b/lib/iris/tests/results/FF/surface_altitude_1.cml @@ -11,7 +11,7 @@ - + + diff --git a/lib/iris/tests/results/abf/load.cml b/lib/iris/tests/results/abf/load.cml index e470cbebf3..e7954ab229 100644 --- a/lib/iris/tests/results/abf/load.cml +++ b/lib/iris/tests/results/abf/load.cml @@ -30,7 +30,7 @@ - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/easy.cml b/lib/iris/tests/results/analysis/aggregated_by/easy.cml index c4edb9484f..d02c3f12d1 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/easy.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/easy.cml @@ -18,6 +18,6 @@ - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml index 2f8f1e73d7..dc9bdd0df8 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/multi_missing.cml @@ -36,6 +36,6 @@ - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml index e6b95e3cbc..51e1ae4ff1 100644 --- a/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml +++ b/lib/iris/tests/results/analysis/aggregated_by/single_missing.cml @@ -31,6 +31,6 @@ - + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_easy.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_easy.cml new file mode 100644 index 0000000000..8c434479c9 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_easy.cml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi.cml new file mode 100644 index 0000000000..cca744ff87 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi.cml @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_missing.cml new file mode 100644 index 0000000000..8c11bdb505 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_missing.cml @@ -0,0 +1,41 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_shared.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_shared.cml new file mode 100644 index 0000000000..ab7a7195fd --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_multi_shared.cml @@ -0,0 +1,63 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single.cml new file mode 100644 index 0000000000..d5bb9775fe --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single.cml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_missing.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_missing.cml new file mode 100644 index 0000000000..f7d57a9828 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_missing.cml @@ -0,0 +1,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared.cml new file mode 100644 index 0000000000..50a2c44a98 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared.cml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared_circular.cml b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared_circular.cml new file mode 100644 index 0000000000..657fb43414 --- /dev/null +++ b/lib/iris/tests/results/analysis/aggregated_by/weighted_single_shared_circular.cml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/areaweights_original.cml b/lib/iris/tests/results/analysis/areaweights_original.cml index 3c33ef500a..651bb648dd 100644 --- a/lib/iris/tests/results/analysis/areaweights_original.cml +++ b/lib/iris/tests/results/analysis/areaweights_original.cml @@ -10,7 +10,7 @@ - + @@ -26,7 +26,7 @@ - + diff --git a/lib/iris/tests/results/analysis/gmean_latitude.cml b/lib/iris/tests/results/analysis/gmean_latitude.cml index ebe22c54f3..26b7fdc8af 100644 --- a/lib/iris/tests/results/analysis/gmean_latitude.cml +++ b/lib/iris/tests/results/analysis/gmean_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml b/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml index 3cd6a93948..94ed36ac88 100644 --- a/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/gmean_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml index cc7b3133e0..1db977312b 100644 --- a/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/gmean_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/hmean_latitude.cml b/lib/iris/tests/results/analysis/hmean_latitude.cml index d953f0e4d9..70e3fcb540 100644 --- a/lib/iris/tests/results/analysis/hmean_latitude.cml +++ b/lib/iris/tests/results/analysis/hmean_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml b/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml index 43700b083c..f762fd643b 100644 --- a/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/hmean_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml index e17383ff64..369dca3203 100644 --- a/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/hmean_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/max_latitude.cml b/lib/iris/tests/results/analysis/max_latitude.cml index faa54fff8a..89542d27d3 100644 --- a/lib/iris/tests/results/analysis/max_latitude.cml +++ b/lib/iris/tests/results/analysis/max_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/max_latitude_longitude.cml b/lib/iris/tests/results/analysis/max_latitude_longitude.cml index 8437e8f4a1..7d24ca7f14 100644 --- a/lib/iris/tests/results/analysis/max_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/max_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml index 5b6504dfb1..b4d1e0349c 100644 --- a/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/max_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/max_run_bar_2d.cml b/lib/iris/tests/results/analysis/max_run_bar_2d.cml new file mode 100644 index 0000000000..32a8a377be --- /dev/null +++ b/lib/iris/tests/results/analysis/max_run_bar_2d.cml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/max_run_bar_2d_masked.cml b/lib/iris/tests/results/analysis/max_run_bar_2d_masked.cml new file mode 100644 index 0000000000..32a8a377be --- /dev/null +++ b/lib/iris/tests/results/analysis/max_run_bar_2d_masked.cml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/max_run_foo_1d.cml b/lib/iris/tests/results/analysis/max_run_foo_1d.cml new file mode 100644 index 0000000000..b2a3bcef56 --- /dev/null +++ b/lib/iris/tests/results/analysis/max_run_foo_1d.cml @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/max_run_foo_2d.cml b/lib/iris/tests/results/analysis/max_run_foo_2d.cml new file mode 100644 index 0000000000..fb8448136f --- /dev/null +++ b/lib/iris/tests/results/analysis/max_run_foo_2d.cml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/analysis/mean_latitude.cml b/lib/iris/tests/results/analysis/mean_latitude.cml index fcf2ef55be..80921e762d 100644 --- a/lib/iris/tests/results/analysis/mean_latitude.cml +++ b/lib/iris/tests/results/analysis/mean_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/mean_latitude_longitude.cml b/lib/iris/tests/results/analysis/mean_latitude_longitude.cml index 5cb139be1a..6ac9400a3a 100644 --- a/lib/iris/tests/results/analysis/mean_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/mean_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml index 573fa1c694..affcf07c07 100644 --- a/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/mean_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/median_latitude.cml b/lib/iris/tests/results/analysis/median_latitude.cml index 49006c9592..bbf3875688 100644 --- a/lib/iris/tests/results/analysis/median_latitude.cml +++ b/lib/iris/tests/results/analysis/median_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/median_latitude_longitude.cml b/lib/iris/tests/results/analysis/median_latitude_longitude.cml index 49ec42b391..5663f6d65f 100644 --- a/lib/iris/tests/results/analysis/median_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/median_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml index 036c6bb2f9..c0c0d7c46b 100644 --- a/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/median_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/min_latitude.cml b/lib/iris/tests/results/analysis/min_latitude.cml index 34a2dc5548..bf20be30a9 100644 --- a/lib/iris/tests/results/analysis/min_latitude.cml +++ b/lib/iris/tests/results/analysis/min_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/min_latitude_longitude.cml b/lib/iris/tests/results/analysis/min_latitude_longitude.cml index 76c7e96bce..3792645582 100644 --- a/lib/iris/tests/results/analysis/min_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/min_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml index 6b484eb591..b43231b7e6 100644 --- a/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/min_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/original.cml b/lib/iris/tests/results/analysis/original.cml index 23129095b6..414de1b6b5 100644 --- a/lib/iris/tests/results/analysis/original.cml +++ b/lib/iris/tests/results/analysis/original.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/original_common.cml b/lib/iris/tests/results/analysis/original_common.cml index c1759c12bd..bbfa48d7d8 100644 --- a/lib/iris/tests/results/analysis/original_common.cml +++ b/lib/iris/tests/results/analysis/original_common.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/original_hmean.cml b/lib/iris/tests/results/analysis/original_hmean.cml index 952cede1c2..bdc145022c 100644 --- a/lib/iris/tests/results/analysis/original_hmean.cml +++ b/lib/iris/tests/results/analysis/original_hmean.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml b/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml index dc1fee2f2b..1ac69490b4 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_masked_altitude.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml b/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml index 6fdbe7df00..eb9adb4aef 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_partial_overlap.cml @@ -99,7 +99,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/linear_subset.cml b/lib/iris/tests/results/analysis/regrid/linear_subset.cml index d9b80dd86b..9bd62287fe 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_subset.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_subset.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml index d9b80dd86b..9bd62287fe 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_1.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml index d9b80dd86b..9bd62287fe 100644 --- a/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml +++ b/lib/iris/tests/results/analysis/regrid/linear_subset_masked_2.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml b/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml index b2aec5e891..a1cff2363e 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_masked_altitude.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml b/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml index f6647aa426..98a0b6b805 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_partial_overlap.cml @@ -99,7 +99,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/nearest_subset.cml b/lib/iris/tests/results/analysis/regrid/nearest_subset.cml index 7e12c9be60..a704cbecbb 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_subset.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_subset.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml index 7e12c9be60..a704cbecbb 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_1.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml index 7e12c9be60..a704cbecbb 100644 --- a/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml +++ b/lib/iris/tests/results/analysis/regrid/nearest_subset_masked_2.cml @@ -107,7 +107,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/regrid/no_overlap.cml b/lib/iris/tests/results/analysis/regrid/no_overlap.cml index 6aa4d218f8..da2f03f1ee 100644 --- a/lib/iris/tests/results/analysis/regrid/no_overlap.cml +++ b/lib/iris/tests/results/analysis/regrid/no_overlap.cml @@ -99,7 +99,7 @@ + 347921.666667, 347921.833333]" shape="(5,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/rms_latitude.cml b/lib/iris/tests/results/analysis/rms_latitude.cml index e409daed2d..d4b1428fb2 100644 --- a/lib/iris/tests/results/analysis/rms_latitude.cml +++ b/lib/iris/tests/results/analysis/rms_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/rms_latitude_longitude.cml b/lib/iris/tests/results/analysis/rms_latitude_longitude.cml index 9bdc53fbad..4293087847 100644 --- a/lib/iris/tests/results/analysis/rms_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/rms_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml index 89a593d122..9ca1d23b42 100644 --- a/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/rms_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/std_dev_latitude.cml b/lib/iris/tests/results/analysis/std_dev_latitude.cml index 154d5ef587..a45aefeff4 100644 --- a/lib/iris/tests/results/analysis/std_dev_latitude.cml +++ b/lib/iris/tests/results/analysis/std_dev_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml b/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml index 770ef9a35a..95e8e3694d 100644 --- a/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/std_dev_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml index a5ce049ca5..f91f6005b7 100644 --- a/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/std_dev_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/sum_latitude.cml b/lib/iris/tests/results/analysis/sum_latitude.cml index 943aa9312f..fbb8460fd8 100644 --- a/lib/iris/tests/results/analysis/sum_latitude.cml +++ b/lib/iris/tests/results/analysis/sum_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/sum_latitude_longitude.cml b/lib/iris/tests/results/analysis/sum_latitude_longitude.cml index 2eff41339b..cb992f3b9d 100644 --- a/lib/iris/tests/results/analysis/sum_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/sum_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml index a2a46d2ba8..6171dc516b 100644 --- a/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/sum_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/variance_latitude.cml b/lib/iris/tests/results/analysis/variance_latitude.cml index 437587b00d..5b55731396 100644 --- a/lib/iris/tests/results/analysis/variance_latitude.cml +++ b/lib/iris/tests/results/analysis/variance_latitude.cml @@ -11,7 +11,7 @@ - + @@ -27,7 +27,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/variance_latitude_longitude.cml b/lib/iris/tests/results/analysis/variance_latitude_longitude.cml index 391ab8834e..359e40ef8a 100644 --- a/lib/iris/tests/results/analysis/variance_latitude_longitude.cml +++ b/lib/iris/tests/results/analysis/variance_latitude_longitude.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml b/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml index 535468acfc..0345eac77b 100644 --- a/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml +++ b/lib/iris/tests/results/analysis/variance_latitude_longitude_1call.cml @@ -11,7 +11,7 @@ - + @@ -26,7 +26,7 @@ + 319544.0, 319545.0]" shape="(10,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/analysis/weighted_mean_original.cml b/lib/iris/tests/results/analysis/weighted_mean_original.cml index 2df84a8606..a69e633e26 100644 --- a/lib/iris/tests/results/analysis/weighted_mean_original.cml +++ b/lib/iris/tests/results/analysis/weighted_mean_original.cml @@ -10,7 +10,7 @@ - + - + diff --git a/lib/iris/tests/results/categorisation/customcheck.cml b/lib/iris/tests/results/categorisation/customcheck.cml index d6dcc7179d..476a1c56ef 100644 --- a/lib/iris/tests/results/categorisation/customcheck.cml +++ b/lib/iris/tests/results/categorisation/customcheck.cml @@ -19,7 +19,7 @@ + 513, 540, 567, 594]" shape="(23,)" standard_name="time" units="Unit('days since 1970-01-01 00:00:00', calendar='standard')" value_type="int32"/> diff --git a/lib/iris/tests/results/categorisation/quickcheck.cml b/lib/iris/tests/results/categorisation/quickcheck.cml index f64c70350f..b8f3904ad1 100644 --- a/lib/iris/tests/results/categorisation/quickcheck.cml +++ b/lib/iris/tests/results/categorisation/quickcheck.cml @@ -68,7 +68,7 @@ + 513, 540, 567, 594]" shape="(23,)" standard_name="time" units="Unit('days since 1970-01-01 00:00:00', calendar='standard')" value_type="int32"/> diff --git a/lib/iris/tests/results/cdm/extract/lat_eq_10.cml b/lib/iris/tests/results/cdm/extract/lat_eq_10.cml index f10c0be37c..e7213fc7bd 100644 --- a/lib/iris/tests/results/cdm/extract/lat_eq_10.cml +++ b/lib/iris/tests/results/cdm/extract/lat_eq_10.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cdm/extract/lat_gt_10.cml b/lib/iris/tests/results/cdm/extract/lat_gt_10.cml index e0d138f327..3ffbbf89e5 100644 --- a/lib/iris/tests/results/cdm/extract/lat_gt_10.cml +++ b/lib/iris/tests/results/cdm/extract/lat_gt_10.cml @@ -11,7 +11,7 @@ - + - + diff --git a/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml b/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml index 3b435e9ceb..7091aee748 100644 --- a/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml +++ b/lib/iris/tests/results/cdm/extract/lat_gt_10_and_lon_ge_10.cml @@ -11,7 +11,7 @@ - + - + diff --git a/lib/iris/tests/results/cdm/masked_cube.cml b/lib/iris/tests/results/cdm/masked_cube.cml index a38340913e..dcfa8c062f 100644 --- a/lib/iris/tests/results/cdm/masked_cube.cml +++ b/lib/iris/tests/results/cdm/masked_cube.cml @@ -10,7 +10,7 @@ - + + 1000.0, 1006.0]" shape="(8,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/constrained_load/all_10_load_match.cml b/lib/iris/tests/results/constrained_load/all_10_load_match.cml index 6a582f9d67..0712af20fa 100644 --- a/lib/iris/tests/results/constrained_load/all_10_load_match.cml +++ b/lib/iris/tests/results/constrained_load/all_10_load_match.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -62,7 +62,7 @@ - + @@ -92,7 +92,7 @@ - + @@ -113,7 +113,7 @@ - + @@ -144,7 +144,7 @@ - + @@ -165,7 +165,7 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml b/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml index 458474f98a..20971021ac 100644 --- a/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml +++ b/lib/iris/tests/results/constrained_load/all_ml_10_22_load_match.cml @@ -11,7 +11,7 @@ - + @@ -43,7 +43,7 @@ [0.222443, 0.177555]]" id="a5c170db" long_name="sigma" points="[0.784571, 0.199878]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + @@ -64,7 +64,7 @@ - + @@ -96,7 +96,7 @@ [0.222443, 0.177555]]" id="a5c170db" long_name="sigma" points="[0.784571, 0.199878]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + @@ -117,7 +117,7 @@ - + @@ -150,7 +150,7 @@ [0.246215, 0.199878]]" id="a5c170db" long_name="sigma" points="[0.803914, 0.222443]" shape="(2,)" units="Unit('1')" value_type="float32"/> - + @@ -171,7 +171,7 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/attribute_constraint.cml b/lib/iris/tests/results/constrained_load/attribute_constraint.cml index 31714035fa..664dc943bc 100644 --- a/lib/iris/tests/results/constrained_load/attribute_constraint.cml +++ b/lib/iris/tests/results/constrained_load/attribute_constraint.cml @@ -12,7 +12,7 @@ - + @@ -131,7 +131,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml index bbafc31987..44e7d077df 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_match.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -62,7 +62,7 @@ - + @@ -115,7 +115,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(11,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml index bbafc31987..44e7d077df 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_and_theta_level_gt_30_le_3_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -62,7 +62,7 @@ - + @@ -115,7 +115,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(11,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_load_match.cml b/lib/iris/tests/results/constrained_load/theta_10_load_match.cml index 2e5005d042..e2852d0151 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_load_match.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + diff --git a/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml index 2e5005d042..e2852d0151 100644 --- a/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_10_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml b/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml index 40bb37f3ab..772929b0da 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_all_10_load_match.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -151,7 +151,7 @@ - + @@ -181,7 +181,7 @@ - + @@ -202,7 +202,7 @@ - + @@ -232,7 +232,7 @@ - + @@ -253,7 +253,7 @@ - + @@ -284,7 +284,7 @@ - + @@ -305,7 +305,7 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml index 03fed4e61b..0e23de090c 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_theta_10_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -151,7 +151,7 @@ - + @@ -181,7 +181,7 @@ - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml b/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml index eadbe8f365..a175652c30 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_theta_load_match.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -151,7 +151,7 @@ - + @@ -270,7 +270,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml index eadbe8f365..a175652c30 100644 --- a/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_and_theta_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -151,7 +151,7 @@ - + @@ -270,7 +270,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml index 77534b9b55..0048a742a6 100644 --- a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_match.cml @@ -11,7 +11,7 @@ - + @@ -64,7 +64,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(11,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml index 77534b9b55..0048a742a6 100644 --- a/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_gt_30_le_3_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -64,7 +64,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(11,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml b/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml index f6727427a1..e24937854d 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_30_load_match.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml index f6727427a1..e24937854d 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_30_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml index daef7ba9dc..218bdd6b1c 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_match.cml @@ -11,7 +11,7 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml index daef7ba9dc..218bdd6b1c 100644 --- a/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_lat_gt_30_load_strict.cml @@ -11,7 +11,7 @@ - + - + diff --git a/lib/iris/tests/results/constrained_load/theta_load_match.cml b/lib/iris/tests/results/constrained_load/theta_load_match.cml index 293e40cc3a..0e5b02be51 100644 --- a/lib/iris/tests/results/constrained_load/theta_load_match.cml +++ b/lib/iris/tests/results/constrained_load/theta_load_match.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/constrained_load/theta_load_strict.cml b/lib/iris/tests/results/constrained_load/theta_load_strict.cml index 293e40cc3a..0e5b02be51 100644 --- a/lib/iris/tests/results/constrained_load/theta_load_strict.cml +++ b/lib/iris/tests/results/constrained_load/theta_load_strict.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/coord_api/intersection_reversed.xml b/lib/iris/tests/results/coord_api/intersection_reversed.xml index b966a09b54..b489f95451 100644 --- a/lib/iris/tests/results/coord_api/intersection_reversed.xml +++ b/lib/iris/tests/results/coord_api/intersection_reversed.xml @@ -1,9 +1,9 @@ - + diff --git a/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt b/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt index 6b95b57215..410da3613a 100644 --- a/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt +++ b/lib/iris/tests/results/coord_api/str_repr/dim_time_str.txt @@ -1,4 +1,4 @@ -DimCoord : time / (hours since 1970-01-01 00:00:00, gregorian calendar) +DimCoord : time / (hours since 1970-01-01 00:00:00, standard calendar) points: [ 2009-09-09 17:10:00, 2009-09-09 17:20:00, 2009-09-09 17:30:00, 2009-09-09 17:40:00, 2009-09-09 17:50:00, 2009-09-09 18:00:00] diff --git a/lib/iris/tests/results/coord_systems/Mercator.xml b/lib/iris/tests/results/coord_systems/Mercator.xml index e8036ef824..4ea768b41e 100644 --- a/lib/iris/tests/results/coord_systems/Mercator.xml +++ b/lib/iris/tests/results/coord_systems/Mercator.xml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/coord_systems/PolarStereographic.xml b/lib/iris/tests/results/coord_systems/PolarStereographic.xml new file mode 100644 index 0000000000..85abfc892f --- /dev/null +++ b/lib/iris/tests/results/coord_systems/PolarStereographic.xml @@ -0,0 +1,2 @@ + + diff --git a/lib/iris/tests/results/coord_systems/PolarStereographicScaleFactor.xml b/lib/iris/tests/results/coord_systems/PolarStereographicScaleFactor.xml new file mode 100644 index 0000000000..2fc1554cd7 --- /dev/null +++ b/lib/iris/tests/results/coord_systems/PolarStereographicScaleFactor.xml @@ -0,0 +1,2 @@ + + diff --git a/lib/iris/tests/results/coord_systems/PolarStereographicStandardParallel.xml b/lib/iris/tests/results/coord_systems/PolarStereographicStandardParallel.xml new file mode 100644 index 0000000000..de7b5f902c --- /dev/null +++ b/lib/iris/tests/results/coord_systems/PolarStereographicStandardParallel.xml @@ -0,0 +1,2 @@ + + diff --git a/lib/iris/tests/results/coord_systems/Stereographic.xml b/lib/iris/tests/results/coord_systems/Stereographic.xml index bb12cd94cc..fb338a8e4d 100644 --- a/lib/iris/tests/results/coord_systems/Stereographic.xml +++ b/lib/iris/tests/results/coord_systems/Stereographic.xml @@ -1,2 +1,2 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml index 458b9bf908..463339e5bc 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_longitude_dual_stage.cml @@ -82,7 +82,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml index a2f12b0b27..a91ea4ce5c 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_longitude_single_stage.cml @@ -82,7 +82,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml index 60539d5960..f963658910 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_dual_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml index 466d0dd8cd..195757a417 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_model_level_number_single_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml index 12bf9270d1..c63c260d25 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_time_dual_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml b/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml index 9d1070140b..d6cc708aa1 100644 --- a/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/latitude_time_single_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml index 4cd9da34f0..23739a1ac5 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_latitude_dual_stage.cml @@ -82,7 +82,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml index dd87dc175b..817b855512 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_latitude_single_stage.cml @@ -82,7 +82,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml index 16ea40c33e..29d59ce111 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_dual_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml index b01ede7936..e99d57b816 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_model_level_number_single_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml index 8d38bb748c..8e57ec7258 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_time_dual_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml b/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml index f4589831a8..67b706e0ae 100644 --- a/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/longitude_time_single_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml index 138e0207c7..d9c1b2a35c 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_dual_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml index 0e2cf8ef23..ceafb3fc67 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_latitude_single_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml index bbc8272c65..e5090a3572 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_dual_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml index ba5cd7a171..9e8bdebd4a 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_longitude_single_stage.cml @@ -44,7 +44,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml index b835be4057..a4e0cc1445 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_time_dual_stage.cml @@ -50,7 +50,7 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml b/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml index 93196268e7..d442637062 100644 --- a/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/model_level_number_time_single_stage.cml @@ -50,7 +50,7 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/original.cml b/lib/iris/tests/results/cube_collapsed/original.cml index 10a81f21d2..4bc6553dba 100644 --- a/lib/iris/tests/results/cube_collapsed/original.cml +++ b/lib/iris/tests/results/cube_collapsed/original.cml @@ -96,7 +96,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml index a4f2cc6084..788d0d8029 100644 --- a/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_latitude_dual_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml index 885328a856..b9b74c6b6d 100644 --- a/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_latitude_single_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml index 273ad909d9..84b4fea150 100644 --- a/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_longitude_dual_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml b/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml index c2e2993874..128d29a281 100644 --- a/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_longitude_single_stage.cml @@ -88,7 +88,7 @@ 0.0, 0.0]" shape="(70,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml b/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml index 4d6e85f8a8..8c206fe840 100644 --- a/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_model_level_number_dual_stage.cml @@ -50,7 +50,7 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml b/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml index 8f7ccf9b8a..08dc52fca2 100644 --- a/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml +++ b/lib/iris/tests/results/cube_collapsed/time_model_level_number_single_stage.cml @@ -50,7 +50,7 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml b/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml index 33b35b7eaa..5fae922867 100644 --- a/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml +++ b/lib/iris/tests/results/cube_collapsed/triple_collapse_lat_ml_pt.cml @@ -43,7 +43,7 @@ - + diff --git a/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml b/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml index c3db78bd9e..454bd29a18 100644 --- a/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml +++ b/lib/iris/tests/results/cube_collapsed/triple_collapse_ml_pt_lon.cml @@ -43,7 +43,7 @@ - + diff --git a/lib/iris/tests/results/cube_io/pickling/cubelist.cml b/lib/iris/tests/results/cube_io/pickling/cubelist.cml index 6cebe384aa..eb839e36e4 100644 --- a/lib/iris/tests/results/cube_io/pickling/cubelist.cml +++ b/lib/iris/tests/results/cube_io/pickling/cubelist.cml @@ -400,7 +400,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -528,7 +528,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_io/pickling/single_cube.cml b/lib/iris/tests/results/cube_io/pickling/single_cube.cml index 2cd3dbb3cb..a025713766 100644 --- a/lib/iris/tests/results/cube_io/pickling/single_cube.cml +++ b/lib/iris/tests/results/cube_io/pickling/single_cube.cml @@ -400,7 +400,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/cube_io/pickling/theta.cml b/lib/iris/tests/results/cube_io/pickling/theta.cml index 39ee6aecfd..6c69f6ed54 100644 --- a/lib/iris/tests/results/cube_io/pickling/theta.cml +++ b/lib/iris/tests/results/cube_io/pickling/theta.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/cube_io/pp/load/global.cml b/lib/iris/tests/results/cube_io/pp/load/global.cml index 2df84a8606..a69e633e26 100644 --- a/lib/iris/tests/results/cube_io/pp/load/global.cml +++ b/lib/iris/tests/results/cube_io/pp/load/global.cml @@ -10,7 +10,7 @@ - + - + diff --git a/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml b/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml index 3f9e5fef9e..f272cebeb1 100644 --- a/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml +++ b/lib/iris/tests/results/cube_slice/2d_intersect_and_reverse.cml @@ -9,15 +9,15 @@ - + - + - + @@ -24,7 +24,7 @@ - + diff --git a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml index 9e5b5a57db..b1bf424a93 100644 --- a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml +++ b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing2.cml @@ -11,7 +11,7 @@ - + @@ -24,7 +24,7 @@ - + diff --git a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml index 061255bbe4..50fd683cb3 100644 --- a/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml +++ b/lib/iris/tests/results/cube_slice/real_data_dual_tuple_indexing3.cml @@ -11,7 +11,7 @@ - + @@ -24,7 +24,7 @@ - + diff --git a/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml b/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml index 2f899b333e..1563dce74d 100644 --- a/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml +++ b/lib/iris/tests/results/cube_slice/real_empty_data_indexing.cml @@ -11,7 +11,7 @@ - + @@ -25,7 +25,7 @@ - + diff --git a/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml b/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml index 1c1e58c02b..5b7d800716 100644 --- a/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml +++ b/lib/iris/tests/results/cube_to_pp/no_forecast_period.cml @@ -3,7 +3,7 @@ - + @@ -16,7 +16,7 @@ - + diff --git a/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml b/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml index 02d380a097..edf4392d30 100644 --- a/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml +++ b/lib/iris/tests/results/cube_to_pp/no_forecast_time.cml @@ -13,7 +13,7 @@ - + diff --git a/lib/iris/tests/results/derived/column.cml b/lib/iris/tests/results/derived/column.cml index e4402b4e4d..827214dafa 100644 --- a/lib/iris/tests/results/derived/column.cml +++ b/lib/iris/tests/results/derived/column.cml @@ -111,7 +111,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/derived/no_orog.cml b/lib/iris/tests/results/derived/no_orog.cml index ec0ffdd5ff..844373675e 100644 --- a/lib/iris/tests/results/derived/no_orog.cml +++ b/lib/iris/tests/results/derived/no_orog.cml @@ -136,7 +136,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/derived/removed_derived_coord.cml b/lib/iris/tests/results/derived/removed_derived_coord.cml index 12feb2b643..5175d88875 100644 --- a/lib/iris/tests/results/derived/removed_derived_coord.cml +++ b/lib/iris/tests/results/derived/removed_derived_coord.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/derived/removed_orog.cml b/lib/iris/tests/results/derived/removed_orog.cml index 4c30ec69bc..982e38fd1e 100644 --- a/lib/iris/tests/results/derived/removed_orog.cml +++ b/lib/iris/tests/results/derived/removed_orog.cml @@ -122,7 +122,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/derived/removed_sigma.cml b/lib/iris/tests/results/derived/removed_sigma.cml index ea34680b7d..3908c22188 100644 --- a/lib/iris/tests/results/derived/removed_sigma.cml +++ b/lib/iris/tests/results/derived/removed_sigma.cml @@ -462,7 +462,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/derived/transposed.cml b/lib/iris/tests/results/derived/transposed.cml index eef077d774..c44857bd61 100644 --- a/lib/iris/tests/results/derived/transposed.cml +++ b/lib/iris/tests/results/derived/transposed.cml @@ -498,7 +498,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml index 1bb899c558..c7200d6106 100644 --- a/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml +++ b/lib/iris/tests/results/experimental/analysis/interpolate/LinearInterpolator/orthogonal_cube_with_factory.cml @@ -31,7 +31,7 @@ - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml index 585657b642..cc9deb4260 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lat_cross_section.cml @@ -5,6 +5,60 @@ + + + + + + + @@ -65,8 +119,14 @@ [0.993097, 0.989272], [0.989272, 0.984692]]" id="a5c170db" long_name="sigma" points="[0.999424, 0.997504, 0.99482, 0.991375, 0.987171]" shape="(5,)" units="Unit('1')" value_type="float32"/> + + + - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml index 4e928851fe..fb3d2cdbcf 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/const_lon_cross_section.cml @@ -5,6 +5,65 @@ + + + + + + + @@ -59,8 +118,13 @@ [0.993097, 0.989272], [0.989272, 0.984692]]" id="a5c170db" long_name="sigma" points="[0.999424, 0.997504, 0.99482, 0.991375, 0.987171]" shape="(5,)" units="Unit('1')" value_type="float32"/> + + + - + diff --git a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml index 31a753c059..70df0e198d 100644 --- a/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml +++ b/lib/iris/tests/results/experimental/regrid/regrid_area_weighted_rectilinear_src_and_grid/hybridheight.cml @@ -429,7 +429,7 @@ 218.732, 216.367]]" shape="(16, 21)" standard_name="surface_altitude" units="Unit('m')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml index be79f3ff57..7422bfe044 100644 --- a/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/2D_1t_face_half_levels.cml @@ -20,8 +20,8 @@ ..., [-42.7342, -40.8934, -46.161, -48.912], [-40.8934, -38.4268, -42.6612, -46.161], - [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791, - -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791, + -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377, + -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml index 568c835e97..f9e0511ccb 100644 --- a/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/2D_72t_face_half_levels.cml @@ -20,8 +20,8 @@ ..., [-42.7342, -40.8934, -46.161, -48.912], [-40.8934, -38.4268, -42.6612, -46.161], - [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791, - -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791, + -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377, + -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> + 21300.0, 21600.0]" shape="(72,)" standard_name="time" units="Unit('seconds since 2016-01-01 15:00:00', calendar='standard')" value_type="float64" var_name="time_instant"> diff --git a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml index 6d7873daaa..2fb8b6e1f0 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_full_levels.cml @@ -31,8 +31,8 @@ ..., [-42.7342, -40.8934, -46.161, -48.912], [-40.8934, -38.4268, -42.6612, -46.161], - [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791, - -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791, + -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_full_levels_face_y"/> + [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377, + -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_full_levels_face_x"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml index b664e3cf6f..9a819eee9e 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_1t_face_half_levels.cml @@ -31,8 +31,8 @@ ..., [-42.7342, -40.8934, -46.161, -48.912], [-40.8934, -38.4268, -42.6612, -46.161], - [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791, - -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791, + -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377, + -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml index b30d443495..9133d98e73 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_snow_pseudo_levels.cml @@ -20,8 +20,8 @@ ..., [-42.7342, -40.8934, -46.161, -48.912], [-40.8934, -38.4268, -42.6612, -46.161], - [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791, - -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791, + -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377, + -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml index 157755298d..05aeab9ccb 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_soil_pseudo_levels.cml @@ -20,8 +20,8 @@ ..., [-42.7342, -40.8934, -46.161, -48.912], [-40.8934, -38.4268, -42.6612, -46.161], - [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791, - -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791, + -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377, + -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml index a9eba1a80d..9dc3e08ee6 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_tile_pseudo_levels.cml @@ -20,8 +20,8 @@ ..., [-42.7342, -40.8934, -46.161, -48.912], [-40.8934, -38.4268, -42.6612, -46.161], - [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791, - -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791, + -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377, + -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml b/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml index e90c048803..7bb47c5296 100644 --- a/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml +++ b/lib/iris/tests/results/experimental/ugrid/3D_veg_pseudo_levels.cml @@ -20,8 +20,8 @@ ..., [-42.7342, -40.8934, -46.161, -48.912], [-40.8934, -38.4268, -42.6612, -46.161], - [-38.4268, -35.2644, -38.4268, -42.6612]]" id="21594c35" long_name="Latitude of mesh nodes." points="[33.4328, 36.1226, 38.2012, ..., -44.791, - -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-38.4268, -35.2644, -38.4268, -42.6612]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[33.4328, 36.1226, 38.2012, ..., -44.791, + -42.1583, -38.815]" shape="(864,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [-127.5, -135.0, -142.5, -135.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[-41.3152, -33.8068, -26.296, ..., -119.377, + -127.321, -135.0]" shape="(864,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> - + diff --git a/lib/iris/tests/results/experimental/ugrid/surface_mean.cml b/lib/iris/tests/results/experimental/ugrid/surface_mean.cml index 368b3508e3..8ccd602c11 100644 --- a/lib/iris/tests/results/experimental/ugrid/surface_mean.cml +++ b/lib/iris/tests/results/experimental/ugrid/surface_mean.cml @@ -20,8 +20,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -71,8 +71,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -122,8 +122,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -173,8 +173,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -224,8 +224,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -275,8 +275,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -326,8 +326,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -377,8 +377,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -428,8 +428,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -479,8 +479,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -530,8 +530,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -581,8 +581,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -632,8 +632,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -683,8 +683,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -734,8 +734,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -785,8 +785,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -836,8 +836,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> @@ -887,8 +887,8 @@ ..., [-37.7044, -36.9373, -37.9318, -38.7655], [-36.9373, -36.1244, -37.0517, -37.9318], - [-36.1244, -35.2644, -36.1244, -37.0517]]" id="21594c35" long_name="Latitude of mesh nodes." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, - -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32"/> + [-36.1244, -35.2644, -36.1244, -37.0517]]" id="72da1058" long_name="Characteristic latitude of mesh faces." points="[34.8187, 35.6462, 36.4283, ..., -37.8421, + -37.0187, -36.1485]" shape="(13824,)" standard_name="latitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_y"/> + [226.875, 225.0, 223.125, 225.0]]" id="b5c6bdeb" long_name="Characteristic longitude of mesh faces." points="[315.933, 317.808, 319.683, ..., 228.759, + 226.878, 225.0]" shape="(13824,)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="Mesh2d_half_levels_face_x"/> diff --git a/lib/iris/tests/results/file_load/theta_levels.cml b/lib/iris/tests/results/file_load/theta_levels.cml index b4ae2a4b35..fc708b7949 100644 --- a/lib/iris/tests/results/file_load/theta_levels.cml +++ b/lib/iris/tests/results/file_load/theta_levels.cml @@ -11,7 +11,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -62,7 +62,7 @@ - + @@ -92,7 +92,7 @@ - + @@ -113,7 +113,7 @@ - + @@ -143,7 +143,7 @@ - + @@ -164,7 +164,7 @@ - + @@ -194,7 +194,7 @@ - + @@ -215,7 +215,7 @@ - + @@ -245,7 +245,7 @@ - + @@ -266,7 +266,7 @@ - + @@ -296,7 +296,7 @@ - + @@ -317,7 +317,7 @@ - + @@ -347,7 +347,7 @@ - + @@ -368,7 +368,7 @@ - + @@ -398,7 +398,7 @@ - + @@ -419,7 +419,7 @@ - + @@ -449,7 +449,7 @@ - + @@ -470,7 +470,7 @@ - + @@ -500,7 +500,7 @@ - + @@ -521,7 +521,7 @@ - + @@ -551,7 +551,7 @@ - + @@ -572,7 +572,7 @@ - + @@ -602,7 +602,7 @@ - + @@ -623,7 +623,7 @@ - + @@ -653,7 +653,7 @@ - + @@ -674,7 +674,7 @@ - + @@ -704,7 +704,7 @@ - + @@ -725,7 +725,7 @@ - + @@ -755,7 +755,7 @@ - + @@ -776,7 +776,7 @@ - + @@ -806,7 +806,7 @@ - + @@ -827,7 +827,7 @@ - + @@ -857,7 +857,7 @@ - + @@ -878,7 +878,7 @@ - + @@ -908,7 +908,7 @@ - + @@ -929,7 +929,7 @@ - + @@ -959,7 +959,7 @@ - + @@ -980,7 +980,7 @@ - + @@ -1010,7 +1010,7 @@ - + @@ -1031,7 +1031,7 @@ - + @@ -1061,7 +1061,7 @@ - + @@ -1082,7 +1082,7 @@ - + @@ -1112,7 +1112,7 @@ - + @@ -1133,7 +1133,7 @@ - + @@ -1163,7 +1163,7 @@ - + @@ -1184,7 +1184,7 @@ - + @@ -1214,7 +1214,7 @@ - + @@ -1235,7 +1235,7 @@ - + @@ -1265,7 +1265,7 @@ - + @@ -1286,7 +1286,7 @@ - + @@ -1316,7 +1316,7 @@ - + @@ -1337,7 +1337,7 @@ - + @@ -1367,7 +1367,7 @@ - + @@ -1388,7 +1388,7 @@ - + @@ -1418,7 +1418,7 @@ - + @@ -1439,7 +1439,7 @@ - + @@ -1469,7 +1469,7 @@ - + @@ -1490,7 +1490,7 @@ - + @@ -1520,7 +1520,7 @@ - + @@ -1541,7 +1541,7 @@ - + @@ -1571,7 +1571,7 @@ - + @@ -1592,7 +1592,7 @@ - + @@ -1622,7 +1622,7 @@ - + @@ -1643,7 +1643,7 @@ - + @@ -1673,7 +1673,7 @@ - + @@ -1694,7 +1694,7 @@ - + @@ -1724,7 +1724,7 @@ - + @@ -1745,7 +1745,7 @@ - + @@ -1775,7 +1775,7 @@ - + @@ -1796,7 +1796,7 @@ - + @@ -1826,7 +1826,7 @@ - + @@ -1847,7 +1847,7 @@ - + @@ -1877,7 +1877,7 @@ - + @@ -1898,7 +1898,7 @@ - + @@ -1928,7 +1928,7 @@ - + diff --git a/lib/iris/tests/results/file_load/u_wind_levels.cml b/lib/iris/tests/results/file_load/u_wind_levels.cml index 68a3b45f07..5d1af58f6c 100644 --- a/lib/iris/tests/results/file_load/u_wind_levels.cml +++ b/lib/iris/tests/results/file_load/u_wind_levels.cml @@ -11,7 +11,7 @@ - + @@ -42,7 +42,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -94,7 +94,7 @@ - + @@ -115,7 +115,7 @@ - + @@ -146,7 +146,7 @@ - + @@ -167,7 +167,7 @@ - + @@ -198,7 +198,7 @@ - + @@ -219,7 +219,7 @@ - + @@ -250,7 +250,7 @@ - + @@ -271,7 +271,7 @@ - + @@ -302,7 +302,7 @@ - + @@ -323,7 +323,7 @@ - + @@ -354,7 +354,7 @@ - + @@ -375,7 +375,7 @@ - + @@ -406,7 +406,7 @@ - + @@ -427,7 +427,7 @@ - + @@ -458,7 +458,7 @@ - + @@ -479,7 +479,7 @@ - + @@ -510,7 +510,7 @@ - + @@ -531,7 +531,7 @@ - + @@ -562,7 +562,7 @@ - + @@ -583,7 +583,7 @@ - + @@ -614,7 +614,7 @@ - + @@ -635,7 +635,7 @@ - + @@ -666,7 +666,7 @@ - + @@ -687,7 +687,7 @@ - + @@ -718,7 +718,7 @@ - + @@ -739,7 +739,7 @@ - + @@ -770,7 +770,7 @@ - + @@ -791,7 +791,7 @@ - + @@ -822,7 +822,7 @@ - + @@ -843,7 +843,7 @@ - + @@ -874,7 +874,7 @@ - + @@ -895,7 +895,7 @@ - + @@ -926,7 +926,7 @@ - + @@ -947,7 +947,7 @@ - + @@ -978,7 +978,7 @@ - + @@ -999,7 +999,7 @@ - + @@ -1030,7 +1030,7 @@ - + @@ -1051,7 +1051,7 @@ - + @@ -1082,7 +1082,7 @@ - + @@ -1103,7 +1103,7 @@ - + @@ -1134,7 +1134,7 @@ - + @@ -1155,7 +1155,7 @@ - + @@ -1186,7 +1186,7 @@ - + @@ -1207,7 +1207,7 @@ - + @@ -1238,7 +1238,7 @@ - + @@ -1259,7 +1259,7 @@ - + @@ -1290,7 +1290,7 @@ - + @@ -1311,7 +1311,7 @@ - + @@ -1342,7 +1342,7 @@ - + @@ -1363,7 +1363,7 @@ - + @@ -1394,7 +1394,7 @@ - + @@ -1415,7 +1415,7 @@ - + @@ -1446,7 +1446,7 @@ - + @@ -1467,7 +1467,7 @@ - + @@ -1498,7 +1498,7 @@ - + @@ -1519,7 +1519,7 @@ - + @@ -1550,7 +1550,7 @@ - + @@ -1571,7 +1571,7 @@ - + @@ -1602,7 +1602,7 @@ - + @@ -1623,7 +1623,7 @@ - + @@ -1654,7 +1654,7 @@ - + @@ -1675,7 +1675,7 @@ - + @@ -1706,7 +1706,7 @@ - + @@ -1727,7 +1727,7 @@ - + @@ -1758,7 +1758,7 @@ - + @@ -1779,7 +1779,7 @@ - + @@ -1810,7 +1810,7 @@ - + @@ -1831,7 +1831,7 @@ - + @@ -1862,7 +1862,7 @@ - + @@ -1883,7 +1883,7 @@ - + @@ -1914,7 +1914,7 @@ - + @@ -1935,7 +1935,7 @@ - + @@ -1966,7 +1966,7 @@ - + diff --git a/lib/iris/tests/results/file_load/v_wind_levels.cml b/lib/iris/tests/results/file_load/v_wind_levels.cml index 9ccdade1bd..c7145a7e9e 100644 --- a/lib/iris/tests/results/file_load/v_wind_levels.cml +++ b/lib/iris/tests/results/file_load/v_wind_levels.cml @@ -11,7 +11,7 @@ - + - + @@ -63,7 +63,7 @@ - + - + @@ -115,7 +115,7 @@ - + - + @@ -167,7 +167,7 @@ - + - + @@ -219,7 +219,7 @@ - + - + @@ -271,7 +271,7 @@ - + - + @@ -323,7 +323,7 @@ - + - + @@ -375,7 +375,7 @@ - + - + @@ -427,7 +427,7 @@ - + - + @@ -479,7 +479,7 @@ - + - + @@ -531,7 +531,7 @@ - + - + @@ -583,7 +583,7 @@ - + - + @@ -635,7 +635,7 @@ - + - + @@ -687,7 +687,7 @@ - + - + @@ -739,7 +739,7 @@ - + - + @@ -791,7 +791,7 @@ - + - + @@ -843,7 +843,7 @@ - + - + @@ -895,7 +895,7 @@ - + - + @@ -947,7 +947,7 @@ - + - + @@ -999,7 +999,7 @@ - + - + @@ -1051,7 +1051,7 @@ - + - + @@ -1103,7 +1103,7 @@ - + - + @@ -1155,7 +1155,7 @@ - + - + @@ -1207,7 +1207,7 @@ - + - + @@ -1259,7 +1259,7 @@ - + - + @@ -1311,7 +1311,7 @@ - + - + @@ -1363,7 +1363,7 @@ - + - + @@ -1415,7 +1415,7 @@ - + - + @@ -1467,7 +1467,7 @@ - + - + @@ -1519,7 +1519,7 @@ - + - + @@ -1571,7 +1571,7 @@ - + - + @@ -1623,7 +1623,7 @@ - + - + @@ -1675,7 +1675,7 @@ - + - + @@ -1727,7 +1727,7 @@ - + - + @@ -1779,7 +1779,7 @@ - + - + @@ -1831,7 +1831,7 @@ - + - + @@ -1883,7 +1883,7 @@ - + - + @@ -1935,7 +1935,7 @@ - + - + diff --git a/lib/iris/tests/results/file_load/wind_levels.cml b/lib/iris/tests/results/file_load/wind_levels.cml index 96d821fc1c..33584deec6 100644 --- a/lib/iris/tests/results/file_load/wind_levels.cml +++ b/lib/iris/tests/results/file_load/wind_levels.cml @@ -11,7 +11,7 @@ - + @@ -42,7 +42,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -94,7 +94,7 @@ - + @@ -115,7 +115,7 @@ - + @@ -146,7 +146,7 @@ - + @@ -167,7 +167,7 @@ - + @@ -198,7 +198,7 @@ - + @@ -219,7 +219,7 @@ - + @@ -250,7 +250,7 @@ - + @@ -271,7 +271,7 @@ - + @@ -302,7 +302,7 @@ - + @@ -323,7 +323,7 @@ - + @@ -354,7 +354,7 @@ - + @@ -375,7 +375,7 @@ - + @@ -406,7 +406,7 @@ - + @@ -427,7 +427,7 @@ - + @@ -458,7 +458,7 @@ - + @@ -479,7 +479,7 @@ - + @@ -510,7 +510,7 @@ - + @@ -531,7 +531,7 @@ - + @@ -562,7 +562,7 @@ - + @@ -583,7 +583,7 @@ - + @@ -614,7 +614,7 @@ - + @@ -635,7 +635,7 @@ - + @@ -666,7 +666,7 @@ - + @@ -687,7 +687,7 @@ - + @@ -718,7 +718,7 @@ - + @@ -739,7 +739,7 @@ - + @@ -770,7 +770,7 @@ - + @@ -791,7 +791,7 @@ - + @@ -822,7 +822,7 @@ - + @@ -843,7 +843,7 @@ - + @@ -874,7 +874,7 @@ - + @@ -895,7 +895,7 @@ - + @@ -926,7 +926,7 @@ - + @@ -947,7 +947,7 @@ - + @@ -978,7 +978,7 @@ - + @@ -999,7 +999,7 @@ - + @@ -1030,7 +1030,7 @@ - + @@ -1051,7 +1051,7 @@ - + @@ -1082,7 +1082,7 @@ - + @@ -1103,7 +1103,7 @@ - + @@ -1134,7 +1134,7 @@ - + @@ -1155,7 +1155,7 @@ - + @@ -1186,7 +1186,7 @@ - + @@ -1207,7 +1207,7 @@ - + @@ -1238,7 +1238,7 @@ - + @@ -1259,7 +1259,7 @@ - + @@ -1290,7 +1290,7 @@ - + @@ -1311,7 +1311,7 @@ - + @@ -1342,7 +1342,7 @@ - + @@ -1363,7 +1363,7 @@ - + @@ -1394,7 +1394,7 @@ - + @@ -1415,7 +1415,7 @@ - + @@ -1446,7 +1446,7 @@ - + @@ -1467,7 +1467,7 @@ - + @@ -1498,7 +1498,7 @@ - + @@ -1519,7 +1519,7 @@ - + @@ -1550,7 +1550,7 @@ - + @@ -1571,7 +1571,7 @@ - + @@ -1602,7 +1602,7 @@ - + @@ -1623,7 +1623,7 @@ - + @@ -1654,7 +1654,7 @@ - + @@ -1675,7 +1675,7 @@ - + @@ -1706,7 +1706,7 @@ - + @@ -1727,7 +1727,7 @@ - + @@ -1758,7 +1758,7 @@ - + @@ -1779,7 +1779,7 @@ - + @@ -1810,7 +1810,7 @@ - + @@ -1831,7 +1831,7 @@ - + @@ -1862,7 +1862,7 @@ - + @@ -1883,7 +1883,7 @@ - + @@ -1914,7 +1914,7 @@ - + @@ -1935,7 +1935,7 @@ - + @@ -1966,7 +1966,7 @@ - + @@ -1987,7 +1987,7 @@ - + - + @@ -2039,7 +2039,7 @@ - + - + @@ -2091,7 +2091,7 @@ - + - + @@ -2143,7 +2143,7 @@ - + - + @@ -2195,7 +2195,7 @@ - + - + @@ -2247,7 +2247,7 @@ - + - + @@ -2299,7 +2299,7 @@ - + - + @@ -2351,7 +2351,7 @@ - + - + @@ -2403,7 +2403,7 @@ - + - + @@ -2455,7 +2455,7 @@ - + - + @@ -2507,7 +2507,7 @@ - + - + @@ -2559,7 +2559,7 @@ - + - + @@ -2611,7 +2611,7 @@ - + - + @@ -2663,7 +2663,7 @@ - + - + @@ -2715,7 +2715,7 @@ - + - + @@ -2767,7 +2767,7 @@ - + - + @@ -2819,7 +2819,7 @@ - + - + @@ -2871,7 +2871,7 @@ - + - + @@ -2923,7 +2923,7 @@ - + - + @@ -2975,7 +2975,7 @@ - + - + @@ -3027,7 +3027,7 @@ - + - + @@ -3079,7 +3079,7 @@ - + - + @@ -3131,7 +3131,7 @@ - + - + @@ -3183,7 +3183,7 @@ - + - + @@ -3235,7 +3235,7 @@ - + - + @@ -3287,7 +3287,7 @@ - + - + @@ -3339,7 +3339,7 @@ - + - + @@ -3391,7 +3391,7 @@ - + - + @@ -3443,7 +3443,7 @@ - + - + @@ -3495,7 +3495,7 @@ - + - + @@ -3547,7 +3547,7 @@ - + - + @@ -3599,7 +3599,7 @@ - + - + @@ -3651,7 +3651,7 @@ - + - + @@ -3703,7 +3703,7 @@ - + - + @@ -3755,7 +3755,7 @@ - + - + @@ -3807,7 +3807,7 @@ - + - + @@ -3859,7 +3859,7 @@ - + - + @@ -3911,7 +3911,7 @@ - + - + diff --git a/lib/iris/tests/results/imagerepo.json b/lib/iris/tests/results/imagerepo.json index 79560a5365..92f0d8fc20 100644 --- a/lib/iris/tests/results/imagerepo.json +++ b/lib/iris/tests/results/imagerepo.json @@ -1,1080 +1,242 @@ { - "gallery_tests.test_plot_COP_1d.TestCOP1DPlot.test_plot_COP_1d.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/baff589936602d8ec977334ae4dac9b61a6dc4d99532c86cc2913e36c4cc0f61.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aefec91c3601249cc9b3336dc4c8cdb31a64c6d997b3c0eccb5932d285e42f33.png" - ], - "gallery_tests.test_plot_COP_maps.TestCOPMaps.test_plot_cop_maps.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9138db95668524913e6ac168997e85957e917e876396b96a81b5ce3c496935.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9130db95668524913c6ac178995b0d956e917ec76396b96a853dcf94696935.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9130db95668524913e6ac168991f0d956e917ec76396b96a853dcf94796931.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9130db95668524913d6ac168991f0d956e917ec76396b96a853dcf94696935.png" - ], - "gallery_tests.test_plot_SOI_filtering.TestSOIFiltering.test_plot_soi_filtering.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fac460b9c17b78723e05a5a9954edaf062332799954e9ca5c63b9a52d24e5a95.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8460b9c17b78723e05a5a9954edaf062333799954e9ca5c63b9a52d24e4a9d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa167295c5e0696a3c17a58c9568da536233da19994cdab487739b4b9b444eb5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa56f295c5e0694a3c17a58d95e8da536233da99984c5af4c6739b4a9a444eb4.png" - ], - "gallery_tests.test_plot_TEC.TestTEC.test_plot_TEC.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e1a561b69b1a9a42846e9a49c7596e3cce6c907b3a83c17e1b8239b3e4f33bc4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e1a561b69b1a9e43846e9a49c7596e2cce6c907b3a83c16e1b9231b3e4f33b8c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e5a761b69a589a4bc46f9e48c65c6631ce61d1ce3982c13739b33193c0ee3f8c.png" - ], - "gallery_tests.test_plot_anomaly_log_colouring.TestAnomalyLogColouring.test_plot_anomaly_log_colouring.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ec4464e185a39f93931e9b1e91696d2949dde6e63e26a47a5ad391938d9a5a0c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ecc164e78e979b19b3789b0885a564a56cc2c65e3ec69469db1bdb9a853c1e24.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ece164e68e979b19b3781b0885a564a56ccac65e3ec69469db1bdb9a853c1e24.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ec4464e384a39b13931a9b1c85696da968d5e6e63e26847bdbd399938d3c5a4c.png" - ], - "gallery_tests.test_plot_atlantic_profiles.TestAtlanticProfiles.test_plot_atlantic_profiles.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/9f8260536bd28e1320739437b5f437b0a51d66f4cc5d08fcd00fdb1c93fcb21c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/9f8260536bd28e1320739437b5f437b0a51d66f4cc7c09f4d00fdb1c93fcb21c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/9f8a60536bd28e1320739437b5f437b0a53d66f4cc5c08f4d00fdb1c93fcb21c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/9fc060f462a08f07203ebc77a1f36707e61f4e38d8f7d08a910197fc877cec58.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/97c160f462a88f07203ebc77a1e36707e61f4e38d8f3d08a910597fc877cec58.png" - ], - "gallery_tests.test_plot_atlantic_profiles.TestAtlanticProfiles.test_plot_atlantic_profiles.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a6eaa57e6e81ddf999311ba3b3775e20845d5889c199673b4e22a4675e8ca11c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eeea64dd6ea8cd99991f1322b3761e06845718d89995b3131f32a4765ec2a1cd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eeea64dd6ea8cd99991d1322b3741e2684571cd89995b3131f32a4765ee2a1cc.png" - ], - "gallery_tests.test_plot_coriolis.TestCoriolisPlot.test_plot_coriolis.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e78665de9a699659e55e9965886979966986c5e63e98c19e3a256679e1981a24.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e68665de9a699659c1fe99a5896965966996c46e3e19c1da3a652669c51e1a26.png" - ], - "gallery_tests.test_plot_cross_section.TestCrossSection.test_plot_cross_section.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea95317b9562e4d1649f5a05856e4ca4da52947e4ea5f13f1b499d42f13b1b41.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea91b17b9562e4d1609f5a05856e4ca45a52957e5ea5f13b1bca9dc0b17b1ac1.png" - ], - "gallery_tests.test_plot_cross_section.TestCrossSection.test_plot_cross_section.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9521fb956a394069921e93f07f4aad856cc47e4e95857a1ea5da3591ba1b81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9521fb956a394068931e9be07e4aa5856cc47e4a91957a1ba55bb5b17a3b81.png" - ], - "gallery_tests.test_plot_custom_aggregation.TestCustomAggregation.test_plot_custom_aggregation.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ee816f81917e907eb03ec73f856f7ac198d070186e90811f1be33ee1a57a6e18.png" - ], - "gallery_tests.test_plot_custom_file_loading.TestCustomFileLoading.test_plot_custom_file_loading.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/faa0cbf1845e34be913787416edcc8bc3bc81f9b63332662a4ed30cdc1b2cd21.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fba0cbf1845e34be912787416edcc8bc3b881f9b62332762a5ad32cdc1b2cd21.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/faa1cb47845e34bc912797436cccc8343f11359b73523746c48c72d9d9b34da5.png" - ], - "gallery_tests.test_plot_deriving_phenomena.TestDerivingPhenomena.test_plot_deriving_phenomena.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/b9993986866952e6c9464639c4766bd9c669916e7b99c1663f99768990763e81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b99139de866952e6c946c639c47e6bd18769d16e7a9981662e813699d0763e89.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ec97681793689768943c97e8926669d186e8c33f6c99c32e6b936c83d33e2c98.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ec97681793689768943c96e890666bc586e1c33f2c99c33e6f956c93d23e2c98.png" - ], - "gallery_tests.test_plot_global_map.TestGlobalMap.test_plot_global_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa9979468566857ef07e3e8978566b91cb0179883c89946686a96b9d83766f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa997b958466846ed13e87467a997a898d66d17e2cc9906684696f99d3162f81.png" - ], - "gallery_tests.test_plot_hovmoller.TestGlobalMap.test_plot_hovmoller.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bab430b4ce4bce43c5becf89c54b1a63c543c56e1e64907e3bb469b490de1ac1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eeb46cb4934b934bc07e974bc14b38949943c0fe3e94c17f6ea46cb4c07b3f00.png" - ], - "gallery_tests.test_plot_inset.TestInsetPlot.test_plot_inset.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ebff6992f50096a5b245dac4f6559496b49248dbc95dcb699529912dcf244a54.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9ff6992b50096a5b245dac4f64594b6b49248dbc95dcb699529952dcf244a56.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebff6992b50096ad9267dac4d64094b294924cdbc95d4b699d29952dcda46e94.png" - ], - "gallery_tests.test_plot_lagged_ensemble.TestLaggedEnsemble.test_plot_lagged_ensemble.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bbbb31e1c44e64e4b0459b5bb1716ecac464f496ce34618eb1079b39b193ce25.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bbbb31b1c44e64e4b1579b5b917133cecc61f146c414668eb1119b1bb197ce34.png" - ], - "gallery_tests.test_plot_lagged_ensemble.TestLaggedEnsemble.test_plot_lagged_ensemble.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abfef958fd462c993a07d87960464b81d1009687c139d3b594e9cf87c6b89687.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aafec5e9e5e03e099a07e0f86542db879438261ec3b13ce78d8dc65a92d83d89.png" - ], - "gallery_tests.test_plot_lineplot_with_legend.TestLineplotWithLegend.test_plot_lineplot_with_legend.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eae942526540b869961f8da694589da69543cc9af1014afbc3fd596b84fe19a7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eae942146540b869961f8de694589da69543cc9af1014afbc3fd596b84fe19a7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eafd9e12a5a061e9925ec716de489e9685078ec981b229e70ddb79219cc3768d.png" - ], - "gallery_tests.test_plot_load_nemo.TestLoadNemo.test_plot_load_nemo.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ff34e87f0049496d17c4d9c04fc225d256971392d39f1696df0f16cec00f36.png" - ], - "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fb11731a94cea4ee64b35e91d1d2304e9e5ac7397b20e1fe12852487e666ce46.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bb11721a87cce5e4cce79e81d19b3b5e1e1cd3783168e07835853485e65e2e1e.png" - ], - "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e5a665a69a599659e5db1865c2653b869996cce63e99e19a1a912639e7181e65.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e58661969e799659c1f719a6c867359a1996c0773649c09c3e612679c07b3f66.png" - ], - "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/f2c464ce9e399332e1b74ce1cc79338c6586e5b33b31b37a66c9664cc06e1a64.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a58660ce9e739b31c93d1cc9c8df33863383e33b3f11c03f2664366cc8ee3cc1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a58660ce9e739b31c93d1c89c8df33863783e23b3f11c07f2664366cc8ee3cc1.png" - ], - "gallery_tests.test_plot_orca_projection.TestOrcaProjection.test_plot_orca_projection.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a83846ea46ce539c93391de32cc86cf87a33fa168721cdb3e896e374b04.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be817a87845ea56cec79817a919e338436a5c1e73fa16c736c4a3e816a1e6b1c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be817a8784dea56cec79817a919e338437a5c1e73fa16c726c4a3e816a1c6b1c.png" - ], - "gallery_tests.test_plot_polar_stereo.TestPolarStereo.test_plot_polar_stereo.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ba1e615ec7e097ad961f9cb190f038e091c2c1e73f07c11f6f386b3cc1793e01.png" - ], - "gallery_tests.test_plot_polynomial_fit.TestPolynomialFit.test_plot_polynomial_fit.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abff4a9df26435886520c97f12414695c4b69d23934bc86adc969237d68ccc6f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aaff4a9df26435886520c97f12414695c4b69d23934bc86adc969a17d69ccc6f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aeffcb34d244348be5a2c96c3a4fc6d0c4b69f2d87294ccb9f1a125684cd7c11.png" - ], - "gallery_tests.test_plot_projections_and_annotations.TestProjectionsAndAnnotations.test_plot_projections_and_annotations.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa854f19851a30e4cc76cd0bb179325ca7c665b0c938cb4b4e719e9cb727b5c0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fac54f19851a30e4cc76cd0bb179325cb78665b0c938cb4b4e719e9c9727b5c0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa854e19851a30e4cc76cd0bb179325cb7c664b0c938cb4bce739e9c37a3b5c0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa854e19851a30e4cc76cd0bb179325cb78665b1c938c94bce739e9c3727b5c0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa854f19851a30e4cc76cd0bb0f932dca7c665b1c92ccb4b4ed19e9c3721b5c8.png" - ], - "gallery_tests.test_plot_projections_and_annotations.TestProjectionsAndAnnotations.test_plot_projections_and_annotations.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e3856d999c389662734331afcd2d5a7184dba492b9b69b64d26dc29974b185b2.png" - ], - "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa15615e97a193adc15e1e81c4fa3eb49d30817e3e05c17e7ba59927817e1e01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ee46607e97a19781c0df1f81d0bb3e241f20c16f3fc0c1fe39263d33d06f3e80.png" - ], - "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ba056717c3e099e9b90f8e81c4da589499b696763e45e56b3b893929c17b7e01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea57685f95a886a1c0de9da090be3e2697e1c0ff3f00c17e6b266c17c07f3f00.png" - ], - "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ba1e605ec7a191a1b85e9e81c4da58909996b37e3a65e16f7b817939e57a1e01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ba1e605ec7a193a1b85e9e81c4da58909996b3763a65e16f7b816939ed7a1e01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a697e97a18681c6da9f8190bf3e263624c1ef3b48c17a2b223c47c0ff3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea57685f95a886a1c0de9da090be3e2497e1c0ef3f01c17e6b366c17c07b3f01.png" - ], - "gallery_tests.test_plot_rotated_pole_mapping.TestRotatedPoleMapping.test_plot_rotated_pole_mapping.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8172d0847ecd2bc913939c36846c714933799cc3cc8727e67639f939996a58.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8172c6857ecd38cb3392ce36c564311931d85ec64e9787719a39993c316e66.png" - ], - "gallery_tests.test_plot_wind_barbs.TestWindBarbs.test_wind_barbs.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e9e960e996169316c1fe9e96c29e36739e13c07c3d61c07f39a13921c07f3e21.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9e161e996169316c1fe9e96c29e36739e13c07c3d61c07f39813929c07f3f01.png" - ], - "gallery_tests.test_plot_wind_speed.TestWindSpeed.test_plot_wind_speed.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bcf924fb9306930ce12ccf97c73236b28ecec4cd3e29847b18e639e6c14f1a09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9e960e996169306c1fe9e96c29e36739e13c06c3d61c07f39a139e1c07f3f01.png" - ], - "gallery_tests.test_plot_wind_speed.TestWindSpeed.test_plot_wind_speed.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bcf924fb9306930ce12ccf97c73236b28ecec4cc3e29847b38e639e6c14f1a09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9e960e996169306c1ee9e96c29e36739653c06c3d61c07f3da139e1c07f3f01.png" - ], - "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fe81957ac17e6a85817e6a85857e942a3e81957a7e81917a7a81d95ec17e2ca1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe81c17e817e3e81817e7e81857e7e817e81c07e7e81c17e7a81817e817e8c2a.png" - ], - "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/be81c17ec17e7e81c17e3e81c57ea55a3e80c17e3e81c1fe7a81c285c95f2c03.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe81857e817e6a85817e7a81857e7e817e81957a7e81817e7a81817e817e843e.png" - ], - "iris.tests.experimental.test_animate.IntegrationTest.test_cube_animation.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea857a81957a857e957ec17e817e6a816a853e817a853e816e818d3a862ad3fe.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be81857ec17e7a81c17e7e81857e3e803e81817a3e81c17e7a81c17ec97e2c2b.png" - ], - "iris.tests.integration.plot.test_plot_2d_coords.Test.test_2d_coord_bounds_northpolarstereo.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e59661969e699659c0f719a6c967339a1992c07f3649c09c3f612669c07b3f66.png" - ], - "iris.tests.integration.plot.test_plot_2d_coords.Test.test_2d_coord_bounds_platecarree.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ee816299954a1da699b6915ec25b6e419729c42c3f84bd9fe6d262d1d1dac076.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ee856299954a1da699b6915ec25b6e419729c42c3f84bd8fa7d262d1d1dac076.png" - ], - "iris.tests.integration.plot.test_plot_2d_coords.Test2dContour.test_2d_coords_contour.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/b4b2643ecb05cb43b0f23d80c53c4e1d3e5990eb1f81c19f2f983cb1c4ff3e42.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_plain_latlon.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eb036726c47c9273918e6e2c6f216336787590eb969a165890ee6c676925b3b3.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_plain_latlon_on_polar_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e66d673c999031cd6667663398dc332c676364e798959336636660d933998666.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_rotated_latlon.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eba037a4c479c273b2963f2c6f6126966865d86f969e33c9b1706c26692793b0.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_non_latlon_1d_coords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ac334934d2e65c72596325b343338cb41c92d9c5b36f65330d379692ca6d6c.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_non_latlon_2d_coords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a7acb36134d2e676627963259343330cb43e92d9c5336e67330d379292ca6d6c.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_plain_latlon.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fb8d4f21c472b27e919d2e216f216b3178e69c7e961ab39a84696c616d245b94.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_plain_latlon_on_polar_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e66c6619999933666666c6d99999336663646d9999c1332667b60cf964d8672c.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_rotated_latlon.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eba925a5c476d25a95a56b876f3826246a449c6b96a3731ab13f6c656a5cb48a.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_non_latlon_1d_coords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ac24947259f3493697632df45926b6e126c4f392593b4937266f26ccf032d8.png" - ], - "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_non_latlon_2d_coords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/afac26367251d3493617632df45c26a6e126c6f392593b4937266f26ccf232d0.png" - ], - "iris.tests.test_analysis.TestProject.test_cartopy_projection.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/9e1952c9c165b4fc668a9d47c1461d7a60fb2e853eb426bd62fd229c9f04c16d.png" - ], - "iris.tests.test_mapping.TestBasic.test_contourf.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a69cc96ad92e193c9963385929e1cc3819acde6d965ce6e666b30386e65b1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a346c9685cb899685c9c39695c79396ec634969ce2c74697a3864697b3c8c.png" - ], - "iris.tests.test_mapping.TestBasic.test_pcolor.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e95a69c896a592e59bc99e3384929636c32d98cde6d964ce7e666332386465b1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a347c96858b8d9685c9c39696c393966c634969ce3c64697a3864697b3c9c.png" - ], - "iris.tests.test_mapping.TestBasic.test_unmappable.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa5684eb54a947ad09eb731c521978dc2fb1cc0e4966ce26e2c6b2d3a6e691a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853e48957ac1df957ac8be852bc1b1944e7a9878e03f4c6a253e6c7a912dc2.png" - ], - "iris.tests.test_mapping.TestBoundedCube.test_grid.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81917e857e6e81857e7a857a81917a7a81857e857e7e81857e7a817a81852e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7a81857e7a817a81817e7a81857e857e7a81857e7a817a81857e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7a81857e7a817a81857a7a81857e857e7a85857e7a817a81857a.png" - ], - "iris.tests.test_mapping.TestBoundedCube.test_pcolormesh.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81e535857e92ca8ec23d21b13ce15e7a811ea5c47e1a5ac17b652d3b05e4f2.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81c17a857e1ea5857e634a7a81cd257e8584da857e3b29817e68f47a81c791.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81c17a857e1ea5857e734a7a81cd257e8484da857e3b29817a68f47a81c799.png" - ], - "iris.tests.test_mapping.TestLimitedAreaCube.test_grid.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bf80e2b1c17f1d0ac4f7c8d739a637202749699b6bb3ce3666e4b048944d9d89.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf80e2f1c17f1d0ac457c8d619a637213749699b6bb34e3666e4b04e944d9d89.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea05392995bac6d691ce3f21666569d86a96c6360ee195cb91e8ce54953b313b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea05392995bac6d691ea3f21666569d86a97c6320ee195cb91e8ce559539391b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1585e885ea7a1785fa7a157a177a017a1585e817a885ea85e86a1785fa7a17.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7a81857e7a817a817a817a81817e7a81857e857e857e857e7a81.png" - ], - "iris.tests.test_mapping.TestLimitedAreaCube.test_outline.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e3e80857e7a817a817a817a81817f7a81857e857e857e857e7a81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7e21857e7a817a817a857a81857a7a81857a857e857a857e7a84.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1585e885e87a1785fa7a177a177e807a1585e85fa0857a85e86817857f6a16.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1585e885e86a1785fa7a177a177e807a1585e817a885ea85e86817857f7a17.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e3e81857e7a857a817e817a81857a7a81817e857e857a857e7a81.png" - ], - "iris.tests.test_mapping.TestLimitedAreaCube.test_pcolormesh.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bf81e6b1c17e1d4884bfc8df39a43720374969db69b34e26c4e4b0ca904f9d89.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea57396995a8c6d691ea3f25664569d86b16c63686ed958991ea4a549531393b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea57396995a8c6d691ea3e25664569d96b16c63684e9958b91ea4a559431793b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea813b49957ec4b7917e3f60266978d97a9562366e81954a914ec6cc957a0f98.png" - ], - "iris.tests.test_mapping.TestLimitedAreaCube.test_scatter.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea053d2e916ac2d9c4d894346b24f3477acf68ad39329ed8c696e136c1ab9a71.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea053d2e916ac2d9c4d895346b2473477acf68ad39329ed8c69ee126c1ab9a71.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea05bd2e916ac2d984983d346b2473477acf69ad3d3296d8c696e126c1ab1e71.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea05bd3a91eac2d984983d346b2473477acf69ad1d3296d8c696e126c1ab1e71.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_keywords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/be21a71bc1de58e43a31871f7e856470c1fa9b8c7b81647384665b9ed1b998c1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be21a71bc1de58e43a63a71b3e016061c1fe9b8c3e01a473847e5b94d1fb9ac3.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_keywords.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea811831957fe3cea68c6ce0d9f29b9b6a816463953e61cc917f1ae36ac09d38.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa819097857e6560957e7bcc7a819c316e81951e857e62c281fe79a17aa19637.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_params.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ee819cb7913b63c8846e64737bb1999c6ec52633953a69c8916f6c636e92911c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8190be857e6739917a7bc47a8594337bb1911c857e6ec3913279007e819637.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_params.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/be21a71bc1de58e43a31871f7e856470c1fa9b8c7b81647384665b9ed1b998c1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be21a71bc1de58e43a63a71b3e016061c1fe9b8c3e01a473847e5b94d1fb9ac3.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_params.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea811831957ae3cea68c6ce0c9f39b9b6a816473953e63cc917f1ae36ac09d38.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81909f857e6520957e5bcc7a8194716e31851e857e6ac281fe3f817a81963f.png" - ], - "iris.tests.test_mapping.TestLowLevel.test_simple.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eae0943295154bcc844e6c314fb093ce7bc7c4b3a4307bc4916f3f316ed2b4ce.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/faa0e55c855fdce7857a1ab16a85a50c3ea1e55e856658a5c11837096e8fe17a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/faa0e55c855fdce7857a1ab16a85a50c36a1e55e854658b5c13837096e8fe17a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/faa0e558855fd9e7857a1ab16a85a51d36a1e55a854e58a5c13837096e8fe17a.png" - ], - "iris.tests.test_mapping.TestMappingSubRegion.test_simple.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bd913e01d07ee07e926e87876f8196c1e0d36967393c1f181e2c3cb8b0f960d7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b9913d90c66eca6ec66ec2f3689195b6cf5b2f00392cb3496695621d34db6c92.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b9913d90c66eca6ec66ec2f3689195b6cf5a2f003924b3496695e21db4db6c92.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b9913d90c66eca6ec66ec2f3689195aecf5b2f00392cb3496495e21da4db6c92.png" - ], - "iris.tests.test_mapping.TestUnmappable.test_simple.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fe818d6ac17e5a958d7ab12b9d677615986e666dc4f20dea7281d98833889b22.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81b54a817eca35817ec701857e3e64943e7bb41b846f996e817e006ee1b19b.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec2ff7c00a56de9023b52e4143da5d16d7ecad1b76f2094c963929c6471c8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfec2d77e01a5a5ed013b4ac4521c94817d4e6d91ff63349c6d61991e3278cc.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfec2577e01b5a5ed013b4ac4521c94817d4e4d91ff63369c6d61991e3278cc.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/87ff95776a01e1f67801cc36f4075b81c5437668c1167c88d2676d39d6867b68.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fff941e7e01e1c2f801c878a41e5b0d85cf36e1837e2d9992c62f21769e6a4d.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_coord_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fbe0623dc9879d91b41e4b449b6579e78798a49b7872d2644b8c919b39306e6c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bbe0c21ccd179dc3b05e4b689b0771b48698961b7962da446e8ca5bb36716c6e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bbe0c214cd979dc3b05e4b68db0771b48698961b7962d2446e8ca5bb36716c6e.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff897066b41f076f81dce1fb007da79c50633e9c40626b8d1066df9d6067969.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff897066a01f0f2f818ee1eb007ca41853e3b81c57e36a991fe2ca9725e29ed.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec1ff7e0098757103a71ce4506dc3d11e7b20d2477ec094857db895217f6a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21.png" - ], - "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff8c2d73a09b4a76c099d26f14b0e5ad0d643b0d42763e9d51378f895867c39.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fe8c0173a19b4066d599946f35f0ed5d0b74729d40369d8953678e897877879.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff8c0567a01b096e4019daff10b464bd4da6391943678e5879f7e3103e67f1c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff8c0567a01b296e4019d2ff10b464bd4da6391943678e5879f7e3903e63f1c.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec2777e04256f68023352f6d61da5c109dec8d19bcf089cc9d99a9c85d999.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec2777e06256f68023352f6d61da5c009decad19bcf089cc9d99a9c85d989.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec2777e002427e801bb4ae65a1c94813dcec999db4bbc9ccd79991f3238cc.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fe9dd77f00e1d73000cc1df707db8184427ef8d1367c88d2667d39d0866b68.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83fe9d977f41e1d73000cc1df707d98184427ef8d1367c88d2667d39d0866b68.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ff9d9f7e01e1c2b001c8f8f63e1b1d81cf36e1837e259982ce2f215c9a626c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ff9d9f7e01e1c2b001c8f8f63e1b1d81cf36e1837e258982c66f215c9a6a6c.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_coord_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fbe0623dc9879d91b41e4b449b6579e78798a49b7872d2644b8c919b39306e6c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bbe0c21ccd179dc3b05e4b689b0771b48698961b7962da446e8ca5bb36716c6e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bbe0c214cd979dc3b05e4b68db0771b48698961b7962d2446e8ca5bb36716c6e.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/87ffb5867f0060d4301f6d9fb007d899c50699e9c8668e78d8678d69de069969.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87ffb79e7f0060d8303fcd1eb007d801c52699e18d769e2199e60ce1da5629ed.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec1ff7f90987720029f1ef458cd43811cdb60d647de609485ddb899215f62.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83fec1ff7f94987720009f1ef458cd43810cdb60d647de609485ddb89921df62.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd97c93734a778ce07c9f99b02731.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731.png" - ], - "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffc8967e0098a6241f9d26e34b8e42f4d20bb4942759e9941f78f8d7867a39.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83f9c8967e009da6245f9946e25f9ed6f0940f29f40749d8853678e8d7857879.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffc9d67e00909624079daef160cf4bd45a439184367ae5979f7e3119e6261c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffc9d67e00909624059daef160cf4bd45a4b9184367ae5979f7e3909e6261c.png" - ], - "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fac1947c99184e62669ca7f65bc96ab81d97b7e248199cc7913662d94ac5a1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fac1947c99184e62669ca7f65bc96ab81d97b7c248399cc7917662d84ac5a1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fac1b47c99184e62669ca7f65bc96ab81d97b7e248199cc7913662d84acda0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b2ecc1a8b9994a16e666b5e3ce151969a5fb4ed49909653990d46b9bfc097684.png" - ], - "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_coord_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bea07c99c15eb16e9891ce50c742394a3ced6cb13390f1cc73c29f1b2d0ecd66.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bea06899c14eb16e9895ce46c74a396a74ed64b13390b3c61b439f1b4d2ccde6.png" - ], - "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ae7f1f07f3e0e0f0211b9e066e074d83926ed8f8cd3792dad1964db0d80e9b09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ae7f1f07f3e0e0f0311b9e066e074d839266d8e8cd379adad1964db0d80e9b09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be852fc1e078c83eb30e3607672149c098d95c5b9e4636f2c1fc299d999f7e03.png" - ], - "iris.tests.test_plot.Test1dQuickplotScatter.test_cube_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a5f896d99a67b94c621deda3f69392cccd246db39018989ec4836de9ed249292.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a5f896d99a66b94c621deda3f69392cccd646db3901898dec4836de9cd249292.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a5f896d99a67b94c621ceda3f6d392cccd246db3901898dec4836de9cd249292.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/edfa96cb9a256b4f65466d9892d9c865693a1a9c94b39ed8484b35ad9a864c32.png" - ], - "iris.tests.test_plot.Test1dQuickplotScatter.test_cube_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a4fb19b3db04c6cd6307b98678601c738c39d71cf3866186d8616e69bd191b9e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e8b33c129649c78de3a773e578650c728e92279be12de1edc4f246b2939c3b01.png" - ], - "iris.tests.test_plot.Test1dScatter.test_coord_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bbfac39d9899384a6f6694a7b613cb489c95b7b7c24a399cc5913262d84acda0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b2ecc12999994e16e666b5e3ce171969a5fb4ed49909e53990c44b9b7c09f684.png" - ], - "iris.tests.test_plot.Test1dScatter.test_coord_coord_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bea07c99c15eb16e9891ce50c742394a3ced6cb13390f1cc73c29f1b2d0ecd66.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bea06899c14eb16e9895ce46c74a396a74ed64b13390b3c61b439f1b4d2ccde6.png" - ], - "iris.tests.test_plot.Test1dScatter.test_coord_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/af7e1f0ff1e1e0f0d918960e6c076d8bd266d868c537365a90966db0de0e1b09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ae7e1f0ff1e1e0f0d918960e6c076d83d266d868c537365ad0966db0de4e1b09.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be812fc1c078c03e930e3627672369c1d8d85c5b96463662e1fc699d9b9f7e03.png" - ], - "iris.tests.test_plot.Test1dScatter.test_cube_coord.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/edf896d79a67b94c651ced23d29392cccd646d33901912fcc4836d69ed249292.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/edda96cb9a256b4765c26d9892dbc665693a1a9494b796c86c4b37ad92864c32.png" - ], - "iris.tests.test_plot.Test1dScatter.test_cube_cube.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/acf939339a16c64de306318638673c738c19d71cf3866186d8636e69bd191b9e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/edb23c529649c78de38773e538650c729e92279be12de1edc4f246b2139c3b01.png" - ], - "iris.tests.test_plot.TestAttributePositive.test_1d_positive_down.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/87fef8117980c7c160078f1ffc049e7e90159a7a95419a7e910dcf1ece19ce3a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7fe781b708487c360079e3bb4789869816bdb64c76b4a3cce7b4e749a6130c5.png" - ], - "iris.tests.test_plot.TestAttributePositive.test_1d_positive_up.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/87ff85d47800bd9f660779d0863f49c9947f4e1e9141de38d700da28ce1d9a2b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87ff85d47a00bc9f660779d8863f49c9907f4e1e9141de38d708da28ce1d9a0b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ff958b7a00b09c661761c9907fcb0d9163ce7895289a618f381bffccf97200.png" - ], - "iris.tests.test_plot.TestAttributePositive.test_2d_positive_down.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fb946ba684e194fb901b3a0587641ad03b1ae7674e64c15a5b99c767c47e3a98.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fb946ba484e194dbc01f3665c0e4399a3f0fc2653f90c99e3f613e64c81e3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fb966ba6846194dbd01f3665c0e4399a3f1bc2653f90c99e2f613e64c01e3f81.png" - ], - "iris.tests.test_plot.TestAttributePositive.test_2d_positive_up.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ee176c7f93e093a0c50f9383815e6e156859e17e6e15e17a9be08e2d851a9b83.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebc06be1941e941ec07f941f907f6fa0950fc07e6f80c07f6b806be1c07f3f80.png" - ], - "iris.tests.test_plot.TestContour.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/cff8a55f7a15b55a7817854ad007a5e8c04f3ce8c04f3e2ac4706ab295b37a96.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaece0173d17951fbd03974a914964e8c04a72e8c1531ee1cc746bb293973ecd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eeece0173c07951fbd038748914964e8c14e72e9c1531ee1cc746bb293973ecd.png" - ], - "iris.tests.test_plot.TestContour.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfc815e78018597fc019b65b425d121955e7eda854b7d6a80db7eb481b72b61.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfa8553fc01b15ab4044a269546caa5956b7e9bc0b97f2cc2d62d360b363b49.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfa8553fc01b15af4055a069546caa5954b7e9bc0f97d2cc2d62d360b362b49.png" - ], - "iris.tests.test_plot.TestContour.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe81ff780185fff800955ad4027e00d517d400855f7e0085ff7e8085ff6aed.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe81ff780085fff800855fd4027e00d517d400855f7e0085ff7e8085ff6aed.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe817ffc00855ef0007e81d4027e80815fd56a03ff7a8085ff3aa883ff6aa5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bff817ffc00857ef0007a81d4027e80815fd56a03ff7a8085ff3aa881ff6aa5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe805ffc00857ef0007a01d4027e80815fd56a83ff7a8085ff3aaa03ff6af5.png" - ], - "iris.tests.test_plot.TestContour.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa56c3cc34e891b1c9a91c36c5a170e3c71b3e5993a784e492c49b4ecec76393.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e36cb95b199999765cd3694b06478c7396329958434c2cecb6c6d69ce1b92.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e36cb95a19999876d4d3694b06c78c7396329958434c2cecb6c6d69ce3b92.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e36cb95b199999e654d3694b26c78c7396329958434c2cacb6c6d69ce9392.png" - ], - "iris.tests.test_plot.TestContour.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe857f7a01a56afa05854ad015bd00d015d50a90577e80857f7ea0857f7abf.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/affe815ffc008554f8007e01d0027e808557d5ea815f7ea0817f2fea817d2aff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/affe805ffc008554f8007e01d0027e808557d5ea815f7ea0817f2eea817f2bff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/affe8057fc00855cf8007e01d0027e808557d5ea815f7ea0817f2fea815f2bff.png" - ], - "iris.tests.test_plot.TestContour.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8bff81ff7a0195fcf8019578d4027e00d550d402857c7e0185fe7a8385fe6aaf.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abff857ff8018578f8017a80d4027e00855ec42a81fe7a8185fe6a8f85fe6ab7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abff817ff8018578fc017a80d4027e00855ec42a81fe7a8185fe7a8f85fe6ab5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abff817ff801857afc017a80d4027e00855ec42a81fe7a8185fe6a8f05fe2abf.png" - ], - "iris.tests.test_plot.TestContourf.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/faa562ed68569d52857abd12953a8f12951f64e0d30f3ac96a4d6a696ee06a32.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea857a81957ac57e957a857a957a958ac5723b0d6ac56b833e856e606a923e90.png" - ], - "iris.tests.test_plot.TestContourf.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa5e03f957a4f80954a9e41e16e9c60970fb5b24ada634e6e93692d4ba562d8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea851f00957ac0f7957ac07f957a628d815e7b126ab13e816a953ae46a859ed3.png" - ], - "iris.tests.test_plot.TestContourf.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e954a7a81857e957e857efc00857e7e007a85c02a7e859f287a85c1fe.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7a81857e7a81857e7a81857e7a806a85857a7a85857e7a85817e.png" - ], - "iris.tests.test_plot.TestContourf.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e95a6938b6b5969193901a4fc1e594a7c69999cbce33639879526e72330e65e4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a3c7e968597b19685c9c696a7c79491c16e59691a387f6978396e68683184.png" - ], - "iris.tests.test_plot.TestContourf.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa85857ec45a7a81857e854a857ee56a917ec56a3a85c56a3a85c4ea7a8112fe.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81817e857e7a81857a7a81957a6e81917a6caa3a85c57a3a8585fa6a8591fe.png" - ], - "iris.tests.test_plot.TestContourf.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81817ec40a7a81857e957e857ef40a857ef60b7a81c40a7b81e60f7a814aff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81817e857e7a81857e7a81817a7e81817a668f7a91857e7a81857e7a85817e.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_bounds.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eab5313f954a7b9260f39789c5ec4cd084d0c4e45aa1c5fe3a04797bb13b3b06.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ee856aa5957a955ac0bf954bc17e3b819548c07f3e81c07e2ec46ea4c07f3e84.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_bounds.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/be853f80854ac17ec0bdc2f5c17a0d09cc1fc07f5ab5e1fe3f409d7a38743e00.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf813e85c07ec57ec17e9073c07e3f81856ec17a3f80c0fe3e813f84c2733e80.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_bounds.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eab5313f954a7b9260f39789c5ec4cd084d0c4e45aa1c5fe3a04797bb13b3b06.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ee856aa5957a955ac0bf954bc17e3b819548c07f3e81c07e2ec46ea4c07f3e84.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_orography.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa17291f95e895e8645e7a95c17a6eece4b4e1333b01c07e1bb13909914b9ec1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa17291f95e895e8645e7a91c17a6ee464f4e1333b01c17e1bb1390d914b9ec1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a91957a857ac4fe268cc07f6e846e05d9373b81d17b1b6a1b41c4fa2cc4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a91917a957ac4ff248cc07f6ea466a5c03f3b81c17f1b321b01935b3fc0.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_orography.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bb07314fc4e0c6b4c31e9ee1847939a1c116c15e7b94e57e1ea9391de16e1ac3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bb07314fc6e1c6b4c31e9ee1846939a1c116c15e7b14e17e1ea9393de16e1ac3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/af0b690f96f0d2d4c25e94a194ad3da19a52c25e3f02c07f3fa52d03c16a3fcb.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea07695f95e0d2b4c09d95e0956a3da99294c2be3e85c07f3fa92b05c15e3f42.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_points.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea953bfb956ac4f4649f1a05c56e6ca45a53945e6ea5c13f1b498542c13f1b41.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe857b91917a847ec0bd3f01c47e6ca43b11915a3ea4db3b1b4a84c4c03f3fc1.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_points.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/be813fc0c15ac13dc1bfc27dc17e1d93c51fc43f1ea1c17a3ec138e4b1721a81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be813a81c17ec57ec17e952ac07f3f808556c17e3f80c07f3e813f80c27e3f81.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_points.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea9561ef956a7b92609b922dc16e6ec6845ac47e5aa5c57e5ec04861957b1b81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe856a85957a955ac03f956ac17f3f809552c07f3e81c07e3e806e85c07e3f84.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_points.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea953bfb956ac4f4649f1a05c56e6ca45a53945e6ea5c13f1b498542c13f1b41.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe857b91917a847ec0bd3f01c47e6ca43b11915a3ea4db3b1b4a84c4c03f3fc1.png" - ], - "iris.tests.test_plot.TestHybridHeight.test_points.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/baf5347ecf0ac3f1c1f68f83850b1f83cc11c0fc7ad0c17a1be138e4b07e1a0d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b878387e978ec2f0c0f09f83878f3f81c070c0fe78d0c1763fa13856d03e3f0f.png" - ], - "iris.tests.test_plot.TestMissingCS.test_missing_cs.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fac16ee0953b911bc15e9648e56ec4e691be7bcc7a8184733ea16a90c17e930d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa816ac1857e853cc17f957ac15f3e849486c8f43e81c13b3f813e91c07e3f46.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_no_u.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a95955a954ac17f954a807e3f48951ac07e3e81c0ff7ea16a81c0ff3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a95955a954ac17f954ac07e3f44951ac07e3e81c07f7ea16aa1c0ff3e81.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_no_u.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea956ab5954a954ac17e954a817f2f60950ac07f3e80c07f7a856aa5c2ff3f80.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_no_v.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8562b6c0773d09956a955a857a1d88845ec57e3f81c07e4ae56b21d0ff5a85.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa816a85957a857ac17e954ac17e1fa2950bc07e3e81c07f3e807a85c17f3f81.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_no_v.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa9562d4c7c43d0bb57b97e0857a3f1995d284763a05c17a7b856a2dc0f45a84.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a857ac17e954ac17e9d02954ac07e3e81c07f3e857a85c2fd3f80.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_none.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8562b6c0763d09b54a955a857a3f88845ec57a3e85c07e6a616b25d0ff7a81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa816a85957a857ac17e954ac17e3fa29506c07e3e81c07f3e807a84c1ff3f81.png" - ], - "iris.tests.test_plot.TestMissingCoord.test_none.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8562f6c0773d09b54a955a857a3f81955ac47e3e85c17e7aa16a25c0765aa1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a957ac17e954ac17a1f06954ac07e3e81c07f3e817a85c0ff3f80.png" - ], - "iris.tests.test_plot.TestPcolor.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e67c9c7e1391e97a596b03a3696a13c4f63066318695ec5c9695e6c49c6a5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea817a81957e857e957e953e957e857e857e6aa06a816ac16a017a816a9585fa.png" - ], - "iris.tests.test_plot.TestPcolor.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea74c707958b3878958b38f8c7236a557a542c7868d54b877875978abc789722.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea74c707958b387895ab38f8c7236a557a542c7868d54b05787197eab478972a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea953f83954ac2bc956ac07e956a3509c0de61796ab57a816a856ad16ab590fb.png" - ], - "iris.tests.test_plot.TestPcolor.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e874978b978b6875978b6875978b7854950b78506855787468747ea2687597aa.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857a7a84857a7a85857e7a813a2f7a817a85857a7a85857a7a85857a.png" - ], - "iris.tests.test_plot.TestPcolor.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e95e696994b196b793b19a1ec3c191c5c6e596191e4e693269336c36391a6e3a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a387e968596319697c3c19284a62c93a560c36933393a6c7e793b6c6b31cd.png" - ], - "iris.tests.test_plot.TestPcolor.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e87a197a1695a97a1695a97a17d5a97a17906785a7816685a7e86685ad687.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857a7e01857e7a81857e7a81e8177a816a8585fa7a85857e7a81857e.png" - ], - "iris.tests.test_plot.TestPcolor.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/af42c0bdd0ad2f52d0bd3f42d0bd7f02d0bd7f003d527f002f427ea82f42d6a8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/af42c0bdd0ad2f52d0ad2b52d0bd7f02d0bd7f002d527f002f527e0d2f52d4ad.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7e80857e7a81857e7a812d557a817a85857e7a81857e7a80857e.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1594f3858a670c94e37b1cccb13e736a1d8cf17a1f94e2c119938e9463678c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea858782957a703f957a3878957a7a65957a6bc06ae76f806ad50fd06a859c50.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ad5e94a5c3b0c3f096a5695a96a53c0f711b3c0f7d1b97b46943c3e0cc416b5a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea85857a857e7e81957a7a81957a6a85857acac6c1fb6aa67a81956e6a81b506.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a95e3c1f96a096a5d6a5eb40c3f0ebe0c1c0c3f07c0b3e3e96a13c1e6d5b694a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a95e381f96a096a5d6a5eb40c3f0ebf0c1e0c3f07c0a3e3e96a13c1e6d5b694a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817e81857e857a857e7a81857e6a85817b81e63a913e857e81c17e7a81956e.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bc7a1c32d3c366cdc585c39986cdc79ec792e3a6960d584939793c3438743873.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e96ac78796953c4c9685383996c538e69692637263696b49693ac796693ac71b.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea1f781f95e085e885e0954295e195ea95a085e87a153e7f95e06a1778557a1f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a81857e857e857e7a81857e6a81c17f95786aa77a807e81c17c7e819558.png" - ], - "iris.tests.test_plot.TestPcolorNoBounds.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ba173a1795e895e8c5e8f400c1f8c1f895a8c5e87a077a5ec5e83e173e177e02.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a80857e857e857e7a81817e3e81817e857f6aa07a857e80c17f7e80c15f.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e67c9c7e1391e97a596b03a3696a13c4fe3026318695ec5c9695e6c49c6a5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea817a81957e857e957e953e957e857e857e6aa06a816ac16a017a816a9585fa.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea74c707958b3878958b38f8c7236a557a542c7868d54b877875978abc789722.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea74c707958b387895ab38f8c7236a557a542c7868d54b05787197eabc789722.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea953f83954ac2fc956ac07e956a3509c0de61796ab57a816a854ad16ab590fb.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea953f83954ac2bc956ac07e956a3509c0de61796ab57a916a856a916ab590fb.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e874978b978b6875978b6875978b7854950b78506855787468747ea2687597aa.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857a7a84857a7a85857e7a813a2f7a817a85857a7a85857a7a85857a.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e95e696994b196b593b19a1ec3c591c5c6e596191e4e693269336c36391a6e3a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9693878969696139296c38f9bcc3474692169cb6c7339393c6cc387c78796cc.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a387e968596319697c3c19284a62c93ad60c36933393a6c7e793a6c6b31cd.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e87a197a1695a97a16d5a97a17d5a97a17806785a7816685a7e86685ad687.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e6e05857e7a81857e7a81a0577a816a8585fa7a85857e7a81857e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857a7e01857e7a81857e7a81a0577a816a8585fa7a85857e7a85857e.png" - ], - "iris.tests.test_plot.TestPcolormesh.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/af42c0bdd0ad2f52d0bd3f42d0bd7f02d0bd7f002d527f002f427fa82f42d6a8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/af4280bdd0ad2f52d0ad2b52d0bd7f02d0bd7f002d527f002f527f0d2f52d4ad.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7e80857e7a81857e7a8129577a817a85857e7a81857e7a80857e.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1594f3858a670c94e37b1cccb13e736a1d84f17a1d94e2c11d938e9463678e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa1594f3858a670c94e37b1cccb13e736a1d8cf17a1d94e2c11993ae9463678c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea858782957a703f957a3878957a7a65957e6bc06ae56f806ad50fd06a859c50.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea858782957a703f957a3878957a7a65957a6b806ae56f846ad50fd46a859c50.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_ty.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ad5e94a5c3b0c3f096a1695a96a53c1f711b3c0f791b97b46943c3e06c436b5a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea85857a857e7e81957a7a81957a6a85857acae6c1fb6aa47a81956e6a81b506.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_tz.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a95e3c1f96a096a5d6a56b40c3f06be2c1c0c3f07c0b3ebe96a13c1e6d5b694a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa813e81857e857a857e7a81857e6a85817b00e63eb93e857e81c17e7a81956e.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa813e81857e857a857e7a81857e6a85817b0aa63e993e857e81c17e7a81956e.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bc7a1c32d3c366cdc785c39986cdc78ec792e7a6960d584939793c3438703873.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e129c7169ed638ec9ed6387196c761c665396724612dcf0d693896929ed698c9.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e96ac79796953c4c9685383996c538e69692637261696b49693ac796693ac71b.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea1f781f95e085e895e0fd4295e095ea95a085e87a153e7e95e06a1778157a17.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a85857a857e857e7a81857e7a81817f95506aaf7a807e81c17c7a81957a.png" - ], - "iris.tests.test_plot.TestPcolormeshNoBounds.test_zy.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ba176a1795e895e8c5e87c00c1f8c1f894a8c5e87a077adec5e83e173e177a06.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa817a80857a857e857e7a81857e3e81817e2fc56aa07a857e80c17f7e80c17f.png" - ], - "iris.tests.test_plot.TestPlot.test_t.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83fe955f6a05e5137305d9c4f443127195187e9cd5467fa3d4917b68fc007a1a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffe95027e05e7007305d9c4a447127f853f069f814f2fa7d4d12b6cfc007e5a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffe9c1a7e05e718f305d9d2e46312718138049e824e2fa783db2bed76b4fe00.png" - ], - "iris.tests.test_plot.TestPlot.test_t_dates.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abffd5ae2a15cdb6b10178d7d4082e57d7290906f685814277b1dc88724cfd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abffd5ae2a15c9b6a10178d7d4082c57d7290906f6c58942f7b1dc88724cfd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abffd4a02a01cc84f10078d7d4082c77d73909ded6ef816273bd9c98725cdd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87fc9d8a7e054d83f5067bc1c1423471927ba73c8d9f864e09a1a7b358c8276f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87fc9d8b7e044d81f5037bd4c14324749279a73e8d9d864f09e4a7b348dc2769.png" - ], - "iris.tests.test_plot.TestPlot.test_x.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffe95297e87c74a6a059158f89c3d6ed0536597c0387836d0f87866d0697097.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bfe956b7c01c2f26300929dfc1e3c6690736f91817e3b0c84be6be5d1603ed1.png" - ], - "iris.tests.test_plot.TestPlot.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ff99c067e01e7166101c9c6b04396b5cd4e2f0993163de9c4fe7b79207e36a1.png" - ], - "iris.tests.test_plot.TestPlot.test_z.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21.png" - ], - "iris.tests.test_plot.TestPlotCitation.test.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895067a1d9506f811783585437abd85426ab995067af9f00687f96afe87c8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8.png" - ], - "iris.tests.test_plot.TestPlotCitation.test_axes.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895067a1d9506f811783585437abd85426ab995067af9f00687f96afe87c8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8.png" - ], - "iris.tests.test_plot.TestPlotCitation.test_figure.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895067a1d9506f811783585437abd85426ab995067af9f00687f96afe87c8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_non_cube_coordinate.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e7e81857e7a81857e7a81857e7a818576c02a7e95856a7e81c17a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81857e857e3e85857e7a81857e7a81857e7a817e81780b7a81c56a7a81857e.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fe8142f5c17ebd2cc16eb548954a9542916a347a915e60bd4afd68793f916296.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853f10956ac5e1957a854e957a203e955e6aa76ae17aa16a856aaf6ab19e12.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853f10957a85e1957a854e957a203e955e6aa76ae17aa16a856a8f6ab1de12.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8542b7b503b548857abd08857abd09945eed6b91d968c161b972d76aa462b5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa8542b7b503b548857abd08857abd09945eed6a91d96ac163b972d36aa462b5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853a85857a857a957a857a957ad05a857b3e946a606b917a816f647a853af4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853a85957a857a957a857a957ac05ac56b3ac46ae16b817a816f647a853af4.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8bf88f457a03b5307e16b561f007b53ed067217ac1786afec0f570bf8178681a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bf98f057a03b5307e16b561f007b53ad067217ac1786afec0f570bf8178685a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eafdcec9bc219530b696a56694c2852a95656b7b81986acdc0e516adad186eda.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eafdcec9f4219530b696a56694c3852a95656b7b85986acdc06516adad186e9a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eafd86c9f8219430fe96a56684c3852e95656b7b85b86acdc0e5162da5186eda.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffe8f367e05952afe05a50b980ded4bd05d69c2c1fb71c1c06272f4d0a06af4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aff24ab7bd05952fbd0f950f914fcd48c47860f3e1b9329094266e345a850f6c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aff24ab7fd05952dbd0f950f914fcd40c47868f3e1b9329094266e345a850f6c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aff21eb6bd04952cbc0f950f914fedc1c0f961f3e1f9329084266e345a850f6c.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/aa953d0f85fab50fd0f2956a7a1785fafa176877d00f68f1d02c60f2f008d0f0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebeaa5419e94b5019e97950d685395bee05361fad05560fad01570fef001dabe.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebeaa5419e95b5419e97950d6853953ee053617ad05560fad01570fef001dabe.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfaa56f96a1856cd681a56ee8162d52e8467e12c50c7e8095ad7e0095ad03ff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa9b5699556854e9456854ed05625f9c0a92bfdc0a90afd81f97e00857e6af6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa9b5699556854e9456854ed05625f9d0a92bfdc0a90afd81f97e00855e7ab6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa9b5299d56854e9156856ed05625fdc0292bfdc0a90afd85b97e00857e6ad6.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfaaf439e87b5019687b5019687b56ac05561fae07103fe6079687a607178f8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfa2d4b968795059e87970f6854697ae055697ac08561fad041d7aef001d6ae.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eb7a3e0c978187a4950190bc6856687a607e687bc0fcc1e394acfc0197fc2bfb.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaf73e0d9503852c950395ac9528c1fad16cc0f2d1ec6af2c0ec6a536a1797f3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaf73e0c9402952c950195acd528c1fac1ecc1f3c1ec63f3c0ec6a536a179ff2.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_x.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/aeb8b5095a87cd60386592d9ec97ad6dd23ca4f6d0797827f0096216c1f878e6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/affa950ddb13c03634359ad8a4c80f26911f26f3c06e0ff3f4007b4285fd6e72.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/afea950ddb13c03e34359ad8a4c86f24913f2693807e3ff1f4087b4285fd28f2.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8fea97194f07c9c830d79169ce16269f91097af6c47861f6d0796076d0797a16.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fee970b4f07c9c930d79129ce16269f91097af6c4f861f4d0786076d0797a16.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/afea97094f07c9c870d79129ce16269f91096af6c4f861f6c07960f6d0797a16.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/afee9632de05c9d9f180d168c454a53e931b3e84956a3b8c85d94ce703ff7284.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea85603f956a9741951e9d83c1fa8d2fd0a55af0d25f345ae5f062c72d68612d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea853f00957ac07c957ac0be951a69f3c47c7a5f3a6127816b953e646b813761.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a69cc96ad92e193c9963385929e1cc3819acde6d965ce6e666b30386e65b1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e97a346c9685cb899685c9c39695c79396ec634969ce2c74697a3864697b3c8c.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffcc65767039740bc069d9ad00b8dadd03f52f181dd347a847a62ff81e8626c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8ffcc65777039740bc069d9ad00b8dadd03d52f181dd707a847a62ff81e8626c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebffca44f502b36498309c9b940999add1bb62bba784374acc5a6a246acc6b65.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ebfeca44f102b3649c309c9b940d19add1bb63b3a7843e4acc5a6aa56acc6b64.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea5649c434ac92e5d9c9361b95b39c38c3835a5ec6d966ced34c633099ace5a5.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a6b6c96a597a591c9949b94b61b69c7926b5bccce66646b3869b831a52c26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e6b6c86a595a791c9349b94b71b69c7926b5bccca66646b1869b831a52ca6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a6b6c86a595a791c9349b94b73b69c7926b5bccca66646b3869b031a52ca6.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2dd2d09295c3c0c7d13c1bc6d23d2c696de0e53c3ac393daf6d205c2c4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2fd2d09295c3c0c7d13c1bc6d23d2c696ce0e53c3ac393dbf6d205c2c0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2f92d09295c3d0c7d13c1bc6d23d2c696cf0e53c3ac2b3d9f6d201c2c4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e3e2f97a1c19996a1c8f26c1e360f684a3c2c6913dca497b9d38097a903ff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e3e3f96a1c3e197a169f1785e3b0e68523e1c398bc58687b1d86096e1039f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea153e0395aac1f895eac0f8940e69e56a743e5f7a432787691ef860c3c1938f.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e9686d8c9696924797879e3b86929e58696d69cc6869659379626133398d9ccd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e961658f961e92469e1e1c7966f36cd86165618c70e166b39b9698719e1e9ec8.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e1a530e29e5ecf199a5acd8f64f1326161a530e265999cd29e52cf199a5e6669.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e96930749696cb9d9697cdc39692671b696c306969eb3c76697319942a0d8699.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bf803f00c05fc4bfc07ec15dc05fd8bbc07cc96c333a32113bd02dd27ced3ec0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be813ea0c17ec55ac17ed23dc07e295ac57e3b653f803f813e816e853e81b542.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea95956a95626993941a6a2d956e6ed6845a6e65c4bec7b64a9594686ea19578.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea85856e857e4893957a7aa1956a7b81954b3b817a856fd46a85846e6e85857e.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/8fe82f047c018c83bc01bc5af01fd1bcd15a327c847860fdc57a69beb0be68bd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8fe82f047c018c83bc01bc5af01fd1bcd15a32fd847860fdc57269beb0be689d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/8bedcf25bc03a4929c103a5bf03fdbbc81cb364d86e46da70f86899b3a0f6cc0.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/cbedcd25bc02a4929c103a5bf03fdbbc81cb364d84e46da70f86899b3a0f6ec1.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/cee8953a7a15856978579696d03d672cc49a6e5a842d3d2cc0b66bd1c2ea39f1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aee1f93a63168569b852d697913d632485ca2e43952d3bbcc2b66bd1426b3c71.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aee1793a6b168569b852d697913c622cc5ca2e4b952d3bb4c2b66bd1426b3c71.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ee953f0591ea3f07914a95fa7e07d1fa68156a15d07c6a3dd038c0fef000d0fa.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ae953f0591ea3f07914a95fa7e07d1fa68156a15d07c6a7dd068c0fef000d0fa.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bec11ab5c1be857ac13e7ae53c422d423e017a85b542fc00c1fefe0091fe03ff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bec13a81c13ec56ac13e5afdd11e256a3e412afd3e4002ff2ee0fe0035fa817a.png" - ], - "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e87a973d96a56953968769439685a54ae05117eae0511fba60513bba69717aba.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e87a952d96a56953968769439685a54ae85197eae0511fba60513bba69717aba.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a96ac97a16c5897a1791e95a53b0b913c6953687c4ec3685cc6c36e7c87c3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea1595ec95ea681d95ea7b0595ab3b13950d7a536a1cc6f26a0cc4f26e0c85f2.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coord_names.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/f9789b388786678686966c9093879ce592c79bc94d19929b6939cf66316c672c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coord_names.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e9a53a59961ec5a62c691a587b9662e1c0e1e53e9e0e9b873ec15a7161bc642f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b8a53b59c71ac5a6b8791c1867876b63d9e0e65c96199d871cc23339633664ce.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coords.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/f9789b388786678686966c9093879ce592c79bc94d19929b6939cf66316c672c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coords.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e9a53a59961ec5a62c691a587b9662e1c0e1e53e9e0e9b873ec15a7161bc642f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b8a53b59c71ac5a6b8791c1867876b63d9e0e65c96199d871cc23339633664ce.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_default.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/f9789b388786678686966c9093879ce592c79bc94d19929b6939cf66316c672c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_yx_order.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa81948e857e4971907ea72e95fa66b2952e4ead6d429b527ac7a5286e981836.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa85978e837e68f094d3673089626ad792073985659a9b1a7a15b52869f19f56.png" - ], - "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_yx_order.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea159694856a6b5096afa53a36941da1e4f5c369cd1ae6d69b6a1c80625af2f6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea95969c874a63d39ca3ad2a231cdbc9c4973631cd6336c633182cbc61c3d3f2.png" - ], - "iris.tests.test_plot.TestPlotOtherCoordSystems.test_plot_tmerc.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e63399cd99cd64b29999335965369b262649c98c9b3966c6998d3319ccd69333.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e665326d999ecc9b3319b3246666cce69b496cccccc9669923193336666699a6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e665326d999acc9b3319b3246666cce69b496cccccc966996719333666669986.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e665326d999ecc92b399b32466269326b369cccccccd64d96199631364f33333.png" - ], - "iris.tests.test_plot.TestQuickplotPlot.test_t.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffb5d67fd4e5962211d9c6a443da77d5389c8ed346d923d011d968dc00da48.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82ffb5d67fdde5962211d9c6a441da77d5389c8cd346d927d011d968dc00da48.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fabd867fd5e5822201d9c6a4539a77953d8cbf834f99e7d051996cdc00da48.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffb59a7f00e59a2205d9d6e4619a74d9388c8e884e8da799d30b6dddb47e00.png" - ], - "iris.tests.test_plot.TestQuickplotPlot.test_t_dates.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffd5ae7f51efb6200378d7d4082c17d7280906d6e58962db31d800da6cdd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffd4ae7f55efbe200178d7d4082c17d7280906d6e58962df319800da6cdd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffd4827f51ef94200078d7c4082c57d739095ed6ed8962db759808da6cdd26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fd958a7e006f9ba0077bc5c9462c759873dd3c8d8f826699a187b358c82f67.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fe958b7e046f89a0033bd4d9632c74d8799d3e8d8d826789e487b348dc2f69.png" - ], - "iris.tests.test_plot.TestQuickplotPlot.test_x.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffb5097e84c54a621799d8601d9966d213cd67c039d876d078d866d869d8f7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/83ffbd097e84c54a621799d8601d9966d253cc27c039d876d078d866d869d8f7.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82ff950b7f81c0d6620199bcfc5e986695734da1816e1b2c85be2b65d96276d1.png" - ], - "iris.tests.test_plot.TestQuickplotPlot.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3f9bc067e01c6166009c9c6b5439ee5cd4e0d2993361de9ccf65b79887636a9.png" - ], - "iris.tests.test_plot.TestQuickplotPlot.test_z.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731.png" - ], - "iris.tests.test_plot.TestSimple.test_bounds.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a85954a957ac17e954ac17a9c3e956ac07e3e80c07f3e857aa5c27d3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a85954a957ac17e954ac17a9d22956ac0fe3e81c07f3e857aa5c27d3f80.png" - ], - "iris.tests.test_plot.TestSimple.test_points.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a957ac17e954ac17e1ca2950bc07e3e80c07f3e807a85c1ff3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a957ac17e954ac17e0da2954bc07e3e81c07f3a806a85c1ff3f81.png" - ], - "iris.tests.test_plot.TestSymbols.test_cloud_cover.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e95a330c96a5ccf2695a330c96a5ccf2695a330c96b5ccf3694a330c96b5ccf3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eb52916494ad6e1b6b5291e494ad6e1b6b5291e494ad6e1b6b5291e494ad6e1b.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eb5291e494ad6e136b5291ec94ad6e136b5291ec94ad6e136b5291ec94ad6e13.png" - ], - "iris.tests.test_quickplot.TestLabels.test_alignment.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa95350f952ad2f0c1f66ac1c55a4af4e550a52b3e05905e1e419e6f937e3b21.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa95350f952ad3f0c1f66a81e55a4af4e550a52b3e05905e1e419e6f937e1b21.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be8137f4954ac03fc0ff3e81d03f496a6d00b4af3ea0c07f6fa232c0db7f2d00.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be813fe0954ac07fc0ff3e81c03fc97a6d0094af3f80c17f36a53240d97f2d82.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be8137e0954ac05fc0ff3e81c07fc97a6d0094af3fa0c17f36a53244d97e2da0.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contour.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fd956a7a01a5ee321fc96666919b6ec15fdca593600d2586785a259dfa5a01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fd956a7a01a5ee3217c9e66691996ec15fdca593680d2586785a259dfa5a01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7fd95da7a01654a3217c962e4819a56c96f3c8593624da584da3b658db662db.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7fd955a7a016d1a3217c962e4819a56c96f3c859b624d2584de3a6999b662db.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3fd95ea6a11258c3217c966e4019a56c96f3c859b62492584fe7a699db46adb.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contour.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/faa12bc1954ef43fc0bf9f02854a4ee48548c17a5ab5c17e7a0d7875a17e3a81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf802f85c17fc17fc07eb42ac07f3f929130c07e3f80c07f7aa02e85c07f3e81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf802e85c07fc17fc07eb42ac17f3f829161c06e3f81c07f7ba02e85c07f3e81.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contourf.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fe812f88957a955a857a9257c17f7aa5c03dc0bf5a85c07e7f402d40a57a3f01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816a95957a957ac0fe1e8bc07f7f806e01c07f3f80c07f3fa23f00c07f3d00.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816a85957a955ac0ff1e8bc07f7f806e01c07f3f80c07f3fa23f80c07f3d00.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contourf.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/faa12bc1954ef43fc0bf9f02854a4ee48548c17a5ab5c17e7a0d7875a17e3a81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf802f85c17fc17fc07eb42ac07f3f929130c07e3f80c07f7aa02e85c07f3e81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bf802e85c07fc17fc07eb42ac17f3f829161c06e3f81c07f7ba02e85c07f3e81.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contourf.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa852f81955ac532c0bf9e89c57edae69357e13f4ea0c05a3f8561a4935a3e01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816a95907ae508c17e955ac07f3fa0945bc07f3f80c07f3aa36f01c0ff3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816ab5907ae508c17e955ac07f3fa0945ac07f3f80c07f3aa32f81c0ff3f80.png" - ], - "iris.tests.test_quickplot.TestLabels.test_contourf_nameless.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/faa52ec1955ac536c0bf9e09c57edae69357e13f4e80c0da2f81618493da3f01.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816af5907ee508c17e955ac03f3f809419c07f3f80c07f3a8b6f81c0ff3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816ab5907ee508c17e975ac07f3fa09459c07f3f80c07f3a812f81c0ff3f80.png" - ], - "iris.tests.test_quickplot.TestLabels.test_map.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea5e618434ac36e5c1c9369b95b39c38c3a39a4fcee19a6e9b64cb609925cd25.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a791c9349b94b79969c396c95bccc69a64db38c9b039a58ca6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a634c86a597a793c9349b94b79969c396c95bcce69a64d938c9b039a58ca6.png" - ], - "iris.tests.test_quickplot.TestLabels.test_map.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea5e618434ac36e5c1c9369b95b39c38c3a39a4ecef19a6e9b64cb609925cd25.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a791c9349b94b79969c396c95bccc69a64db38c9b039a58ca6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a793c9349b94b69969c396c95bcce69a64d938c9b039a58ca6.png" - ], - "iris.tests.test_quickplot.TestLabels.test_pcolor.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bb423d4e94a5c6b9c15adaadc1fb6a469c8de43a3e07904e5f016b57984e1ea1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16affc05ab500956e974ac53f3d80925ac03f2f81c07e3fa12da1c2fe3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16abfc05ab500956e974ac13f3da0925ac07f3fa1c07e3fa12da1c25e3f80.png" - ], - "iris.tests.test_quickplot.TestLabels.test_pcolormesh.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bb433d4e94a4c6b9c15adaadc1fb6a469c8de43a3e07904e5f016b57984e1ea1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16affc05ab500956e974ac53f3d80925ac03f3f81c07e3fa12da1c27e3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16abfc05ab500956e974ac13f3da0925ac07f3fa1c07e3fa12da1c25e3f80.png" - ], - "iris.tests.test_quickplot.TestLabels.test_pcolormesh_str_symbol.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16affc05ab500956e974ac53f3d80925ac03f3f80c07e3fa12da1c27f3f80.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eea16abfc05ab500956e974ac13f3da0925ac07f3fa1c07e3fa12da1c25e3f80.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_non_cube_coordinate.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa816a85857a955ae17e957ec57e7a81855fc17e3a81c57e1a813a85c57a1a05.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fe816a85857a957ac07f957ac07f3e80956ac07f3e80c07f3e813e85c07e3f80.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a95955a956ac17f950a807e3f4c951ac07e3f81c0ff3ea16aa1c0bd3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a95955a954ac17f950ac07e3f44951ac07e3f81c0ff3ea16aa1c0ff3e81.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a957ac17e954ac17e1ca2950bc07e3e80c07f3e807a85c1ff3f81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa856a85957a957ac17e954ac17e0da2954bc07e3e81c07f3a806a85c1ff3f81.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eafdeec9f729943032168d66d4cb896e9567497b81304aedc96514ad8d18669a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaf9a6c9f728943032168d66d4cb8d2e9567497b81304aedc9e51e2d9d186ada.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a6fb4b967f00950eb00f9d0f900fcd62dc7868f2c1bb3a909c266e34daa52f6c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a6fa1e967f00950eb00f9d0f914fcdc2d560c9f3c1fb3a9084266e34daa52f6c.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa9b549f756854ea0168d6ed556896dd8e909ed88290afdd9e97e008d6e2296.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaa9b529f756850ea0169566d1568d6dd86909ed88290afd9ded7e008d666ad6.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/aad73e0df78085ac840395ac9428d9fad56cd8f2906c48f2d0ec7a536a1737f3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aad73e0cf710952c840195acd528c1e2d1ecc9f3c1ec49f3c1ec6a536a1737f3.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_x.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a6ffb5097e84cde2224598d1649f8d6cd2388c76d0799867d009da76c9f8d866.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a6bfb5097f84cde2224599d1649f8d6cd2388c76d0799867d009da76c1f8d866.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a6fbb50cfbd0c036203598dce4c88d26d32f8cf3886e1df3dc047b4289ec6e72.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a6fb958dfb50c03e203598dca4c9cd26933f9cb3886e1df1dc047b4289ec2e72.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a6fb958dff50c03e203598dca4c9cd26933f9cf3886e1de1dc047b4289ec2672.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_y.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ff978b7f00c9c830d7992166179e969509d866c478d964d079c876d869da26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a7ff97837f00c9c830d79921661f9e9695099876c478d964c079c876d879da26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a2ffb6127f0dc9993085d960c6748d3e9b121ca49d6a1b048df34ce789ff7205.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a9ff16eb740954a9e05855a19a3c0fbc13e1ea5c07d5ad0cb58e45e3c35.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea856a95957a957ac07e954ac17e3e87950bc07f3ea4c27d3e833ac1c1e03f80.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e5a565b69e1a9a42917e1a19c17b3a619e59c47b3a25c53e3b8430e5c57a3e85.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e5a761a79a589e58c07d1e48c07c3f819e41c07f3d84c17e3fa62585c0fe3f83.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/afffe6d67700958636179d92e019992dd039daf5817d987a807a48e499684a6d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aeffe6d67780958636179d92e019892dd139daf5815d987a807a48e699684a6d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/eaff6ad4f74ab16490109c9b942999add1b74bb785a41d4acd526a254acc6365.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aaffead4f7cab16490109c9b946d99add1b34bb385a41c4acd526a254acc6365.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea5e618434ac36e5c1c9369b95b39c38c3a39a4fcee19a6e9b64cb609925cd25.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a593c9b49b94b79969c396c95bccc69a64db30d9b039a52c26.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a636c86a597a791c9349b94b79969c396c95bccc69a64db38c9b039a58ca6.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85a634c86a597a793c9349b94b79969c396c95bcce69a64d938c9b039a58ca6.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2fd2d09295c2d1c3d33c1bc2d67d2c696ce0653c3ac2b1d976da05c2c4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ad2f6d2fd2d09295c2d1c3d33c1bc2d27d2c696ce0e53c3ad2b1d976da01c2c4.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e3e2f97a1c19996a1c8f26d1e3a0f684a3c2c6913dc2497b9db8095e502ff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e3c1f97a1c3e197a1c9f37c5e390668521e0c390bdd8685b1d86096e5279f.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea153f0395eac1f895eac9fa941c79e56a741e4f68430f876916f860c9c1938d.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e968658e969692c797879e3b86929e58696d49cd6869c9a37962c923990d9c6d.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e9e1658e961e92569e9e3c7966d36c586165698c70e1ce739b3698619e1e984c.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e1a530e29e5ecf199a5acd8f64f1326161a538e665a198d29e52cb1d9a5e6669.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e96930749696cf9d9697cdc39692670b696c386969eb3866696399a41a0d8e99.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/bf813f80c156c05dc0fec29dc17f1a6dd05fc0ff1aa1c57e3b243b20375a1e81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/be816a81d17ec57ac07e952ac07f3aa0955ec17e3f80c07f3f803f80c0bf3f81.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.1": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ea95629d956a996069939e9bc07f7aad856cc47e5e81857a1e254a35c1be1b81.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/fa816a85957a957ac03f957ac07f3ba1954ac07e3e81c07f3ea47a85c07e3e80.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.2": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/87ed2f867f008d8220179852f01fd9bed1789a6c847cc877c46ac972987ec8fd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87ed2f067f008d8220179852f01fd9bed1789a6c847cc877c468c9f6987ec8fd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/87ed2f067f008d8220179c52f01fd9bed1789a6c847cc877c560c976987ec8fd.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3eded05fe11a492b000985af07fdbb4d1e3366d8c644da79fa68993180f6ec1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a3eded04ff11a492b000985af07fdbb4d1eb366d8c644da79fa68993180f6e81.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.3": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/a2f9b5ba7600a56962df9e96f01dc926c498cc46847f9d6cd0244bf19a6b19f1.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/a2f9b5ba7600856962df9e96f01dcd26c498cc46847f9d6cd0244bf19a6b1975.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aef9f93a770085e9205fd696d13c4b2485ca1a43952f1934daa66bd1ca6b3c71.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/aef9793a770085e9205fd696d03ccb2485ca1e43952f1934daa66bd1ca6b3c71.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.4": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/ae953f87d5e82d86801f91ee6e1591fe7e117876c07d6877d068d878d800d07a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ae953f87d5e82d87801b91ee6e1599fe7e117874c07d6877d068d878d800d07a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bec1329dc5be85dac01d58d73e419d423e41daa59822dc00c5fefe0091fe03ff.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/bec13e81c5bec55ac03dd896d17e8d6a1e410af7380008ff1de6fe0099ea237b.png" - ], - "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.5": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/e87a952d96856943969f694696858d4ee0519d6ee07f9b6a78619b2a79711a2a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e87a952d96856943969f694696858d4ae0519d6ee07f996a78719b2a79711a3a.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/e85e96ac97a168d897a5791695a19927913c3953687ecce3687c86e3487cc6c3.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/ea1595ac95e8689d95fb7b0595291963916f3b73487fccf2680484f2486ec6f0.png" - ], - "iris.tests.test_quickplot.TestTimeReferenceUnitsLabels.test_not_reference_time_units.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/82faa1977fdf89976200ddf6e000d9e7f75f9866d560dae4dc00d966dc005e20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82b8a1977fdf89876200dde6e000d9e7f77f9866d560dfe4dc00d966fc005e20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82f8a1977fdf89876200ddf6e000d9e7f77f9866d560dee4dc00d966dc005e20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82f8a1977fdf89876200dde6e000d9e7f77f9866d560dfe4dc00dd64dc005e20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82faa19e7f51898c6001dd86845fd9a2dd7f996281ee19f389ef03ffdc007e00.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82f8a19e7f51888c6001dda6855fd9a2dd7f986281ee19f389ff03ffdc007e00.png" - ], - "iris.tests.test_quickplot.TestTimeReferenceUnitsLabels.test_reference_time_units.0": [ - "https://scitools.github.io/test-iris-imagehash/images/v4/82fe81987fd777ffe0002addd4002805dda8de65dde9d4625bfddc209841de20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fe81987fdf77ffe0002a9dd4002805ddaade65d9a9d5625bfddc209841de20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fe81987fdf77ffe0002addd4002805dd28df67d9a9d4625bfddc209841de20.png", - "https://scitools.github.io/test-iris-imagehash/images/v4/82fa80997f547799a0037a00d52f0956ddaf9f7e98a1816e09f5d8260bfffe00.png" - ] + "gallery_tests.test_plot_COP_1d.0": "aefec91c3601249cc9b3336dc4c8cdb31a64c6d997b3c0eccb5932d285e42f33", + "gallery_tests.test_plot_COP_maps.0": "ea9130db95668524913e6ac168991f0d956e917ec76396b96a853dcf94696935", + "gallery_tests.test_plot_SOI_filtering.0": "fa56f295c5e0694a3c17a58d95e8da536233da99984c5af4c6739b4a9a444eb4", + "gallery_tests.test_plot_TEC.0": "e5a761b69a589a4bc46f9e48c65c6631ce61d1ce3982c13739b33193c0ee3f8c", + "gallery_tests.test_plot_anomaly_log_colouring.0": "ec4464e384a39b13931a9b1c85696da968d5e6e63e26847bdbd399938d3c5a4c", + "gallery_tests.test_plot_atlantic_profiles.0": "97c160f462a88f07203ebc77a1e36707e61f4e38d8f3d08a910597fc877cec58", + "gallery_tests.test_plot_atlantic_profiles.1": "eeea64dd6ea8cd99991d1322b3741e2684571cd89995b3131f32a4765ee2a1cc", + "gallery_tests.test_plot_coriolis.0": "e68665de9a699659c1fe99a5896965966996c46e3e19c1da3a652669c51e1a26", + "gallery_tests.test_plot_cross_section.0": "ea91b17b9562e4d1609f5a05856e4ca45a52957e5ea5f13b1bca9dc0b17b1ac1", + "gallery_tests.test_plot_cross_section.1": "ea9521fb956a394068931e93e07e4aa5856cc47e4a91957b1ba55bb5b17a3b81", + "gallery_tests.test_plot_custom_aggregation.0": "ee816f81917e907eb03ec73f856f7ac198d070186e90811f1be33ee1a57a6e18", + "gallery_tests.test_plot_custom_file_loading.0": "fa81cb47845e34bc932797436cccc8343f11359b73523746c48c72d9d9b34da5", + "gallery_tests.test_plot_deriving_phenomena.0": "ec97681793689768943c97e8926669d186e8c33f6c99c32e6b936c83d33e2c98", + "gallery_tests.test_plot_global_map.0": "fb997b958466846ed13e87467a997a898d66d17e2cc9906684696f99d3162e81", + "gallery_tests.test_plot_hovmoller.0": "eeb46cb4934b934bc07e974bc14b38949943c0fe3e94c17f6ea46cb4c07b3f00", + "gallery_tests.test_plot_inset.0": "ebff6992b50096ad9267dac4d640949294924cdbc95d4b699d29952dcda46ed4", + "gallery_tests.test_plot_lagged_ensemble.0": "bbbb31b1c44e64e4b1579b5b917133cecc61f146c414668eb1119b1bb197ce34", + "gallery_tests.test_plot_lagged_ensemble.1": "aafec5e9e5e03e099a07e0f86542db879438261ec3b13ce78d8dc65a92d83d89", + "gallery_tests.test_plot_lineplot_with_legend.0": "eafd9e12a5a061e9925ec716de489e9685078ec981b229e70ddb79219cc3768d", + "gallery_tests.test_plot_load_nemo.0": "a3ff34e87f0049496d17c4d9c04fc225d256971392db9f1696df0f16cec00736", + "gallery_tests.test_plot_orca_projection.0": "bb11721a87cce5e4cce79e81d19b3b5e1e1cd3783168e07835853485e65e2e1e", + "gallery_tests.test_plot_orca_projection.1": "e58661969e799659c1f719a6c867359a1996c0773649c09c3e612679c07b3f66", + "gallery_tests.test_plot_orca_projection.2": "a58660ce9e739b31c93d1c89c8df33863783e23b3f11c07f2664366cc8ee3cc1", + "gallery_tests.test_plot_orca_projection.3": "be817a8784dea56cec79817a919e338437a5c1e73fa16c726c4a3e816a1c6b1c", + "gallery_tests.test_plot_polar_stereo.0": "ba1e615ec7e097ad961f9cb190f038e091c2c1e73f07c11f6f386b3cc1793e01", + "gallery_tests.test_plot_polynomial_fit.0": "aeffcb34d244348be5a2c96c3a4fc6d0c4b69f2d87294ccb9f1a125684cd7c11", + "gallery_tests.test_plot_projections_and_annotations.0": "fa854f19851a30e4cc76cd0bb0f932dca7c665b0c93ccb4b4ed19e9c3721b5c8", + "gallery_tests.test_plot_projections_and_annotations.1": "e3856d999c389662734331afcd2d5a7184dba592b9b69b64d26dc29954b185b2", + "gallery_tests.test_plot_rotated_pole_mapping.0": "ee46607e97a19781c0de1f81d0bb3e241f20c16f3fc0c1fe3d263d33d06f3e80", + "gallery_tests.test_plot_rotated_pole_mapping.1": "ea57685f95a886a1c0de9da090be3e2697e1c0ff3f00c17e6b266c17c07f3f00", + "gallery_tests.test_plot_rotated_pole_mapping.2": "ea57685f95a886a1c0de9da090be3e2497e1c0ff3f01c17e6b366c17c07b3f00", + "gallery_tests.test_plot_rotated_pole_mapping.3": "fa8172c6857ecd38cb3392ce36c564311931d85ec64e9787719a39993c316e66", + "gallery_tests.test_plot_wind_barbs.0": "e9e161e996169316c1fe9e96c29e36739e13c07c3d61c07f39813929c07f3f01", + "gallery_tests.test_plot_wind_speed.0": "e9e960e996169306c1fe9e96c29e36739e03c06c3d61c07f3da139e1c07f3f01", + "gallery_tests.test_plot_wind_speed.1": "e9e960e996169306c1ee9f96c29e36739653c06c3d61c07f39a139e1c07f3f01", + "gallery_tests.test_plot_zonal_means.0": "b45b3071c9a4c9a6c69c363cc327cbb3cb9634d8c9e63cf336738c6634d8c384", + "iris.tests.integration.plot.test_animate.IntegrationTest.test_cube_animation.0": "fe81c17e817e3e81817e3e81857e7a817e81c17e7e81c17e7a81817e817e8c2e", + "iris.tests.integration.plot.test_animate.IntegrationTest.test_cube_animation.1": "fe81857e817e7a85817e7a81857e7e817e81917a7e81817e7a81817e817e843e", + "iris.tests.integration.plot.test_animate.IntegrationTest.test_cube_animation.2": "be81817ec17e7a81c17e7e81857e3e803e81817a3e81c17e7a81c17ec97e2c2f", + "iris.tests.integration.plot.test_plot_2d_coords.Test.test_2d_coord_bounds_northpolarstereo.0": "e59661969e699659c0f719a6c967339a1992c07f3649c09c3f612669c07b3f66", + "iris.tests.integration.plot.test_plot_2d_coords.Test.test_2d_coord_bounds_platecarree.0": "ee856299954a1da699b6915ec25b6e419729c42c3f84bd9fa6d262d1d1dac076", + "iris.tests.integration.plot.test_plot_2d_coords.Test2dContour.test_2d_coords_contour.0": "b4b2643ecb05cb43b0f23d80c53c4e1d3e5990eb1f81c19f2f983cb1c4ff3e42", + "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_plain_latlon.0": "eb036726c47c9273918e6e2c6f216336787590eb969a165890ee6c676925b3b3", + "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_plain_latlon_on_polar_map.0": "e66d673c999031cd6667663398dc332c676364e798959336636660d933998666", + "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_2d_rotated_latlon.0": "eba037a4c479c273b2963f2c6f6126966865d86f969e33c9b1706c26692793b0", + "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_non_latlon_1d_coords.0": "a7ac334934d2e65c72596325b343338cb41c92d9c5b36f65330d379692ca6d6c", + "iris.tests.integration.plot.test_vector_plots.TestBarbs.test_non_latlon_2d_coords.0": "a7acb36134d2e676627963259343330cb43e92d9c5336e67330d379292ca6d6c", + "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_plain_latlon.0": "fb8d4f21c472b27e919d2e216f216b3178e69c7e961ab39a84696c616d245b94", + "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_plain_latlon_on_polar_map.0": "e66c6619999933666666c6d99999336663646d9999c1332667b60cf964d8672c", + "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_2d_rotated_latlon.0": "eba925a5c476d25a95a56b876f3826246a449c6b96a3731ab13f6c656a5cb48a", + "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_non_latlon_1d_coords.0": "a7ac24947259f3493697632df45926b6e126c4f392593b4937266f26ccf032d8", + "iris.tests.integration.plot.test_vector_plots.TestQuiver.test_non_latlon_2d_coords.0": "afac26367251d3493617632df45c26a6e126c6f392593b4937266f26ccf232d0", + "iris.tests.test_analysis.TestProject.test_cartopy_projection.0": "9e1952c9c165b4fc668a9d47c1461d7a60fb2e853eb426bd62fd229c9f04c16d", + "iris.tests.test_mapping.TestBasic.test_contourf.0": "e97a346c9685cb899685c9c39695c79396ec634969ce2c74697a3864697b3c8c", + "iris.tests.test_mapping.TestBasic.test_pcolor.0": "e97a347c96858b8d9685c9c39696c393966c634969ce3c64697a3864697b3c9c", + "iris.tests.test_mapping.TestBasic.test_unmappable.0": "ea853e48957ac1df957ac8be852bc1b1944e7a9a78e02f4c6a253e6c7a912dc2", + "iris.tests.test_mapping.TestBoundedCube.test_grid.0": "fa81857e857e7a81857e7a817a81817e7a81857e857e7a81857e7a817a81857e", + "iris.tests.test_mapping.TestBoundedCube.test_pcolormesh.0": "fa81c17e857e1ea1857e634a7a81cd257e8484da857e3b29817e68f47a81c799", + "iris.tests.test_mapping.TestLimitedAreaCube.test_grid.0": "fa81857e857e7a81857e7a817a817a817a81817e7a81857e857e857e857e7a81", + "iris.tests.test_mapping.TestLimitedAreaCube.test_outline.0": "fa81857e857e3e81857e7a857a817e817a81857a7a81817e857e857a857e7a81", + "iris.tests.test_mapping.TestLimitedAreaCube.test_pcolormesh.0": "ea813949957ec4b7917e3f60266978d97a9562376e81954a914ec6cc957a0f98", + "iris.tests.test_mapping.TestLimitedAreaCube.test_scatter.0": "ea05bd3e91eac2d984983d346b2473477acf69ad1d3296d8c696e126c1ab1a71", + "iris.tests.test_mapping.TestLowLevel.test_keywords.0": "be21a71bc1de58e43a63a71b3e016061c1fe9b8c3e01a473847e5b94d1fb9ac3", + "iris.tests.test_mapping.TestLowLevel.test_keywords.1": "fa819897857e6530957e7bcc7a819c316ea1951e857e62c2857e79a17a819633", + "iris.tests.test_mapping.TestLowLevel.test_params.0": "fa8190be857e6739913a7bc47a8594337bb1911c857e6ec3913279807e819637", + "iris.tests.test_mapping.TestLowLevel.test_params.1": "be21a71bc1de58e43a63a71b3e016061c1fe9b8c3e01a473847e5b94d1fb9ac3", + "iris.tests.test_mapping.TestLowLevel.test_params.2": "fa81909f857e6520957e7acc7a8194716e31851e857e6ac281fe3ba17a81963f", + "iris.tests.test_mapping.TestLowLevel.test_simple.0": "faa0e558855f9de7857a1ab16a85a51d36a1e55a854e58a5c13837096e8fe17a", + "iris.tests.test_mapping.TestMappingSubRegion.test_simple.0": "b9913d90c66eca6ec66ec2f3689195aecf5b2f00392cb3496495e21da4db6c92", + "iris.tests.test_mapping.TestUnmappable.test_simple.0": "fa81b54a817eca37817ec701857e3e64943e7bb41b806f996e817e006ee1b19b", + "iris.tests.test_plot.Test1dFillBetween.test_coord_coord.0": "f31432798cebcd87723835b4a5c5c2dbcf139c6c8cf4730bf3c36d801e380378", + "iris.tests.test_plot.Test1dFillBetween.test_coord_cube.0": "ea17352b92f0cbd42d6c8d25e59d36dc3a538d2bb2e42d26c6d2c2c8e4a1ce99", + "iris.tests.test_plot.Test1dFillBetween.test_cube_coord.0": "aff8e44af2019b3d3d03e0d1865e272cc1643de292db4b98c53c7ce5b0c37b2c", + "iris.tests.test_plot.Test1dFillBetween.test_cube_cube.0": "ea1761f695a09c0b70cc938d334b4e4f4c3671f2cd8b7996973c2c68e1c39e26", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord.0": "8bfec2577e01a5a5ed013b4ac4521c94817d4e6d91ff63369c6d61991e3278cc", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_coord.0": "8fff941e7e01e1c2f801c878a41e5b0d85cf36e1837e2d9992c62f21769e6a4d", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_coord_map.0": "bbe0c214cd979dc3b05e4b68db0771b48698961b7962d2446e8ca5bb36716c6e", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_coord_cube.0": "8ff897066a01f0f2f818ee1eb007ca41853e3b81c57e36a991fe2ca9725e29ed", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube.0": "8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube_coord.0": "8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21", + "iris.tests.test_plot.Test1dPlotMultiArgs.test_cube_cube.0": "8ff8c0567a01b296e4019d2ff10b464bd4da6391943678e5879f7e3903e63f1c", + "iris.tests.test_plot.Test1dQuickplotFillBetween.test_coord_coord.0": "f314b2798ce3cd87723835a4a5c5c2dbcf139c6c8cf4730bd3c36d801c3c6378", + "iris.tests.test_plot.Test1dQuickplotFillBetween.test_coord_cube.0": "ea17352bd2f0cbd4256c8da5e59c36dc1a538d2b92e41d26ced2c2c8eca1ce99", + "iris.tests.test_plot.Test1dQuickplotFillBetween.test_cube_coord.0": "a3ffe44af6009b3d2907c8f1f6588f2cc96619e290fb4b88cd2c3ce590e3770c", + "iris.tests.test_plot.Test1dQuickplotFillBetween.test_cube_cube.0": "ea17e1f695a09c0b60cc938d334b4e4f4c3671f2cd8b7996973c2c69e1c31e26", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord.0": "83fec2777e002427e801bb4ae65a1c94813dcec999db4bbc9ccd79991f3238cc", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_coord.0": "83ff9d9f7e01e1c2b001c8f8f63e1b1d81cf36e1837e258982ce6f215c9a626c", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_coord_map.0": "bbe0c214cd979dc3b05e4b68db0771b48698961b7962d2446e8ca5bb36716c6e", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_coord_cube.0": "87ffb79e7f0060d8303fcd1eb007d801c52699e18d769e2199e60ce1da5629ed", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube.0": "a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube_coord.0": "a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731", + "iris.tests.test_plot.Test1dQuickplotPlotMultiArgs.test_cube_cube.0": "83ffc9d67e00909624059daef160cf4bd45a4b9184367ae5979f7e3909e6261c", + "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_coord.0": "b2ecc1a8b9994a16e666b5e3ce151969a5fb4ed49909653990d46b9bfc097684", + "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_coord_map.0": "bea06899c14eb16e9895ce46c74a396a74ed64b13390b3c61b439f1b4d2ccde6", + "iris.tests.test_plot.Test1dQuickplotScatter.test_coord_cube.0": "be852fc1e078c83eb30e3607672149c098d95c5b9e4636f2c1fc299d999f7e03", + "iris.tests.test_plot.Test1dQuickplotScatter.test_cube_coord.0": "edfa96cb9a256b4f65466d9892d9c865693a1a9c94b39ed8484b35ad9a864c32", + "iris.tests.test_plot.Test1dQuickplotScatter.test_cube_cube.0": "e8b33c129649c78de3a773e578650c728e92279be12de1edc4f246b2939c3b01", + "iris.tests.test_plot.Test1dScatter.test_coord_coord.0": "b2ecc12999994e16e666b5e3ce171969a5fb4ed49909e53990c44b9b7c09f684", + "iris.tests.test_plot.Test1dScatter.test_coord_coord_map.0": "bea06899c14eb16e9895ce46c74a396a74ed64b13390b3c61b439f1b4d2ccde6", + "iris.tests.test_plot.Test1dScatter.test_coord_cube.0": "be812fc1c078c03e930e3627672369c1d8d85c5b96463662e1fc699d9b9f7e03", + "iris.tests.test_plot.Test1dScatter.test_cube_coord.0": "edda96cb9a256b4765c26d9892dbc665693a1a9494b796c86c4b37ad92864c32", + "iris.tests.test_plot.Test1dScatter.test_cube_cube.0": "edb23c529649c78de38773e538650c729e92279be12de1edc4f246b2139c3b01", + "iris.tests.test_plot.Test2dPoints.test_circular_changes.0": "fa81c57a857e93bd9b193e436ec4ccb03b01c14a857e3e34911f3b816e81c57b", + "iris.tests.test_plot.TestAttributePositive.test_1d_positive_down.0": "a7fe781b708487c360079e3bb4789869816bdb64c76b4a3cce7b4e749a6130c5", + "iris.tests.test_plot.TestAttributePositive.test_1d_positive_up.0": "a7ff958b7a00b09c6617e1c1907fcb0d9163ce7895289a618f381bffccf97200", + "iris.tests.test_plot.TestAttributePositive.test_2d_positive_down.0": "fb966ba6846194dbd01f3665c0e4399a3f1bc2653f90c99e2f613e64c01e3f81", + "iris.tests.test_plot.TestAttributePositive.test_2d_positive_up.0": "ebc06be1941e941ec07f941f907f6fa0950fc07e6f80c07f6b806be1c07f3f80", + "iris.tests.test_plot.TestContour.test_tx.0": "eeece0173c07951fbd038748914964e8c14e72e9c1531ee1cc746bb293973ecd", + "iris.tests.test_plot.TestContour.test_ty.0": "ebfa8553fc01b15af4055a069546caa5954b7e9bc0f97d2cc2d62d360b362b49", + "iris.tests.test_plot.TestContour.test_tz.0": "8bfe805ffc00857ef0007e01d4027e80815fd56a81ff7a8085ff3aaa03ff6af5", + "iris.tests.test_plot.TestContour.test_yx.0": "e85e36cb95b199998e6d4d3694b26c78c7396329958434c2cacb6c6d69ce9392", + "iris.tests.test_plot.TestContour.test_zx.0": "affe8057fc00855cf8007e00d0027e808557d5ea815f7ea0817f2fea817f2bff", + "iris.tests.test_plot.TestContour.test_zy.0": "abff817ff801857afc017a80d4027e00855ec42a81fe7a8185fe6a8f05fe2abf", + "iris.tests.test_plot.TestContourf.test_tx.0": "ea857a81957a857e957a857a857a958ac5723b0d7ac56b833e856e606a933e90", + "iris.tests.test_plot.TestContourf.test_ty.0": "ea851f00957ac0f3957ac07f957a628d815e7b926ab13e816a953aac6a859ed3", + "iris.tests.test_plot.TestContourf.test_tz.0": "fa81857e857e7a81857a7a81857e7a81857e7a806a95857a7a85857e7a85817e", + "iris.tests.test_plot.TestContourf.test_yx.0": "e97a386e968597b19685c9c296a7c79493c16e59691a387f6978396e6c6a3184", + "iris.tests.test_plot.TestContourf.test_zx.0": "fa81817e857e7a81857a7a81957a6e81917a6caa3a85c57a3a8585fa6a8591fe", + "iris.tests.test_plot.TestContourf.test_zy.0": "fa81817e857e7a81857e7a81817a7e81817a668f7a91857e7a81857e7a85817e", + "iris.tests.test_plot.TestHybridHeight.test_bounds.0": "ee856aa5957a955ac0bf954bc17e3b819548c07f3e81c07e2ec46ea4c07f3e84", + "iris.tests.test_plot.TestHybridHeight.test_bounds.1": "bf813e85c07ec57ec17e9073c07e3f81856ec17a3f80c0fe3e813f84c2733e80", + "iris.tests.test_plot.TestHybridHeight.test_bounds.2": "ee856aa5957a955ac0bf954bc17e3b819548c07f3e81c07e2ec46ea4c07f3e84", + "iris.tests.test_plot.TestHybridHeight.test_orography.0": "fa817a91917a957ac4ff240cc07f6ea466a5c03f3b81c17f1b321b01d35b3fc0", + "iris.tests.test_plot.TestHybridHeight.test_orography.1": "ea07695f95e0d2b4c09d95e0956a3da99294c2be3e85c07f3fa92b05c15e3f42", + "iris.tests.test_plot.TestHybridHeight.test_points.0": "fe857b91917a847ec4bd3f01c47c6ca43b11915a3ea4db3b1b4a84c4c03f3fc1", + "iris.tests.test_plot.TestHybridHeight.test_points.1": "be813a81c17ec57ec17e952ac07f3f808556c17e3f80c07f3e813f80c27e3f81", + "iris.tests.test_plot.TestHybridHeight.test_points.2": "fe856a85957a955ac03f956ac17f3f809552c07f3e81c07e3e807e85c07e3f80", + "iris.tests.test_plot.TestHybridHeight.test_points.3": "fe857b91917a847ec4bd3f01c47c6ca43b11915a3ea4db3b1b4a84c4c03f3fc1", + "iris.tests.test_plot.TestHybridHeight.test_points.4": "b878387e978ec2f0c0f09f83878f3f81c070c0fe78d0c1763fa13856d03e3f0f", + "iris.tests.test_plot.TestMissingCS.test_missing_cs.0": "fa816ac1857e853cc17e957ac15f3e8494c6c8f43e81c13b3f813e91c07e3f46", + "iris.tests.test_plot.TestMissingCoord.test_no_u.0": "ea856a95955a954ac17f954a807e3f48951ac07e3f81c0ff7ea16a81c0bf3f81", + "iris.tests.test_plot.TestMissingCoord.test_no_u.1": "ea956ab5954a954ac17e9542817f2f60950ac07f3e80c0ff7a856aa5c2ff3f80", + "iris.tests.test_plot.TestMissingCoord.test_no_v.0": "fa816a85957a857ac17e954ac17e1fa2950bc07e3e81c07f3e807a85c17f3f81", + "iris.tests.test_plot.TestMissingCoord.test_no_v.1": "fa856a85957a857ac17e954ac17e9d02954ac07e3e81c07f3e857a85c2fd3f80", + "iris.tests.test_plot.TestMissingCoord.test_none.0": "fa816a85957a857ac17e954ac17e3fa2950ac07e3e80c07f3e807a85c1ff3f81", + "iris.tests.test_plot.TestMissingCoord.test_none.1": "fa856a85957a957ac17e954ac17a1f06954ac07f3e81c07f3e817a85c0fd3f80", + "iris.tests.test_plot.TestPcolor.test_tx.0": "ea817a81957e857e957e953e957e857e857e6aa06a816ac16a017a816a9585fa", + "iris.tests.test_plot.TestPcolor.test_ty.0": "ea953f83954ac2fc956ac07e956a3509c0de61796ab57a916a854a916ab590fb", + "iris.tests.test_plot.TestPcolor.test_tz.0": "fa81857e857a7a84857a7a81857e7a813e2f7a817a85857a7a85857a7a85857a", + "iris.tests.test_plot.TestPcolor.test_yx.0": "e97a387e968596319697c3c19284a62c93ad60c36933393a6c7a793b6c6b31cd", + "iris.tests.test_plot.TestPcolor.test_zx.0": "fa81857e857a6e05857e7a81857e7a81e0577a816a8585fa7a85857e7a81857e", + "iris.tests.test_plot.TestPcolor.test_zy.0": "fa81857e857e7e80857e7a81857e7a812d577a816a85857e7a81857e7a80857e", + "iris.tests.test_plot.TestPcolorNoBounds.test_tx.0": "ea858782957a603f957a3878957a7a7d957a6bc06ae56f806ad50fd06a859c50", + "iris.tests.test_plot.TestPcolorNoBounds.test_ty.0": "ea85857a857e7e81957a7a81957a6a85857acaa6c1fb6aa67a81956e6a81b506", + "iris.tests.test_plot.TestPcolorNoBounds.test_tz.0": "fa817e81857e857a857e7a81857e6a85817b81e63e813e857e81c17e7a81956e", + "iris.tests.test_plot.TestPcolorNoBounds.test_yx.0": "e96ac78796953c4c9685383996c538e69792637063696b49693ac796693ac71b", + "iris.tests.test_plot.TestPcolorNoBounds.test_zx.0": "fa817a81857e857e857e7a81857e6a84c17f95786aa77a807e81c17c7e819558", + "iris.tests.test_plot.TestPcolorNoBounds.test_zy.0": "fa817a80857e857e857e7a81817e3e81817e857f6aa07a857e80c17f7e80c15f", + "iris.tests.test_plot.TestPcolormesh.test_tx.0": "ea817a81957e857e957e953e957e857e857e6aa06a816ac16a017a816a9585fa", + "iris.tests.test_plot.TestPcolormesh.test_ty.0": "ea953f83954ac2fc956ac07e956a3509c0de71796ab57a816a854a916ab590fb", + "iris.tests.test_plot.TestPcolormesh.test_tz.0": "fa81857e857a7e84857a7a81857e7a813a0f7a817a85857b7a85857a7a85857a", + "iris.tests.test_plot.TestPcolormesh.test_yx.0": "e97a387e968596319697c3c19284a62c93ad60c36933393a6c7e793a6c6b31cd", + "iris.tests.test_plot.TestPcolormesh.test_zx.0": "fa81857e857a7e01857e7a81857e7a81e0577a816a8585fa7a85857e7a81857e", + "iris.tests.test_plot.TestPcolormesh.test_zy.0": "fa81857e857e7e80857e7a81857e7a8125577a817a85817f7a81857e7a80857e", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_tx.0": "ea858782957a603f957a387a957a7a6d957a6bc06ae56f806ad50fd06a859c50", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_ty.0": "ea85857a857e3e81957a7a81957a6a85857acae6c1fb6aa67a81956e6a81b506", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_tz.0": "fa813e81857e857a857e7a81857e6a85817b0aa63e993e857e81c17e7a81956e", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_yx.0": "e96ac79796953c4c9685383996c538e69692637261696b49693ac796693ac71b", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_zx.0": "fa817a85857a857e857e7a81857e7a81c17f95506aaf7a807e81c17c7a81857a", + "iris.tests.test_plot.TestPcolormeshNoBounds.test_zy.0": "fa817a80857a857e857e7a81817e3e81817e2f756aa47a817e80c17f7e80c17f", + "iris.tests.test_plot.TestPlot.test_t.0": "8ffe9c1a7e05e718f305d9d2e463127181380c9e824e2fa781db2bed76b4fe00", + "iris.tests.test_plot.TestPlot.test_t_dates.0": "87fc9d8b7e044d81f5037bd4c14324749279a73e8d9d864f09e4a7b348dc2769", + "iris.tests.test_plot.TestPlot.test_x.0": "8bfe956b7c01c2f26300929dfc1e3c6690736f91817e3b0c84be6be5d1603ed1", + "iris.tests.test_plot.TestPlot.test_y.0": "aff8946c7a14c99fb193d263e42432d8d00c2d27944a3f8dc5223ef703ff6b90", + "iris.tests.test_plot.TestPlot.test_z.0": "8fffc1dc7e019c70f001b70ee4386de1814e7938837b6a7f84d07c9f15b02f21", + "iris.tests.test_plot.TestPlotCitation.test.0": "abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8", + "iris.tests.test_plot.TestPlotCitation.test_axes.0": "abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8", + "iris.tests.test_plot.TestPlotCitation.test_figure.0": "abf895467a1d9506f811783485437abd85427ab995067ab9f00687f96afe87c8", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_non_cube_coordinate.0": "fa81857e857e3e85857e7a81857e7a81857e7a817e81780b7a81c56a7a81857e", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.0": "ea853f10956ac1e1957a854e957a207e955e6aa76ae17aa16a856aaf6ab19e12", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.1": "ea853a85857a857a957a857a957ed05a857b3e946a606b917a816f247a853af4", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.2": "eafdcec9f4219530b696a56694c3852a95656b7b85986acdc06516adad186e9a", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.3": "aff24ab7fd05952dbd0f950f910fed48c47868f2e1b9329094266e345a850f6c", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.4": "eaa9b5699556854e9456854ed05625f9d0a92bfdc0a90afd81f97e00855e7ab6", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_tx.5": "eaf73e0d9503852c950395ac9528c1fad06cc0f2d1ec6af2c0fc6a536a1797f3", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_x.0": "afea950ddb13c03e34359ad8a4c86f24913f2693806e3ff1f4087b4285fd2af2", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_y.0": "afee9632de05c9d9f180d168c454a53e931b3e84954a3b8c85f94ce703ff7284", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.0": "ea853f00957ac07c957ac0bf951a69f3c47c7a5f3a4127816b953e646b813761", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.1": "e97a346c9685cb899685c9c39695c79396ec634969ce2c74697a3864697b3c8c", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.2": "ebffca44f102b3609c309c9b940d19add1bb63b3a7843e4acc5a6aa56acc6b64", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.3": "e85a6b6c86a595a791c9349b94b63b69c7926b5bccca66646b3869b831a52ca6", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.4": "ea153e0395aac0f895eac1f8941e69e56a743e5d7a432787691ef860c3c1938f", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_yx.5": "e96930749696cb9d9697cdc39692671b696c306969eb3c76697319942a0d8699", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.0": "be813ea0c17ec55ac17ed23dc07e295ac57e2b653f803f813e816e853e85b542", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.1": "ea85856e857e4893957a7aa1956a7b81954b3b817a856fd46a85847c6e85857e", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.2": "cbedcd25bc02a4929c103a5bf03fdbbc81cb364d84e46da70f86899b3a0f6ec1", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.3": "aee1793a6b168569b852d697913c622cc5ca2e4b952d3bb4c2b66bd1426b3c71", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.4": "bec13a81c13ec54ac13e5afdd11e256a3e412afd3e4002ff2ee0fe0035fa817e", + "iris.tests.test_plot.TestPlotCoordinatesGiven.test_zx.5": "ea1594ec95ea6c1d95ea7b0595ab3b13950f6a536a1cc6f26a0cc4f26e0c85f2", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coord_names.0": "b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coord_names.1": "b8a53b59c71ac5a6b8791c1867876b63d9e0e65c96199d871cc23339633664ce", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coords.0": "b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_coords.1": "b8a53b59c71ac5a6b8791c1867876b63d9e0e65c96199d871cc23339633664ce", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_default.0": "b87830b0c786cf269ec766c99399cce998d3b3166f2530d3658c692d30ec6735", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_yx_order.0": "fa85978e837e68f094d3673089626ad792073985659a9b1a7a15b52869f19f56", + "iris.tests.test_plot.TestPlotDimAndAuxCoordsKwarg.test_yx_order.1": "ea95969c874a63d39ca3ad2a231cdbc9c4973631cd6336c633182cbc61c3d3f2", + "iris.tests.test_plot.TestPlotOtherCoordSystems.test_plot_tmerc.0": "e665326d999ecc92b399b32466269326b369cccccccd64d96199631364f33333", + "iris.tests.test_plot.TestQuickplotPlot.test_t.0": "83ffb59a7f00e59a2205d9d6e4619a74d9388c8e884e8da799d30b6dddb47e00", + "iris.tests.test_plot.TestQuickplotPlot.test_t_dates.0": "82fe958b7e046f89a0033bd4d9632c74d8799d3e8d8d826789e487b348dc2f69", + "iris.tests.test_plot.TestQuickplotPlot.test_x.0": "82ff950b7f81c0d6620199bcfc5e986695734da1816e1b2c85be2b65d96276d1", + "iris.tests.test_plot.TestQuickplotPlot.test_y.0": "a2fbb46e7f10c99f2013d863e46498dcd06c0d2798421fa5dd221e7789ff6f10", + "iris.tests.test_plot.TestQuickplotPlot.test_z.0": "a3ffc1de7e009c7030019786f438cde3810fd93c9b734a778ce47c9799b02731", + "iris.tests.test_plot.TestSimple.test_bounds.0": "ea856a85954a957ac17e954ac17a9d3a956ac07e3e80c07f3e857aa5c27d3f80", + "iris.tests.test_plot.TestSimple.test_points.0": "ea856a85957a957ac17e954ac17e1ea2950bc07e3e80c07f3e807a85c1ff3f81", + "iris.tests.test_plot.TestSymbols.test_cloud_cover.0": "eb5291e494ad6e136b5291ec94ad6e136b5291ec94ad6e136b5291ec94ad6e13", + "iris.tests.test_quickplot.TestLabels.test_alignment.0": "be813fe0954ac07fc0ff3e81c03fc97a6d0094af3f80c17f36a53240d97f2d82", + "iris.tests.test_quickplot.TestLabels.test_contour.0": "a7fd955a7a016d1a3217c962e4819a56c96f3c859b624d2584de3a6999b662db", + "iris.tests.test_quickplot.TestLabels.test_contour.1": "bf802f85c17fc17fc07eb42ac17f3f929130c06e3f80c07f7aa02e85c07f3e81", + "iris.tests.test_quickplot.TestLabels.test_contourf.0": "be816a95957a957ac0fe1e8bc07f7f806e01c07f3f80c07f3fa23f00c07f3d00", + "iris.tests.test_quickplot.TestLabels.test_contourf.1": "bf802f85c17fc17fc07eb42ac17f3f929130c06e3f80c07f7aa02e85c07f3e81", + "iris.tests.test_quickplot.TestLabels.test_contourf.2": "be816a95907ae508c17e955ac07f3fa0945bc07f3f80c07f3aa36f01c0ff3f80", + "iris.tests.test_quickplot.TestLabels.test_contourf_nameless.0": "be816af5907ee508c17e955ac03f3f809419c07f3f80c07f3a8b6f81c0ff3f80", + "iris.tests.test_quickplot.TestLabels.test_map.0": "e85a634c86a597a793c9349b94b79969c396c95bcce69a64d938c9b039a58ca6", + "iris.tests.test_quickplot.TestLabels.test_map.1": "e85a636c86a597a793c9349b94b69969c396c95bcce69a64d938c9b039a58ca6", + "iris.tests.test_quickplot.TestLabels.test_pcolor.0": "eea16affc05ab500956e974ac53f3d80925ac03f2f81c07e3fa12da1c2fe3f80", + "iris.tests.test_quickplot.TestLabels.test_pcolormesh.0": "eea16affc05ab500956e974ac53f3d80925ac03f2f81c07e3fa12da1c2fe3f80", + "iris.tests.test_quickplot.TestLabels.test_pcolormesh_str_symbol.0": "eea16affc05ab500956e974ac53f3d80925ac03f3f80c07e3fa12d21c2ff3f80", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_non_cube_coordinate.0": "fe816a85857a957ac07f957ac07f3e80956ac07f3e80c07f3e813e85c07e3f80", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.0": "ea856a95955a956ac17f950a807e3f4e951ac07e3f81c0ff3ea16aa1c0bd3e81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.1": "ea856a85957a957ac17e954ac17e1ea2950bc07e3e80c07f3e807a85c1ff3f81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.2": "eaf9eec9f729943032168d66d4db896e9567497b81304aedc96514ad8d18669a", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.3": "a6fb4b967f00950eb00f9d0f900fcd62dc7868f2c1bb3a909c266e34daa52f6c", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.4": "eaa9b549f756854ea0168d6ed556896fd8a909ed88290afdd9e97e008d6e2296", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_tx.5": "aad73e0df78085ac840195ac9528d9fad56cd8f2906c48f2d0ec7a536a1737f3", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_x.0": "a6fb958dff50c03e203598dca4c9cd26933f9cf3886e1de1dc047b4289ec2672", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_y.0": "a2ffb6127f0dc9992085d960c6748d3edb121ca49d6a1b048df34ce789ff7205", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.0": "ea856a95957a957ac07e954ac17e3e86950bc17f3ea4c27d3e833ac1c1e03f80", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.1": "e5a761a79a589e58c07d1e48c07c3f819e41c07f3d84c17e3fa62585c0fe3f83", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.2": "aaffead4f7cab16490109c9b946d99add1b74bb385a41c4acd526a254acc6325", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.3": "e85a634c86a597a793c9349b94b79969c396c95bcce69a64d938c9b039a58ca6", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.4": "ea153f0395eac1f895eac9fa941c79e56a741e4f68430f876916f860c9c1938d", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_yx.5": "e96930749696cf9d9697cdc39692670b696c386969eb3866696399a41a0d8e99", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.0": "be816a81d17ec57ac07e952ac07f3aa0955ec17e3f80c07f3f803f80c0bf3f81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.1": "fa816a85957a957ac03f957ac07f3ba1954ac07e3e81c07f3ea47a85c07e3e80", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.2": "a3eded04ff11a492b000985af07fdbb4d1eb366d8c644da79fa68993180f6e81", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.3": "aef9793a770085e9205fd696d03ccb2485ca1e43952f1934daa66bd1ca6b3c71", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.4": "bec13e81c5bec55ac03dd8b4d17a8d6a1e4108f7384008ff1de6fe0099ee237b", + "iris.tests.test_quickplot.TestQuickplotCoordinatesGiven.test_zx.5": "ea1595ac95e8689d95fb7b0595291943916f3b73487fccf2680484f2486ec7f0", + "iris.tests.test_quickplot.TestTimeReferenceUnitsLabels.test_not_reference_time_units.0": "82f8a19e7f51888c6001dda6855fd9e2dd7f986281ee19f389ef03ffdc007e00", + "iris.tests.test_quickplot.TestTimeReferenceUnitsLabels.test_reference_time_units.0": "82fa80997f547799a0037a00d52f0956ddaf9f7e98a1816e09f5d8260bfffe00" } \ No newline at end of file diff --git a/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl b/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl index 1f6bc36832..2873f68205 100644 --- a/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl +++ b/lib/iris/tests/results/integration/climatology/TestClimatology/reference_simpledata.cdl @@ -13,7 +13,7 @@ variables: time:climatology = "time_climatology" ; time:units = "days since 1970-01-01 00:00:00-00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_climatology(time, bnds) ; double latitude(latitude) ; latitude:axis = "Y" ; diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl b/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl rename to lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_different_saves_on_variables.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_same_saves_as_global.cdl b/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_same_saves_as_global.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/multiple_same_saves_as_global.cdl rename to lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/multiple_same_saves_as_global.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/single_saves_as_global.cdl b/lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/single_saves_as_global.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestUmVersionAttribute/single_saves_as_global.cdl rename to lib/iris/tests/results/integration/netcdf/attributes/TestUmVersionAttribute/single_saves_as_global.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestAtmosphereSigma/save.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/TestAtmosphereSigma/save.cdl similarity index 98% rename from lib/iris/tests/results/integration/netcdf/TestAtmosphereSigma/save.cdl rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestAtmosphereSigma/save.cdl index cfb3143050..762226192c 100644 --- a/lib/iris/tests/results/integration/netcdf/TestAtmosphereSigma/save.cdl +++ b/lib/iris/tests/results/integration/netcdf/aux_factories/TestAtmosphereSigma/save.cdl @@ -21,7 +21,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; diff --git a/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/TestHybridPressure/save.cdl similarity index 98% rename from lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestHybridPressure/save.cdl index 88c5fc18fe..6fed33430a 100644 --- a/lib/iris/tests/results/integration/netcdf/TestHybridPressure/save.cdl +++ b/lib/iris/tests/results/integration/netcdf/aux_factories/TestHybridPressure/save.cdl @@ -21,7 +21,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; diff --git a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl b/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl similarity index 99% rename from lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl index 5ff22a679b..d813ab98dc 100644 --- a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl +++ b/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_and_pressure.cdl @@ -21,7 +21,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; diff --git a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml b/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml similarity index 98% rename from lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml rename to lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml index 4d37f856ad..09d54a1b19 100644 --- a/lib/iris/tests/results/integration/netcdf/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml +++ b/lib/iris/tests/results/integration/netcdf/aux_factories/TestSaveMultipleAuxFactories/hybrid_height_cubes.cml @@ -58,7 +58,7 @@ [124, 125, 126, 127, 128, 129]]" shape="(5, 6)" standard_name="surface_altitude" units="Unit('m')" value_type="int64" var_name="surface_altitude"/> - + @@ -122,7 +122,7 @@ [1240, 1250, 1260, 1270, 1280, 1290]]" shape="(5, 6)" units="Unit('m')" value_type="int64" var_name="surface_altitude_0"/> - + diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_multi_dtype.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_multi_dtype.cdl rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_multi_dtype.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_single_dtype.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl similarity index 100% rename from lib/iris/tests/results/integration/netcdf/TestPackedData/multi_packed_single_dtype.cdl rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/multi_packed_single_dtype.cdl diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl similarity index 95% rename from lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl index 65da679ad0..fece18b1f3 100644 --- a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_manual.cdl +++ b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_manual.cdl @@ -32,7 +32,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -41,7 +41,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl similarity index 95% rename from lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl index 65da679ad0..fece18b1f3 100644 --- a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_signed.cdl +++ b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_signed.cdl @@ -32,7 +32,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -41,7 +41,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl similarity index 95% rename from lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl rename to lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl index d7a39d72de..c85ba6aadd 100644 --- a/lib/iris/tests/results/integration/netcdf/TestPackedData/single_packed_unsigned.cdl +++ b/lib/iris/tests/results/integration/netcdf/general/TestPackedData/single_packed_unsigned.cdl @@ -32,7 +32,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -41,7 +41,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/merge/dec.cml b/lib/iris/tests/results/merge/dec.cml index ea72b506f0..4efd40910f 100644 --- a/lib/iris/tests/results/merge/dec.cml +++ b/lib/iris/tests/results/merge/dec.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -151,7 +151,7 @@ - + @@ -270,7 +270,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -291,7 +291,7 @@ - + @@ -411,7 +411,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + @@ -432,7 +432,7 @@ - + - + diff --git a/lib/iris/tests/results/merge/theta.cml b/lib/iris/tests/results/merge/theta.cml index 293e40cc3a..0e5b02be51 100644 --- a/lib/iris/tests/results/merge/theta.cml +++ b/lib/iris/tests/results/merge/theta.cml @@ -11,7 +11,7 @@ - + @@ -130,7 +130,7 @@ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]" shape="(38,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/merge/theta_two_times.cml b/lib/iris/tests/results/merge/theta_two_times.cml index 0dd396e337..d1c9f59ace 100644 --- a/lib/iris/tests/results/merge/theta_two_times.cml +++ b/lib/iris/tests/results/merge/theta_two_times.cml @@ -399,7 +399,7 @@ - + - + diff --git a/lib/iris/tests/results/name/NAMEIII_field.cml b/lib/iris/tests/results/name/NAMEIII_field.cml index 97b3189bba..c419a2760d 100644 --- a/lib/iris/tests/results/name/NAMEIII_field.cml +++ b/lib/iris/tests/results/name/NAMEIII_field.cml @@ -48,7 +48,7 @@ - + @@ -113,7 +113,7 @@ - + @@ -177,7 +177,7 @@ - + @@ -241,7 +241,7 @@ - + @@ -305,7 +305,7 @@ - + diff --git a/lib/iris/tests/results/name/NAMEIII_timeseries.cml b/lib/iris/tests/results/name/NAMEIII_timeseries.cml index c4e70590a2..3776bfc27f 100644 --- a/lib/iris/tests/results/name/NAMEIII_timeseries.cml +++ b/lib/iris/tests/results/name/NAMEIII_timeseries.cml @@ -58,7 +58,7 @@ 358342.0, 358343.0, 358344.0, 358345.0, 358346.0, 358347.0, 358348.0, 358349.0, 358350.0, 358351.0, 358352.0, 358353.0, - 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -129,7 +129,7 @@ 358342.0, 358343.0, 358344.0, 358345.0, 358346.0, 358347.0, 358348.0, 358349.0, 358350.0, 358351.0, 358352.0, 358353.0, - 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -199,7 +199,7 @@ 358342.0, 358343.0, 358344.0, 358345.0, 358346.0, 358347.0, 358348.0, 358349.0, 358350.0, 358351.0, 358352.0, 358353.0, - 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -269,7 +269,7 @@ 358342.0, 358343.0, 358344.0, 358345.0, 358346.0, 358347.0, 358348.0, 358349.0, 358350.0, 358351.0, 358352.0, 358353.0, - 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -339,7 +339,7 @@ 358342.0, 358343.0, 358344.0, 358345.0, 358346.0, 358347.0, 358348.0, 358349.0, 358350.0, 358351.0, 358352.0, 358353.0, - 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 358354.0, 358355.0, 358356.0, 358357.0]" shape="(72,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/name/NAMEIII_trajectory.cml b/lib/iris/tests/results/name/NAMEIII_trajectory.cml index c514d589ca..20a0ec3b82 100644 --- a/lib/iris/tests/results/name/NAMEIII_trajectory.cml +++ b/lib/iris/tests/results/name/NAMEIII_trajectory.cml @@ -16,7 +16,7 @@ - + @@ -39,7 +39,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -61,7 +61,7 @@ - + @@ -84,7 +84,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -106,7 +106,7 @@ - + @@ -129,7 +129,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -151,7 +151,7 @@ - + @@ -174,7 +174,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -196,7 +196,7 @@ - + @@ -219,7 +219,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -241,7 +241,7 @@ - + @@ -264,7 +264,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -286,7 +286,7 @@ - + @@ -309,7 +309,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -331,7 +331,7 @@ - + @@ -354,7 +354,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -376,7 +376,7 @@ - + @@ -399,7 +399,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -421,7 +421,7 @@ - + @@ -444,7 +444,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -466,7 +466,7 @@ - + @@ -489,7 +489,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -511,7 +511,7 @@ - + @@ -534,7 +534,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -556,7 +556,7 @@ - + @@ -579,7 +579,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -601,7 +601,7 @@ - + @@ -624,7 +624,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -646,7 +646,7 @@ - + @@ -669,7 +669,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -691,7 +691,7 @@ - + @@ -714,7 +714,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -736,7 +736,7 @@ - + @@ -759,7 +759,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/name/NAMEIII_trajectory0.cml b/lib/iris/tests/results/name/NAMEIII_trajectory0.cml index 5f10016f39..d337ca9454 100644 --- a/lib/iris/tests/results/name/NAMEIII_trajectory0.cml +++ b/lib/iris/tests/results/name/NAMEIII_trajectory0.cml @@ -16,7 +16,7 @@ - + @@ -39,7 +39,7 @@ + 366886.75, 366887.0]" shape="(836,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/name/NAMEIII_version2.cml b/lib/iris/tests/results/name/NAMEIII_version2.cml index 95b9db7d5b..0ad0c883a2 100644 --- a/lib/iris/tests/results/name/NAMEIII_version2.cml +++ b/lib/iris/tests/results/name/NAMEIII_version2.cml @@ -76,7 +76,7 @@ 402921.0, 402922.0, 402923.0, 402924.0, 402925.0, 402926.0, 402927.0, 402928.0, 402929.0, 402930.0, 402931.0, 402932.0, - 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -158,7 +158,7 @@ 402921.0, 402922.0, 402923.0, 402924.0, 402925.0, 402926.0, 402927.0, 402928.0, 402929.0, 402930.0, 402931.0, 402932.0, - 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -240,7 +240,7 @@ 402921.0, 402922.0, 402923.0, 402924.0, 402925.0, 402926.0, 402927.0, 402928.0, 402929.0, 402930.0, 402931.0, 402932.0, - 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -322,7 +322,7 @@ 402921.0, 402922.0, 402923.0, 402924.0, 402925.0, 402926.0, 402927.0, 402928.0, 402929.0, 402930.0, 402931.0, 402932.0, - 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 402933.0, 402934.0, 402935.0, 402936.0]" shape="(24,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/name/NAMEII_field.cml b/lib/iris/tests/results/name/NAMEII_field.cml index 664669ef62..7d88c06eff 100644 --- a/lib/iris/tests/results/name/NAMEII_field.cml +++ b/lib/iris/tests/results/name/NAMEII_field.cml @@ -51,7 +51,7 @@ - + @@ -112,7 +112,7 @@ - + @@ -166,7 +166,7 @@ - + @@ -227,7 +227,7 @@ - + @@ -288,7 +288,7 @@ - + diff --git a/lib/iris/tests/results/name/NAMEII_field__no_time_averaging.cml b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging.cml new file mode 100644 index 0000000000..9bc2c0d1ac --- /dev/null +++ b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging.cml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/name/NAMEII_field__no_time_averaging_0.cml b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging_0.cml new file mode 100644 index 0000000000..8d1ad620d0 --- /dev/null +++ b/lib/iris/tests/results/name/NAMEII_field__no_time_averaging_0.cml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/name/NAMEII_timeseries.cml b/lib/iris/tests/results/name/NAMEII_timeseries.cml index 52aaa8b809..39af8a6288 100644 --- a/lib/iris/tests/results/name/NAMEII_timeseries.cml +++ b/lib/iris/tests/results/name/NAMEII_timeseries.cml @@ -36,7 +36,7 @@ [370473.5, 370474.5], [370474.5, 370475.5], [370475.5, 370476.5]]" id="cb784457" points="[370345.0, 370346.0, 370347.0, ..., 370474.0, - 370475.0, 370476.0]" shape="(132,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 370475.0, 370476.0]" shape="(132,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> @@ -85,7 +85,7 @@ [370473.5, 370474.5], [370474.5, 370475.5], [370475.5, 370476.5]]" id="cb784457" points="[370345.0, 370346.0, 370347.0, ..., 370474.0, - 370475.0, 370476.0]" shape="(132,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='gregorian')" value_type="float64"/> + 370475.0, 370476.0]" shape="(132,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/aliases.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/aliases.cdl index e6a18dd2e4..da0d1d10db 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/aliases.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/aliases.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/flag.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/flag.cdl index 22ee23e2f6..ef1ef973e2 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/flag.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/flag.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/fulldims.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/fulldims.cdl index 50ebd1abc9..1d33942464 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/fulldims.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/fulldims.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/multiple.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/multiple.cdl index 9ae68a1112..5a0edc7528 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/multiple.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/multiple.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/partialdims.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/partialdims.cdl index 4d54fe36f0..81d32bf80c 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/partialdims.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/partialdims.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/shared.cdl b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/shared.cdl index 84516e186f..c6b29c5bda 100644 --- a/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/shared.cdl +++ b/lib/iris/tests/results/netcdf/TestNetCDFSave__ancillaries/shared.cdl @@ -20,7 +20,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double grid_latitude(grid_latitude) ; grid_latitude:axis = "Y" ; grid_latitude:units = "degrees" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml b/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml index ca4a0eb017..c748853c5c 100644 --- a/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml +++ b/lib/iris/tests/results/netcdf/netcdf_cell_methods.cml @@ -9,7 +9,7 @@ - + @@ -29,7 +29,7 @@ - + @@ -50,7 +50,7 @@ - + @@ -75,7 +75,7 @@ - + @@ -98,7 +98,7 @@ - + @@ -121,7 +121,7 @@ - + @@ -140,7 +140,7 @@ - + @@ -159,7 +159,7 @@ - + @@ -179,7 +179,7 @@ - + @@ -199,7 +199,7 @@ - + @@ -222,7 +222,7 @@ - + @@ -241,7 +241,7 @@ - + @@ -261,7 +261,7 @@ - + @@ -281,7 +281,7 @@ - + @@ -304,7 +304,7 @@ - + @@ -323,7 +323,7 @@ - + @@ -336,7 +336,7 @@ - + @@ -349,7 +349,7 @@ - + @@ -362,7 +362,7 @@ - + @@ -375,7 +375,7 @@ - + @@ -394,7 +394,7 @@ - + @@ -413,7 +413,7 @@ - + @@ -433,7 +433,7 @@ - + @@ -450,7 +450,7 @@ - + @@ -463,7 +463,7 @@ - + @@ -476,7 +476,7 @@ - + @@ -489,7 +489,7 @@ - + @@ -502,7 +502,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml index a11d593684..3847d5a417 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_index_0.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml index 30e6844591..89ee5ac195 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_index_1.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml index 6f9446582a..b3c7709dae 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_index_2.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml index 12def7cea4..ea5e42150e 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_0.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml index b20281c53e..b028ee6cf8 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_mix_1.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml index 0d126109cf..76f66e1bc4 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_0.cml @@ -17,7 +17,7 @@ + 929298, 929304]" shape="(20,)" standard_name="time" units="Unit('hours since 1900-01-01 00:00:0.0', calendar='standard')" value_type="int32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml index 8cfb4a0b5f..133cc4f659 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_1.cml @@ -15,7 +15,7 @@ + 929226, 929232, 929238, 929244]" shape="(10,)" standard_name="time" units="Unit('hours since 1900-01-01 00:00:0.0', calendar='standard')" value_type="int32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml index 9259a07563..1d7025751e 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_slice_2.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml index 6bc1a094e3..1f5a990bd4 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_0.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml index 0535339c7e..9c32197e56 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_1.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml index 6a0f9a90bf..100ab1257c 100644 --- a/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml +++ b/lib/iris/tests/results/netcdf/netcdf_deferred_tuple_2.cml @@ -14,7 +14,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml index bda7f9ed9f..22a4ff1989 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyt_hires.cml @@ -82,7 +82,7 @@ 71603.5, 71604.5, 71605.5, 71606.5, 71607.5, 71608.5, 71609.5, 71610.5, 71611.5, 71612.5, 71613.5, 71614.5, 71615.5, 71616.5, 71617.5, - 71618.5]" shape="(31,)" standard_name="time" units="Unit('days since 1850-01-01', calendar='gregorian')" value_type="float64" var_name="time"/> + 71618.5]" shape="(31,)" standard_name="time" units="Unit('days since 1850-01-01', calendar='standard')" value_type="float64" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml index 1204fd0d39..fc6772e5f0 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyt_total.cml @@ -19,7 +19,7 @@ 929262, 929268, 929274, 929280, 929286, 929292, 929298, 929304, 929310, 929316, 929322, 929328, 929334, 929340, 929346, 929352, 929358, 929364, - 929370]" shape="(31,)" standard_name="time" units="Unit('hours since 1900-01-01 00:00:0.0', calendar='gregorian')" value_type="int32" var_name="time"/> + 929370]" shape="(31,)" standard_name="time" units="Unit('hours since 1900-01-01 00:00:0.0', calendar='standard')" value_type="int32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml index ac41f4a8b8..9d6b3c1e43 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems.cml @@ -20,7 +20,7 @@ 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> - + @@ -46,7 +46,7 @@ 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> - + diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml index 4234b5cc84..15ab300757 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_0.cml @@ -20,7 +20,7 @@ 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> - + diff --git a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml index 17d87a0190..29ff3b9bd9 100644 --- a/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml +++ b/lib/iris/tests/results/netcdf/netcdf_global_xyzt_gems_iter_1.cml @@ -20,7 +20,7 @@ 51, 52, 53, 54, 55, 56, 57, 58, 59, 60]" shape="(60,)" units="Unit('unknown')" value_type="int32" var_name="levelist"/> - + diff --git a/lib/iris/tests/results/netcdf/netcdf_laea.cml b/lib/iris/tests/results/netcdf/netcdf_laea.cml index ad23114038..799f40522b 100644 --- a/lib/iris/tests/results/netcdf/netcdf_laea.cml +++ b/lib/iris/tests/results/netcdf/netcdf_laea.cml @@ -11,7 +11,7 @@ - + @@ -63,7 +63,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_lcc.cml b/lib/iris/tests/results/netcdf/netcdf_lcc.cml index 7ea53e6600..592c33d534 100644 --- a/lib/iris/tests/results/netcdf/netcdf_lcc.cml +++ b/lib/iris/tests/results/netcdf/netcdf_lcc.cml @@ -88,7 +88,7 @@ [273.0, 303.0], [304.0, 333.0], [334.0, 364.0]]" id="1c4a69ce" long_name="time" points="[15.0, 44.5, 74.0, 104.5, 135.0, 165.5, 196.0, - 227.0, 257.5, 288.0, 318.5, 349.0]" shape="(12,)" standard_name="time" units="Unit('days since 2010-01-01 12:00:00', calendar='gregorian')" value_type="float64" var_name="time"/> + 227.0, 257.5, 288.0, 318.5, 349.0]" shape="(12,)" standard_name="time" units="Unit('days since 2010-01-01 12:00:00', calendar='standard')" value_type="float64" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_merc.cml b/lib/iris/tests/results/netcdf/netcdf_merc.cml index 02fc4e7c34..c06a2efe88 100644 --- a/lib/iris/tests/results/netcdf/netcdf_merc.cml +++ b/lib/iris/tests/results/netcdf/netcdf_merc.cml @@ -53,19 +53,19 @@ 45.5158, 45.9993]]" shape="(192, 192)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="lon"/> - - + - - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_merc_false.cml b/lib/iris/tests/results/netcdf/netcdf_merc_false.cml new file mode 100644 index 0000000000..1e50aa6e65 --- /dev/null +++ b/lib/iris/tests/results/netcdf/netcdf_merc_false.cml @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/netcdf/netcdf_merc_scale_factor.cml b/lib/iris/tests/results/netcdf/netcdf_merc_scale_factor.cml new file mode 100644 index 0000000000..c9ad4ca33f --- /dev/null +++ b/lib/iris/tests/results/netcdf/netcdf_merc_scale_factor.cml @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/netcdf/netcdf_monotonic.cml b/lib/iris/tests/results/netcdf/netcdf_monotonic.cml index 578b2b6d96..3385ecd6fe 100644 --- a/lib/iris/tests/results/netcdf/netcdf_monotonic.cml +++ b/lib/iris/tests/results/netcdf/netcdf_monotonic.cml @@ -12,7 +12,7 @@ - + @@ -30,7 +30,7 @@ - + @@ -48,7 +48,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/netcdf_polar.cml b/lib/iris/tests/results/netcdf/netcdf_polar.cml new file mode 100644 index 0000000000..15c1a90da9 --- /dev/null +++ b/lib/iris/tests/results/netcdf/netcdf_polar.cml @@ -0,0 +1,45 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml b/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml index b236a3677d..05e5fe475d 100644 --- a/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml +++ b/lib/iris/tests/results/netcdf/netcdf_rotated_xyt_precipitation.cml @@ -54,7 +54,7 @@ + [2925.5, 2926.5]]" id="2306ff47" long_name="Julian Day" points="[2922.5, 2923.5, 2924.5, 2925.5]" shape="(4,)" standard_name="time" units="Unit('days since 1950-01-01 00:00:00.0', calendar='standard')" value_type="float32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl b/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl index 1863d1ee7d..74a83c9714 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_hybrid_height.cdl @@ -22,7 +22,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; @@ -46,7 +46,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; float level_height(model_level_number) ; level_height:bounds = "level_height_bnds" ; level_height:units = "m" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml b/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml index 8e4a005d44..fbecdf97d3 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml +++ b/lib/iris/tests/results/netcdf/netcdf_save_load_hybrid_height.cml @@ -418,7 +418,7 @@ 0.666666666686, 0.833333333314, 1.0]" shape="(6,)" standard_name="forecast_period" units="Unit('hours')" value_type="float64" var_name="forecast_period"/> - + + 347926.666667, 347926.833333, 347927.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml b/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml index 13582b3106..54bcc8a686 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml +++ b/lib/iris/tests/results/netcdf/netcdf_save_load_ndim_auxiliary.cml @@ -54,7 +54,7 @@ + [2925.5, 2926.5]]" id="2306ff47" long_name="Julian Day" points="[2922.5, 2923.5, 2924.5, 2925.5]" shape="(4,)" standard_name="time" units="Unit('days since 1950-01-01 00:00:00.0', calendar='standard')" value_type="float32" var_name="time"/> diff --git a/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl b/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl index 32d4163d01..f8180d4ea8 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_ndim_auxiliary.cdl @@ -22,7 +22,7 @@ variables: time:units = "days since 1950-01-01 00:00:00.0" ; time:standard_name = "time" ; time:long_name = "Julian Day" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; float time_bnds(time, bnds) ; float rlat(rlat) ; rlat:axis = "Y" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl b/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl index 0e3ae7e715..642e46a905 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_realistic_0d.cdl @@ -50,7 +50,7 @@ variables: double time ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; // global attributes: :source = "Iris test case" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl index 601ea11719..d49e775024 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d.cdl @@ -21,7 +21,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl index b86a77aa62..8353df60e9 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_realistic_4d_no_hybrid.cdl @@ -21,7 +21,7 @@ variables: time:axis = "T" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; int model_level_number(model_level_number) ; model_level_number:axis = "Z" ; model_level_number:units = "1" ; diff --git a/lib/iris/tests/results/netcdf/netcdf_save_single.cdl b/lib/iris/tests/results/netcdf/netcdf_save_single.cdl index e45496521c..9847532001 100644 --- a/lib/iris/tests/results/netcdf/netcdf_save_single.cdl +++ b/lib/iris/tests/results/netcdf/netcdf_save_single.cdl @@ -30,7 +30,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -39,7 +39,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/netcdf/netcdf_stereo.cml b/lib/iris/tests/results/netcdf/netcdf_stereo.cml index b07304cd62..fae7ff027b 100644 --- a/lib/iris/tests/results/netcdf/netcdf_stereo.cml +++ b/lib/iris/tests/results/netcdf/netcdf_stereo.cml @@ -54,19 +54,19 @@ 10.449, 10.5996]]" shape="(160, 256)" standard_name="longitude" units="Unit('degrees')" value_type="float32" var_name="lon"/> - - + - - + - + diff --git a/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml b/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml index 2d909ba57e..0575c684a9 100644 --- a/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml +++ b/lib/iris/tests/results/netcdf/netcdf_tmerc_and_climatology.cml @@ -62,7 +62,7 @@ - + diff --git a/lib/iris/tests/results/netcdf/save_load_traj.cml b/lib/iris/tests/results/netcdf/save_load_traj.cml index 7f8b3d7e99..9b225d127f 100644 --- a/lib/iris/tests/results/netcdf/save_load_traj.cml +++ b/lib/iris/tests/results/netcdf/save_load_traj.cml @@ -1,6 +1,6 @@ - + @@ -36,6 +36,6 @@ - + diff --git a/lib/iris/tests/results/nimrod/load_2flds.cml b/lib/iris/tests/results/nimrod/load_2flds.cml index b068657d40..41e92dd48b 100644 --- a/lib/iris/tests/results/nimrod/load_2flds.cml +++ b/lib/iris/tests/results/nimrod/load_2flds.cml @@ -14,7 +14,7 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/period_of_interest.cml b/lib/iris/tests/results/nimrod/period_of_interest.cml index 258e5bcbbc..4c495b212a 100644 --- a/lib/iris/tests/results/nimrod/period_of_interest.cml +++ b/lib/iris/tests/results/nimrod/period_of_interest.cml @@ -3,7 +3,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/probability_fields.cml b/lib/iris/tests/results/nimrod/probability_fields.cml index 7add3e75a4..184d205132 100644 --- a/lib/iris/tests/results/nimrod/probability_fields.cml +++ b/lib/iris/tests/results/nimrod/probability_fields.cml @@ -17,7 +17,7 @@ - + + @@ -62,7 +62,7 @@ - + + @@ -111,7 +111,7 @@ - + @@ -131,7 +131,7 @@ - + @@ -158,7 +158,7 @@ - + @@ -186,7 +186,7 @@ - + @@ -210,7 +210,7 @@ - + @@ -237,7 +237,7 @@ - + @@ -271,7 +271,7 @@ - + @@ -291,7 +291,7 @@ - + @@ -315,7 +315,7 @@ - + @@ -331,7 +331,7 @@ - + @@ -358,7 +358,7 @@ - + @@ -378,7 +378,7 @@ - + @@ -402,7 +402,7 @@ - + @@ -422,7 +422,7 @@ - + @@ -448,7 +448,7 @@ - + @@ -461,7 +461,7 @@ - + @@ -484,7 +484,7 @@ - + @@ -497,7 +497,7 @@ - + @@ -521,7 +521,7 @@ - + @@ -537,7 +537,7 @@ - + @@ -560,7 +560,7 @@ - + @@ -573,7 +573,7 @@ - + @@ -600,7 +600,7 @@ - + @@ -620,7 +620,7 @@ - + @@ -644,7 +644,7 @@ - + @@ -664,7 +664,7 @@ - + @@ -688,7 +688,7 @@ - + @@ -701,7 +701,7 @@ - + @@ -726,7 +726,7 @@ - + @@ -742,7 +742,7 @@ - + @@ -766,7 +766,7 @@ - + @@ -779,7 +779,7 @@ - + @@ -807,7 +807,7 @@ - + @@ -828,7 +828,7 @@ - + @@ -853,7 +853,7 @@ - + @@ -873,7 +873,7 @@ - + @@ -901,7 +901,7 @@ - + @@ -922,7 +922,7 @@ - + @@ -949,7 +949,7 @@ - + @@ -969,7 +969,7 @@ - + @@ -992,7 +992,7 @@ - + @@ -1005,7 +1005,7 @@ - + @@ -1029,7 +1029,7 @@ - + @@ -1042,7 +1042,7 @@ - + @@ -1067,7 +1067,7 @@ - + @@ -1083,7 +1083,7 @@ - + @@ -1110,7 +1110,7 @@ - + @@ -1130,7 +1130,7 @@ - + @@ -1153,7 +1153,7 @@ - + @@ -1166,7 +1166,7 @@ - + @@ -1190,7 +1190,7 @@ - + @@ -1213,7 +1213,7 @@ - + @@ -1236,7 +1236,7 @@ - + @@ -1256,7 +1256,7 @@ - + @@ -1280,7 +1280,7 @@ - + @@ -1303,7 +1303,7 @@ - + @@ -1326,7 +1326,7 @@ - + @@ -1346,7 +1346,7 @@ - + @@ -1369,7 +1369,7 @@ - + @@ -1389,7 +1389,7 @@ - + @@ -1417,7 +1417,7 @@ - + @@ -1444,7 +1444,7 @@ - + @@ -1468,7 +1468,7 @@ - + @@ -1495,7 +1495,7 @@ - + @@ -1518,7 +1518,7 @@ - + @@ -1538,7 +1538,7 @@ - + @@ -1562,7 +1562,7 @@ - + @@ -1585,7 +1585,7 @@ - + @@ -1608,7 +1608,7 @@ - + @@ -1628,7 +1628,7 @@ - + @@ -1656,7 +1656,7 @@ - + @@ -1683,7 +1683,7 @@ - + @@ -1707,7 +1707,7 @@ - + @@ -1734,7 +1734,7 @@ - + @@ -1757,7 +1757,7 @@ - + @@ -1777,7 +1777,7 @@ - + @@ -1800,7 +1800,7 @@ - + @@ -1820,7 +1820,7 @@ - + @@ -1844,7 +1844,7 @@ - + @@ -1867,7 +1867,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml index 31518dd321..a6ed9068ca 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_bmr04_precip_2km.cml @@ -19,7 +19,7 @@ [6300, 7200]]" id="b40ecfd3" points="[7200, 7200]" shape="(2,)" standard_name="forecast_period" units="Unit('second')" value_type="int32"/> - + @@ -36,7 +36,7 @@ + [1580193900, 1580194800]]" id="90a3bd1c" points="[1580194800, 1580194800]" shape="(2,)" standard_name="time" units="Unit('seconds since 1970-01-01 00:00:00', calendar='standard')" value_type="int64"/> diff --git a/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml index 80cb1834c0..cf3232d548 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_bsr05_precip_accum60_2km.cml @@ -18,7 +18,7 @@ - + @@ -34,7 +34,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml index 68ec95555c..2aa1576fad 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud3d0060_2km.cml @@ -17,7 +17,7 @@ - + - + @@ -73,7 +73,7 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml index c6bc6f0419..3dc62cc8e9 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_cloud_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + @@ -96,7 +96,7 @@ - + @@ -112,7 +112,7 @@ - + @@ -136,7 +136,7 @@ - + @@ -152,7 +152,7 @@ - + @@ -175,7 +175,7 @@ - + - + @@ -226,7 +226,7 @@ - + @@ -242,7 +242,7 @@ - + @@ -268,7 +268,7 @@ - + @@ -284,7 +284,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml index e6c99f9e50..9be61d489c 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convection_2km.cml @@ -24,7 +24,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -79,7 +79,7 @@ - + @@ -102,7 +102,7 @@ - + @@ -118,7 +118,7 @@ - + @@ -141,7 +141,7 @@ - + @@ -157,7 +157,7 @@ - + @@ -185,7 +185,7 @@ - + @@ -208,7 +208,7 @@ - + @@ -231,7 +231,7 @@ - + @@ -247,7 +247,7 @@ - + @@ -270,7 +270,7 @@ - + @@ -286,7 +286,7 @@ - + @@ -309,7 +309,7 @@ - + @@ -328,7 +328,7 @@ - + @@ -351,7 +351,7 @@ - + @@ -370,7 +370,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml index 2f52a93277..734beb7f47 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_convwind_2km.cml @@ -24,7 +24,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -70,7 +70,7 @@ - + @@ -86,7 +86,7 @@ - + @@ -116,7 +116,7 @@ - + @@ -132,7 +132,7 @@ - + @@ -162,7 +162,7 @@ - + @@ -178,7 +178,7 @@ - + @@ -208,7 +208,7 @@ - + @@ -224,7 +224,7 @@ - + @@ -247,7 +247,7 @@ - + @@ -270,7 +270,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml index b2b47715a2..56bfecc1b4 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_frzlev_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + @@ -95,7 +95,7 @@ - + @@ -111,7 +111,7 @@ - + @@ -135,7 +135,7 @@ - + @@ -151,7 +151,7 @@ - + @@ -175,7 +175,7 @@ - + @@ -191,7 +191,7 @@ - + @@ -214,7 +214,7 @@ - + @@ -230,7 +230,7 @@ - + @@ -254,7 +254,7 @@ - + @@ -270,7 +270,7 @@ - + @@ -294,7 +294,7 @@ - + @@ -310,7 +310,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml index 4fb1371250..2eb83d787b 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_height_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml index 59776b5b74..4f4c986a39 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precip_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -57,7 +57,7 @@ - + @@ -73,7 +73,7 @@ - + @@ -97,7 +97,7 @@ - + @@ -113,7 +113,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml index 0fa98e3bb6..dd6102ea7f 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_precipaccum_2km.cml @@ -19,7 +19,7 @@ [6300, 7200]]" id="b40ecfd3" points="[7200, 7200]" shape="(2,)" standard_name="forecast_period" units="Unit('second')" value_type="int32"/> - + @@ -36,7 +36,7 @@ + [1580186700, 1580187600]]" id="90a3bd1c" points="[1580187600, 1580187600]" shape="(2,)" standard_name="time" units="Unit('seconds since 1970-01-01 00:00:00', calendar='standard')" value_type="int64"/> diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml index 3fdf646e70..be1e89a53d 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_preciptype_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -57,7 +57,7 @@ - + @@ -73,7 +73,7 @@ - + @@ -97,7 +97,7 @@ - + @@ -113,7 +113,7 @@ - + @@ -136,7 +136,7 @@ - + @@ -152,7 +152,7 @@ - + @@ -176,7 +176,7 @@ - + @@ -192,7 +192,7 @@ - + @@ -216,7 +216,7 @@ - + @@ -232,7 +232,7 @@ - + @@ -255,7 +255,7 @@ - + @@ -271,7 +271,7 @@ - + @@ -295,7 +295,7 @@ - + @@ -311,7 +311,7 @@ - + @@ -335,7 +335,7 @@ - + @@ -351,7 +351,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml index edb0862676..9a3ff88df8 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_pressure_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml index 38f076f232..00bc65f236 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiation_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + @@ -95,7 +95,7 @@ - + @@ -111,7 +111,7 @@ - + @@ -134,7 +134,7 @@ - + @@ -150,7 +150,7 @@ - + @@ -173,7 +173,7 @@ - + @@ -189,7 +189,7 @@ - + @@ -212,7 +212,7 @@ - + @@ -228,7 +228,7 @@ - + @@ -251,7 +251,7 @@ - + @@ -267,7 +267,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml index 35bed38591..b2cf624214 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_radiationuv_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + @@ -95,7 +95,7 @@ - + @@ -111,7 +111,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml index 4411ff9dd5..aaed20394f 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_refl_2km.cml @@ -26,7 +26,7 @@ - + @@ -42,7 +42,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml index 8759dac5c7..3a25dc86fc 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity3d0060_2km.cml @@ -17,7 +17,7 @@ - + - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml index 9b7e7582d0..fa4ab30a58 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_relhumidity_2km.cml @@ -17,7 +17,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml index ce549ab3cd..918a0c7ae5 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_snow_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -57,7 +57,7 @@ - + @@ -73,7 +73,7 @@ - + @@ -96,7 +96,7 @@ - + @@ -112,7 +112,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml index 9385bfc9ae..3a6c3bf53c 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil3d0060_2km.cml @@ -24,7 +24,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -70,7 +70,7 @@ - + @@ -86,7 +86,7 @@ - + @@ -116,7 +116,7 @@ - + @@ -132,7 +132,7 @@ - + @@ -162,7 +162,7 @@ - + @@ -178,7 +178,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml index a76971a1ed..eab889a8af 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_soil_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -77,7 +77,7 @@ urban_roof, water]" shape="(10,)" standard_name="soil_type" units="Unit('unknown')" value_type="string"/> - + @@ -100,7 +100,7 @@ - + @@ -121,7 +121,7 @@ urban_roof, water]" shape="(10,)" standard_name="soil_type" units="Unit('unknown')" value_type="string"/> - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml index 09677ff57a..6ff6359046 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_temperature_2km.cml @@ -18,7 +18,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -65,7 +65,7 @@ - + @@ -88,7 +88,7 @@ - + @@ -111,7 +111,7 @@ - + @@ -134,7 +134,7 @@ - + @@ -157,7 +157,7 @@ - + @@ -180,7 +180,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml index 8a0f50700c..037cb5c2b6 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_visibility_2km.cml @@ -17,7 +17,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -86,7 +86,7 @@ - + @@ -109,7 +109,7 @@ - + @@ -132,7 +132,7 @@ - + @@ -155,7 +155,7 @@ - + @@ -178,7 +178,7 @@ - + @@ -201,7 +201,7 @@ - + @@ -224,7 +224,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml index df2054e8af..5ca9920172 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_wind_2km.cml @@ -18,7 +18,7 @@ - + @@ -41,7 +41,7 @@ - + @@ -64,7 +64,7 @@ - + @@ -87,7 +87,7 @@ - + @@ -110,7 +110,7 @@ - + @@ -133,7 +133,7 @@ - + @@ -156,7 +156,7 @@ - + @@ -179,7 +179,7 @@ - + @@ -202,7 +202,7 @@ - + @@ -225,7 +225,7 @@ - + @@ -249,7 +249,7 @@ - + @@ -272,7 +272,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml index 331ff59c74..91c40ea6d0 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv3d0015_2km.cml @@ -17,7 +17,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -86,7 +86,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml index aa14346e2f..3252dbf047 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek00_winduv_2km.cml @@ -17,7 +17,7 @@ - + @@ -40,7 +40,7 @@ - + @@ -63,7 +63,7 @@ - + @@ -86,7 +86,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml index 1756ac0205..d39fa0e367 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek01_cape_2km.cml @@ -17,7 +17,7 @@ - + @@ -33,7 +33,7 @@ - + @@ -56,7 +56,7 @@ - + @@ -72,7 +72,7 @@ - + @@ -98,7 +98,7 @@ - + @@ -114,7 +114,7 @@ - + @@ -137,7 +137,7 @@ - + @@ -153,7 +153,7 @@ - + @@ -176,7 +176,7 @@ - + @@ -192,7 +192,7 @@ - + @@ -215,7 +215,7 @@ - + @@ -231,7 +231,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml b/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml index f4710dd36d..4a5783ecb3 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_ek07_precip0540_accum180_18km.cml @@ -18,7 +18,7 @@ - + @@ -34,7 +34,7 @@ - + diff --git a/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml b/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml index 57756ccc1d..d2c7e72848 100644 --- a/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml +++ b/lib/iris/tests/results/nimrod/u1096_ng_umqv_fog_2km.cml @@ -17,7 +17,7 @@ - + @@ -37,7 +37,7 @@ - + - + diff --git a/lib/iris/tests/results/pandas/as_cube/series_datetime_gregorian.cml b/lib/iris/tests/results/pandas/as_cube/series_datetime_standard.cml similarity index 86% rename from lib/iris/tests/results/pandas/as_cube/series_datetime_gregorian.cml rename to lib/iris/tests/results/pandas/as_cube/series_datetime_standard.cml index 7e2e6f4166..5cb621d5f3 100644 --- a/lib/iris/tests/results/pandas/as_cube/series_datetime_gregorian.cml +++ b/lib/iris/tests/results/pandas/as_cube/series_datetime_standard.cml @@ -4,7 +4,7 @@ + 300292.067778, 309797.084722]" shape="(5,)" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/pp_load_rules/global.cml b/lib/iris/tests/results/pp_load_rules/global.cml index 2df84a8606..a69e633e26 100644 --- a/lib/iris/tests/results/pp_load_rules/global.cml +++ b/lib/iris/tests/results/pp_load_rules/global.cml @@ -10,7 +10,7 @@ - + - + diff --git a/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml b/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml index 9e4b6d31f5..ecf51190c7 100644 --- a/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml +++ b/lib/iris/tests/results/pp_load_rules/lbproc_mean_max_min.cml @@ -11,7 +11,7 @@ - + @@ -31,7 +31,7 @@ - + @@ -48,7 +48,7 @@ - + @@ -68,7 +68,7 @@ - + @@ -89,7 +89,7 @@ - + @@ -109,7 +109,7 @@ - + @@ -130,7 +130,7 @@ - + @@ -150,7 +150,7 @@ - + @@ -171,7 +171,7 @@ - + @@ -190,7 +190,7 @@ 850.0, 925.0, 950.0, 1000.0]" shape="(28,)" units="Unit('hPa')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/pp_load_rules/rotated_uk.cml b/lib/iris/tests/results/pp_load_rules/rotated_uk.cml index 51b4682ebf..ece399df4e 100644 --- a/lib/iris/tests/results/pp_load_rules/rotated_uk.cml +++ b/lib/iris/tests/results/pp_load_rules/rotated_uk.cml @@ -11,7 +11,7 @@ - + @@ -35,7 +35,7 @@ - + diff --git a/lib/iris/tests/results/stock/realistic_4d.cml b/lib/iris/tests/results/stock/realistic_4d.cml index 88adbc43de..6640c54360 100644 --- a/lib/iris/tests/results/stock/realistic_4d.cml +++ b/lib/iris/tests/results/stock/realistic_4d.cml @@ -498,7 +498,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/system/supported_filetype_.grib2.cml b/lib/iris/tests/results/system/supported_filetype_.grib2.cml index f334b13863..5376af2fe1 100644 --- a/lib/iris/tests/results/system/supported_filetype_.grib2.cml +++ b/lib/iris/tests/results/system/supported_filetype_.grib2.cml @@ -9,7 +9,7 @@ - + - + diff --git a/lib/iris/tests/results/system/supported_filetype_.nc.cml b/lib/iris/tests/results/system/supported_filetype_.nc.cml index 595cd287ae..6ad0a3b176 100644 --- a/lib/iris/tests/results/system/supported_filetype_.nc.cml +++ b/lib/iris/tests/results/system/supported_filetype_.nc.cml @@ -36,7 +36,7 @@ - + diff --git a/lib/iris/tests/results/system/supported_filetype_.pp.cml b/lib/iris/tests/results/system/supported_filetype_.pp.cml index 838b9fad50..e457b2921e 100644 --- a/lib/iris/tests/results/system/supported_filetype_.pp.cml +++ b/lib/iris/tests/results/system/supported_filetype_.pp.cml @@ -6,7 +6,7 @@ - + - + diff --git a/lib/iris/tests/results/trajectory/constant_latitude.cml b/lib/iris/tests/results/trajectory/constant_latitude.cml index 7990edada5..38c208b825 100644 --- a/lib/iris/tests/results/trajectory/constant_latitude.cml +++ b/lib/iris/tests/results/trajectory/constant_latitude.cml @@ -1,6 +1,6 @@ - + @@ -12,17 +12,17 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> + -0.1188, -0.1188]" shape="(100,)" standard_name="grid_latitude" units="Unit('degrees')" value_type="float64"> - + @@ -90,10 +90,10 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> - + diff --git a/lib/iris/tests/results/trajectory/hybrid_height.cml b/lib/iris/tests/results/trajectory/hybrid_height.cml index 63de9366dc..28e821b900 100644 --- a/lib/iris/tests/results/trajectory/hybrid_height.cml +++ b/lib/iris/tests/results/trajectory/hybrid_height.cml @@ -54,13 +54,13 @@ [124, 125, 126, 127, 128, 129]]" shape="(5, 6)" units="Unit('m')" value_type="int64"/> - + - + - + - + diff --git a/lib/iris/tests/results/trajectory/single_point.cml b/lib/iris/tests/results/trajectory/single_point.cml index 393ad5e335..64c71e0394 100644 --- a/lib/iris/tests/results/trajectory/single_point.cml +++ b/lib/iris/tests/results/trajectory/single_point.cml @@ -1,6 +1,6 @@ - + @@ -12,15 +12,15 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="forecast_reference_time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> - + - + @@ -88,10 +88,10 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> - + diff --git a/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml b/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml index 750d597493..7b5bbfc086 100644 --- a/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml +++ b/lib/iris/tests/results/trajectory/tri_polar_latitude_slice.cml @@ -1,6 +1,6 @@ - + @@ -144,6 +144,6 @@ - + diff --git a/lib/iris/tests/results/trajectory/zigzag.cml b/lib/iris/tests/results/trajectory/zigzag.cml index 250500786c..8a578c4ab4 100644 --- a/lib/iris/tests/results/trajectory/zigzag.cml +++ b/lib/iris/tests/results/trajectory/zigzag.cml @@ -1,6 +1,6 @@ - + @@ -11,22 +11,29 @@ - + - + - + @@ -48,10 +55,10 @@ - + - + diff --git a/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml b/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml index e2a1ef2ea6..2592307cda 100644 --- a/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml +++ b/lib/iris/tests/results/unit/analysis/cartography/project/TestAll/cube.cml @@ -79,7 +79,7 @@ [0.996162, 0.993097]]" id="a5c170db" long_name="sigma" points="[0.999424, 0.997504, 0.99482]" shape="(3,)" units="Unit('1')" value_type="float32"/> - + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_all_dims.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_all_dims.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_last_dims.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_last_dims.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_middle_dim.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_middle_dim.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_zeroth_dim.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/collapse_zeroth_dim.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/slice.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/slice.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/slice.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/transposed.cml new file mode 100644 index 0000000000..1e74c9bc9c --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__derived_coords/TestBroadcastingDerived/transposed.cml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml new file mode 100644 index 0000000000..e318abad67 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_all_dims.cml @@ -0,0 +1,112 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_last_dims.cml new file mode 100644 index 0000000000..e318abad67 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_last_dims.cml @@ -0,0 +1,112 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_middle_dim.cml new file mode 100644 index 0000000000..82c79a7577 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_middle_dim.cml @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_zeroth_dim.cml new file mode 100644 index 0000000000..82c79a7577 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/collapse_zeroth_dim.cml @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/slice.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/slice.cml new file mode 100644 index 0000000000..82c79a7577 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/slice.cml @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/transposed.cml new file mode 100644 index 0000000000..82c79a7577 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMesh/transposed.cml @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_all_dims.cml new file mode 100644 index 0000000000..e318abad67 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_all_dims.cml @@ -0,0 +1,112 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_last_dims.cml new file mode 100644 index 0000000000..e318abad67 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_last_dims.cml @@ -0,0 +1,112 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_middle_dim.cml new file mode 100644 index 0000000000..82c79a7577 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_middle_dim.cml @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_zeroth_dim.cml new file mode 100644 index 0000000000..82c79a7577 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/collapse_zeroth_dim.cml @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/slice.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/slice.cml new file mode 100644 index 0000000000..82c79a7577 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/slice.cml @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/transposed.cml new file mode 100644 index 0000000000..82c79a7577 --- /dev/null +++ b/lib/iris/tests/results/unit/analysis/maths/_arith__meshcoords/TestBroadcastingWithMeshAndDerived/transposed.cml @@ -0,0 +1,106 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml index d4a90d37ac..86d7855b1b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml index 7ae36e51c3..73d6073a4b 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml index bea6795b38..8467544d44 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml @@ -110,7 +110,7 @@ + 347921.666667, 347921.833333, 347922.0]" shape="(6,)" standard_name="time" units="Unit('hours since 1970-01-01 00:00:00', calendar='standard')" value_type="float64"/> diff --git a/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content/mesh_result.txt b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content/mesh_result.txt new file mode 100644 index 0000000000..e20527cb49 --- /dev/null +++ b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content/mesh_result.txt @@ -0,0 +1,24 @@ + + Mesh coordinates + + + + latitude + x + + + longitude + x + + + Mesh + + + + name + unknown + + + location + face + \ No newline at end of file diff --git a/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/embedded_newlines_string_attribute.txt b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/embedded_newlines_string_attribute.txt new file mode 100644 index 0000000000..e886d25e60 --- /dev/null +++ b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/embedded_newlines_string_attribute.txt @@ -0,0 +1,8 @@ + + Attributes + + + + newlines-string + 'string\nwith\nnewlines' + \ No newline at end of file diff --git a/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/long_string_attribute.txt b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/long_string_attribute.txt new file mode 100644 index 0000000000..e972e1d6df --- /dev/null +++ b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/long_string_attribute.txt @@ -0,0 +1,8 @@ + + Attributes + + + + long-string + 'long string.. long string.. long string.. long string.. long string.. long ...' + \ No newline at end of file diff --git a/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/multi_string_attribute.txt b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/multi_string_attribute.txt new file mode 100644 index 0000000000..1736a083d6 --- /dev/null +++ b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/multi_string_attribute.txt @@ -0,0 +1,8 @@ + + Attributes + + + + multi-string + ['vector', 'of', 'strings'] + \ No newline at end of file diff --git a/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/simple_string_attribute.txt b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/simple_string_attribute.txt new file mode 100644 index 0000000000..8726d1f6ea --- /dev/null +++ b/lib/iris/tests/results/unit/experimental/representation/CubeRepresentation/_make_content__string_attrs/simple_string_attribute.txt @@ -0,0 +1,8 @@ + + Attributes + + + + single-string + 'single string' + \ No newline at end of file diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl index 1559cd2bff..ea9a1c283b 100644 --- a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl +++ b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator.cdl @@ -13,7 +13,7 @@ variables: mercator:longitude_of_projection_origin = 49. ; mercator:false_easting = 0. ; mercator:false_northing = 0. ; - mercator:scale_factor_at_projection_origin = 1. ; + mercator:standard_parallel = 0. ; int64 projection_y_coordinate(projection_y_coordinate) ; projection_y_coordinate:axis = "Y" ; projection_y_coordinate:units = "m" ; diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl index 8db60ca952..73b692ed63 100644 --- a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl +++ b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/mercator_no_ellipsoid.cdl @@ -10,7 +10,7 @@ variables: mercator:longitude_of_projection_origin = 49. ; mercator:false_easting = 0. ; mercator:false_northing = 0. ; - mercator:scale_factor_at_projection_origin = 1. ; + mercator:standard_parallel = 0. ; int64 projection_y_coordinate(projection_y_coordinate) ; projection_y_coordinate:axis = "Y" ; projection_y_coordinate:units = "m" ; diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl new file mode 100644 index 0000000000..a11dc60c30 --- /dev/null +++ b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/stereographic_scale_factor.cdl @@ -0,0 +1,23 @@ +dimensions: + projection_x_coordinate = 4 ; + projection_y_coordinate = 3 ; +variables: + int64 air_pressure_anomaly(projection_y_coordinate, projection_x_coordinate) ; + air_pressure_anomaly:standard_name = "air_pressure_anomaly" ; + air_pressure_anomaly:grid_mapping = "stereographic" ; + int stereographic ; + stereographic:grid_mapping_name = "stereographic" ; + stereographic:longitude_of_projection_origin = 20. ; + stereographic:latitude_of_projection_origin = -10. ; + stereographic:false_easting = 500000. ; + stereographic:false_northing = -200000. ; + stereographic:scale_factor_at_projection_origin = 1.3 ; + int64 projection_y_coordinate(projection_y_coordinate) ; + projection_y_coordinate:axis = "Y" ; + projection_y_coordinate:units = "m" ; + projection_y_coordinate:standard_name = "projection_y_coordinate" ; + int64 projection_x_coordinate(projection_x_coordinate) ; + projection_x_coordinate:axis = "X" ; + projection_x_coordinate:units = "m" ; + projection_x_coordinate:standard_name = "projection_x_coordinate" ; +} diff --git a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl index 3c1033c17e..2159123553 100644 --- a/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl +++ b/lib/iris/tests/results/unit/fileformats/netcdf/Saver/write/with_climatology.cdl @@ -13,7 +13,7 @@ variables: time:climatology = "time_climatology" ; time:units = "days since 1970-01-01 00:00:00-00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_climatology(time, bnds) ; double latitude(latitude) ; latitude:axis = "Y" ; diff --git a/lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml b/lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml new file mode 100644 index 0000000000..52aae1eb5e --- /dev/null +++ b/lib/iris/tests/results/unit/util/mask_cube/TestCubeMask/mask_cube_2d_create_new_dim.cml @@ -0,0 +1,23 @@ + + + + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/util/mask_cube/original_cube_full2d_global.cml b/lib/iris/tests/results/unit/util/mask_cube/original_cube_full2d_global.cml new file mode 100644 index 0000000000..abaebd51d6 --- /dev/null +++ b/lib/iris/tests/results/unit/util/mask_cube/original_cube_full2d_global.cml @@ -0,0 +1,123 @@ + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_1d.cml b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_1d.cml new file mode 100644 index 0000000000..bf8902bcb2 --- /dev/null +++ b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_1d.cml @@ -0,0 +1,22 @@ + + + + + + + + + + + + diff --git a/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_2d.cml b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_2d.cml new file mode 100644 index 0000000000..e1760775f9 --- /dev/null +++ b/lib/iris/tests/results/unit/util/mask_cube/original_cube_simple_2d.cml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml index 1f9dfb0a14..e7c799f397 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cml @@ -11,7 +11,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml index 06c192f8a4..66cbc7206b 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cml @@ -11,7 +11,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml index 9b654f6c6e..af298945f0 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cml @@ -12,7 +12,7 @@ - + @@ -41,7 +41,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml index d5d05f15fd..44999e85b7 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.000128.1990.12.01.00.00.b.cml @@ -10,7 +10,7 @@ - + @@ -39,7 +39,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml index 1f4d8a4b2c..990fa0d7fe 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.004224.1990.12.01.00.00.b.cml @@ -10,7 +10,7 @@ - + @@ -39,7 +39,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml index 359cba997f..43789498c1 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/from_pp/000003000000.03.236.008320.1990.12.01.00.00.b.cml @@ -11,7 +11,7 @@ - + @@ -40,7 +40,7 @@ - + diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl index 429da0807b..ddbbee5d34 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.000128.1990.12.01.00.00.b_0.cdl @@ -31,7 +31,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -40,7 +40,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl index 429da0807b..ddbbee5d34 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.004224.1990.12.01.00.00.b_0.cdl @@ -31,7 +31,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -40,7 +40,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl index f1c94dc834..cb026fd7ae 100644 --- a/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl +++ b/lib/iris/tests/results/usecases/pp_to_cf_conversion/to_netcdf/000003000000.03.236.008320.1990.12.01.00.00.b_0.cdl @@ -31,7 +31,7 @@ variables: double forecast_reference_time ; forecast_reference_time:units = "hours since 1970-01-01 00:00:00" ; forecast_reference_time:standard_name = "forecast_reference_time" ; - forecast_reference_time:calendar = "gregorian" ; + forecast_reference_time:calendar = "standard" ; double height ; height:units = "m" ; height:standard_name = "height" ; @@ -40,7 +40,7 @@ variables: time:bounds = "time_bnds" ; time:units = "hours since 1970-01-01 00:00:00" ; time:standard_name = "time" ; - time:calendar = "gregorian" ; + time:calendar = "standard" ; double time_bnds(bnds) ; // global attributes: diff --git a/lib/iris/tests/runner/_runner.py b/lib/iris/tests/runner/_runner.py index 3ef961d000..7f9439d4b6 100644 --- a/lib/iris/tests/runner/_runner.py +++ b/lib/iris/tests/runner/_runner.py @@ -10,17 +10,16 @@ # Because this file is imported by setup.py, there may be additional runtime # imports later in the file. -import multiprocessing import os import sys # NOTE: Do not inherit from object as distutils does not like it. class TestRunner: - """Run the Iris tests under nose and multiprocessor for performance""" + """Run the Iris tests under pytest and pytest-xdist for performance""" description = ( - "Run tests under nose and multiprocessor for performance. " + "Run tests under pytest and pytest-xdist for performance. " "Default behaviour is to run all non-gallery tests. " "Specifying one or more test flags will run *only* those " "tests." @@ -36,18 +35,13 @@ class TestRunner: ("system-tests", "s", "Run the limited subset of system tests."), ("gallery-tests", "e", "Run the gallery code tests."), ("default-tests", "d", "Run the default tests."), - ( - "coding-tests", - "c", - "Run the coding standards tests. (These are a " - "subset of the default tests.)", - ), ( "num-processors=", "p", "The number of processors used for running " "the tests.", ), ("create-missing", "m", "Create missing test result files."), + ("coverage", "c", "Enable coverage testing"), ] boolean_options = [ "no-data", @@ -55,8 +49,8 @@ class TestRunner: "stop", "gallery-tests", "default-tests", - "coding-tests", "create-missing", + "coverage", ] def initialize_options(self): @@ -65,13 +59,13 @@ def initialize_options(self): self.system_tests = False self.gallery_tests = False self.default_tests = False - self.coding_tests = False self.num_processors = None self.create_missing = False + self.coverage = False def finalize_options(self): - # These enviroment variables will be propagated to all the - # processes that nose.run creates. + # These environment variables will be propagated to all the + # processes that pytest-xdist creates. if self.no_data: print("Running tests in no-data mode...") import iris.config @@ -85,8 +79,6 @@ def finalize_options(self): tests.append("system") if self.default_tests: tests.append("default") - if self.coding_tests: - tests.append("coding") if self.gallery_tests: tests.append("gallery") if not tests: @@ -95,25 +87,21 @@ def finalize_options(self): if self.stop: print("Stopping tests after the first error or failure") if self.num_processors is None: - # Choose a magic number that works reasonably well for the default - # number of processes. - self.num_processors = (multiprocessing.cpu_count() + 1) // 4 + 1 + self.num_processors = "auto" else: self.num_processors = int(self.num_processors) def run(self): - import nose + import pytest if hasattr(self, "distribution") and self.distribution.tests_require: self.distribution.fetch_build_eggs(self.distribution.tests_require) tests = [] if self.system_tests: - tests.append("iris.tests.system_test") + tests.append("lib/iris/tests/system_test.py") if self.default_tests: - tests.append("iris.tests") - if self.coding_tests: - tests.append("iris.tests.test_coding_standards") + tests.append("lib/iris/tests") if self.gallery_tests: import iris.config @@ -129,35 +117,27 @@ def run(self): "WARNING: Gallery path %s does not exist." % (gallery_path) ) if not tests: - tests.append("iris.tests") - - regexp_pat = r"--match=^([Tt]est(?![Mm]ixin)|[Ss]ystem)" - - n_processors = max(self.num_processors, 1) + tests.append("lib/iris/tests") args = [ - "", None, - "--processes=%s" % n_processors, - "--verbosity=2", - regexp_pat, - "--process-timeout=180", + f"-n={self.num_processors}", ] if self.stop: - args.append("--stop") + args.append("-x") + + if self.coverage: + args.extend(["--cov=lib/iris", "--cov-report=xml"]) result = True for test in tests: - args[1] = test + args[0] = test print() print( - "Running test discovery on %s with %s processors." - % (test, n_processors) + f"Running test discovery on {test} with {self.num_processors} processors." ) - # run the tests at module level i.e. my_module.tests - # - test must start with test/Test and must not contain the - # word Mixin. - result &= nose.run(argv=args) + retcode = pytest.main(args=args) + result &= retcode.value == 0 if result is False: exit(1) diff --git a/lib/iris/tests/stock/__init__.py b/lib/iris/tests/stock/__init__.py index a46a5510f6..632dc95e20 100644 --- a/lib/iris/tests/stock/__init__.py +++ b/lib/iris/tests/stock/__init__.py @@ -20,7 +20,13 @@ from iris.coord_systems import GeogCS, RotatedGeogCS import iris.coords import iris.coords as icoords -from iris.coords import AuxCoord, CellMethod, DimCoord +from iris.coords import ( + AncillaryVariable, + AuxCoord, + CellMeasure, + CellMethod, + DimCoord, +) from iris.cube import Cube from ._stock_2d_latlons import ( # noqa @@ -99,7 +105,12 @@ def simple_1d(with_bounds=True): bounds = np.column_stack( [np.arange(11, dtype=np.int32), np.arange(11, dtype=np.int32) + 1] ) - coord = DimCoord(points, long_name="foo", units="1", bounds=bounds) + coord = DimCoord( + points, + long_name="foo", + units="1", + bounds=bounds if with_bounds else None, + ) cube.add_dim_coord(coord, 0) return cube @@ -399,6 +410,35 @@ def simple_2d_w_multidim_and_scalars(): return cube +def simple_2d_w_cell_measure_ancil_var(): + """ + Returns a two dimensional cube with a CellMeasure and AncillaryVariable. + + >>> print(simple_2d_w_cell_measure_ancil_var()) + thingness / (1) (bar: 3; foo: 4) + Dimension coordinates: + bar x - + foo - x + Cell measures: + cell_area x x + Ancillary variables: + quality_flag x - + Scalar coordinates: + wibble 1 + + """ + cube = simple_2d() + cube.add_aux_coord(AuxCoord([1], long_name="wibble"), None) + cube.add_ancillary_variable( + AncillaryVariable([1, 2, 3], standard_name="quality_flag"), 0 + ) + cube.add_cell_measure( + CellMeasure(np.arange(12).reshape(3, 4), standard_name="cell_area"), + (0, 1), + ) + return cube + + def hybrid_height(): """ Returns a two-dimensional (Z, X), hybrid-height cube. diff --git a/lib/iris/tests/stock/file_headers/xios_2D_face_half_levels.cdl b/lib/iris/tests/stock/file_headers/xios_2D_face_half_levels.cdl index b135546f2d..1e5522854e 100644 --- a/lib/iris/tests/stock/file_headers/xios_2D_face_half_levels.cdl +++ b/lib/iris/tests/stock/file_headers/xios_2D_face_half_levels.cdl @@ -39,7 +39,7 @@ variables: double time_instant(time_counter) ; time_instant:standard_name = "time" ; time_instant:long_name = "Time axis" ; - time_instant:calendar = "gregorian" ; + time_instant:calendar = "standard" ; time_instant:units = "seconds since 2016-01-01 15:00:00" ; time_instant:time_origin = "2016-01-01 15:00:00" ; time_instant:bounds = "time_instant_bounds" ; diff --git a/lib/iris/tests/stock/file_headers/xios_3D_face_full_levels.cdl b/lib/iris/tests/stock/file_headers/xios_3D_face_full_levels.cdl index e4f32de7b7..9159bf6e46 100644 --- a/lib/iris/tests/stock/file_headers/xios_3D_face_full_levels.cdl +++ b/lib/iris/tests/stock/file_headers/xios_3D_face_full_levels.cdl @@ -42,7 +42,7 @@ variables: double time_instant(time_counter) ; time_instant:standard_name = "time" ; time_instant:long_name = "Time axis" ; - time_instant:calendar = "gregorian" ; + time_instant:calendar = "standard" ; time_instant:units = "seconds since 2016-01-01 15:00:00" ; time_instant:time_origin = "2016-01-01 15:00:00" ; time_instant:bounds = "time_instant_bounds" ; diff --git a/lib/iris/tests/stock/file_headers/xios_3D_face_half_levels.cdl b/lib/iris/tests/stock/file_headers/xios_3D_face_half_levels.cdl index a193dbe451..f79ae0bdaf 100644 --- a/lib/iris/tests/stock/file_headers/xios_3D_face_half_levels.cdl +++ b/lib/iris/tests/stock/file_headers/xios_3D_face_half_levels.cdl @@ -42,7 +42,7 @@ variables: double time_instant(time_counter) ; time_instant:standard_name = "time" ; time_instant:long_name = "Time axis" ; - time_instant:calendar = "gregorian" ; + time_instant:calendar = "standard" ; time_instant:units = "seconds since 2016-01-01 15:00:00" ; time_instant:time_origin = "2016-01-01 15:00:00" ; time_instant:bounds = "time_instant_bounds" ; diff --git a/lib/iris/tests/stock/mesh.py b/lib/iris/tests/stock/mesh.py index ca15ee1c97..da226a3790 100644 --- a/lib/iris/tests/stock/mesh.py +++ b/lib/iris/tests/stock/mesh.py @@ -61,7 +61,11 @@ def sample_mesh(n_nodes=None, n_faces=None, n_edges=None, lazy_values=False): units="degrees_east", long_name="long-name", var_name="var-name", - attributes={"a": 1, "b": "c"}, + attributes={ + # N.B. cast this so that a save-load roundtrip preserves it + "a": np.int64(1), + "b": "c", + }, ) node_y = AuxCoord(1200 + arr.arange(n_nodes), standard_name="latitude") diff --git a/lib/iris/tests/stock/netcdf.py b/lib/iris/tests/stock/netcdf.py index 030e90a0f3..bf93f01f6b 100644 --- a/lib/iris/tests/stock/netcdf.py +++ b/lib/iris/tests/stock/netcdf.py @@ -8,10 +8,60 @@ from pathlib import Path from string import Template import subprocess +from typing import Optional -import netCDF4 +import dask +from dask import array as da import numpy as np +from iris.fileformats.netcdf import _thread_safe_nc +from iris.tests import env_bin_path + +NCGEN_PATHSTR = str(env_bin_path("ncgen")) + + +def ncgen_from_cdl( + cdl_str: Optional[str], cdl_path: Optional[str], nc_path: str +): + """ + Generate a test netcdf file from cdl. + + Source is CDL in either a string or a file. + If given a string, will either save a CDL file, or pass text directly. + A netcdf output file is always created, at the given path. + + Parameters + ---------- + cdl_str : str or None + String containing a CDL description of a netcdf file. + If None, 'cdl_path' must be an existing file. + cdl_path : str or None + Path of temporary text file where cdl_str is written. + If None, 'cdl_str' must be given, and is piped direct to ncgen. + nc_path : str + Path of temporary netcdf file where converted result is put. + + Notes + ----- + For legacy reasons, the path args are 'str's not 'Path's. + + """ + if cdl_str and cdl_path: + with open(cdl_path, "w") as f_out: + f_out.write(cdl_str) + if cdl_path: + # Create netcdf from stored CDL file. + call_args = [NCGEN_PATHSTR, "-k3", "-o", nc_path, cdl_path] + call_kwargs = {} + else: + # No CDL file : pipe 'cdl_str' directly into the ncgen program. + if not cdl_str: + raise ValueError("Must provide either 'cdl_str' or 'cdl_path'.") + call_args = [NCGEN_PATHSTR, "-k3", "-o", nc_path] + call_kwargs = dict(input=cdl_str, encoding="ascii") + + subprocess.run(call_args, check=True, **call_kwargs) + def _file_from_cdl_template( temp_file_dir, dataset_name, dataset_type, template_subs @@ -37,12 +87,7 @@ def _file_from_cdl_template( # Spawn an "ncgen" command to create an actual NetCDF file from the # CDL string. - subprocess.run( - ["ncgen", "-o" + str(nc_write_path)], - input=cdl, - encoding="ascii", - check=True, - ) + ncgen_from_cdl(cdl_str=cdl, cdl_path=None, nc_path=nc_write_path) return nc_write_path @@ -55,10 +100,10 @@ def _add_standard_data(nc_path, unlimited_dim_size=0): """ - ds = netCDF4.Dataset(nc_path, "r+") + ds = _thread_safe_nc.DatasetWrapper(nc_path, "r+") unlimited_dim_names = [ - dim for dim in ds.dimensions if ds.dimensions[dim].size == 0 + dim for dim in ds.dimensions if ds.dimensions[dim].isunlimited() ] # Data addition dependent on this assumption: assert len(unlimited_dim_names) < 2 @@ -79,11 +124,13 @@ def _add_standard_data(nc_path, unlimited_dim_size=0): # so it can be a dim-coord. data_size = np.prod(shape) data = np.arange(1, data_size + 1, dtype=var.dtype).reshape(shape) + var[:] = data else: # Fill with a plain value. But avoid zeros, so we can simulate # valid ugrid connectivities even when start_index=1. - data = np.ones(shape, dtype=var.dtype) # Do not use zero - var[:] = data + with dask.config.set({"array.chunk-size": "2048MiB"}): + data = da.ones(shape, dtype=var.dtype) # Do not use zero + da.store(data, var) ds.close() diff --git a/lib/iris/tests/system_test.py b/lib/iris/tests/system_test.py index 36573362dd..745163b485 100644 --- a/lib/iris/tests/system_test.py +++ b/lib/iris/tests/system_test.py @@ -21,7 +21,7 @@ import iris -class SystemInitialTest(tests.IrisTest): +class TestSystemInitial(tests.IrisTest): def test_supported_filetypes(self): nx, ny = 60, 60 data = np.arange(nx * ny, dtype=">f4").reshape(nx, ny) @@ -51,7 +51,7 @@ def horiz_cs(): ) ) hours_since_epoch = cf_units.Unit( - "hours since epoch", cf_units.CALENDAR_GREGORIAN + "hours since epoch", cf_units.CALENDAR_STANDARD ) cm.add_aux_coord( iris.coords.AuxCoord( diff --git a/lib/iris/tests/test_aggregate_by.py b/lib/iris/tests/test_aggregate_by.py index 4e479f40f7..90bf0e5d4e 100644 --- a/lib/iris/tests/test_aggregate_by.py +++ b/lib/iris/tests/test_aggregate_by.py @@ -138,6 +138,101 @@ def setUp(self): self.cube_multi.add_dim_coord(coord_lon.copy(), 1) self.cube_multi.add_dim_coord(coord_lat.copy(), 2) + # + # masked cubes to test handling of masks + # + mask_single = np.vstack( + ( + np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat( + 26, axis=0 + ), + np.zeros([10, 3, 3]), + ) + ) + self.cube_single_masked = self.cube_single.copy( + ma.array(self.cube_single.data, mask=mask_single) + ) + mask_multi = np.vstack( + ( + np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat( + 16, axis=0 + ), + np.ones([2, 3, 3]), + np.zeros([2, 3, 3]), + ) + ) + self.cube_multi_masked = self.cube_multi.copy( + ma.array(self.cube_multi.data, mask=mask_multi) + ) + + # + # simple cubes for further tests + # + data_easy = np.array( + [[6, 10, 12, 18], [8, 12, 14, 20], [18, 12, 10, 6]], + dtype=np.float32, + ) + self.cube_easy = iris.cube.Cube( + data_easy, long_name="temperature", units="kelvin" + ) + + llcs = iris.coord_systems.GeogCS(6371229) + self.cube_easy.add_aux_coord( + iris.coords.AuxCoord( + np.array([0, 0, 10], dtype=np.float32), + "latitude", + units="degrees", + coord_system=llcs, + ), + 0, + ) + self.cube_easy.add_aux_coord( + iris.coords.AuxCoord( + np.array([0, 0, 10, 10], dtype=np.float32), + "longitude", + units="degrees", + coord_system=llcs, + ), + 1, + ) + + data_easy_weighted = np.array( + [[3, 5, 7, 9], [0, 2, 4, 6]], + dtype=np.float32, + ) + self.cube_easy_weighted = iris.cube.Cube( + data_easy_weighted, long_name="temperature", units="kelvin" + ) + llcs = iris.coord_systems.GeogCS(6371229) + self.cube_easy_weighted.add_aux_coord( + iris.coords.AuxCoord( + np.array([0, 10], dtype=np.float32), + "latitude", + units="degrees", + coord_system=llcs, + ), + 0, + ) + self.cube_easy_weighted.add_aux_coord( + iris.coords.AuxCoord( + np.array([0, 0, 10, 10], dtype=np.float32), + "longitude", + units="degrees", + coord_system=llcs, + ), + 1, + ) + + # + # weights for weighted aggregate-by + # + self.weights_single = np.ones_like(z_points, dtype=np.float64) + self.weights_single[2] = 0.0 + self.weights_single[4:6] = 0.0 + + self.weights_multi = np.ones_like(z1_points, dtype=np.float64) + self.weights_multi[1:4] = 0.0 + # # expected data results # @@ -166,6 +261,31 @@ def setUp(self): ], dtype=np.float64, ) + self.weighted_single_expected = np.array( + [ + [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], + [[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]], + [[3.0, 6.0, 9.0], [12.0, 15.0, 18.0], [21.0, 24.0, 27.0]], + [[7.5, 15.0, 22.5], [30.0, 37.5, 45.0], [52.5, 60.0, 67.5]], + [[12.0, 24.0, 36.0], [48.0, 60.0, 72.0], [84.0, 96.0, 108.0]], + [ + [17.5, 35.0, 52.5], + [70.0, 87.5, 105.0], + [122.5, 140.0, 157.5], + ], + [ + [24.0, 48.0, 72.0], + [96.0, 120.0, 144.0], + [168.0, 192.0, 216.0], + ], + [ + [31.5, 63.0, 94.5], + [126.0, 157.5, 189.0], + [220.5, 252.0, 283.5], + ], + ], + dtype=np.float64, + ) row1 = [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]] row2 = [ @@ -229,6 +349,28 @@ def setUp(self): ], dtype=np.float64, ) + self.weighted_multi_expected = np.array( + [ + [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], + [[4.0, 8.0, 12.0], [16.0, 20.0, 24.0], [28.0, 32.0, 36.0]], + [[14.0, 28.0, 42.0], [56.0, 70.0, 84.0], [98.0, 112.0, 126.0]], + [[7.0, 14.0, 21.0], [28.0, 35.0, 42.0], [49.0, 56.0, 63.0]], + [[9.0, 18.0, 27.0], [36.0, 45.0, 54.0], [63.0, 72.0, 81.0]], + [[10.5, 21.0, 31.5], [42.0, 52.5, 63.0], [73.5, 84.0, 94.5]], + [[13.0, 26.0, 39.0], [52.0, 65.0, 78.0], [91.0, 104.0, 117.0]], + [ + [15.0, 30.0, 45.0], + [60.0, 75.0, 90.0], + [105.0, 120.0, 135.0], + ], + [ + [16.5, 33.0, 49.5], + [66.0, 82.5, 99.0], + [115.5, 132.0, 148.5], + ], + ], + dtype=np.float64, + ) def test_single(self): # mean group-by with single coordinate name. @@ -271,6 +413,34 @@ def test_single(self): aggregateby_cube.data, self.single_rms_expected ) + def test_weighted_single(self): + # weighted mean group-by with single coordinate name. + aggregateby_cube = self.cube_single.aggregated_by( + "height", + iris.analysis.MEAN, + weights=self.weights_single, + ) + + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single.cml"), + ) + + # weighted mean group-by with single coordinate. + aggregateby_cube = self.cube_single.aggregated_by( + self.coord_z_single, + iris.analysis.MEAN, + weights=self.weights_single, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single.cml"), + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + self.weighted_single_expected, + ) + def test_single_shared(self): z2_points = np.arange(36, dtype=np.int32) coord_z2 = iris.coords.AuxCoord( @@ -300,6 +470,38 @@ def test_single_shared(self): aggregateby_cube.data, self.single_expected ) + def test_weighted_single_shared(self): + z2_points = np.arange(36, dtype=np.int32) + coord_z2 = iris.coords.AuxCoord( + z2_points, long_name="wibble", units="1" + ) + self.cube_single.add_aux_coord(coord_z2, 0) + + # weighted group-by with single coordinate name on shared axis. + aggregateby_cube = self.cube_single.aggregated_by( + "height", + iris.analysis.MEAN, + weights=self.weights_single, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single_shared.cml"), + ) + + # weighted group-by with single coordinate on shared axis. + aggregateby_cube = self.cube_single.aggregated_by( + self.coord_z_single, + iris.analysis.MEAN, + weights=self.weights_single, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single_shared.cml"), + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, self.weighted_single_expected + ) + def test_single_shared_circular(self): points = np.arange(36) * 10.0 circ_coord = iris.coords.DimCoord( @@ -329,6 +531,48 @@ def test_single_shared_circular(self): aggregateby_cube.data, self.single_expected ) + def test_weighted_single_shared_circular(self): + points = np.arange(36) * 10.0 + circ_coord = iris.coords.DimCoord( + points, long_name="circ_height", units="degrees", circular=True + ) + self.cube_single.add_aux_coord(circ_coord, 0) + + # weighted group-by with single coordinate name on shared axis. + aggregateby_cube = self.cube_single.aggregated_by( + "height", + iris.analysis.MEAN, + weights=self.weights_single, + ) + self.assertCML( + aggregateby_cube, + ( + "analysis", + "aggregated_by", + "weighted_single_shared_circular.cml", + ), + ) + + # weighted group-by with single coordinate on shared axis. + coord = self.cube_single.coords("height") + aggregateby_cube = self.cube_single.aggregated_by( + coord, + iris.analysis.MEAN, + weights=self.weights_single, + ) + self.assertCML( + aggregateby_cube, + ( + "analysis", + "aggregated_by", + "weighted_single_shared_circular.cml", + ), + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + self.weighted_single_expected, + ) + def test_multi(self): # group-by with multiple coordinate names. aggregateby_cube = self.cube_multi.aggregated_by( @@ -366,6 +610,55 @@ def test_multi(self): aggregateby_cube.data, self.multi_expected ) + def test_weighted_multi(self): + # weighted group-by with multiple coordinate names. + aggregateby_cube = self.cube_multi.aggregated_by( + ["height", "level"], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi.cml"), + ) + + # weighted group-by with multiple coordinate names (different order). + aggregateby_cube = self.cube_multi.aggregated_by( + ["level", "height"], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi.cml"), + ) + + # weighted group-by with multiple coordinates. + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z1_multi, self.coord_z2_multi], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi.cml"), + ) + + # weighted group-by with multiple coordinates (different order). + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z2_multi, self.coord_z1_multi], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi.cml"), + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + self.weighted_multi_expected, + ) + def test_multi_shared(self): z3_points = np.arange(20, dtype=np.int32) coord_z3 = iris.coords.AuxCoord( @@ -416,50 +709,91 @@ def test_multi_shared(self): aggregateby_cube.data, self.multi_expected ) - def test_easy(self): - data = np.array( - [[6, 10, 12, 18], [8, 12, 14, 20], [18, 12, 10, 6]], - dtype=np.float32, + def test_weighted_multi_shared(self): + z3_points = np.arange(20, dtype=np.int32) + coord_z3 = iris.coords.AuxCoord( + z3_points, long_name="sigma", units="1" + ) + z4_points = np.arange(19, -1, -1, dtype=np.int32) + coord_z4 = iris.coords.AuxCoord( + z4_points, long_name="gamma", units="1" ) - cube = iris.cube.Cube(data, long_name="temperature", units="kelvin") - llcs = iris.coord_systems.GeogCS(6371229) - cube.add_aux_coord( - iris.coords.AuxCoord( - np.array([0, 0, 10], dtype=np.float32), - "latitude", - units="degrees", - coord_system=llcs, - ), - 0, + self.cube_multi.add_aux_coord(coord_z3, 0) + self.cube_multi.add_aux_coord(coord_z4, 0) + + # weighted group-by with multiple coordinate names on shared axis. + aggregateby_cube = self.cube_multi.aggregated_by( + ["height", "level"], + iris.analysis.MEAN, + weights=self.weights_multi, ) - cube.add_aux_coord( - iris.coords.AuxCoord( - np.array([0, 0, 10, 10], dtype=np.float32), - "longitude", - units="degrees", - coord_system=llcs, - ), - 1, + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi_shared.cml"), ) + # weighted group-by with multiple coordinate names on shared axis + # (different order). + aggregateby_cube = self.cube_multi.aggregated_by( + ["level", "height"], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi_shared.cml"), + ) + + # weighted group-by with multiple coordinates on shared axis. + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z1_multi, self.coord_z2_multi], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi_shared.cml"), + ) + + # weighted group-by with multiple coordinates on shared axis (different + # order). + aggregateby_cube = self.cube_multi.aggregated_by( + [self.coord_z2_multi, self.coord_z1_multi], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi_shared.cml"), + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + self.weighted_multi_expected, + ) + + def test_easy(self): # # Easy mean aggregate test by each coordinate. # - aggregateby_cube = cube.aggregated_by("longitude", iris.analysis.MEAN) + aggregateby_cube = self.cube_easy.aggregated_by( + "longitude", iris.analysis.MEAN + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( [[8.0, 15.0], [10.0, 17.0], [15.0, 8.0]], dtype=np.float32 ), ) + self.assertCML( aggregateby_cube, ("analysis", "aggregated_by", "easy.cml"), - checksum=False, ) - aggregateby_cube = cube.aggregated_by("latitude", iris.analysis.MEAN) + aggregateby_cube = self.cube_easy.aggregated_by( + "latitude", iris.analysis.MEAN + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -471,7 +805,9 @@ def test_easy(self): # # Easy max aggregate test by each coordinate. # - aggregateby_cube = cube.aggregated_by("longitude", iris.analysis.MAX) + aggregateby_cube = self.cube_easy.aggregated_by( + "longitude", iris.analysis.MAX + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -479,7 +815,9 @@ def test_easy(self): ), ) - aggregateby_cube = cube.aggregated_by("latitude", iris.analysis.MAX) + aggregateby_cube = self.cube_easy.aggregated_by( + "latitude", iris.analysis.MAX + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -491,7 +829,9 @@ def test_easy(self): # # Easy sum aggregate test by each coordinate. # - aggregateby_cube = cube.aggregated_by("longitude", iris.analysis.SUM) + aggregateby_cube = self.cube_easy.aggregated_by( + "longitude", iris.analysis.SUM + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -499,7 +839,9 @@ def test_easy(self): ), ) - aggregateby_cube = cube.aggregated_by("latitude", iris.analysis.SUM) + aggregateby_cube = self.cube_easy.aggregated_by( + "latitude", iris.analysis.SUM + ) np.testing.assert_almost_equal( aggregateby_cube.data, np.array( @@ -511,7 +853,7 @@ def test_easy(self): # # Easy percentile aggregate test by each coordinate. # - aggregateby_cube = cube.aggregated_by( + aggregateby_cube = self.cube_easy.aggregated_by( "longitude", iris.analysis.PERCENTILE, percent=25 ) np.testing.assert_almost_equal( @@ -521,7 +863,7 @@ def test_easy(self): ), ) - aggregateby_cube = cube.aggregated_by( + aggregateby_cube = self.cube_easy.aggregated_by( "latitude", iris.analysis.PERCENTILE, percent=25 ) np.testing.assert_almost_equal( @@ -535,7 +877,9 @@ def test_easy(self): # # Easy root mean square aggregate test by each coordinate. # - aggregateby_cube = cube.aggregated_by("longitude", iris.analysis.RMS) + aggregateby_cube = self.cube_easy.aggregated_by( + "longitude", iris.analysis.RMS + ) row = [ list(np.sqrt([68.0, 234.0])), list(np.sqrt([104.0, 298.0])), @@ -545,7 +889,9 @@ def test_easy(self): aggregateby_cube.data, np.array(row, dtype=np.float32) ) - aggregateby_cube = cube.aggregated_by("latitude", iris.analysis.RMS) + aggregateby_cube = self.cube_easy.aggregated_by( + "latitude", iris.analysis.RMS + ) row = [ list(np.sqrt([50.0, 122.0, 170.0, 362.0])), [18.0, 12.0, 10.0, 6.0], @@ -554,17 +900,109 @@ def test_easy(self): aggregateby_cube.data, np.array(row, dtype=np.float32) ) + def test_weighted_easy(self): + # Use different weights for lat and lon to avoid division by zero. + lon_weights = np.array( + [[1, 0, 1, 1], [9, 1, 2, 0]], + dtype=np.float32, + ) + lat_weights = np.array([2.0, 2.0]) + + # + # Easy weighted mean aggregate test by each coordinate. + # + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "longitude", iris.analysis.MEAN, weights=lon_weights + ) + + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array([[3.0, 8.0], [0.2, 4.0]], dtype=np.float32), + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_easy.cml"), + ) + + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "latitude", + iris.analysis.MEAN, + weights=lat_weights, + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array( + [[3.0, 5.0, 7.0, 9.0], [0.0, 2.0, 4.0, 6.0]], + dtype=np.float32, + ), + ) + + # + # Easy weighted sum aggregate test by each coordinate. + # + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "longitude", iris.analysis.SUM, weights=lon_weights + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array([[3.0, 16.0], [2.0, 8.0]], dtype=np.float32), + ) + + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "latitude", + iris.analysis.SUM, + weights=lat_weights, + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array( + [[6.0, 10.0, 14.0, 18.0], [0.0, 4.0, 8.0, 12.0]], + dtype=np.float32, + ), + ) + + # + # Easy weighted percentile aggregate test for longitude. + # Note: Not possible for latitude since at least two values for each + # category are necessary. + # + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "longitude", + iris.analysis.WPERCENTILE, + percent=50, + weights=lon_weights, + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array([[3.0, 8.0], [0.2, 4.0]], dtype=np.float32), + ) + + # + # Easy weighted root mean square aggregate test by each coordinate. + # + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "longitude", iris.analysis.RMS, weights=lon_weights + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array( + [[3.0, np.sqrt(65.0)], [np.sqrt(0.4), 4.0]], dtype=np.float32 + ), + ) + + aggregateby_cube = self.cube_easy_weighted.aggregated_by( + "latitude", iris.analysis.RMS, weights=lat_weights + ) + np.testing.assert_almost_equal( + aggregateby_cube.data, + np.array( + [[3.0, 5.0, 7.0, 9.0], [0.0, 2.0, 4.0, 6.0]], + dtype=np.float32, + ), + ) + def test_single_missing(self): # aggregation correctly handles masked data - mask = np.vstack( - ( - np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat( - 26, axis=0 - ), - np.zeros([10, 3, 3]), - ) - ) - self.cube_single.data = ma.array(self.cube_single.data, mask=mask) single_expected = ma.masked_invalid( [ [ @@ -609,30 +1047,81 @@ def test_single_missing(self): ], ] ) - aggregateby_cube = self.cube_single.aggregated_by( + aggregateby_cube = self.cube_single_masked.aggregated_by( "height", iris.analysis.MEAN ) + self.assertCML( aggregateby_cube, ("analysis", "aggregated_by", "single_missing.cml"), - checksum=False, ) self.assertMaskedArrayAlmostEqual( aggregateby_cube.data, single_expected ) + def test_weighted_single_missing(self): + # weighted aggregation correctly handles masked data + weighted_single_expected = ma.masked_invalid( + [ + [ + [0.0, np.nan, 0.0], + [np.nan, 0.0, np.nan], + [0.0, np.nan, 0.0], + ], + [ + [1.0, np.nan, 3.0], + [np.nan, 5.0, np.nan], + [7.0, np.nan, 9.0], + ], + [ + [3.0, np.nan, 9.0], + [np.nan, 15.0, np.nan], + [21.0, np.nan, 27.0], + ], + [ + [7.5, np.nan, 22.5], + [np.nan, 37.5, np.nan], + [52.5, np.nan, 67.5], + ], + [ + [12.0, np.nan, 36.0], + [np.nan, 60.0, np.nan], + [84.0, np.nan, 108.0], + ], + [ + [17.5, np.nan, 52.5], + [np.nan, 87.5, np.nan], + [122.5, np.nan, 157.5], + ], + [ + [24.0, 53.0, 72.0], + [106.0, 120.0, 159.0], + [168.0, 212.0, 216.0], + ], + [ + [31.5, 63.0, 94.5], + [126.0, 157.5, 189.0], + [220.5, 252.0, 283.5], + ], + ] + ) + aggregateby_cube = self.cube_single_masked.aggregated_by( + "height", + iris.analysis.MEAN, + weights=self.weights_single, + ) + + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single_missing.cml"), + ) + self.assertMaskedArrayAlmostEqual( + aggregateby_cube.data, + weighted_single_expected, + ) + def test_multi_missing(self): # aggregation correctly handles masked data - mask = np.vstack( - ( - np.array([[[0, 1, 0], [1, 0, 1], [0, 1, 0]]]).repeat( - 16, axis=0 - ), - np.ones([2, 3, 3]), - np.zeros([2, 3, 3]), - ) - ) - self.cube_multi.data = ma.array(self.cube_multi.data, mask=mask) multi_expected = ma.masked_invalid( [ [ @@ -682,32 +1171,160 @@ def test_multi_missing(self): ], ] ) - aggregateby_cube = self.cube_multi.aggregated_by( + aggregateby_cube = self.cube_multi_masked.aggregated_by( ["height", "level"], iris.analysis.MEAN ) + self.assertCML( aggregateby_cube, ("analysis", "aggregated_by", "multi_missing.cml"), - checksum=False, ) self.assertMaskedArrayAlmostEqual( aggregateby_cube.data, multi_expected ) - def test_returned_weights(self): + def test_weighted_multi_missing(self): + # weighted aggregation correctly handles masked data + weighted_multi_expected = ma.masked_invalid( + [ + [ + [0.0, np.nan, 0.0], + [np.nan, 0.0, np.nan], + [0.0, np.nan, 0.0], + ], + [ + [4.0, np.nan, 12.0], + [np.nan, 20.0, np.nan], + [28.0, np.nan, 36.0], + ], + [ + [14.0, 37.0, 42.0], + [74.0, 70.0, 111.0], + [98.0, 148.0, 126.0], + ], + [ + [7.0, np.nan, 21.0], + [np.nan, 35.0, np.nan], + [49.0, np.nan, 63.0], + ], + [ + [9.0, np.nan, 27.0], + [np.nan, 45.0, np.nan], + [63.0, np.nan, 81.0], + ], + [ + [10.5, np.nan, 31.5], + [np.nan, 52.5, np.nan], + [73.5, np.nan, 94.5], + ], + [ + [13.0, np.nan, 39.0], + [np.nan, 65.0, np.nan], + [91.0, np.nan, 117.0], + ], + [ + [15.0, np.nan, 45.0], + [np.nan, 75.0, np.nan], + [105.0, np.nan, 135.0], + ], + [ + [np.nan, np.nan, np.nan], + [np.nan, np.nan, np.nan], + [np.nan, np.nan, np.nan], + ], + ] + ) + aggregateby_cube = self.cube_multi_masked.aggregated_by( + ["height", "level"], + iris.analysis.MEAN, + weights=self.weights_multi, + ) + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi_missing.cml"), + ) + self.assertMaskedArrayAlmostEqual( + aggregateby_cube.data, + weighted_multi_expected, + ) + + def test_returned_true_single(self): + aggregateby_output = self.cube_single.aggregated_by( + "height", + iris.analysis.MEAN, + returned=True, + weights=self.weights_single, + ) + self.assertTrue(isinstance(aggregateby_output, tuple)) + + aggregateby_cube = aggregateby_output[0] + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_single.cml"), + ) + + aggregateby_weights = aggregateby_output[1] + expected_weights = np.array( + [ + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[4.0, 4.0, 4.0], [4.0, 4.0, 4.0], [4.0, 4.0, 4.0]], + [[5.0, 5.0, 5.0], [5.0, 5.0, 5.0], [5.0, 5.0, 5.0]], + [[6.0, 6.0, 6.0], [6.0, 6.0, 6.0], [6.0, 6.0, 6.0]], + [[7.0, 7.0, 7.0], [7.0, 7.0, 7.0], [7.0, 7.0, 7.0]], + [[8.0, 8.0, 8.0], [8.0, 8.0, 8.0], [8.0, 8.0, 8.0]], + ] + ) + np.testing.assert_almost_equal(aggregateby_weights, expected_weights) + + def test_returned_true_multi(self): + aggregateby_output = self.cube_multi.aggregated_by( + ["height", "level"], + iris.analysis.MEAN, + returned=True, + weights=self.weights_multi, + ) + self.assertTrue(isinstance(aggregateby_output, tuple)) + + aggregateby_cube = aggregateby_output[0] + self.assertCML( + aggregateby_cube, + ("analysis", "aggregated_by", "weighted_multi.cml"), + ) + + aggregateby_weights = aggregateby_output[1] + expected_weights = np.array( + [ + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[3.0, 3.0, 3.0], [3.0, 3.0, 3.0], [3.0, 3.0, 3.0]], + [[3.0, 3.0, 3.0], [3.0, 3.0, 3.0], [3.0, 3.0, 3.0]], + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[2.0, 2.0, 2.0], [2.0, 2.0, 2.0], [2.0, 2.0, 2.0]], + [[3.0, 3.0, 3.0], [3.0, 3.0, 3.0], [3.0, 3.0, 3.0]], + [[1.0, 1.0, 1.0], [1.0, 1.0, 1.0], [1.0, 1.0, 1.0]], + [[2.0, 2.0, 2.0], [2.0, 2.0, 2.0], [2.0, 2.0, 2.0]], + ] + ) + np.testing.assert_almost_equal(aggregateby_weights, expected_weights) + + def test_returned_fails_with_non_weighted_aggregator(self): self.assertRaises( - ValueError, + TypeError, self.cube_single.aggregated_by, "height", - iris.analysis.MEAN, + iris.analysis.MAX, returned=True, ) + + def test_weights_fail_with_non_weighted_aggregator(self): self.assertRaises( - ValueError, + TypeError, self.cube_single.aggregated_by, "height", - iris.analysis.MEAN, - weights=[1, 2, 3, 4, 5], + iris.analysis.MAX, + weights=self.weights_single, ) diff --git a/lib/iris/tests/test_analysis.py b/lib/iris/tests/test_analysis.py index d5a810d2fa..e0a5d0971e 100644 --- a/lib/iris/tests/test_analysis.py +++ b/lib/iris/tests/test_analysis.py @@ -9,6 +9,7 @@ import iris.tests as tests # isort:skip import cf_units +import dask.array as da import numpy as np import numpy.ma as ma @@ -19,6 +20,7 @@ import iris.coords import iris.cube import iris.tests.stock +import iris.util class TestAnalysisCubeCoordComparison(tests.IrisTest): @@ -931,6 +933,106 @@ def test_count_2d(self): gt6, ("analysis", "count_foo_bar_2d.cml"), checksum=False ) + def test_max_run_1d(self): + cube = tests.stock.simple_1d() + # [ 0 1 2 3 4 5 6 7 8 9 10] + result = cube.collapsed( + "foo", + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 1, 4, 5, 6, 8, 9]), + ) + self.assertArrayEqual(result.data, np.array(3)) + self.assertEqual(result.units, 1) + self.assertTupleEqual(result.cell_methods, ()) + self.assertCML( + result, ("analysis", "max_run_foo_1d.cml"), checksum=False + ) + + def test_max_run_lazy(self): + cube = tests.stock.simple_1d() + # [ 0 1 2 3 4 5 6 7 8 9 10] + # Make data lazy + cube.data = da.from_array(cube.data) + result = cube.collapsed( + "foo", + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 1, 4, 5, 6, 8, 9]), + ) + self.assertTrue(result.has_lazy_data()) + # Realise data + _ = result.data + self.assertArrayEqual(result.data, np.array(3)) + self.assertEqual(result.units, 1) + self.assertTupleEqual(result.cell_methods, ()) + self.assertCML( + result, ("analysis", "max_run_foo_1d.cml"), checksum=False + ) + + def test_max_run_2d(self): + cube = tests.stock.simple_2d() + # [[ 0 1 2 3] + # [ 4 5 6 7] + # [ 8 9 10 11]] + foo_result = cube.collapsed( + "foo", + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 3, 4, 5, 7, 9, 11]), + ) + self.assertArrayEqual( + foo_result.data, np.array([1, 2, 1], dtype=np.float32) + ) + self.assertEqual(foo_result.units, 1) + self.assertTupleEqual(foo_result.cell_methods, ()) + self.assertCML( + foo_result, ("analysis", "max_run_foo_2d.cml"), checksum=False + ) + + bar_result = cube.collapsed( + "bar", + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 3, 4, 5, 7, 9, 11]), + ) + self.assertArrayEqual( + bar_result.data, np.array([2, 2, 0, 3], dtype=np.float32) + ) + self.assertEqual(bar_result.units, 1) + self.assertTupleEqual(bar_result.cell_methods, ()) + self.assertCML( + bar_result, ("analysis", "max_run_bar_2d.cml"), checksum=False + ) + + with self.assertRaises(ValueError): + _ = cube.collapsed( + ("foo", "bar"), + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 3, 4, 5, 7, 9, 11]), + ) + + def test_max_run_masked(self): + cube = tests.stock.simple_2d() + # [[ 0 1 2 3] + # [ 4 5 6 7] + # [ 8 9 10 11]] + iris.util.mask_cube( + cube, np.isin(cube.data, [0, 2, 3, 5, 7, 11]), in_place=True + ) + # [[-- 1 -- --] + # [ 4 -- 6 --] + # [ 8 9 10 --]] + result = cube.collapsed( + "bar", + iris.analysis.MAX_RUN, + function=lambda val: np.isin(val, [0, 1, 4, 5, 6, 9, 10, 11]), + ) + self.assertArrayEqual( + result.data, np.array([1, 1, 2, 0], dtype=np.float32) + ) + self.assertEqual(result.units, 1) + self.assertTupleEqual(result.cell_methods, ()) + self.assertCML( + result, ("analysis", "max_run_bar_2d_masked.cml"), checksum=False + ) + def test_weighted_sum_consistency(self): # weighted sum with unit weights should be the same as a sum cube = tests.stock.simple_1d() @@ -1562,5 +1664,43 @@ def test_mean_with_weights(self): self.assertArrayAlmostEqual(expected_result, res_cube.data) +class TestCreateWeightedAggregatorFn(tests.IrisTest): + @staticmethod + def aggregator_fn(data, axis, **kwargs): + return (data, axis, kwargs) + + def test_no_weights_supplied(self): + aggregator_fn = iris.analysis.create_weighted_aggregator_fn( + self.aggregator_fn, 42, test_kwarg="test" + ) + output = aggregator_fn("dummy_array", None) + self.assertEqual(len(output), 3) + self.assertEqual(output[0], "dummy_array") + self.assertEqual(output[1], 42) + self.assertEqual(output[2], {"test_kwarg": "test"}) + + def test_weights_supplied(self): + aggregator_fn = iris.analysis.create_weighted_aggregator_fn( + self.aggregator_fn, 42, test_kwarg="test" + ) + output = aggregator_fn("dummy_array", "w") + self.assertEqual(len(output), 3) + self.assertEqual(output[0], "dummy_array") + self.assertEqual(output[1], 42) + self.assertEqual(output[2], {"test_kwarg": "test", "weights": "w"}) + + def test_weights_in_kwargs(self): + kwargs = {"test_kwarg": "test", "weights": "ignored"} + aggregator_fn = iris.analysis.create_weighted_aggregator_fn( + self.aggregator_fn, 42, **kwargs + ) + output = aggregator_fn("dummy_array", "w") + self.assertEqual(len(output), 3) + self.assertEqual(output[0], "dummy_array") + self.assertEqual(output[1], 42) + self.assertEqual(output[2], {"test_kwarg": "test", "weights": "w"}) + self.assertEqual(kwargs, {"test_kwarg": "test", "weights": "ignored"}) + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index e753adbae8..6c08dc1f9e 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -249,7 +249,7 @@ def test_apply_ufunc(self): np.square, a, new_name="squared temperature", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=False, ) self.assertCMLApproxData(a, ("analysis", "apply_ufunc_original.cml")) @@ -259,14 +259,14 @@ def test_apply_ufunc(self): np.square, a, new_name="squared temperature", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=True, ) self.assertCMLApproxData(b, ("analysis", "apply_ufunc.cml")) self.assertCMLApproxData(a, ("analysis", "apply_ufunc.cml")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -295,7 +295,7 @@ def test_apply_ufunc_fail(self): def test_ifunc(self): a = self.cube - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units**2) b = my_ifunc(a, new_name="squared temperature", in_place=False) self.assertCMLApproxData(a, ("analysis", "apply_ifunc_original.cml")) @@ -307,7 +307,7 @@ def test_ifunc(self): self.assertCMLApproxData(a, ("analysis", "apply_ifunc.cml")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -347,7 +347,7 @@ def test_ifunc_init_fail(self): def test_ifunc_call_fail(self): a = self.cube - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units**2) # should now NOT fail because giving 2 arguments to an ifunc that # expects only one will now ignore the surplus argument and raise @@ -367,7 +367,7 @@ def test_ifunc_call_fail(self): my_ifunc(a) my_ifunc = iris.analysis.maths.IFunc( - lambda a: (a, a ** 2.0), lambda cube: cf_units.Unit("1") + lambda a: (a, a**2.0), lambda cube: cf_units.Unit("1") ) # should fail because data function returns a tuple @@ -553,9 +553,9 @@ def test_square_root(self): a.data = abs(a.data) a.units **= 2 - e = a ** 0.5 + e = a**0.5 - self.assertArrayAllClose(e.data, a.data ** 0.5) + self.assertArrayAllClose(e.data, a.data**0.5) self.assertCML(e, ("analysis", "sqrt.cml"), checksum=False) self.assertRaises(ValueError, iris.analysis.maths.exponentiate, a, 0.3) @@ -585,26 +585,26 @@ def test_apply_ufunc(self): np.square, a, new_name="more_thingness", - new_unit=a.units ** 2, + new_unit=a.units**2, in_place=False, ) - ans = a.data ** 2 + ans = a.data**2 self.assertArrayEqual(b.data, ans) self.assertEqual(b.name(), "more_thingness") self.assertEqual(b.units, cf_units.Unit("m^2")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 vec_mag_ufunc = np.frompyfunc(vec_mag, 2, 1) b = iris.analysis.maths.apply_ufunc(vec_mag_ufunc, a, c) - ans = a.data ** 2 + c.data ** 2 - b2 = b ** 2 + ans = a.data**2 + c.data**2 + b2 = b**2 self.assertArrayAlmostEqual(b2.data, ans) @@ -617,17 +617,17 @@ def test_ifunc(self): a = self.cube a.units = cf_units.Unit("meters") - my_ifunc = iris.analysis.maths.IFunc(np.square, lambda x: x.units ** 2) + my_ifunc = iris.analysis.maths.IFunc(np.square, lambda x: x.units**2) b = my_ifunc(a, new_name="more_thingness", in_place=False) - ans = a.data ** 2 + ans = a.data**2 self.assertArrayEqual(b.data, ans) self.assertEqual(b.name(), "more_thingness") self.assertEqual(b.units, cf_units.Unit("m^2")) def vec_mag(u, v): - return math.sqrt(u ** 2 + v ** 2) + return math.sqrt(u**2 + v**2) c = a.copy() + 2 @@ -637,12 +637,12 @@ def vec_mag(u, v): ) b = my_ifunc(a, c) - ans = (a.data ** 2 + c.data ** 2) ** 0.5 + ans = (a.data**2 + c.data**2) ** 0.5 self.assertArrayAlmostEqual(b.data, ans) def vec_mag_data_func(u_data, v_data): - return np.sqrt(u_data ** 2 + v_data ** 2) + return np.sqrt(u_data**2 + v_data**2) vec_mag_ifunc = iris.analysis.maths.IFunc( vec_mag_data_func, lambda a, b: (a + b).units @@ -687,12 +687,12 @@ def setUp(self): self.data_1u = np.array([[9, 9, 9], [8, 8, 8]], dtype=np.uint64) self.data_2u = np.array([[3, 3, 3], [2, 2, 2]], dtype=np.uint64) - self.cube_1f = Cube(self.data_1f) - self.cube_2f = Cube(self.data_2f) - self.cube_1i = Cube(self.data_1i) - self.cube_2i = Cube(self.data_2i) - self.cube_1u = Cube(self.data_1u) - self.cube_2u = Cube(self.data_2u) + self.cube_1f = Cube(self.data_1f.copy()) + self.cube_2f = Cube(self.data_2f.copy()) + self.cube_1i = Cube(self.data_1i.copy()) + self.cube_2i = Cube(self.data_2i.copy()) + self.cube_1u = Cube(self.data_1u.copy()) + self.cube_2u = Cube(self.data_2u.copy()) self.ops = (operator.add, operator.sub, operator.mul, operator.truediv) self.iops = ( diff --git a/lib/iris/tests/test_cdm.py b/lib/iris/tests/test_cdm.py index 64a7924ce4..8f2a9b474d 100644 --- a/lib/iris/tests/test_cdm.py +++ b/lib/iris/tests/test_cdm.py @@ -349,7 +349,6 @@ def test_similar_coord(self): ) def test_cube_summary_cell_methods(self): - cube = self.cube_2d.copy() # Create a list of values used to create cell methods @@ -692,7 +691,7 @@ def test_cube_iteration(self): pass def test_not_iterable(self): - self.assertFalse(isinstance(self.t, collections.Iterable)) + self.assertFalse(isinstance(self.t, collections.abc.Iterable)) class Test2dSlicing(TestCube2d): diff --git a/lib/iris/tests/test_cell.py b/lib/iris/tests/test_cell.py index 03d3fa7d7c..21d2603072 100644 --- a/lib/iris/tests/test_cell.py +++ b/lib/iris/tests/test_cell.py @@ -169,9 +169,35 @@ def test_coord_bounds_cmp(self): self.assertTrue(self.e < 2) def test_cell_cell_cmp(self): + self.e = iris.coords.Cell(1) + self.f = iris.coords.Cell(1) + + self.assertTrue(self.e == self.f) + self.assertEqual(hash(self.e), hash(self.f)) + + self.e = iris.coords.Cell(1) + self.f = iris.coords.Cell(1, [0, 2]) + + self.assertFalse(self.e == self.f) + self.assertNotEqual(hash(self.e), hash(self.f)) + + self.e = iris.coords.Cell(1, [0, 2]) + self.f = iris.coords.Cell(1, [0, 2]) + + self.assertTrue(self.e == self.f) + self.assertEqual(hash(self.e), hash(self.f)) + + self.e = iris.coords.Cell(1, [0, 2]) + self.f = iris.coords.Cell(1, [2, 0]) + + self.assertTrue(self.e == self.f) + self.assertEqual(hash(self.e), hash(self.f)) + self.e = iris.coords.Cell(0.7, [1.1, 1.9]) self.f = iris.coords.Cell(0.8, [1.1, 1.9]) + self.assertFalse(self.e == self.f) + self.assertNotEqual(hash(self.e), hash(self.f)) self.assertFalse(self.e > self.f) self.assertTrue(self.e <= self.f) self.assertTrue(self.f >= self.e) diff --git a/lib/iris/tests/test_cf.py b/lib/iris/tests/test_cf.py index 89fa2d20c6..3abd6b981b 100644 --- a/lib/iris/tests/test_cf.py +++ b/lib/iris/tests/test_cf.py @@ -11,8 +11,12 @@ # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests # isort:skip +import contextlib +import io from unittest import mock +import pytest + import iris import iris.fileformats.cf as cf @@ -50,11 +54,14 @@ def test_cached(self): @tests.skip_data class TestCFReader(tests.IrisTest): - def setUp(self): + @pytest.fixture(autouse=True) + def set_up(self): filename = tests.get_data_path( ("NetCDF", "rotated", "xyt", "small_rotPole_precipitation.nc") ) self.cfr = cf.CFReader(filename) + with self.cfr: + yield def test_ancillary_variables_pass_0(self): self.assertEqual(self.cfr.cf_group.ancillary_variables, {}) @@ -267,6 +274,32 @@ def test_variable_attribute_touch_pass_0(self): ), ) + def test_destructor(self): + """Test the destructor when reading the dataset fails. + Related to issue #3312: previously, the `CFReader` would + always call `close()` on its `_dataset` attribute, even if it + didn't exist because opening the dataset had failed. + """ + with self.temp_filename(suffix=".nc") as fn: + with open(fn, "wb+") as fh: + fh.write( + b"\x89HDF\r\n\x1a\nBroken file with correct signature" + ) + fh.flush() + + with io.StringIO() as buf: + with contextlib.redirect_stderr(buf): + try: + _ = cf.CFReader(fn) + except OSError: + pass + try: + _ = iris.load_cubes(fn) + except OSError: + pass + buf.seek(0) + self.assertMultiLineEqual("", buf.read()) + @tests.skip_data class TestLoad(tests.IrisTest): @@ -320,7 +353,8 @@ def test_cell_methods(self): @tests.skip_data class TestClimatology(tests.IrisTest): - def setUp(self): + @pytest.fixture(autouse=True) + def set_up(self): filename = tests.get_data_path( ( "NetCDF", @@ -329,6 +363,8 @@ def setUp(self): ) ) self.cfr = cf.CFReader(filename) + with self.cfr: + yield def test_bounds(self): time = self.cfr.cf_group["temp_dmax_tmean_abs"].cf_group.coordinates[ @@ -345,7 +381,8 @@ def test_bounds(self): @tests.skip_data class TestLabels(tests.IrisTest): - def setUp(self): + @pytest.fixture(autouse=True) + def set_up(self): filename = tests.get_data_path( ( "NetCDF", @@ -360,6 +397,10 @@ def setUp(self): ) self.cfr_end = cf.CFReader(filename) + with self.cfr_start: + with self.cfr_end: + yield + def test_label_dim_start(self): cf_data_var = self.cfr_start.cf_group["temp_dmax_tmean_abs"] diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 01f6f777fa..b52934c568 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -12,9 +12,12 @@ from fnmatch import fnmatch from glob import glob import os +from pathlib import Path import subprocess import iris +from iris.fileformats.netcdf import _thread_safe_nc +from iris.tests import system_test LICENSE_TEMPLATE = """# Copyright Iris contributors # @@ -40,6 +43,29 @@ IRIS_REPO_DIRPATH = os.environ.get("IRIS_REPO_DIR", IRIS_INSTALL_DIR) +def test_netcdf4_import(): + """Use of netCDF4 must be via iris.fileformats.netcdf._thread_safe_nc .""" + # Please avoid including these phrases in any comments/strings throughout + # Iris (e.g. use "from the netCDF4 library" instead) - this allows the + # below search to remain quick and simple. + import_strings = ("import netCDF4", "from netCDF4") + + files_including_import = [] + for file_path in Path(IRIS_DIR).rglob("*.py"): + with file_path.open("r") as open_file: + file_text = open_file.read() + + if any([i in file_text for i in import_strings]): + files_including_import.append(file_path) + + expected = [ + Path(_thread_safe_nc.__file__), + Path(system_test.__file__), + Path(__file__), + ] + assert set(files_including_import) == set(expected) + + class TestLicenseHeaders(tests.IrisTest): @staticmethod def whatchanged_parse(whatchanged_output): diff --git a/lib/iris/tests/test_constraints.py b/lib/iris/tests/test_constraints.py index 4f9e48fb83..e568105f91 100644 --- a/lib/iris/tests/test_constraints.py +++ b/lib/iris/tests/test_constraints.py @@ -67,6 +67,27 @@ def test_constraints(self): sub_list = self.slices.extract(constraint) self.assertEqual(len(sub_list), 70 * 6) + def test_coord_availability(self): + # "model_level_number" coordinate available + constraint = iris.Constraint(model_level_number=lambda x: True) + result = self.slices.extract(constraint) + self.assertTrue(result) + + # "wibble" coordinate is not available + constraint = iris.Constraint(wibble=lambda x: False) + result = self.slices.extract(constraint) + self.assertFalse(result) + + # "wibble" coordinate is not available + constraint = iris.Constraint(wibble=lambda x: True) + result = self.slices.extract(constraint) + self.assertFalse(result) + + # "lambda x: False" always (confusingly) throws away the cube + constraint = iris.Constraint(model_level_number=lambda x: False) + result = self.slices.extract(constraint) + self.assertFalse(result) + def test_mismatched_type(self): constraint = iris.Constraint(model_level_number="aardvark") sub_list = self.slices.extract(constraint) @@ -91,7 +112,7 @@ def test_cell_different_bounds(self): self.assertEqual(len(sub_list), 0) -class TestMixin: +class ConstraintMixin: """ Mix-in class for attributes & utilities common to the "normal" and "strict" test cases. @@ -134,7 +155,7 @@ def setUp(self): self.lat_gt_45 = iris.Constraint(latitude=lambda c: c > 45) -class RelaxedConstraintMixin(TestMixin): +class RelaxedConstraintMixin(ConstraintMixin): @staticmethod def fixup_sigma_to_be_aux(cubes): # XXX Fix the cubes such that the sigma coordinate is always an AuxCoord. Pending gh issue #18 @@ -296,11 +317,11 @@ def load_match(self, files, constraints): @tests.skip_data -class TestCubeExtract__names(TestMixin, tests.IrisTest): +class TestCubeExtract__names(ConstraintMixin, tests.IrisTest): def setUp(self): fname = iris.sample_data_path("atlantic_profiles.nc") self.cubes = iris.load(fname) - TestMixin.setUp(self) + ConstraintMixin.setUp(self) cube = iris.load_cube(self.theta_path) # Expected names... self.standard_name = "air_potential_temperature" @@ -353,11 +374,11 @@ def test_unknown(self): @tests.skip_data -class TestCubeExtract__name_constraint(TestMixin, tests.IrisTest): +class TestCubeExtract__name_constraint(ConstraintMixin, tests.IrisTest): def setUp(self): fname = iris.sample_data_path("atlantic_profiles.nc") self.cubes = iris.load(fname) - TestMixin.setUp(self) + ConstraintMixin.setUp(self) cube = iris.load_cube(self.theta_path) # Expected names... self.standard_name = "air_potential_temperature" @@ -579,9 +600,9 @@ def test_unknown(self): @tests.skip_data -class TestCubeExtract(TestMixin, tests.IrisTest): +class TestCubeExtract(ConstraintMixin, tests.IrisTest): def setUp(self): - TestMixin.setUp(self) + ConstraintMixin.setUp(self) self.cube = iris.load_cube(self.theta_path) def test_attribute_constraint(self): @@ -644,7 +665,7 @@ def test_non_existent_coordinate(self): @tests.skip_data -class TestConstraints(TestMixin, tests.IrisTest): +class TestConstraints(ConstraintMixin, tests.IrisTest): def test_constraint_expressions(self): rt = repr(self.theta) rl10 = repr(self.level_10) diff --git a/lib/iris/tests/test_coord_api.py b/lib/iris/tests/test_coord_api.py index 87270b524c..ea99ae06df 100644 --- a/lib/iris/tests/test_coord_api.py +++ b/lib/iris/tests/test_coord_api.py @@ -49,7 +49,11 @@ def test_slice_multiple_indices(self): def test_slice_reverse(self): b = self.lat[::-1] np.testing.assert_array_equal(b.points, self.lat.points[::-1]) - np.testing.assert_array_equal(b.bounds, self.lat.bounds[::-1, :]) + np.testing.assert_array_equal(b.bounds, self.lat.bounds[::-1, ::-1]) + + # Check contiguity is preserved. + self.assertTrue(self.lat.is_contiguous()) + self.assertTrue(b.is_contiguous()) c = b[::-1] self.assertEqual(self.lat, c) diff --git a/lib/iris/tests/test_coord_categorisation.py b/lib/iris/tests/test_coord_categorisation.py index 616da882f5..0206ba66a5 100644 --- a/lib/iris/tests/test_coord_categorisation.py +++ b/lib/iris/tests/test_coord_categorisation.py @@ -52,7 +52,7 @@ def setUp(self): time_coord = iris.coords.DimCoord( day_numbers, standard_name="time", - units=cf_units.Unit("days since epoch", "gregorian"), + units=cf_units.Unit("days since epoch", "standard"), ) cube.add_dim_coord(time_coord, 0) diff --git a/lib/iris/tests/test_coordsystem.py b/lib/iris/tests/test_coordsystem.py index 4229125969..7cd15297cc 100644 --- a/lib/iris/tests/test_coordsystem.py +++ b/lib/iris/tests/test_coordsystem.py @@ -14,7 +14,6 @@ GeogCS, LambertConformal, RotatedGeogCS, - Stereographic, TransverseMercator, ) import iris.coords @@ -33,16 +32,6 @@ def osgb(): ) -def stereo(): - return Stereographic( - central_lat=-90, - central_lon=-45, - false_easting=100, - false_northing=200, - ellipsoid=GeogCS(6377563.396, 6356256.909), - ) - - class TestCoordSystemLookup(tests.IrisTest): def setUp(self): self.cube = iris.tests.stock.lat_lon_cube() @@ -87,7 +76,7 @@ def test_simple(self): def test_different_class(self): a = self.cs1 b = self.cs3 - self.assertNotEquals(a, b) + self.assertNotEqual(a, b) def test_different_public_attributes(self): a = self.cs1 @@ -98,7 +87,7 @@ def test_different_public_attributes(self): self.assertEqual(a.foo, "a") # a and b should not be the same - self.assertNotEquals(a, b) + self.assertNotEqual(a, b) # a and b should be the same b.foo = "a" @@ -106,7 +95,7 @@ def test_different_public_attributes(self): b.foo = "b" # a and b should not be the same - self.assertNotEquals(a, b) + self.assertNotEqual(a, b) class Test_CoordSystem_xml_element(tests.IrisTest): @@ -216,12 +205,152 @@ def test_as_cartopy_crs(self): cs = GeogCS(6543210, 6500000) res = cs.as_cartopy_crs() globe = ccrs.Globe( - semimajor_axis=6543210.0, semiminor_axis=6500000.0, ellipse=None + semimajor_axis=6543210.0, + semiminor_axis=6500000.0, + ellipse=None, ) expected = ccrs.Geodetic(globe) self.assertEqual(res, expected) +class Test_GeogCS_equality(tests.IrisTest): + """Test cached values don't break GeogCS equality""" + + def test_as_cartopy_globe(self): + cs_const = GeogCS(6543210, 6500000) + cs_mut = GeogCS(6543210, 6500000) + initial_globe = cs_mut.as_cartopy_globe() + new_globe = cs_mut.as_cartopy_globe() + + self.assertIs(new_globe, initial_globe) + self.assertEqual(cs_const, cs_mut) + + def test_as_cartopy_projection(self): + cs_const = GeogCS(6543210, 6500000) + cs_mut = GeogCS(6543210, 6500000) + initial_projection = cs_mut.as_cartopy_projection() + initial_globe = initial_projection.globe + new_projection = cs_mut.as_cartopy_projection() + new_globe = new_projection.globe + + self.assertIs(new_globe, initial_globe) + self.assertEqual(cs_const, cs_mut) + + def test_as_cartopy_crs(self): + cs_const = GeogCS(6543210, 6500000) + cs_mut = GeogCS(6543210, 6500000) + initial_crs = cs_mut.as_cartopy_crs() + initial_globe = initial_crs.globe + new_crs = cs_mut.as_cartopy_crs() + new_globe = new_crs.globe + + self.assertIs(new_crs, initial_crs) + self.assertIs(new_globe, initial_globe) + self.assertEqual(cs_const, cs_mut) + + def test_update_to_equivalent(self): + cs_const = GeogCS(6500000, 6000000) + # Cause caching + _ = cs_const.as_cartopy_crs() + + cs_mut = GeogCS(6543210, 6000000) + # Cause caching + _ = cs_mut.as_cartopy_crs() + # Set value + cs_mut.semi_major_axis = 6500000 + cs_mut.inverse_flattening = 13 + + self.assertEqual(cs_const.semi_major_axis, 6500000) + self.assertEqual(cs_mut.semi_major_axis, 6500000) + self.assertEqual(cs_const, cs_mut) + + +class Test_GeogCS_mutation(tests.IrisTest): + "Test that altering attributes of a GeogCS instance behaves as expected" + + def test_semi_major_axis_change(self): + # Clear datum + # Clear caches + cs = GeogCS.from_datum("OSGB 1936") + _ = cs.as_cartopy_crs() + self.assertEqual(cs.datum, "OSGB 1936") + cs.semi_major_axis = 6000000 + self.assertIsNone(cs.datum) + self.assertEqual(cs.as_cartopy_globe().semimajor_axis, 6000000) + + def test_semi_major_axis_no_change(self): + # Datum untouched + # Caches untouched + cs = GeogCS.from_datum("OSGB 1936") + initial_crs = cs.as_cartopy_crs() + self.assertEqual(cs.datum, "OSGB 1936") + cs.semi_major_axis = 6377563.396 + self.assertEqual(cs.datum, "OSGB 1936") + new_crs = cs.as_cartopy_crs() + self.assertIs(new_crs, initial_crs) + + def test_semi_minor_axis_change(self): + # Clear datum + # Clear caches + cs = GeogCS.from_datum("OSGB 1936") + _ = cs.as_cartopy_crs() + self.assertEqual(cs.datum, "OSGB 1936") + cs.semi_minor_axis = 6000000 + self.assertIsNone(cs.datum) + self.assertEqual(cs.as_cartopy_globe().semiminor_axis, 6000000) + + def test_semi_minor_axis_no_change(self): + # Datum untouched + # Caches untouched + cs = GeogCS.from_datum("OSGB 1936") + initial_crs = cs.as_cartopy_crs() + self.assertEqual(cs.datum, "OSGB 1936") + cs.semi_minor_axis = 6356256.909237285 + self.assertEqual(cs.datum, "OSGB 1936") + new_crs = cs.as_cartopy_crs() + self.assertIs(new_crs, initial_crs) + + def test_datum_change(self): + # Semi-major axis changes + # All internal ellipoid values set to None + # CRS changes + cs = GeogCS(6543210, 6500000) + _ = cs.as_cartopy_crs() + self.assertTrue("_globe" in cs.__dict__) + self.assertTrue("_crs" in cs.__dict__) + self.assertEqual(cs.semi_major_axis, 6543210) + cs.datum = "OSGB 1936" + self.assertEqual(cs.as_cartopy_crs().datum, "OSGB 1936") + self.assertIsNone(cs.__dict__["_semi_major_axis"]) + self.assertIsNone(cs.__dict__["_semi_minor_axis"]) + self.assertIsNone(cs.__dict__["_inverse_flattening"]) + self.assertEqual(cs.semi_major_axis, 6377563.396) + + def test_datum_no_change(self): + # Caches untouched + cs = GeogCS.from_datum("OSGB 1936") + initial_crs = cs.as_cartopy_crs() + cs.datum = "OSGB 1936" + new_crs = cs.as_cartopy_crs() + self.assertIs(new_crs, initial_crs) + + def test_inverse_flattening_change(self): + # Caches untouched + # Axes unchanged (this behaviour is odd, but matches existing behaviour) + # Warning about lack of effect on other aspects + cs = GeogCS(6543210, 6500000) + initial_crs = cs.as_cartopy_crs() + with self.assertWarnsRegex( + UserWarning, + "Setting inverse_flattening does not affect other properties of the GeogCS object.", + ): + cs.inverse_flattening = cs.inverse_flattening + 1 + new_crs = cs.as_cartopy_crs() + self.assertIs(new_crs, initial_crs) + self.assertEqual(cs.semi_major_axis, 6543210) + self.assertEqual(cs.semi_minor_axis, 6500000) + + class Test_RotatedGeogCS_construction(tests.IrisTest): def test_init(self): rcs = RotatedGeogCS( @@ -243,7 +372,10 @@ def test_init(self): class Test_RotatedGeogCS_repr(tests.IrisTest): def test_repr(self): rcs = RotatedGeogCS( - 30, 40, north_pole_grid_longitude=50, ellipsoid=GeogCS(6371229) + 30, + 40, + north_pole_grid_longitude=50, + ellipsoid=GeogCS(6371229), ) expected = ( "RotatedGeogCS(30.0, 40.0, " @@ -263,7 +395,10 @@ def test_repr(self): class Test_RotatedGeogCS_str(tests.IrisTest): def test_str(self): rcs = RotatedGeogCS( - 30, 40, north_pole_grid_longitude=50, ellipsoid=GeogCS(6371229) + 30, + 40, + north_pole_grid_longitude=50, + ellipsoid=GeogCS(6371229), ) expected = ( "RotatedGeogCS(30.0, 40.0, " @@ -373,85 +508,6 @@ def test_as_cartopy_projection(self): self.assertEqual(res, expected) -class Test_Stereographic_construction(tests.IrisTest): - def test_stereo(self): - st = stereo() - self.assertXMLElement(st, ("coord_systems", "Stereographic.xml")) - - -class Test_Stereographic_repr(tests.IrisTest): - def test_stereo(self): - st = stereo() - expected = ( - "Stereographic(central_lat=-90.0, central_lon=-45.0, " - "false_easting=100.0, false_northing=200.0, true_scale_lat=None, " - "ellipsoid=GeogCS(semi_major_axis=6377563.396, semi_minor_axis=6356256.909))" - ) - self.assertEqual(expected, repr(st)) - - -class Test_Stereographic_as_cartopy_crs(tests.IrisTest): - def test_as_cartopy_crs(self): - latitude_of_projection_origin = -90.0 - longitude_of_projection_origin = -45.0 - false_easting = 100.0 - false_northing = 200.0 - ellipsoid = GeogCS(6377563.396, 6356256.909) - - st = Stereographic( - central_lat=latitude_of_projection_origin, - central_lon=longitude_of_projection_origin, - false_easting=false_easting, - false_northing=false_northing, - ellipsoid=ellipsoid, - ) - expected = ccrs.Stereographic( - central_latitude=latitude_of_projection_origin, - central_longitude=longitude_of_projection_origin, - false_easting=false_easting, - false_northing=false_northing, - globe=ccrs.Globe( - semimajor_axis=6377563.396, - semiminor_axis=6356256.909, - ellipse=None, - ), - ) - - res = st.as_cartopy_crs() - self.assertEqual(res, expected) - - -class Test_Stereographic_as_cartopy_projection(tests.IrisTest): - def test_as_cartopy_projection(self): - latitude_of_projection_origin = -90.0 - longitude_of_projection_origin = -45.0 - false_easting = 100.0 - false_northing = 200.0 - ellipsoid = GeogCS(6377563.396, 6356256.909) - - st = Stereographic( - central_lat=latitude_of_projection_origin, - central_lon=longitude_of_projection_origin, - false_easting=false_easting, - false_northing=false_northing, - ellipsoid=ellipsoid, - ) - expected = ccrs.Stereographic( - central_latitude=latitude_of_projection_origin, - central_longitude=longitude_of_projection_origin, - false_easting=false_easting, - false_northing=false_northing, - globe=ccrs.Globe( - semimajor_axis=6377563.396, - semiminor_axis=6356256.909, - ellipse=None, - ), - ) - - res = st.as_cartopy_projection() - self.assertEqual(res, expected) - - class Test_LambertConformal(tests.GraphicsTest): def test_fail_secant_latitudes_none(self): emsg = "secant latitudes" @@ -488,5 +544,23 @@ def test_south_cutoff(self): self.assertEqual(ccrs.cutoff, 30) +class Test_Datums(tests.IrisTest): + def test_default_none(self): + cs = GeogCS(6543210, 6500000) # Arbitrary radii + cartopy_crs = cs.as_cartopy_crs() + self.assertMultiLineEqual(cartopy_crs.datum.name, "unknown") + + def test_set_persist(self): + cs = GeogCS.from_datum(datum="WGS84") + cartopy_crs = cs.as_cartopy_crs() + self.assertMultiLineEqual( + cartopy_crs.datum.name, "World Geodetic System 1984" + ) + + cs = GeogCS.from_datum(datum="OSGB36") + cartopy_crs = cs.as_cartopy_crs() + self.assertMultiLineEqual(cartopy_crs.datum.name, "OSGB 1936") + + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_file_save.py b/lib/iris/tests/test_file_save.py index 3b751cfcbe..216637202a 100644 --- a/lib/iris/tests/test_file_save.py +++ b/lib/iris/tests/test_file_save.py @@ -201,7 +201,7 @@ def test_bytesio(self): data = infile.read() # Compare files - self.assertEquals( + self.assertEqual( data, sio.getvalue(), "Mismatch in data when comparing iris bytesio save " diff --git a/lib/iris/tests/test_image_json.py b/lib/iris/tests/test_image_json.py index 7c5c824ffe..b5213156f8 100644 --- a/lib/iris/tests/test_image_json.py +++ b/lib/iris/tests/test_image_json.py @@ -8,56 +8,42 @@ # importing anything else import iris.tests as tests # isort:skip -import codecs -import itertools -import json -import os +from pathlib import Path -import requests +import iris.tests.graphics as graphics -@tests.skip_inet +@tests.skip_data class TestImageFile(tests.IrisTest): - def test_resolve(self): - listingfile_uri = ( - "https://raw.githubusercontent.com/SciTools/test-iris-imagehash" - "/gh-pages/v4_files_listing.txt" - ) - req = requests.get(listingfile_uri) - if req.status_code != 200: - raise ValueError( - "GET failed on image listings file: {}".format(listingfile_uri) - ) - - listings_text = req.content.decode("utf-8") - reference_image_filenames = [ - line.strip() for line in listings_text.split("\n") + def test_json(self): + # get test names from json + repo_names = [*graphics.read_repo_json().keys()] + # get file names from test data + test_data_names = [ + pp.stem for pp in Path(tests.get_data_path(["images"])).iterdir() ] - base = "https://scitools.github.io/test-iris-imagehash/images/v4" - reference_image_uris = set( - "{}/{}".format(base, name) for name in reference_image_filenames - ) - - imagerepo_json_filepath = os.path.join( - os.path.dirname(__file__), "results", "imagerepo.json" - ) - with open(imagerepo_json_filepath, "rb") as fi: - imagerepo = json.load(codecs.getreader("utf-8")(fi)) - - # "imagerepo" maps key: list-of-uris. Put all the uris in one big set. - tests_uris = set(itertools.chain.from_iterable(imagerepo.values())) - - missing_refs = list(tests_uris - reference_image_uris) - n_missing_refs = len(missing_refs) - if n_missing_refs > 0: + # compare + repo_name_set = set(repo_names) + self.assertEqual(len(repo_names), len(repo_name_set)) + test_data_name_set = set(test_data_names) + self.assertEqual(len(test_data_names), len(test_data_name_set)) + missing_from_json = test_data_name_set - repo_name_set + if missing_from_json: + amsg = ( + "Missing images: Images are present in the iris-test-data " + "repo, that are not referenced in imagerepo.json" + ) + # Always fails when we get here: report the problem. + self.assertEqual(missing_from_json, set(), msg=amsg) + missing_from_test_data = repo_name_set - test_data_name_set + if missing_from_test_data: amsg = ( - "Missing images: These {} image uris are referenced in " - "imagerepo.json, but not listed in {} : " + "Missing images: Image names are referenced in " + "imagerepo.json, that are not present in the iris-test-data " + "repo" ) - amsg = amsg.format(n_missing_refs, listingfile_uri) - amsg += "".join("\n {}".format(uri) for uri in missing_refs) # Always fails when we get here: report the problem. - self.assertEqual(n_missing_refs, 0, msg=amsg) + self.assertEqual(missing_from_test_data, set(), msg=amsg) if __name__ == "__main__": diff --git a/lib/iris/tests/test_io_init.py b/lib/iris/tests/test_io_init.py index d33b76ddeb..82da82cfa9 100644 --- a/lib/iris/tests/test_io_init.py +++ b/lib/iris/tests/test_io_init.py @@ -126,7 +126,7 @@ def test_format_picker(self): ] # test that each filespec is identified as the expected format - for (expected_format_name, file_spec) in test_specs: + for expected_format_name, file_spec in test_specs: test_path = tests.get_data_path(file_spec) with open(test_path, "rb") as test_file: a = iff.FORMAT_AGENT.get_spec(test_path, test_file) diff --git a/lib/iris/tests/test_lazy_aggregate_by.py b/lib/iris/tests/test_lazy_aggregate_by.py new file mode 100644 index 0000000000..d1ebc9a36a --- /dev/null +++ b/lib/iris/tests/test_lazy_aggregate_by.py @@ -0,0 +1,48 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +import unittest + +from iris._lazy_data import as_lazy_data +from iris.tests import test_aggregate_by + + +# Simply redo the tests of test_aggregate_by.py with lazy data +class TestLazyAggregateBy(test_aggregate_by.TestAggregateBy): + def setUp(self): + super().setUp() + + self.cube_single.data = as_lazy_data(self.cube_single.data) + self.cube_multi.data = as_lazy_data(self.cube_multi.data) + self.cube_single_masked.data = as_lazy_data( + self.cube_single_masked.data + ) + self.cube_multi_masked.data = as_lazy_data(self.cube_multi_masked.data) + self.cube_easy.data = as_lazy_data(self.cube_easy.data) + self.cube_easy_weighted.data = as_lazy_data( + self.cube_easy_weighted.data + ) + + assert self.cube_single.has_lazy_data() + assert self.cube_multi.has_lazy_data() + assert self.cube_single_masked.has_lazy_data() + assert self.cube_multi_masked.has_lazy_data() + assert self.cube_easy.has_lazy_data() + assert self.cube_easy_weighted.has_lazy_data() + + def tearDown(self): + super().tearDown() + + # Note: weighted easy cube is not expected to have lazy data since + # WPERCENTILE is not lazy. + assert self.cube_single.has_lazy_data() + assert self.cube_multi.has_lazy_data() + assert self.cube_single_masked.has_lazy_data() + assert self.cube_multi_masked.has_lazy_data() + assert self.cube_easy.has_lazy_data() + + +if __name__ == "__main__": + unittest.main() diff --git a/lib/iris/tests/test_load.py b/lib/iris/tests/test_load.py index 86ff2f1ece..adb33924e5 100644 --- a/lib/iris/tests/test_load.py +++ b/lib/iris/tests/test_load.py @@ -12,8 +12,10 @@ import iris.tests as tests # isort:skip import pathlib +from unittest import mock import iris +from iris.fileformats.netcdf import _thread_safe_nc import iris.io @@ -148,19 +150,20 @@ def test_path_object(self): self.assertEqual(len(cubes), 1) -class TestOpenDAP(tests.IrisTest): - def test_load(self): - # Check that calling iris.load_* with a http URI triggers a call to - # ``iris.io.load_http`` +class TestOPeNDAP(tests.IrisTest): + def setUp(self): + self.url = "http://geoport.whoi.edu:80/thredds/dodsC/bathy/gom15" - url = "http://geoport.whoi.edu:80/thredds/dodsC/bathy/gom15" + def test_load_http_called(self): + # Check that calling iris.load_* with an http URI triggers a call to + # ``iris.io.load_http`` class LoadHTTPCalled(Exception): pass def new_load_http(passed_urls, *args, **kwargs): self.assertEqual(len(passed_urls), 1) - self.assertEqual(url, passed_urls[0]) + self.assertEqual(self.url, passed_urls[0]) raise LoadHTTPCalled() try: @@ -174,11 +177,30 @@ def new_load_http(passed_urls, *args, **kwargs): iris.load_cubes, ]: with self.assertRaises(LoadHTTPCalled): - fn(url) + fn(self.url) finally: iris.io.load_http = orig + @tests.skip_data + def test_netCDF_Dataset_call(self): + # Check that load_http calls netCDF4.Dataset and supplies the expected URL. + + # To avoid making a request to an OPeNDAP server in a test, instead + # mock the call to netCDF.Dataset so that it returns a dataset for a + # local file. + filename = tests.get_data_path( + ("NetCDF", "global", "xyt", "SMALL_total_column_co2.nc") + ) + fake_dataset = _thread_safe_nc.DatasetWrapper(filename) + + with mock.patch( + "iris.fileformats.netcdf._thread_safe_nc.DatasetWrapper", + return_value=fake_dataset, + ) as dataset_loader: + next(iris.io.load_http([self.url], callback=None)) + dataset_loader.assert_called_with(self.url, mode="r") + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_mapping.py b/lib/iris/tests/test_mapping.py index a71385b5bc..202c319b61 100644 --- a/lib/iris/tests/test_mapping.py +++ b/lib/iris/tests/test_mapping.py @@ -242,12 +242,6 @@ def test_pcolormesh(self): iplt.pcolormesh(self.cube) self.check_graphic() - def test_grid(self): - iplt.pcolormesh(self.cube, facecolors="none", edgecolors="blue") - # the result is a graphic which has coloured edges. This is a mpl bug, - # see https://github.com/matplotlib/matplotlib/issues/1302 - self.check_graphic() - def test_outline(self): iplt.outline(self.cube) self.check_graphic() diff --git a/lib/iris/tests/test_merge.py b/lib/iris/tests/test_merge.py index c209d68da0..e53bbfb5f3 100644 --- a/lib/iris/tests/test_merge.py +++ b/lib/iris/tests/test_merge.py @@ -190,7 +190,7 @@ def setUp(self): ) def test__ndarray_ndarray(self): - for (lazy0, lazy1) in self.lazy_combos: + for lazy0, lazy1 in self.lazy_combos: cubes = iris.cube.CubeList() cubes.append(self._make_cube(0, dtype=self.dtype, lazy=lazy0)) cubes.append(self._make_cube(1, dtype=self.dtype, lazy=lazy1)) diff --git a/lib/iris/tests/test_name.py b/lib/iris/tests/test_name.py index 2843673da8..b4e91bafd7 100644 --- a/lib/iris/tests/test_name.py +++ b/lib/iris/tests/test_name.py @@ -8,6 +8,9 @@ # import iris tests first so that some things can be initialised before # importing anything else import iris.tests as tests # isort:skip + +import tempfile + import iris @@ -39,7 +42,7 @@ def test_NAMEIII_version2(self): ) self.assertCMLApproxData(cubes, ("name", "NAMEIII_version2.cml")) - def test_NAMEII_trajectory(self): + def test_NAMEIII_trajectory(self): cubes = iris.load( tests.get_data_path(("NAME", "NAMEIII_trajectory.txt")) ) @@ -48,6 +51,32 @@ def test_NAMEII_trajectory(self): cubes, ("name", "NAMEIII_trajectory.cml"), checksum=False ) + def test_NAMEII__no_time_averaging(self): + cubes = iris.load( + tests.get_data_path(("NAME", "NAMEII_no_time_averaging.txt")) + ) + + # Also check that it saves without error. + # This was previously failing, see https://github.com/SciTools/iris/issues/3288 + with tempfile.TemporaryDirectory() as temp_dirpath: + iris.save(cubes, temp_dirpath + "/tmp.nc") + + self.assertCML( + cubes[0], + ( + "name", + "NAMEII_field__no_time_averaging_0.cml", + ), + ) + self.assertCML( + cubes, + ( + "name", + "NAMEII_field__no_time_averaging.cml", + ), + checksum=False, + ) + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 2c22c6d088..d182de84f6 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -16,11 +16,9 @@ import os.path import shutil import stat -from subprocess import check_call import tempfile from unittest import mock -import netCDF4 as nc import numpy as np import numpy.ma as ma @@ -30,9 +28,11 @@ import iris.coord_systems as icoord_systems from iris.fileformats._nc_load_rules import helpers as ncload_helpers import iris.fileformats.netcdf +from iris.fileformats.netcdf import _thread_safe_nc from iris.fileformats.netcdf import load_cubes as nc_load_cubes import iris.std_names import iris.tests.stock as stock +from iris.tests.stock.netcdf import ncgen_from_cdl import iris.util @@ -81,7 +81,7 @@ def test_missing_time_bounds(self): ("NetCDF", "global", "xyt", "SMALL_hires_wind_u_for_ipcc4.nc") ) shutil.copyfile(src, filename) - dataset = nc.Dataset(filename, mode="a") + dataset = _thread_safe_nc.DatasetWrapper(filename, mode="a") dataset.renameVariable("time_bnds", "foo") dataset.close() _ = iris.load_cube(filename, "eastward_wind") @@ -204,7 +204,7 @@ def test_missing_climatology(self): ("NetCDF", "transverse_mercator", "tmean_1910_1910.nc") ) shutil.copyfile(src, filename) - dataset = nc.Dataset(filename, mode="a") + dataset = _thread_safe_nc.DatasetWrapper(filename, mode="a") dataset.renameVariable("climatology_bounds", "foo") dataset.close() _ = iris.load_cube(filename, "Mean temperature") @@ -218,6 +218,26 @@ def test_load_merc_grid(self): ) self.assertCML(cube, ("netcdf", "netcdf_merc.cml")) + def test_load_complex_merc_grid(self): + # Test loading a single CF-netCDF file with a Mercator grid_mapping that + # includes false easting and northing and a standard parallel + cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "mercator", "false_east_north_merc.nc") + ) + ) + self.assertCML(cube, ("netcdf", "netcdf_merc_false.cml")) + + def test_load_merc_grid_non_unit_scale_factor(self): + # Test loading a single CF-netCDF file with a Mercator grid_mapping that + # includes a non-unit scale factor at projection origin + cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "mercator", "non_unit_scale_factor_merc.nc") + ) + ) + self.assertCML(cube, ("netcdf", "netcdf_merc_scale_factor.cml")) + def test_load_stereographic_grid(self): # Test loading a single CF-netCDF file with a stereographic # grid_mapping. @@ -228,6 +248,16 @@ def test_load_stereographic_grid(self): ) self.assertCML(cube, ("netcdf", "netcdf_stereo.cml")) + def test_load_polar_stereographic_grid(self): + # Test loading a single CF-netCDF file with a polar stereographic + # grid_mapping. + cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "polar", "toa_brightness_temperature.nc") + ) + ) + self.assertCML(cube, ("netcdf", "netcdf_polar.cml")) + def test_cell_methods(self): # Test exercising CF-netCDF cell method parsing. cubes = iris.load( @@ -283,9 +313,7 @@ def test_deferred_loading(self): cube[((0, 8, 4, 2, 14, 12),)][((0, 2, 4, 1),)], ("netcdf", "netcdf_deferred_tuple_1.cml"), ) - subcube = cube[((0, 8, 4, 2, 14, 12),)][((0, 2, 4, 1),)][ - (1, 3), - ] + subcube = cube[((0, 8, 4, 2, 14, 12),)][((0, 2, 4, 1),)][(1, 3),] self.assertCML(subcube, ("netcdf", "netcdf_deferred_tuple_2.cml")) # Consecutive mixture on same dimension. @@ -333,12 +361,8 @@ def test_um_stash_source(self): self.tmpdir = tempfile.mkdtemp() cdl_path = os.path.join(self.tmpdir, "tst.cdl") nc_path = os.path.join(self.tmpdir, "tst.nc") - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(ref_cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) + # Create a temporary netcdf file from the CDL string. + ncgen_from_cdl(ref_cdl, cdl_path, nc_path) # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(nc_load_cubes(nc_path)) self.assertEqual(len(cubes), 1) @@ -382,12 +406,8 @@ def test_ukmo__um_stash_source_priority(self): self.tmpdir = tempfile.mkdtemp() cdl_path = os.path.join(self.tmpdir, "tst.cdl") nc_path = os.path.join(self.tmpdir, "tst.nc") - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(ref_cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) + # Create a temporary netcdf file from the CDL string. + ncgen_from_cdl(ref_cdl, cdl_path, nc_path) # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(nc_load_cubes(nc_path)) self.assertEqual(len(cubes), 1) @@ -427,12 +447,8 @@ def test_bad_um_stash_source(self): self.tmpdir = tempfile.mkdtemp() cdl_path = os.path.join(self.tmpdir, "tst.cdl") nc_path = os.path.join(self.tmpdir, "tst.nc") - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(ref_cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) + # Create a temporary netcdf file from the CDL string. + ncgen_from_cdl(ref_cdl, cdl_path, nc_path) # Load with iris.fileformats.netcdf.load_cubes, and check expected content. cubes = list(nc_load_cubes(nc_path)) self.assertEqual(len(cubes), 1) @@ -616,7 +632,7 @@ def test_netcdf_save_format(self): with self.temp_filename(suffix=".nc") as file_out: # Test default NETCDF4 file format saving. iris.save(cube, file_out) - ds = nc.Dataset(file_out) + ds = _thread_safe_nc.DatasetWrapper(file_out) self.assertEqual( ds.file_format, "NETCDF4", "Failed to save as NETCDF4 format" ) @@ -624,7 +640,7 @@ def test_netcdf_save_format(self): # Test NETCDF4_CLASSIC file format saving. iris.save(cube, file_out, netcdf_format="NETCDF4_CLASSIC") - ds = nc.Dataset(file_out) + ds = _thread_safe_nc.DatasetWrapper(file_out) self.assertEqual( ds.file_format, "NETCDF4_CLASSIC", @@ -634,7 +650,7 @@ def test_netcdf_save_format(self): # Test NETCDF3_CLASSIC file format saving. iris.save(cube, file_out, netcdf_format="NETCDF3_CLASSIC") - ds = nc.Dataset(file_out) + ds = _thread_safe_nc.DatasetWrapper(file_out) self.assertEqual( ds.file_format, "NETCDF3_CLASSIC", @@ -644,7 +660,7 @@ def test_netcdf_save_format(self): # Test NETCDF4_64BIT file format saving. iris.save(cube, file_out, netcdf_format="NETCDF3_64BIT") - ds = nc.Dataset(file_out) + ds = _thread_safe_nc.DatasetWrapper(file_out) self.assertTrue( ds.file_format in ["NETCDF3_64BIT", "NETCDF3_64BIT_OFFSET"], "Failed to save as NETCDF3_64BIT format", @@ -1031,7 +1047,7 @@ def test_attributes(self): with self.temp_filename(suffix=".nc") as filename: iris.save(self.cube, filename) # Load the dataset. - ds = nc.Dataset(filename, "r") + ds = _thread_safe_nc.DatasetWrapper(filename, "r") exceptions = [] # Should be global attributes. for gkey in aglobals: @@ -1195,7 +1211,7 @@ def test_shared(self): self.assertCDL(filename) # Also check that only one, shared ancillary variable was written. - ds = nc.Dataset(filename) + ds = _thread_safe_nc.DatasetWrapper(filename) self.assertIn("air_potential_temperature", ds.variables) self.assertIn("alternate_data", ds.variables) self.assertEqual( @@ -1399,7 +1415,6 @@ def test_process_flags(self): } for bits, descriptions in multiple_map.items(): - ll_cube = stock.lat_lon_cube() ll_cube.attributes["ukmo__process_flags"] = descriptions diff --git a/lib/iris/tests/test_nimrod.py b/lib/iris/tests/test_nimrod.py index a1d7bb298f..6d62623198 100644 --- a/lib/iris/tests/test_nimrod.py +++ b/lib/iris/tests/test_nimrod.py @@ -80,7 +80,8 @@ def test_huge_field_load(self): @tests.skip_data def test_load_kwarg(self): """Tests that the handle_metadata_errors kwarg is effective by setting it to - False with a file with known incomplete meta-data (missing ellipsoid).""" + False with a file with known incomplete meta-data (missing ellipsoid). + """ datafile = "u1096_ng_ek00_pressure_2km" with self.assertRaisesRegex( TranslationError, diff --git a/lib/iris/tests/test_pandas.py b/lib/iris/tests/test_pandas.py deleted file mode 100644 index af62ad23d3..0000000000 --- a/lib/iris/tests/test_pandas.py +++ /dev/null @@ -1,504 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import copy -import datetime -import unittest - -import cf_units -import cftime -import matplotlib.units -import numpy as np - -# Importing pandas has the side-effect of messing with the formatters -# used by matplotlib for handling dates. -default_units_registry = copy.copy(matplotlib.units.registry) -try: - import pandas -except ImportError: - # Disable all these tests if pandas is not installed. - pandas = None -matplotlib.units.registry = default_units_registry - -skip_pandas = unittest.skipIf( - pandas is None, 'Test(s) require "pandas", ' "which is not available." -) - -if pandas is not None: - from iris.coords import DimCoord - from iris.cube import Cube - import iris.pandas - - -@skip_pandas -class TestAsSeries(tests.IrisTest): - """Test conversion of 1D cubes to Pandas using as_series()""" - - def test_no_dim_coord(self): - cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") - series = iris.pandas.as_series(cube) - expected_index = np.array([0, 1, 2, 3, 4]) - self.assertArrayEqual(series, cube.data) - self.assertArrayEqual(series.index, expected_index) - - def test_simple(self): - cube = Cube(np.array([0, 1, 2, 3, 4.4]), long_name="foo") - dim_coord = DimCoord([5, 6, 7, 8, 9], long_name="bar") - cube.add_dim_coord(dim_coord, 0) - expected_index = np.array([5, 6, 7, 8, 9]) - series = iris.pandas.as_series(cube) - self.assertArrayEqual(series, cube.data) - self.assertArrayEqual(series.index, expected_index) - - def test_masked(self): - data = np.ma.MaskedArray([0, 1, 2, 3, 4.4], mask=[0, 1, 0, 1, 0]) - cube = Cube(data, long_name="foo") - series = iris.pandas.as_series(cube) - self.assertArrayEqual(series, cube.data.astype("f").filled(np.nan)) - - def test_time_gregorian(self): - cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") - time_coord = DimCoord( - [0, 100.1, 200.2, 300.3, 400.4], - long_name="time", - units="days since 2000-01-01 00:00", - ) - cube.add_dim_coord(time_coord, 0) - expected_index = [ - datetime.datetime(2000, 1, 1, 0, 0), - datetime.datetime(2000, 4, 10, 2, 24), - datetime.datetime(2000, 7, 19, 4, 48), - datetime.datetime(2000, 10, 27, 7, 12), - datetime.datetime(2001, 2, 4, 9, 36), - ] - series = iris.pandas.as_series(cube) - self.assertArrayEqual(series, cube.data) - self.assertListEqual(list(series.index), expected_index) - - def test_time_360(self): - cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") - time_unit = cf_units.Unit( - "days since 2000-01-01 00:00", calendar=cf_units.CALENDAR_360_DAY - ) - time_coord = DimCoord( - [0, 100.1, 200.2, 300.3, 400.4], long_name="time", units=time_unit - ) - cube.add_dim_coord(time_coord, 0) - expected_index = [ - cftime.Datetime360Day(2000, 1, 1, 0, 0), - cftime.Datetime360Day(2000, 4, 11, 2, 24), - cftime.Datetime360Day(2000, 7, 21, 4, 48), - cftime.Datetime360Day(2000, 11, 1, 7, 12), - cftime.Datetime360Day(2001, 2, 11, 9, 36), - ] - - series = iris.pandas.as_series(cube) - self.assertArrayEqual(series, cube.data) - self.assertArrayEqual(series.index, expected_index) - - def test_copy_true(self): - cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") - series = iris.pandas.as_series(cube) - series[0] = 99 - self.assertEqual(cube.data[0], 0) - - def test_copy_int32_false(self): - cube = Cube(np.array([0, 1, 2, 3, 4], dtype=np.int32), long_name="foo") - series = iris.pandas.as_series(cube, copy=False) - series[0] = 99 - self.assertEqual(cube.data[0], 99) - - def test_copy_int64_false(self): - cube = Cube(np.array([0, 1, 2, 3, 4], dtype=np.int32), long_name="foo") - series = iris.pandas.as_series(cube, copy=False) - series[0] = 99 - self.assertEqual(cube.data[0], 99) - - def test_copy_float_false(self): - cube = Cube(np.array([0, 1, 2, 3.3, 4]), long_name="foo") - series = iris.pandas.as_series(cube, copy=False) - series[0] = 99 - self.assertEqual(cube.data[0], 99) - - def test_copy_masked_true(self): - data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0]) - cube = Cube(data, long_name="foo") - series = iris.pandas.as_series(cube) - series[0] = 99 - self.assertEqual(cube.data[0], 0) - - def test_copy_masked_false(self): - data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0]) - cube = Cube(data, long_name="foo") - with self.assertRaises(ValueError): - _ = iris.pandas.as_series(cube, copy=False) - - -@skip_pandas -class TestAsDataFrame(tests.IrisTest): - """Test conversion of 2D cubes to Pandas using as_data_frame()""" - - def test_no_dim_coords(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo" - ) - expected_index = [0, 1] - expected_columns = [0, 1, 2, 3, 4] - data_frame = iris.pandas.as_data_frame(cube) - self.assertArrayEqual(data_frame, cube.data) - self.assertArrayEqual(data_frame.index, expected_index) - self.assertArrayEqual(data_frame.columns, expected_columns) - - def test_no_x_coord(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo" - ) - y_coord = DimCoord([10, 11], long_name="bar") - cube.add_dim_coord(y_coord, 0) - expected_index = [10, 11] - expected_columns = [0, 1, 2, 3, 4] - data_frame = iris.pandas.as_data_frame(cube) - self.assertArrayEqual(data_frame, cube.data) - self.assertArrayEqual(data_frame.index, expected_index) - self.assertArrayEqual(data_frame.columns, expected_columns) - - def test_no_y_coord(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo" - ) - x_coord = DimCoord([10, 11, 12, 13, 14], long_name="bar") - cube.add_dim_coord(x_coord, 1) - expected_index = [0, 1] - expected_columns = [10, 11, 12, 13, 14] - data_frame = iris.pandas.as_data_frame(cube) - self.assertArrayEqual(data_frame, cube.data) - self.assertArrayEqual(data_frame.index, expected_index) - self.assertArrayEqual(data_frame.columns, expected_columns) - - def test_simple(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="foo" - ) - x_coord = DimCoord([10, 11, 12, 13, 14], long_name="bar") - y_coord = DimCoord([15, 16], long_name="milk") - cube.add_dim_coord(x_coord, 1) - cube.add_dim_coord(y_coord, 0) - expected_index = [15, 16] - expected_columns = [10, 11, 12, 13, 14] - data_frame = iris.pandas.as_data_frame(cube) - self.assertArrayEqual(data_frame, cube.data) - self.assertArrayEqual(data_frame.index, expected_index) - self.assertArrayEqual(data_frame.columns, expected_columns) - - def test_masked(self): - data = np.ma.MaskedArray( - [[0, 1, 2, 3, 4.4], [5, 6, 7, 8, 9]], - mask=[[0, 1, 0, 1, 0], [1, 0, 1, 0, 1]], - ) - cube = Cube(data, long_name="foo") - expected_index = [0, 1] - expected_columns = [0, 1, 2, 3, 4] - data_frame = iris.pandas.as_data_frame(cube) - self.assertArrayEqual(data_frame, cube.data.astype("f").filled(np.nan)) - self.assertArrayEqual(data_frame.index, expected_index) - self.assertArrayEqual(data_frame.columns, expected_columns) - - def test_time_gregorian(self): - cube = Cube( - np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]), long_name="ts" - ) - day_offsets = [0, 100.1, 200.2, 300.3, 400.4] - time_coord = DimCoord( - day_offsets, long_name="time", units="days since 2000-01-01 00:00" - ) - cube.add_dim_coord(time_coord, 1) - data_frame = iris.pandas.as_data_frame(cube) - self.assertArrayEqual(data_frame, cube.data) - nanoseconds_per_day = 24 * 60 * 60 * 1000000000 - days_to_2000 = 365 * 30 + 7 - # pandas Timestamp class cannot handle floats in pandas None: - # Prepare a cube and a template - cube_file_path = tests.get_data_path( - ["NetCDF", "regrid", "regrid_xyt.nc"] - ) - self.cube = load_cube(cube_file_path) - - template_file_path = tests.get_data_path( - ["NetCDF", "regrid", "regrid_template_global_latlon.nc"] - ) - self.template_cube = load_cube(template_file_path) - - # Chunked data makes the regridder run repeatedly - self.cube.data = self.cube.lazy_data().rechunk((1, -1, -1)) - - def test_src_stays_lazy(self) -> None: - cube = self.cube.copy() - # Regrid the cube onto the template. - regridder = AreaWeightedRegridder(cube, self.template_cube) - regridder(cube) - # Base cube stays lazy - self.assertTrue(cube.has_lazy_data()) - - def test_output_lazy(self) -> None: - cube = self.cube.copy() - # Regrid the cube onto the template. - regridder = AreaWeightedRegridder(cube, self.template_cube) - out = regridder(cube) - # Lazy base cube means lazy output - self.assertTrue(out.has_lazy_data()) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/cartography/__init__.py b/lib/iris/tests/unit/analysis/cartography/__init__.py deleted file mode 100644 index 625a6fa141..0000000000 --- a/lib/iris/tests/unit/analysis/cartography/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.analysis.cartography` module.""" diff --git a/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py b/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py deleted file mode 100644 index a44661292f..0000000000 --- a/lib/iris/tests/unit/analysis/cartography/test__quadrant_area.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -"""Unit tests for the `iris.analysis.cartography._quadrant_area` function""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. - -import iris.tests as tests # isort:skip - -import cf_units -import numpy as np - -from iris.analysis.cartography import ( - DEFAULT_SPHERICAL_EARTH_RADIUS, - _quadrant_area, -) - - -class TestExampleCases(tests.IrisTest): - def _radian_bounds(self, coord_list, dtype): - bound_deg = np.array(coord_list, dtype=dtype) - bound_deg = np.atleast_2d(bound_deg) - degrees = cf_units.Unit("degrees") - radians = cf_units.Unit("radians") - return degrees.convert(bound_deg, radians) - - def _as_bounded_coords(self, lats, lons, dtype=np.float64): - return ( - self._radian_bounds(lats, dtype=dtype), - self._radian_bounds(lons, dtype=dtype), - ) - - def test_area_in_north(self): - lats, lons = self._as_bounded_coords([0, 10], [0, 10]) - area = _quadrant_area(lats, lons, DEFAULT_SPHERICAL_EARTH_RADIUS) - self.assertArrayAllClose(area, [[1228800593851.443115234375]]) - - def test_area_in_far_north(self): - lats, lons = self._as_bounded_coords([70, 80], [0, 10]) - area = _quadrant_area(lats, lons, DEFAULT_SPHERICAL_EARTH_RADIUS) - self.assertArrayAllClose(area, [[319251845980.7646484375]]) - - def test_area_in_far_south(self): - lats, lons = self._as_bounded_coords([-80, -70], [0, 10]) - area = _quadrant_area(lats, lons, DEFAULT_SPHERICAL_EARTH_RADIUS) - self.assertArrayAllClose(area, [[319251845980.763671875]]) - - def test_area_in_north_with_reversed_lats(self): - lats, lons = self._as_bounded_coords([10, 0], [0, 10]) - area = _quadrant_area(lats, lons, DEFAULT_SPHERICAL_EARTH_RADIUS) - self.assertArrayAllClose(area, [[1228800593851.443115234375]]) - - def test_area_multiple_lats(self): - lats, lons = self._as_bounded_coords( - [[-80, -70], [0, 10], [70, 80]], [0, 10] - ) - area = _quadrant_area(lats, lons, DEFAULT_SPHERICAL_EARTH_RADIUS) - - self.assertArrayAllClose( - area, - [ - [319251845980.763671875], - [1228800593851.443115234375], - [319251845980.7646484375], - ], - ) - - def test_area_multiple_lats_and_lons(self): - lats, lons = self._as_bounded_coords( - [[-80, -70], [0, 10], [70, 80]], [[0, 10], [10, 30]] - ) - area = _quadrant_area(lats, lons, DEFAULT_SPHERICAL_EARTH_RADIUS) - - self.assertArrayAllClose( - area, - [ - [3.19251846e11, 6.38503692e11], - [1.22880059e12, 2.45760119e12], - [3.19251846e11, 6.38503692e11], - ], - ) - - def test_symmetric_64_bit(self): - lats, lons = self._as_bounded_coords( - [[-90, -89.375], [89.375, 90]], [0, 10], dtype=np.float64 - ) - area = _quadrant_area(lats, lons, DEFAULT_SPHERICAL_EARTH_RADIUS) - self.assertArrayAllClose(area, area[::-1]) - - def test_symmetric_32_bit(self): - lats, lons = self._as_bounded_coords( - [[-90, -89.375], [89.375, 90]], [0, 10], dtype=np.float32 - ) - area = _quadrant_area(lats, lons, DEFAULT_SPHERICAL_EARTH_RADIUS) - self.assertArrayAllClose(area, area[::-1]) - - -class TestErrorHandling(tests.IrisTest): - def test_lat_bounds_1d_error(self): - self._assert_error_on_malformed_bounds([0, 10], [[0, 10]]) - - def test_lon_bounds_1d_error(self): - self._assert_error_on_malformed_bounds([[0, 10]], [0, 10]) - - def test_too_many_lat_bounds_error(self): - self._assert_error_on_malformed_bounds([[0, 10, 20]], [[0, 10]]) - - def test_too_many_lon_bounds_error(self): - self._assert_error_on_malformed_bounds([[0, 10]], [[0, 10, 20]]) - - def _assert_error_on_malformed_bounds(self, lat_bnds, lon_bnds): - with self.assertRaisesRegex( - ValueError, r"Bounds must be \[n,2\] array" - ): - _quadrant_area(np.array(lat_bnds), np.array(lon_bnds), 1.0) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/cartography/test__xy_range.py b/lib/iris/tests/unit/analysis/cartography/test__xy_range.py deleted file mode 100644 index 009c97fc34..0000000000 --- a/lib/iris/tests/unit/analysis/cartography/test__xy_range.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -"""Unit tests for :func:`iris.analysis.cartography._xy_range`""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. - -import iris.tests as tests # isort:skip -import numpy as np - -from iris.analysis.cartography import _xy_range -import iris.tests.stock as stock - - -class Test(tests.IrisTest): - def test_bounds_mismatch(self): - cube = stock.realistic_3d() - cube.coord("grid_longitude").guess_bounds() - - with self.assertRaisesRegex(ValueError, "bounds"): - _ = _xy_range(cube) - - def test_non_circular(self): - cube = stock.realistic_3d() - assert not cube.coord("grid_longitude").circular - - result_non_circ = _xy_range(cube) - self.assertEqual(result_non_circ, ((-5.0, 5.0), (-4.0, 4.0))) - - @tests.skip_data - def test_geog_cs_circular(self): - cube = stock.global_pp() - assert cube.coord("longitude").circular - - result = _xy_range(cube) - np.testing.assert_array_almost_equal( - result, ((0, 360), (-90, 90)), decimal=0 - ) - - @tests.skip_data - def test_geog_cs_regional(self): - cube = stock.global_pp() - cube = cube[10:20, 20:30] - assert not cube.coord("longitude").circular - - result = _xy_range(cube) - np.testing.assert_array_almost_equal( - result, ((75, 108.75), (42.5, 65)), decimal=0 - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/cartography/test_area_weights.py b/lib/iris/tests/unit/analysis/cartography/test_area_weights.py deleted file mode 100644 index 696841ddd6..0000000000 --- a/lib/iris/tests/unit/analysis/cartography/test_area_weights.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -"""Unit tests for the `iris.analysis.cartography.area_weights` function""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip -import iris.analysis.cartography -import iris.tests.stock as stock - - -class TestInvalidUnits(tests.IrisTest): - def test_latitude_no_units(self): - cube = stock.lat_lon_cube() - cube.coord("longitude").guess_bounds() - cube.coord("latitude").guess_bounds() - cube.coord("latitude").units = None - with self.assertRaisesRegex( - ValueError, "Units of degrees or " "radians required" - ): - iris.analysis.cartography.area_weights(cube) - - def test_longitude_no_units(self): - cube = stock.lat_lon_cube() - cube.coord("latitude").guess_bounds() - cube.coord("longitude").guess_bounds() - cube.coord("longitude").units = None - with self.assertRaisesRegex( - ValueError, "Units of degrees or " "radians required" - ): - iris.analysis.cartography.area_weights(cube) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py b/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py deleted file mode 100644 index 6b957baec6..0000000000 --- a/lib/iris/tests/unit/analysis/cartography/test_gridcell_angles.py +++ /dev/null @@ -1,346 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the function -:func:`iris.analysis.cartography.gridcell_angles`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from cf_units import Unit -import numpy as np - -from iris.analysis.cartography import gridcell_angles -from iris.coords import AuxCoord -from iris.cube import Cube -from iris.tests.stock import lat_lon_cube, sample_2d_latlons - - -def _2d_multicells_testcube(cellsize_degrees=1.0): - """ - Create a test cube with a grid of X and Y points, where each gridcell - is independent (disjoint), arranged at an angle == the x-coord point. - - """ - # Setup np.linspace arguments to make the coordinate points. - x0, x1, nx = -164, 164, 9 - y0, y1, ny = -75, 75, 7 - - lats = np.linspace(y0, y1, ny, endpoint=True) - lons_angles = np.linspace(x0, x1, nx, endpoint=True) - x_pts_2d, y_pts_2d = np.meshgrid(lons_angles, lats) - - # Make gridcells rectangles surrounding these centrepoints, but also - # tilted at various angles (= same as x-point lons, as that's easy). - - # Calculate centrepoint lons+lats : in radians, and shape (ny, nx, 1). - xangs, yangs = np.deg2rad(x_pts_2d), np.deg2rad(y_pts_2d) - xangs, yangs = [arr[..., None] for arr in (xangs, yangs)] - # Program which corners are up+down on each gridcell axis. - dx_corners = [[[-1, 1, 1, -1]]] - dy_corners = [[[-1, -1, 1, 1]]] - # Calculate the relative offsets in x+y at the 4 corners. - x_ofs_2d = cellsize_degrees * np.cos(xangs) * dx_corners - x_ofs_2d -= cellsize_degrees * np.sin(xangs) * dy_corners - y_ofs_2d = cellsize_degrees * np.cos(xangs) * dy_corners - y_ofs_2d += cellsize_degrees * np.sin(xangs) * dx_corners - # Apply a latitude stretch to make correct angles on the globe. - y_ofs_2d *= np.cos(yangs) - # Make bounds arrays by adding the corner offsets to the centrepoints. - x_bds_2d = x_pts_2d[..., None] + x_ofs_2d - y_bds_2d = y_pts_2d[..., None] + y_ofs_2d - - # Create a cube with these points + bounds in its 'X' and 'Y' coords. - co_x = AuxCoord( - points=x_pts_2d, - bounds=x_bds_2d, - standard_name="longitude", - units="degrees", - ) - co_y = AuxCoord( - points=y_pts_2d, - bounds=y_bds_2d, - standard_name="latitude", - units="degrees", - ) - cube = Cube(np.zeros((ny, nx))) - cube.add_aux_coord(co_x, (0, 1)) - cube.add_aux_coord(co_y, (0, 1)) - return cube - - -class TestGridcellAngles(tests.IrisTest): - def setUp(self): - # Make a small "normal" contiguous-bounded cube to test on. - # This one is regional. - self.standard_regional_cube = sample_2d_latlons( - regional=True, transformed=True - ) - # Record the standard correct angle answers. - result_cube = gridcell_angles(self.standard_regional_cube) - result_cube.convert_units("degrees") - self.standard_result_cube = result_cube - self.standard_small_cube_results = result_cube.data - - def _check_multiple_orientations_and_latitudes( - self, - method="mid-lhs, mid-rhs", - atol_degrees=0.005, - cellsize_degrees=1.0, - ): - - cube = _2d_multicells_testcube(cellsize_degrees=cellsize_degrees) - - # Calculate gridcell angles at each point. - angles_cube = gridcell_angles(cube, cell_angle_boundpoints=method) - - # Check that the results are a close match to the original intended - # gridcell orientation angles. - # NOTE: neither the above gridcell construction nor the calculation - # itself are exact : Errors scale as the square of gridcell sizes. - angles_cube.convert_units("degrees") - angles_calculated = angles_cube.data - - # Note: the gridcell angles **should** just match the longitudes at - # each point - angles_expected = cube.coord("longitude").points - - # Wrap both into standard range for comparison. - angles_calculated = (angles_calculated + 360.0) % 360.0 - angles_expected = (angles_expected + 360.0) % 360.0 - - # Assert (toleranced) equality, and return results. - self.assertArrayAllClose( - angles_calculated, angles_expected, atol=atol_degrees - ) - - return angles_calculated, angles_expected - - def test_various_orientations_and_locations(self): - self._check_multiple_orientations_and_latitudes() - - def test_result_form(self): - # Check properties of the result cube *other than* the data values. - test_cube = self.standard_regional_cube - result_cube = self.standard_result_cube - self.assertEqual( - result_cube.long_name, "gridcell_angle_from_true_east" - ) - self.assertEqual(result_cube.units, Unit("degrees")) - self.assertEqual(len(result_cube.coords()), 2) - self.assertEqual( - result_cube.coord(axis="x"), test_cube.coord(axis="x") - ) - self.assertEqual( - result_cube.coord(axis="y"), test_cube.coord(axis="y") - ) - - def test_bottom_edge_method(self): - # Get results with the "other" calculation method + check to tolerance. - # A smallish cellsize should yield similar results in both cases. - r1, _ = self._check_multiple_orientations_and_latitudes() - r2, _ = self._check_multiple_orientations_and_latitudes( - method="lower-left, lower-right", - cellsize_degrees=0.1, - atol_degrees=0.1, - ) - - # Not *exactly* the same : this checks we tested the 'other' method ! - self.assertFalse(np.allclose(r1, r2)) - # Note: results are a bit different in places. This is acceptable. - self.assertArrayAllClose(r1, r2, atol=0.1) - - def test_bounded_coord_args(self): - # Check that passing the coords gives the same result as the cube. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) - result = gridcell_angles(co_x, co_y) - self.assertArrayAllClose(result.data, self.standard_small_cube_results) - - def test_coords_radians_args(self): - # Check it still works with coords converted to radians. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) - for coord in (co_x, co_y): - coord.convert_units("radians") - result = gridcell_angles(co_x, co_y) - self.assertArrayAllClose(result.data, self.standard_small_cube_results) - - def test_bounds_array_args(self): - # Check we can calculate from bounds values alone. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) - # Results drawn from coord bounds should be nearly the same, - # but not exactly, because of the different 'midpoint' values. - result = gridcell_angles(co_x.bounds, co_y.bounds) - self.assertArrayAllClose( - result.data, self.standard_small_cube_results, atol=0.1 - ) - - def test_unbounded_regional_coord_args(self): - # Remove the coord bounds to check points-based calculation. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) - for coord in (co_x, co_y): - coord.bounds = None - result = gridcell_angles(co_x, co_y) - # Note: in this case, we can expect the leftmost and rightmost columns - # to be rubbish, because the data is not global. - # But the rest should match okay. - self.assertArrayAllClose( - result.data[:, 1:-1], self.standard_small_cube_results[:, 1:-1] - ) - - def test_points_array_args(self): - # Check we can calculate from points arrays alone (no coords). - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) - # As previous, the leftmost and rightmost columns are not good. - result = gridcell_angles(co_x.points, co_y.points) - self.assertArrayAllClose( - result.data[:, 1:-1], self.standard_small_cube_results[:, 1:-1] - ) - - def test_unbounded_global(self): - # For a contiguous global grid, a result based on points, i.e. with the - # bounds removed, should be a reasonable match for the 'ideal' one - # based on the bounds. - - # Make a global cube + calculate ideal bounds-based results. - global_cube = sample_2d_latlons(transformed=True) - result_cube = gridcell_angles(global_cube) - result_cube.convert_units("degrees") - global_cube_results = result_cube.data - - # Check a points-based calculation on the same basic grid. - co_x, co_y = (global_cube.coord(axis=ax) for ax in ("x", "y")) - for coord in (co_x, co_y): - coord.bounds = None - result = gridcell_angles(co_x, co_y) - # In this case, the match is actually rather poor (!). - self.assertArrayAllClose(result.data, global_cube_results, atol=7.5) - # Leaving off first + last columns again gives a decent result. - self.assertArrayAllClose( - result.data[:, 1:-1], global_cube_results[:, 1:-1] - ) - - # NOTE: although this looks just as bad as 'test_points_array_args', - # maximum errors there in the end columns are actually > 100 degrees ! - - def test_nonlatlon_coord_system(self): - # Check with points specified in an unexpected coord system. - cube = sample_2d_latlons(regional=True, rotated=True) - result = gridcell_angles(cube) - self.assertArrayAllClose(result.data, self.standard_small_cube_results) - # Check that the result has transformed (true-latlon) coordinates. - self.assertEqual(len(result.coords()), 2) - x_coord = result.coord(axis="x") - y_coord = result.coord(axis="y") - self.assertEqual(x_coord.shape, cube.shape) - self.assertEqual(y_coord.shape, cube.shape) - self.assertIsNotNone(cube.coord_system) - self.assertIsNone(x_coord.coord_system) - self.assertIsNone(y_coord.coord_system) - - def test_fail_coords_bad_units(self): - # Check error with bad coords units. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) - co_y.units = "m" - with self.assertRaisesRegex(ValueError, "must have angular units"): - gridcell_angles(co_x, co_y) - - def test_fail_nonarraylike(self): - # Check error with bad args. - co_x, co_y = 1, 2 - with self.assertRaisesRegex( - ValueError, "must have array shape property" - ): - gridcell_angles(co_x, co_y) - - def test_fail_non2d_coords(self): - # Check error with bad args. - cube = lat_lon_cube() - with self.assertRaisesRegex( - ValueError, "inputs must have 2-dimensional shape" - ): - gridcell_angles(cube) - - def test_fail_different_shapes(self): - # Check error with mismatched shapes. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) - co_y = co_y[1:] - with self.assertRaisesRegex(ValueError, "must have same shape"): - gridcell_angles(co_x, co_y) - - def test_fail_different_coord_system(self): - # Check error with mismatched coord systems. - cube = sample_2d_latlons(regional=True, rotated=True) - cube.coord(axis="x").coord_system = None - with self.assertRaisesRegex( - ValueError, "must have same coordinate system" - ): - gridcell_angles(cube) - - def test_fail_cube_dims(self): - # Check error with mismatched cube dims. - cube = self.standard_regional_cube - # Make 5x6 into 5x5. - cube = cube[:, :-1] - co_x = cube.coord(axis="x") - pts, bds = co_x.points, co_x.bounds - co_new_x = co_x.copy( - points=pts.transpose((1, 0)), bounds=bds.transpose((1, 0, 2)) - ) - cube.remove_coord(co_x) - cube.add_aux_coord(co_new_x, (1, 0)) - with self.assertRaisesRegex( - ValueError, "must have the same cube dimensions" - ): - gridcell_angles(cube) - - def test_fail_coord_noncoord(self): - # Check that passing a coord + an array gives an error. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) - with self.assertRaisesRegex( - ValueError, "is a Coordinate, but .* is not" - ): - gridcell_angles(co_x, co_y.bounds) - - def test_fail_noncoord_coord(self): - # Check that passing an array + a coord gives an error. - co_x, co_y = ( - self.standard_regional_cube.coord(axis=ax) for ax in ("x", "y") - ) - with self.assertRaisesRegex( - ValueError, "is a Coordinate, but .* is not" - ): - gridcell_angles(co_x.points, co_y) - - def test_fail_bad_method(self): - with self.assertRaisesRegex( - ValueError, "unrecognised cell_angle_boundpoints" - ): - self._check_multiple_orientations_and_latitudes( - method="something_unknown" - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/cartography/test_project.py b/lib/iris/tests/unit/analysis/cartography/test_project.py deleted file mode 100644 index 8649cc55ea..0000000000 --- a/lib/iris/tests/unit/analysis/cartography/test_project.py +++ /dev/null @@ -1,169 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :func:`iris.analysis.cartography.project`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import cartopy.crs as ccrs -import numpy as np - -from iris.analysis.cartography import project -import iris.coord_systems -import iris.coords -import iris.cube -import iris.tests -import iris.tests.stock - -ROBINSON = ccrs.Robinson() - - -def low_res_4d(): - cube = iris.tests.stock.realistic_4d_no_derived() - cube = cube[0:2, 0:3, ::10, ::10] - cube.remove_coord("surface_altitude") - return cube - - -class TestAll(tests.IrisTest): - def setUp(self): - cs = iris.coord_systems.GeogCS(6371229) - self.cube = iris.cube.Cube(np.zeros(25).reshape(5, 5)) - self.cube.add_dim_coord( - iris.coords.DimCoord( - np.arange(5), - standard_name="latitude", - units="degrees", - coord_system=cs, - ), - 0, - ) - self.cube.add_dim_coord( - iris.coords.DimCoord( - np.arange(5), - standard_name="longitude", - units="degrees", - coord_system=cs, - ), - 1, - ) - - self.tcs = iris.coord_systems.GeogCS(6371229) - - def test_is_iris_coord_system(self): - res, _ = project(self.cube, self.tcs) - self.assertEqual( - res.coord("projection_y_coordinate").coord_system, self.tcs - ) - self.assertEqual( - res.coord("projection_x_coordinate").coord_system, self.tcs - ) - - self.assertIsNot( - res.coord("projection_y_coordinate").coord_system, self.tcs - ) - self.assertIsNot( - res.coord("projection_x_coordinate").coord_system, self.tcs - ) - - @tests.skip_data - def test_bad_resolution_negative(self): - cube = low_res_4d() - with self.assertRaises(ValueError): - project(cube, ROBINSON, nx=-200, ny=200) - - @tests.skip_data - def test_bad_resolution_non_numeric(self): - cube = low_res_4d() - with self.assertRaises(TypeError): - project(cube, ROBINSON, nx=200, ny="abc") - - @tests.skip_data - def test_missing_lat(self): - cube = low_res_4d() - cube.remove_coord("grid_latitude") - with self.assertRaises(ValueError): - project(cube, ROBINSON) - - @tests.skip_data - def test_missing_lon(self): - cube = low_res_4d() - cube.remove_coord("grid_longitude") - with self.assertRaises(ValueError): - project(cube, ROBINSON) - - @tests.skip_data - def test_missing_latlon(self): - cube = low_res_4d() - cube.remove_coord("grid_longitude") - cube.remove_coord("grid_latitude") - with self.assertRaises(ValueError): - project(cube, ROBINSON) - - @tests.skip_data - def test_default_resolution(self): - cube = low_res_4d() - new_cube, extent = project(cube, ROBINSON) - self.assertEqual(new_cube.shape, cube.shape) - - @tests.skip_data - def test_explicit_resolution(self): - cube = low_res_4d() - nx, ny = 5, 4 - new_cube, extent = project(cube, ROBINSON, nx=nx, ny=ny) - self.assertEqual(new_cube.shape, cube.shape[:2] + (ny, nx)) - - @tests.skip_data - def test_explicit_resolution_single_point(self): - cube = low_res_4d() - nx, ny = 1, 1 - new_cube, extent = project(cube, ROBINSON, nx=nx, ny=ny) - self.assertEqual(new_cube.shape, cube.shape[:2] + (ny, nx)) - - @tests.skip_data - def test_mismatched_coord_systems(self): - cube = low_res_4d() - cube.coord("grid_longitude").coord_system = None - with self.assertRaises(ValueError): - project(cube, ROBINSON) - - @tests.skip_data - def test_extent(self): - cube = low_res_4d() - _, extent = project(cube, ROBINSON) - self.assertEqual( - extent, - [ - -17005833.33052523, - 17005833.33052523, - -8625154.6651, - 8625154.6651, - ], - ) - - @tests.skip_data - def test_cube(self): - cube = low_res_4d() - new_cube, _ = project(cube, ROBINSON) - self.assertCMLApproxData(new_cube) - - @tests.skip_data - def test_no_coord_system(self): - cube = low_res_4d() - cube.coord("grid_longitude").coord_system = None - cube.coord("grid_latitude").coord_system = None - with iris.tests.mock.patch("warnings.warn") as warn: - _, _ = project(cube, ROBINSON) - warn.assert_called_once_with( - "Coordinate system of latitude and " - "longitude coordinates is not specified. " - "Assuming WGS84 Geodetic." - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py deleted file mode 100644 index f5c882a983..0000000000 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_grid_vectors.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the function -:func:`iris.analysis.cartography.rotate_grid_vectors`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest.mock import Mock -from unittest.mock import call as mock_call - -import numpy as np - -from iris.analysis.cartography import rotate_grid_vectors -from iris.cube import Cube -from iris.tests.stock import sample_2d_latlons - - -class TestRotateGridVectors(tests.IrisTest): - def _check_angles_calculation( - self, angles_in_degrees=True, nan_angles_mask=None - ): - # Check basic maths on a 2d latlon grid. - u_cube = sample_2d_latlons(regional=True, transformed=True) - u_cube.units = "ms-1" - u_cube.rename("dx") - u_cube.data[...] = 0 - v_cube = u_cube.copy() - v_cube.rename("dy") - - # Define 6 different vectors, repeated in each data row. - in_vu = np.array([(0, 1), (2, -1), (-1, -1), (-3, 1), (2, 0), (0, 0)]) - in_angs = np.rad2deg(np.arctan2(in_vu[..., 0], in_vu[..., 1])) - in_mags = np.sqrt(np.sum(in_vu * in_vu, axis=1)) - v_cube.data[...] = in_vu[..., 0] - u_cube.data[...] = in_vu[..., 1] - - # Define 5 different test rotation angles, one for each data row. - rotation_angles = np.array([0.0, -45.0, 135, -140.0, 90.0]) - ang_cube_data = np.broadcast_to(rotation_angles[:, None], u_cube.shape) - ang_cube = u_cube.copy() - if angles_in_degrees: - ang_cube.units = "degrees" - else: - ang_cube.units = "radians" - ang_cube_data = np.deg2rad(ang_cube_data) - ang_cube.data[:] = ang_cube_data - - if nan_angles_mask is not None: - ang_cube.data[nan_angles_mask] = np.nan - - # Rotate all vectors by all the given angles. - result = rotate_grid_vectors(u_cube, v_cube, ang_cube) - out_u, out_v = [cube.data for cube in result] - - # Check that vector magnitudes were unchanged. - out_mags = np.sqrt(out_u * out_u + out_v * out_v) - expect_mags = in_mags[None, :] - self.assertArrayAllClose(out_mags, expect_mags) - - # Check that vector angles are all as expected. - out_angs = np.rad2deg(np.arctan2(out_v, out_u)) - expect_angs = in_angs[None, :] + rotation_angles[:, None] - ang_diffs = out_angs - expect_angs - # Fix for null vectors, and +/-360 differences. - ang_diffs[np.abs(out_mags) < 0.001] = 0.0 - ang_diffs[np.isclose(np.abs(ang_diffs), 360.0)] = 0.0 - # Check that any differences are very small. - self.assertArrayAllClose(ang_diffs, 0.0) - - # Check that results are always masked arrays, masked at NaN angles. - self.assertTrue(np.ma.isMaskedArray(out_u)) - self.assertTrue(np.ma.isMaskedArray(out_v)) - if nan_angles_mask is not None: - self.assertArrayEqual(out_u.mask, nan_angles_mask) - self.assertArrayEqual(out_v.mask, nan_angles_mask) - - def test_angles_calculation(self): - self._check_angles_calculation() - - def test_angles_in_radians(self): - self._check_angles_calculation(angles_in_degrees=False) - - def test_angles_from_grid(self): - # Check it will gets angles from 'u_cube', and pass any kwargs on to - # the angles routine. - u_cube = sample_2d_latlons(regional=True, transformed=True) - u_cube = u_cube[:2, :3] - u_cube.units = "ms-1" - u_cube.rename("dx") - u_cube.data[...] = 1.0 - v_cube = u_cube.copy() - v_cube.rename("dy") - v_cube.data[...] = 0.0 - - # Setup a fake angles result from the inner call to 'gridcell_angles'. - angles_result_data = np.array( - [[0.0, 90.0, 180.0], [-180.0, -90.0, 270.0]] - ) - angles_result_cube = Cube(angles_result_data, units="degrees") - angles_kwargs = {"this": 2} - angles_call_patch = self.patch( - "iris.analysis._grid_angles.gridcell_angles", - Mock(return_value=angles_result_cube), - ) - - # Call the routine. - result = rotate_grid_vectors( - u_cube, v_cube, grid_angles_kwargs=angles_kwargs - ) - - self.assertEqual( - angles_call_patch.call_args_list, [mock_call(u_cube, this=2)] - ) - - out_u, out_v = [cube.data for cube in result] - # Records what results should be for the various n*90deg rotations. - expect_u = np.array([[1.0, 0.0, -1.0], [-1.0, 0.0, 0.0]]) - expect_v = np.array([[0.0, 1.0, 0.0], [0.0, -1.0, -1.0]]) - # Check results are as expected. - self.assertArrayAllClose(out_u, expect_u) - self.assertArrayAllClose(out_v, expect_v) - - def test_nan_vectors(self): - bad_angle_points = np.zeros((5, 6), dtype=bool) - bad_angle_points[2, 3] = True - self._check_angles_calculation(nan_angles_mask=bad_angle_points) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py b/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py deleted file mode 100644 index 9e3af90603..0000000000 --- a/lib/iris/tests/unit/analysis/cartography/test_rotate_winds.py +++ /dev/null @@ -1,497 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the function -:func:`iris.analysis.cartography.rotate_winds`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import cartopy.crs as ccrs -import numpy as np -import numpy.ma as ma - -from iris.analysis.cartography import rotate_winds, unrotate_pole -import iris.coord_systems -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube - - -def uv_cubes(x=None, y=None): - """Return u, v cubes with a grid in a rotated pole CRS.""" - cs = iris.coord_systems.RotatedGeogCS( - grid_north_pole_latitude=37.5, grid_north_pole_longitude=177.5 - ) - if x is None: - x = np.linspace(311.9, 391.1, 6) - if y is None: - y = np.linspace(-23.6, 24.8, 5) - - x2d, y2d = np.meshgrid(x, y) - u = 10 * (2 * np.cos(2 * np.deg2rad(x2d) + 3 * np.deg2rad(y2d + 30)) ** 2) - v = 20 * np.cos(6 * np.deg2rad(x2d)) - lon = DimCoord( - x, standard_name="grid_longitude", units="degrees", coord_system=cs - ) - lat = DimCoord( - y, standard_name="grid_latitude", units="degrees", coord_system=cs - ) - u_cube = Cube(u, standard_name="x_wind", units="m/s") - v_cube = Cube(v, standard_name="y_wind", units="m/s") - for cube in (u_cube, v_cube): - cube.add_dim_coord(lat.copy(), 0) - cube.add_dim_coord(lon.copy(), 1) - return u_cube, v_cube - - -def uv_cubes_3d(ref_cube, n_realization=3): - """ - Return 3d u, v cubes with a grid in a rotated pole CRS taken from - the provided 2d cube, by adding a realization dimension - coordinate bound to teh zeroth dimension. - - """ - lat = ref_cube.coord("grid_latitude") - lon = ref_cube.coord("grid_longitude") - x2d, y2d = np.meshgrid(lon.points, lat.points) - u = 10 * (2 * np.cos(2 * np.deg2rad(x2d) + 3 * np.deg2rad(y2d + 30)) ** 2) - v = 20 * np.cos(6 * np.deg2rad(x2d)) - # Multiply slices by factor to give variation over 0th dim. - factor = np.arange(1, n_realization + 1).reshape(n_realization, 1, 1) - u = factor * u - v = factor * v - realization = DimCoord(np.arange(n_realization), "realization") - u_cube = Cube(u, standard_name="x_wind", units="m/s") - v_cube = Cube(v, standard_name="y_wind", units="m/s") - for cube in (u_cube, v_cube): - cube.add_dim_coord(realization.copy(), 0) - cube.add_dim_coord(lat.copy(), 1) - cube.add_dim_coord(lon.copy(), 2) - return u_cube, v_cube - - -class TestPrerequisites(tests.IrisTest): - def test_different_coord_systems(self): - u, v = uv_cubes() - v.coord("grid_latitude").coord_system = iris.coord_systems.GeogCS(1) - with self.assertRaisesRegex( - ValueError, "Coordinates differ between u and v cubes" - ): - rotate_winds(u, v, iris.coord_systems.OSGB()) - - def test_different_xy_coord_systems(self): - u, v = uv_cubes() - u.coord("grid_latitude").coord_system = iris.coord_systems.GeogCS(1) - v.coord("grid_latitude").coord_system = iris.coord_systems.GeogCS(1) - with self.assertRaisesRegex( - ValueError, "Coordinate systems of x and y coordinates differ" - ): - rotate_winds(u, v, iris.coord_systems.OSGB()) - - def test_different_shape(self): - x = np.linspace(311.9, 391.1, 6) - y = np.linspace(-23.6, 24.8, 5) - u, _ = uv_cubes(x, y) - _, v = uv_cubes(x[:-1], y) - with self.assertRaisesRegex(ValueError, "same shape"): - rotate_winds(u, v, iris.coord_systems.OSGB()) - - def test_xy_dimensionality(self): - u, v = uv_cubes() - # Replace 1d lat with 2d lat. - x = u.coord("grid_longitude").points - y = u.coord("grid_latitude").points - x2d, y2d = np.meshgrid(x, y) - lat_2d = AuxCoord( - y2d, - "grid_latitude", - units="degrees", - coord_system=u.coord("grid_latitude").coord_system, - ) - for cube in (u, v): - cube.remove_coord("grid_latitude") - cube.add_aux_coord(lat_2d.copy(), (0, 1)) - - with self.assertRaisesRegex( - ValueError, - "x and y coordinates must have the same number of dimensions", - ): - rotate_winds(u, v, iris.coord_systems.OSGB()) - - def test_dim_mapping(self): - x = np.linspace(311.9, 391.1, 3) - y = np.linspace(-23.6, 24.8, 3) - u, v = uv_cubes(x, y) - v.transpose() - with self.assertRaisesRegex(ValueError, "Dimension mapping"): - rotate_winds(u, v, iris.coord_systems.OSGB()) - - -class TestAnalyticComparison(tests.IrisTest): - @staticmethod - def _unrotate_equation( - rotated_lons, rotated_lats, rotated_us, rotated_vs, pole_lon, pole_lat - ): - # Perform a rotated-pole 'unrotate winds' transformation on arrays of - # rotated-lat, rotated-lon, u and v. - # This can be defined as an analytic function : cf. UMDP015 - - # Work out the rotation angles. - lambda_angle = np.radians(pole_lon - 180.0) - phi_angle = np.radians(90.0 - pole_lat) - - # Get the locations in true lats+lons. - trueLongitude, trueLatitude = unrotate_pole( - rotated_lons, rotated_lats, pole_lon, pole_lat - ) - - # Calculate inter-coordinate rotation coefficients. - cos_rot = np.cos(np.radians(rotated_lons)) * np.cos( - np.radians(trueLongitude) - lambda_angle - ) + np.sin(np.radians(rotated_lons)) * np.sin( - np.radians(trueLongitude) - lambda_angle - ) * np.cos( - phi_angle - ) - sin_rot = -( - ( - np.sin(np.radians(trueLongitude) - lambda_angle) - * np.sin(phi_angle) - ) - / np.cos(np.radians(rotated_lats)) - ) - - # Matrix-multiply to rotate the vectors. - u_true = rotated_us * cos_rot - rotated_vs * sin_rot - v_true = rotated_vs * cos_rot + rotated_us * sin_rot - - return u_true, v_true - - def _check_rotated_to_true(self, u_rot, v_rot, target_cs, **kwds): - # Run test calculation (numeric). - u_true, v_true = rotate_winds(u_rot, v_rot, target_cs) - - # Perform same calculation via the reference method (equations). - cs_rot = u_rot.coord("grid_longitude").coord_system - pole_lat = cs_rot.grid_north_pole_latitude - pole_lon = cs_rot.grid_north_pole_longitude - rotated_lons = u_rot.coord("grid_longitude").points - rotated_lats = u_rot.coord("grid_latitude").points - rotated_lons_2d, rotated_lats_2d = np.meshgrid( - rotated_lons, rotated_lats - ) - rotated_u, rotated_v = u_rot.data, v_rot.data - u_ref, v_ref = self._unrotate_equation( - rotated_lons_2d, - rotated_lats_2d, - rotated_u, - rotated_v, - pole_lon, - pole_lat, - ) - - # Check that all the numerical results are within given tolerances. - self.assertArrayAllClose(u_true.data, u_ref, **kwds) - self.assertArrayAllClose(v_true.data, v_ref, **kwds) - - def test_rotated_to_true__small(self): - # Check for a small field with varying data. - target_cs = iris.coord_systems.GeogCS(6371229) - u_rot, v_rot = uv_cubes() - self._check_rotated_to_true( - u_rot, v_rot, target_cs, rtol=1e-5, atol=0.0005 - ) - - def test_rotated_to_true_global(self): - # Check for global fields with various constant wind values - # - constant in the rotated pole system, that is. - # We expect less accuracy where this gets close to the true poles. - target_cs = iris.coord_systems.GeogCS(6371229) - u_rot, v_rot = uv_cubes( - x=np.arange(0, 360.0, 15), y=np.arange(-89, 89, 10) - ) - for vector in ((1, 0), (0, 1), (1, 1), (-3, -1.5)): - u_rot.data[...] = vector[0] - v_rot.data[...] = vector[1] - self._check_rotated_to_true( - u_rot, - v_rot, - target_cs, - rtol=5e-4, - atol=5e-4, - err_msg="vector={}".format(vector), - ) - - -class TestRotatedToOSGB(tests.IrisTest): - # Define some coordinate ranges for the uv_cubes 'standard' RotatedPole - # system, that exceed the OSGB margins, but not by "too much". - _rp_x_min, _rp_x_max = -5.0, 5.0 - _rp_y_min, _rp_y_max = -5.0, 15.0 - - def _uv_cubes_limited_extent(self): - # Make test cubes suitable for transforming to OSGB, as the standard - # 'uv_cubes' result goes too far outside, leading to errors. - x = np.linspace(self._rp_x_min, self._rp_x_max, 6) - y = np.linspace(self._rp_y_min, self._rp_y_max, 5) - return uv_cubes(x=x, y=y) - - def test_name(self): - u, v = self._uv_cubes_limited_extent() - u.rename("bob") - v.rename("alice") - ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - self.assertEqual(ut.name(), "transformed_" + u.name()) - self.assertEqual(vt.name(), "transformed_" + v.name()) - - def test_new_coords(self): - u, v = self._uv_cubes_limited_extent() - x = u.coord("grid_longitude").points - y = u.coord("grid_latitude").points - x2d, y2d = np.meshgrid(x, y) - src_crs = ccrs.RotatedPole(pole_longitude=177.5, pole_latitude=37.5) - tgt_crs = ccrs.OSGB() - xyz_tran = tgt_crs.transform_points(src_crs, x2d, y2d) - - ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - - points = xyz_tran[..., 0].reshape(x2d.shape) - expected_x = AuxCoord( - points, - standard_name="projection_x_coordinate", - units="m", - coord_system=iris.coord_systems.OSGB(), - ) - self.assertEqual(ut.coord("projection_x_coordinate"), expected_x) - self.assertEqual(vt.coord("projection_x_coordinate"), expected_x) - - points = xyz_tran[..., 1].reshape(y2d.shape) - expected_y = AuxCoord( - points, - standard_name="projection_y_coordinate", - units="m", - coord_system=iris.coord_systems.OSGB(), - ) - self.assertEqual(ut.coord("projection_y_coordinate"), expected_y) - self.assertEqual(vt.coord("projection_y_coordinate"), expected_y) - - def test_new_coords_transposed(self): - u, v = self._uv_cubes_limited_extent() - # Transpose cubes so that cube is in xy order rather than the - # typical yx order of meshgrid. - u.transpose() - v.transpose() - x = u.coord("grid_longitude").points - y = u.coord("grid_latitude").points - x2d, y2d = np.meshgrid(x, y) - src_crs = ccrs.RotatedPole(pole_longitude=177.5, pole_latitude=37.5) - tgt_crs = ccrs.OSGB() - xyz_tran = tgt_crs.transform_points(src_crs, x2d, y2d) - - ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - - points = xyz_tran[..., 0].reshape(x2d.shape) - expected_x = AuxCoord( - points, - standard_name="projection_x_coordinate", - units="m", - coord_system=iris.coord_systems.OSGB(), - ) - self.assertEqual(ut.coord("projection_x_coordinate"), expected_x) - self.assertEqual(vt.coord("projection_x_coordinate"), expected_x) - - points = xyz_tran[..., 1].reshape(y2d.shape) - expected_y = AuxCoord( - points, - standard_name="projection_y_coordinate", - units="m", - coord_system=iris.coord_systems.OSGB(), - ) - self.assertEqual(ut.coord("projection_y_coordinate"), expected_y) - self.assertEqual(vt.coord("projection_y_coordinate"), expected_y) - # Check dim mapping for 2d coords is yx. - expected_dims = u.coord_dims("grid_latitude") + u.coord_dims( - "grid_longitude" - ) - self.assertEqual( - ut.coord_dims("projection_x_coordinate"), expected_dims - ) - self.assertEqual( - ut.coord_dims("projection_y_coordinate"), expected_dims - ) - self.assertEqual( - vt.coord_dims("projection_x_coordinate"), expected_dims - ) - self.assertEqual( - vt.coord_dims("projection_y_coordinate"), expected_dims - ) - - def test_orig_coords(self): - u, v = self._uv_cubes_limited_extent() - ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - self.assertEqual(u.coord("grid_latitude"), ut.coord("grid_latitude")) - self.assertEqual(v.coord("grid_latitude"), vt.coord("grid_latitude")) - self.assertEqual(u.coord("grid_longitude"), ut.coord("grid_longitude")) - self.assertEqual(v.coord("grid_longitude"), vt.coord("grid_longitude")) - - def test_magnitude_preservation(self): - u, v = self._uv_cubes_limited_extent() - ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - orig_sq_mag = u.data ** 2 + v.data ** 2 - res_sq_mag = ut.data ** 2 + vt.data ** 2 - self.assertArrayAllClose(orig_sq_mag, res_sq_mag, rtol=5e-4) - - def test_data_values(self): - u, v = self._uv_cubes_limited_extent() - # Slice out 4 points that lie in and outside OSGB extent. - u = u[1:3, 3:5] - v = v[1:3, 3:5] - ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - # Values precalculated and checked. - expected_ut_data = np.array( - [[0.16285514, 0.35323639], [1.82650698, 2.62455840]] - ) - expected_vt_data = np.array( - [[19.88979966, 19.01921346], [19.88018847, 19.01424281]] - ) - # Compare u and v data values against previously calculated values. - self.assertArrayAllClose(ut.data, expected_ut_data, rtol=1e-5) - self.assertArrayAllClose(vt.data, expected_vt_data, rtol=1e-5) - - def test_nd_data(self): - u2d, y2d = self._uv_cubes_limited_extent() - u, v = uv_cubes_3d(u2d) - u = u[:, 1:3, 3:5] - v = v[:, 1:3, 3:5] - ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - # Values precalculated and checked (as test_data_values above), - # then scaled by factor [1, 2, 3] along 0th dim (see uv_cubes_3d()). - expected_ut_data = np.array( - [[0.16285514, 0.35323639], [1.82650698, 2.62455840]] - ) - expected_vt_data = np.array( - [[19.88979966, 19.01921346], [19.88018847, 19.01424281]] - ) - factor = np.array([1, 2, 3]).reshape(3, 1, 1) - expected_ut_data = factor * expected_ut_data - expected_vt_data = factor * expected_vt_data - # Compare u and v data values against previously calculated values. - self.assertArrayAlmostEqual(ut.data, expected_ut_data) - self.assertArrayAlmostEqual(vt.data, expected_vt_data) - - def test_transposed(self): - # Test case where the coordinates are not ordered yx in the cube. - u, v = self._uv_cubes_limited_extent() - # Slice out 4 points that lie in and outside OSGB extent. - u = u[1:3, 3:5] - v = v[1:3, 3:5] - # Transpose cubes (in-place) - u.transpose() - v.transpose() - ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - # Values precalculated and checked. - expected_ut_data = np.array( - [[0.16285514, 0.35323639], [1.82650698, 2.62455840]] - ).T - expected_vt_data = np.array( - [[19.88979966, 19.01921346], [19.88018847, 19.01424281]] - ).T - # Compare u and v data values against previously calculated values. - self.assertArrayAllClose(ut.data, expected_ut_data, rtol=1e-5) - self.assertArrayAllClose(vt.data, expected_vt_data, rtol=1e-5) - - -class TestMasking(tests.IrisTest): - def test_rotated_to_osgb(self): - # Rotated Pole data with large extent. - x = np.linspace(311.9, 391.1, 10) - y = np.linspace(-23.6, 24.8, 8) - u, v = uv_cubes(x, y) - ut, vt = rotate_winds(u, v, iris.coord_systems.OSGB()) - - # Ensure cells with discrepancies in magnitude are masked. - self.assertTrue(ma.isMaskedArray(ut.data)) - self.assertTrue(ma.isMaskedArray(vt.data)) - - # Snapshot of mask with fixed tolerance of atol=2e-3 - expected_mask = np.array( - [ - [1, 1, 1, 0, 0, 0, 0, 0, 0, 1], - [1, 1, 1, 0, 0, 0, 0, 0, 0, 1], - [1, 1, 1, 1, 0, 0, 0, 0, 1, 1], - [1, 1, 1, 1, 0, 0, 0, 0, 1, 1], - [1, 1, 1, 1, 0, 0, 0, 0, 1, 1], - [1, 1, 1, 1, 1, 0, 0, 1, 1, 1], - [1, 1, 1, 1, 1, 0, 0, 1, 1, 1], - [1, 1, 1, 1, 1, 0, 0, 1, 1, 1], - ], - np.bool_, - ) - self.assertArrayEqual(expected_mask, ut.data.mask) - self.assertArrayEqual(expected_mask, vt.data.mask) - - # Check unmasked values have sufficiently small error in mag. - expected_mag = np.sqrt(u.data ** 2 + v.data ** 2) - # Use underlying data to ignore mask in calculation. - res_mag = np.sqrt(ut.data.data ** 2 + vt.data.data ** 2) - # Calculate percentage error (note there are no zero magnitudes - # so we can divide safely). - anom = 100.0 * np.abs(res_mag - expected_mag) / expected_mag - self.assertTrue(anom[~ut.data.mask].max() < 0.1) - - def test_rotated_to_unrotated(self): - # Suffiently accurate so that no mask is introduced. - u, v = uv_cubes() - ut, vt = rotate_winds(u, v, iris.coord_systems.GeogCS(6371229)) - self.assertFalse(ma.isMaskedArray(ut.data)) - self.assertFalse(ma.isMaskedArray(vt.data)) - - -class TestRoundTrip(tests.IrisTest): - def test_rotated_to_unrotated(self): - # Check ability to use 2d coords as input. - u, v = uv_cubes() - ut, vt = rotate_winds(u, v, iris.coord_systems.GeogCS(6371229)) - # Remove grid lat and lon, leaving 2d projection coords. - ut.remove_coord("grid_latitude") - vt.remove_coord("grid_latitude") - ut.remove_coord("grid_longitude") - vt.remove_coord("grid_longitude") - # Change back. - orig_cs = u.coord("grid_latitude").coord_system - res_u, res_v = rotate_winds(ut, vt, orig_cs) - # Check data values - limited accuracy due to numerical approx. - self.assertArrayAlmostEqual(res_u.data, u.data, decimal=3) - self.assertArrayAlmostEqual(res_v.data, v.data, decimal=3) - # Check coords locations. - x2d, y2d = np.meshgrid( - u.coord("grid_longitude").points, u.coord("grid_latitude").points - ) - # Shift longitude from 0 to 360 -> -180 to 180. - x2d = np.where(x2d > 180, x2d - 360, x2d) - res_x = res_u.coord( - "projection_x_coordinate", coord_system=orig_cs - ).points - res_y = res_u.coord( - "projection_y_coordinate", coord_system=orig_cs - ).points - self.assertArrayAlmostEqual(res_x, x2d) - self.assertArrayAlmostEqual(res_y, y2d) - res_x = res_v.coord( - "projection_x_coordinate", coord_system=orig_cs - ).points - res_y = res_v.coord( - "projection_y_coordinate", coord_system=orig_cs - ).points - self.assertArrayAlmostEqual(res_x, x2d) - self.assertArrayAlmostEqual(res_y, y2d) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/geometry/__init__.py b/lib/iris/tests/unit/analysis/geometry/__init__.py deleted file mode 100644 index c57f5e246a..0000000000 --- a/lib/iris/tests/unit/analysis/geometry/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.analysis.geometry` module.""" diff --git a/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py b/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py deleted file mode 100644 index 2509ac1a92..0000000000 --- a/lib/iris/tests/unit/analysis/geometry/test__extract_relevant_cube_slice.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :func:`iris.analysis.geometry._extract_relevant_cube_slice`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. - -import iris.tests as tests # isort:skip -import shapely.geometry - -from iris.analysis.geometry import _extract_relevant_cube_slice -import iris.tests.stock as stock - - -class Test(tests.IrisTest): - def test_polygon_smaller_than_cube(self): - cube = stock.lat_lon_cube() - cube.dim_coords[0].guess_bounds() - cube.dim_coords[1].guess_bounds() - geometry = shapely.geometry.box(-0.4, -0.4, 0.4, 0.4) - actual = _extract_relevant_cube_slice(cube, geometry) - target = ( - cube[1, 1], - cube[1, 1].coords(axis="x")[0], - cube[1, 1].coords(axis="y")[0], - (1, 1, 1, 1), - ) - self.assertEqual(target, actual) - - def test_polygon_larger_than_cube(self): - cube = stock.lat_lon_cube() - cube.dim_coords[0].guess_bounds() - cube.dim_coords[1].guess_bounds() - geometry = shapely.geometry.box(-0.6, -0.6, 0.6, 0.6) - actual = _extract_relevant_cube_slice(cube, geometry) - target = ( - cube[:, :3], - cube[:, :3].coords(axis="x")[0], - cube[:, :3].coords(axis="y")[0], - (0, 0, 2, 2), - ) - self.assertEqual(target, actual) - - def test_polygon_on_cube_boundary(self): - cube = stock.lat_lon_cube() - cube.dim_coords[0].guess_bounds() - cube.dim_coords[1].guess_bounds() - geometry = shapely.geometry.box(-0.5, -0.5, 0.5, 0.5) - actual = _extract_relevant_cube_slice(cube, geometry) - target = ( - cube[1, 1], - cube[1, 1].coords(axis="x")[0], - cube[1, 1].coords(axis="y")[0], - (1, 1, 1, 1), - ) - self.assertEqual(target, actual) - - def test_rotated_polygon_on_cube_boundary(self): - cube = stock.lat_lon_cube() - cube.dim_coords[0].guess_bounds() - cube.dim_coords[1].guess_bounds() - geometry = shapely.geometry.Polygon( - ((0.0, -0.5), (-0.5, 0.0), (0.0, 0.5), (0.5, 0.0)) - ) - actual = _extract_relevant_cube_slice(cube, geometry) - target = ( - cube[1, 1], - cube[1, 1].coords(axis="x")[0], - cube[1, 1].coords(axis="y")[0], - (1, 1, 1, 1), - ) - self.assertEqual(target, actual) - - def test_rotated_polygon_larger_than_cube_boundary(self): - cube = stock.lat_lon_cube() - cube.dim_coords[0].guess_bounds() - cube.dim_coords[1].guess_bounds() - geometry = shapely.geometry.Polygon( - ((0.0, -0.6), (-0.6, 0.0), (0.0, 0.6), (0.6, 0.0)) - ) - actual = _extract_relevant_cube_slice(cube, geometry) - target = ( - cube[:, :3], - cube[:, :3].coords(axis="x")[0], - cube[:, :3].coords(axis="y")[0], - (0, 0, 2, 2), - ) - self.assertEqual(target, actual) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py b/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py deleted file mode 100644 index 49e03a1174..0000000000 --- a/lib/iris/tests/unit/analysis/geometry/test_geometry_area_weights.py +++ /dev/null @@ -1,171 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.analysis.geometry.geometry_area_weights` -function. - - """ - -# Import iris.tests first so that some things can be initialised before -# importing anything else. - -import iris.tests as tests # isort:skip -import warnings - -import numpy as np -import shapely.geometry - -from iris.analysis.geometry import geometry_area_weights -from iris.coords import DimCoord -from iris.cube import Cube -import iris.tests.stock as stock - - -class Test(tests.IrisTest): - def setUp(self): - x_coord = DimCoord([1.0, 3.0], "longitude", bounds=[[0, 2], [2, 4]]) - y_coord = DimCoord([1.0, 3.0], "latitude", bounds=[[0, 2], [2, 4]]) - self.data = np.empty((4, 2, 2)) - dim_coords_and_dims = [(y_coord, (1,)), (x_coord, (2,))] - self.cube = Cube(self.data, dim_coords_and_dims=dim_coords_and_dims) - self.geometry = shapely.geometry.Polygon( - [(3, 3), (3, 50), (50, 50), (50, 3)] - ) - - def test_no_overlap(self): - geometry = shapely.geometry.Polygon([(4, 4), (4, 6), (6, 6), (6, 4)]) - weights = geometry_area_weights(self.cube, geometry) - self.assertEqual(np.sum(weights), 0) - - def test_overlap(self): - weights = geometry_area_weights(self.cube, self.geometry) - expected = np.repeat( - [[[0.0, 0.0], [0.0, 1.0]]], self.data.shape[0], axis=0 - ) - self.assertArrayEqual(weights, expected) - - def test_overlap_normalize(self): - weights = geometry_area_weights( - self.cube, self.geometry, normalize=True - ) - expected = np.repeat( - [[[0.0, 0.0], [0.0, 0.25]]], self.data.shape[0], axis=0 - ) - self.assertArrayEqual(weights, expected) - - @tests.skip_data - def test_distinct_xy(self): - cube = stock.simple_pp() - cube = cube[:4, :4] - lon = cube.coord("longitude") - lat = cube.coord("latitude") - lon.guess_bounds() - lat.guess_bounds() - from iris.util import regular_step - - quarter = abs(regular_step(lon) * regular_step(lat) * 0.25) - half = abs(regular_step(lon) * regular_step(lat) * 0.5) - minx = 3.7499990463256836 - maxx = 7.499998092651367 - miny = 84.99998474121094 - maxy = 89.99998474121094 - geometry = shapely.geometry.box(minx, miny, maxx, maxy) - weights = geometry_area_weights(cube, geometry) - target = np.array( - [ - [0, quarter, quarter, 0], - [0, half, half, 0], - [0, quarter, quarter, 0], - [0, 0, 0, 0], - ] - ) - self.assertTrue(np.allclose(weights, target)) - - @tests.skip_data - def test_distinct_xy_bounds(self): - # cases where geometry bnds are outside cube bnds correctly handled? - cube = stock.simple_pp() - cube = cube[:4, :4] - lon = cube.coord("longitude") - lat = cube.coord("latitude") - lon.guess_bounds() - lat.guess_bounds() - from iris.util import regular_step - - quarter = abs(regular_step(lon) * regular_step(lat) * 0.25) - half = abs(regular_step(lon) * regular_step(lat) * 0.5) - full = abs(regular_step(lon) * regular_step(lat)) - minx = 3.7499990463256836 - maxx = 13.12499619 - maxx_overshoot = 15.0 - miny = 84.99998474121094 - maxy = 89.99998474121094 - geometry = shapely.geometry.box(minx, miny, maxx, maxy) - geometry_overshoot = shapely.geometry.box( - minx, miny, maxx_overshoot, maxy - ) - weights = geometry_area_weights(cube, geometry) - weights_overshoot = geometry_area_weights(cube, geometry_overshoot) - target = np.array( - [ - [0, quarter, half, half], - [0, half, full, full], - [0, quarter, half, half], - [0, 0, 0, 0], - ] - ) - self.assertTrue(np.allclose(weights, target)) - self.assertTrue(np.allclose(weights_overshoot, target)) - - @tests.skip_data - def test_distinct_xy_bounds_pole(self): - # is UserWarning issued for out-of-bounds? results will be unexpected! - cube = stock.simple_pp() - cube = cube[:4, :4] - lon = cube.coord("longitude") - lat = cube.coord("latitude") - lon.guess_bounds() - lat.guess_bounds() - from iris.util import regular_step - - quarter = abs(regular_step(lon) * regular_step(lat) * 0.25) - half = abs(regular_step(lon) * regular_step(lat) * 0.5) - top_cell_half = abs(regular_step(lon) * (90 - lat.bounds[0, 1]) * 0.5) - minx = 3.7499990463256836 - maxx = 7.499998092651367 - miny = 84.99998474121094 - maxy = 99.99998474121094 - geometry = shapely.geometry.box(minx, miny, maxx, maxy) - # see http://stackoverflow.com/a/3892301 to assert warnings - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") # always trigger all warnings - weights = geometry_area_weights(cube, geometry) - self.assertEqual( - str(w[-1].message), - "The geometry exceeds the " - "cube's y dimension at the upper end.", - ) - self.assertTrue(issubclass(w[-1].category, UserWarning)) - target = np.array( - [ - [0, top_cell_half, top_cell_half, 0], - [0, half, half, 0], - [0, quarter, quarter, 0], - [0, 0, 0, 0], - ] - ) - self.assertTrue(np.allclose(weights, target)) - - def test_shared_xy(self): - cube = stock.track_1d() - geometry = shapely.geometry.box(1, 4, 3.5, 7) - weights = geometry_area_weights(cube, geometry) - target = np.array([0, 0, 2, 0.5, 0, 0, 0, 0, 0, 0, 0]) - self.assertTrue(np.allclose(weights, target)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/interpolation/__init__.py b/lib/iris/tests/unit/analysis/interpolation/__init__.py deleted file mode 100644 index 3825dacda3..0000000000 --- a/lib/iris/tests/unit/analysis/interpolation/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.analysis._interpolation` package.""" diff --git a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py b/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py deleted file mode 100644 index 6c3999a6f4..0000000000 --- a/lib/iris/tests/unit/analysis/interpolation/test_RectilinearInterpolator.py +++ /dev/null @@ -1,612 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :class:`iris.analysis._interpolation.RectilinearInterpolator`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import datetime - -import numpy as np - -import iris -from iris._lazy_data import as_lazy_data -from iris.analysis._interpolation import RectilinearInterpolator -import iris.coords -import iris.cube -import iris.exceptions -import iris.tests.stock as stock - -LINEAR = "linear" -NEAREST = "nearest" - -EXTRAPOLATE = "extrapolate" - - -class ThreeDimCube(tests.IrisTest): - def setUp(self): - cube = stock.simple_3d_w_multidim_coords() - cube.add_aux_coord( - iris.coords.DimCoord(np.arange(2), "height", units="1"), 0 - ) - cube.add_dim_coord( - iris.coords.DimCoord(np.arange(3), "latitude", units="1"), 1 - ) - cube.add_dim_coord( - iris.coords.DimCoord(np.arange(4), "longitude", units="1"), 2 - ) - self.data = np.arange(24).reshape(2, 3, 4).astype(np.float32) - cube.data = self.data - self.cube = cube - - -class Test___init__(ThreeDimCube): - def test_properties(self): - interpolator = RectilinearInterpolator( - self.cube, ["latitude"], LINEAR, EXTRAPOLATE - ) - - self.assertEqual(interpolator.method, LINEAR) - self.assertEqual(interpolator.extrapolation_mode, EXTRAPOLATE) - - # Access to cube property of the RectilinearInterpolator instance. - self.assertEqual(interpolator.cube, self.cube) - - # Access to the resulting coordinate which we are interpolating over. - self.assertEqual(interpolator.coords, [self.cube.coord("latitude")]) - - -class Test___init____validation(ThreeDimCube): - def test_interpolator_overspecified(self): - # Over specification by means of interpolating over two coordinates - # mapped to the same dimension. - msg = ( - "Coordinates repeat a data dimension - " - "the interpolation would be over-specified" - ) - with self.assertRaisesRegex(ValueError, msg): - RectilinearInterpolator( - self.cube, ["wibble", "height"], LINEAR, EXTRAPOLATE - ) - - def test_interpolator_overspecified_scalar(self): - # Over specification by means of interpolating over one dimension - # coordinate and a scalar coordinate (not mapped to a dimension). - self.cube.add_aux_coord( - iris.coords.AuxCoord(1, long_name="scalar"), None - ) - - msg = ( - "Coordinates repeat a data dimension - " - "the interpolation would be over-specified" - ) - with self.assertRaisesRegex(ValueError, msg): - RectilinearInterpolator( - self.cube, ["wibble", "scalar"], LINEAR, EXTRAPOLATE - ) - - def test_interpolate__decreasing(self): - def check_expected(): - # Check a simple case is equivalent to extracting the first row. - self.interpolator = RectilinearInterpolator( - self.cube, ["latitude"], LINEAR, EXTRAPOLATE - ) - expected = self.data[:, 0:1, :] - result = self.interpolator([[0]]) - self.assertArrayEqual(result.data, expected) - - # Check with normal cube. - check_expected() - # Check same result from a cube inverted in the latitude dimension. - self.cube = self.cube[:, ::-1] - check_expected() - - def test_interpolate_non_monotonic(self): - self.cube.add_aux_coord( - iris.coords.AuxCoord([0, 3, 2], long_name="non-monotonic"), 1 - ) - msg = ( - "Cannot interpolate over the non-monotonic coordinate " - "non-monotonic." - ) - with self.assertRaisesRegex(ValueError, msg): - RectilinearInterpolator( - self.cube, ["non-monotonic"], LINEAR, EXTRAPOLATE - ) - - -class Test___call___1D(ThreeDimCube): - def setUp(self): - ThreeDimCube.setUp(self) - self.interpolator = RectilinearInterpolator( - self.cube, ["latitude"], LINEAR, EXTRAPOLATE - ) - - def test_interpolate_bad_coord_name(self): - with self.assertRaises(iris.exceptions.CoordinateNotFoundError): - RectilinearInterpolator( - self.cube, ["doesnt exist"], LINEAR, EXTRAPOLATE - ) - - def test_interpolate_data_single(self): - # Single sample point. - result = self.interpolator([[1.5]]) - expected = self.data[:, 1:, :].mean(axis=1).reshape(2, 1, 4) - self.assertArrayEqual(result.data, expected) - - foo_res = result.coord("foo").points - bar_res = result.coord("bar").points - expected_foo = ( - self.cube[:, 1:, :].coord("foo").points.mean(axis=0).reshape(1, 4) - ) - expected_bar = ( - self.cube[:, 1:, :].coord("bar").points.mean(axis=0).reshape(1, 4) - ) - - self.assertArrayEqual(foo_res, expected_foo) - self.assertArrayEqual(bar_res, expected_bar) - - def test_interpolate_data_multiple(self): - # Multiple sample points for a single coordinate (these points are not - # interpolated). - result = self.interpolator([[1, 2]]) - self.assertArrayEqual(result.data, self.data[:, 1:3, :]) - - foo_res = result.coord("foo").points - bar_res = result.coord("bar").points - expected_foo = self.cube[:, 1:, :].coord("foo").points - expected_bar = self.cube[:, 1:, :].coord("bar").points - - self.assertArrayEqual(foo_res, expected_foo) - self.assertArrayEqual(bar_res, expected_bar) - - def test_interpolate_data_linear_extrapolation(self): - # Sample point outside the coordinate range. - result = self.interpolator([[-1]]) - expected = self.data[:, 0:1] - (self.data[:, 1:2] - self.data[:, 0:1]) - self.assertArrayEqual(result.data, expected) - - def _extrapolation_dtype(self, dtype): - self.cube.data = self.cube.data.astype(dtype) - interpolator = RectilinearInterpolator( - self.cube, ["latitude"], LINEAR, extrapolation_mode="nan" - ) - result = interpolator([[-1]]) - self.assertTrue(np.all(np.isnan(result.data))) - - def test_extrapolation_nan_float32(self): - # Ensure np.nan in a float32 array results. - self._extrapolation_dtype(np.float32) - - def test_extrapolation_nan_float64(self): - # Ensure np.nan in a float64 array results. - self._extrapolation_dtype(np.float64) - - def test_interpolate_data_error_on_extrapolation(self): - msg = "One of the requested xi is out of bounds in dimension 0" - interpolator = RectilinearInterpolator( - self.cube, ["latitude"], LINEAR, extrapolation_mode="error" - ) - with self.assertRaisesRegex(ValueError, msg): - interpolator([[-1]]) - - def test_interpolate_data_unsupported_extrapolation(self): - msg = "Extrapolation mode 'unsupported' not supported" - with self.assertRaisesRegex(ValueError, msg): - RectilinearInterpolator( - self.cube, - ["latitude"], - LINEAR, - extrapolation_mode="unsupported", - ) - - def test_multi_points_array(self): - # Providing a multidimensional sample points for a 1D interpolation. - # i.e. points given for two coordinates where there are only one - # specified. - msg = "Expected sample points for 1 coordinates, got 2." - with self.assertRaisesRegex(ValueError, msg): - self.interpolator([[1, 2], [1]]) - - def test_interpolate_data_dtype_casting(self): - data = self.data.astype(int) - self.cube.data = data - self.interpolator = RectilinearInterpolator( - self.cube, ["latitude"], LINEAR, EXTRAPOLATE - ) - result = self.interpolator([[0.125]]) - self.assertEqual(result.data.dtype, np.float64) - - def test_default_collapse_scalar(self): - interpolator = RectilinearInterpolator( - self.cube, ["wibble"], LINEAR, EXTRAPOLATE - ) - result = interpolator([0]) - self.assertEqual(result.shape, (3, 4)) - - def test_collapse_scalar(self): - interpolator = RectilinearInterpolator( - self.cube, ["wibble"], LINEAR, EXTRAPOLATE - ) - result = interpolator([0], collapse_scalar=True) - self.assertEqual(result.shape, (3, 4)) - - def test_no_collapse_scalar(self): - interpolator = RectilinearInterpolator( - self.cube, ["wibble"], LINEAR, EXTRAPOLATE - ) - result = interpolator([0], collapse_scalar=False) - self.assertEqual(result.shape, (1, 3, 4)) - - def test_unsorted_datadim_mapping(self): - # Currently unsorted data dimension mapping is not supported as the - # indexing is not yet clever enough to remap the interpolated - # coordinates. - self.cube.transpose((0, 2, 1)) - interpolator = RectilinearInterpolator( - self.cube, ["latitude"], LINEAR, EXTRAPOLATE - ) - msg = "Currently only increasing data_dims is supported." - with self.assertRaisesRegex(NotImplementedError, msg): - interpolator([0]) - - -class Test___call___1D_circular(ThreeDimCube): - # Note: all these test data interpolation. - def setUp(self): - ThreeDimCube.setUp(self) - self.cube.coord("longitude")._points = np.linspace( - 0, 360, 4, endpoint=False - ) - self.cube.coord("longitude").circular = True - self.cube.coord("longitude").units = "degrees" - self.interpolator = RectilinearInterpolator( - self.cube, ["longitude"], LINEAR, extrapolation_mode="nan" - ) - self.cube_reverselons = self.cube[:, :, ::-1] - self.interpolator_reverselons = RectilinearInterpolator( - self.cube_reverselons, - ["longitude"], - LINEAR, - extrapolation_mode="nan", - ) - - self.testpoints_fully_wrapped = ([[180, 270]], [[-180, -90]]) - self.testpoints_partially_wrapped = ([[180, 90]], [[-180, 90]]) - self.testpoints_fully_wrapped_twice = ( - [np.linspace(-360, 360, 100)], - [(np.linspace(-360, 360, 100) + 360) % 360], - ) - - def test_fully_wrapped(self): - points, points_wrapped = self.testpoints_fully_wrapped - expected = self.interpolator(points) - result = self.interpolator(points_wrapped) - self.assertArrayEqual(expected.data, result.data) - - def test_fully_wrapped_reversed_mainpoints(self): - points, _ = self.testpoints_fully_wrapped - expected = self.interpolator(points) - result = self.interpolator_reverselons(points) - self.assertArrayEqual(expected.data, result.data) - - def test_fully_wrapped_reversed_testpoints(self): - _, points = self.testpoints_fully_wrapped - expected = self.interpolator(points) - result = self.interpolator_reverselons(points) - self.assertArrayEqual(expected.data, result.data) - - def test_partially_wrapped(self): - points, points_wrapped = self.testpoints_partially_wrapped - expected = self.interpolator(points) - result = self.interpolator(points_wrapped) - self.assertArrayEqual(expected.data, result.data) - - def test_partially_wrapped_reversed_mainpoints(self): - points, _ = self.testpoints_partially_wrapped - expected = self.interpolator(points) - result = self.interpolator_reverselons(points) - self.assertArrayEqual(expected.data, result.data) - - def test_partially_wrapped_reversed_testpoints(self): - points, _ = self.testpoints_partially_wrapped - expected = self.interpolator(points) - result = self.interpolator_reverselons(points) - self.assertArrayEqual(expected.data, result.data) - - def test_fully_wrapped_twice(self): - xs, xs_not_wrapped = self.testpoints_fully_wrapped_twice - expected = self.interpolator(xs) - result = self.interpolator(xs_not_wrapped) - self.assertArrayEqual(expected.data, result.data) - - def test_fully_wrapped_twice_reversed_mainpoints(self): - _, points = self.testpoints_fully_wrapped_twice - expected = self.interpolator(points) - result = self.interpolator_reverselons(points) - self.assertArrayEqual(expected.data, result.data) - - def test_fully_wrapped_not_circular(self): - cube = stock.lat_lon_cube() - new_long = cube.coord("longitude").copy( - cube.coord("longitude").points + 710 - ) - cube.remove_coord("longitude") - cube.add_dim_coord(new_long, 1) - - interpolator = RectilinearInterpolator( - cube, ["longitude"], LINEAR, EXTRAPOLATE - ) - res = interpolator([-10]) - self.assertArrayEqual(res.data, cube[:, 1].data) - - -class Test___call___1D_singlelendim(ThreeDimCube): - def setUp(self): - """ - thingness / (1) (wibble: 2; latitude: 1) - Dimension coordinates: - wibble x - - latitude - x - Auxiliary coordinates: - height x - - bar - x - foo - x - Scalar coordinates: - longitude: 0 - """ - ThreeDimCube.setUp(self) - self.cube = self.cube[:, 0:1, 0] - self.interpolator = RectilinearInterpolator( - self.cube, ["latitude"], LINEAR, EXTRAPOLATE - ) - - def test_interpolate_data_linear_extrapolation(self): - # Linear extrapolation of a single valued element. - result = self.interpolator([[1001]]) - self.assertArrayEqual(result.data, self.cube.data) - - def test_interpolate_data_nan_extrapolation(self): - interpolator = RectilinearInterpolator( - self.cube, ["latitude"], LINEAR, extrapolation_mode="nan" - ) - result = interpolator([[1001]]) - self.assertTrue(np.all(np.isnan(result.data))) - - def test_interpolate_data_nan_extrapolation_not_needed(self): - # No extrapolation for a single length dimension. - interpolator = RectilinearInterpolator( - self.cube, ["latitude"], LINEAR, extrapolation_mode="nan" - ) - result = interpolator([[0]]) - self.assertArrayEqual(result.data, self.cube.data) - - -class Test___call___masked(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_4d_with_hybrid_height() - mask = np.isnan(self.cube.data) - mask[::3, ::3] = True - self.cube.data = np.ma.masked_array(self.cube.data, mask=mask) - - def test_orthogonal_cube(self): - interpolator = RectilinearInterpolator( - self.cube, ["grid_latitude"], LINEAR, EXTRAPOLATE - ) - result_cube = interpolator([1]) - - # Explicit mask comparison to ensure mask retention. - # Masked value input - self.assertTrue(self.cube.data.mask[0, 0, 0, 0]) - # Mask retention on output - self.assertTrue(result_cube.data.mask[0, 0, 0]) - - self.assertCML( - result_cube, - ( - "experimental", - "analysis", - "interpolate", - "LinearInterpolator", - "orthogonal_cube_with_factory.cml", - ), - ) - - -class Test___call___2D(ThreeDimCube): - def setUp(self): - ThreeDimCube.setUp(self) - self.interpolator = RectilinearInterpolator( - self.cube, ["latitude", "longitude"], LINEAR, EXTRAPOLATE - ) - - def test_interpolate_data(self): - result = self.interpolator([[1, 2], [2]]) - expected = self.data[:, 1:3, 2:3] - self.assertArrayEqual(result.data, expected) - - index = (slice(None), slice(1, 3, 1), slice(2, 3, 1)) - for coord in self.cube.coords(): - coord_res = result.coord(coord).points - coord_expected = self.cube[index].coord(coord).points - - self.assertArrayEqual(coord_res, coord_expected) - - def test_orthogonal_points(self): - result = self.interpolator([[1, 2], [1, 2]]) - expected = self.data[:, 1:3, 1:3] - self.assertArrayEqual(result.data, expected) - - index = (slice(None), slice(1, 3, 1), slice(1, 3, 1)) - for coord in self.cube.coords(): - coord_res = result.coord(coord).points - coord_expected = self.cube[index].coord(coord).points - - self.assertArrayEqual(coord_res, coord_expected) - - def test_multi_dim_coord_interpolation(self): - msg = "Interpolation coords must be 1-d for rectilinear interpolation." - with self.assertRaisesRegex(ValueError, msg): - interpolator = RectilinearInterpolator( - self.cube, ["foo", "bar"], LINEAR, EXTRAPOLATE - ) - interpolator([[15], [10]]) - - -class Test___call___2D_non_contiguous(ThreeDimCube): - def setUp(self): - ThreeDimCube.setUp(self) - coords = ["height", "longitude"] - self.interpolator = RectilinearInterpolator( - self.cube, coords, LINEAR, EXTRAPOLATE - ) - - def test_interpolate_data_multiple(self): - result = self.interpolator([[1], [1, 2]]) - expected = self.data[1:2, :, 1:3] - self.assertArrayEqual(result.data, expected) - - index = (slice(1, 2), slice(None), slice(1, 3, 1)) - for coord in self.cube.coords(): - coord_res = result.coord(coord).points - coord_expected = self.cube[index].coord(coord).points - - self.assertArrayEqual(coord_res, coord_expected) - - def test_orthogonal_cube(self): - result_cube = self.interpolator( - [np.int64([0, 1, 1]), np.int32([0, 1])] - ) - result_path = ( - "experimental", - "analysis", - "interpolate", - "LinearInterpolator", - "basic_orthogonal_cube.cml", - ) - self.assertCMLApproxData(result_cube, result_path) - self.assertEqual(result_cube.coord("longitude").dtype, np.int32) - self.assertEqual(result_cube.coord("height").dtype, np.int64) - - def test_orthogonal_cube_squash(self): - result_cube = self.interpolator([np.int64(0), np.int32([0, 1])]) - result_path = ( - "experimental", - "analysis", - "interpolate", - "LinearInterpolator", - "orthogonal_cube_1d_squashed.cml", - ) - self.assertCMLApproxData(result_cube, result_path) - self.assertEqual(result_cube.coord("longitude").dtype, np.int32) - self.assertEqual(result_cube.coord("height").dtype, np.int64) - - non_collapsed_cube = self.interpolator( - [[np.int64(0)], np.int32([0, 1])], collapse_scalar=False - ) - result_path = ( - "experimental", - "analysis", - "interpolate", - "LinearInterpolator", - "orthogonal_cube_1d_squashed_2.cml", - ) - self.assertCML(non_collapsed_cube[0, ...], result_path) - self.assertCML(result_cube, result_path) - self.assertEqual(result_cube, non_collapsed_cube[0, ...]) - - -class Test___call___lazy_data(ThreeDimCube): - def test_src_cube_data_loaded(self): - # RectilinearInterpolator operates using a snapshot of the source cube. - # If the source cube has lazy data when the interpolator is - # instantiated we want to make sure the source cube's data is - # loaded as a consequence of interpolation to avoid the risk - # of loading it again and again. - - # Modify self.cube to have lazy data. - self.cube.data = as_lazy_data(self.data) - self.assertTrue(self.cube.has_lazy_data()) - - # Perform interpolation and check the data has been loaded. - interpolator = RectilinearInterpolator( - self.cube, ["latitude"], LINEAR, EXTRAPOLATE - ) - interpolator([[1.5]]) - self.assertFalse(self.cube.has_lazy_data()) - - -class Test___call___time(tests.IrisTest): - def interpolator(self, method=LINEAR): - data = np.arange(12).reshape(4, 3) - cube = iris.cube.Cube(data) - time_coord = iris.coords.DimCoord( - np.arange(0.0, 48.0, 12.0), "time", units="hours since epoch" - ) - height_coord = iris.coords.DimCoord( - np.arange(3), "altitude", units="m" - ) - cube.add_dim_coord(time_coord, 0) - cube.add_dim_coord(height_coord, 1) - return RectilinearInterpolator(cube, ["time"], method, EXTRAPOLATE) - - def test_number_at_existing_value(self): - interpolator = self.interpolator() - result = interpolator([12]) - self.assertArrayEqual(result.data, [3, 4, 5]) - - def test_datetime_at_existing_value(self): - interpolator = self.interpolator() - result = interpolator([datetime.datetime(1970, 1, 1, 12)]) - self.assertArrayEqual(result.data, [3, 4, 5]) - - def test_datetime_between_existing_values(self): - interpolator = self.interpolator() - result = interpolator([datetime.datetime(1970, 1, 1, 18)]) - self.assertArrayEqual(result.data, [4.5, 5.5, 6.5]) - - def test_mixed_numbers_and_datetimes(self): - interpolator = self.interpolator() - result = interpolator( - [ - ( - 12, - datetime.datetime(1970, 1, 1, 18), - datetime.datetime(1970, 1, 2, 0), - 26, - ) - ] - ) - self.assertEqual(result.coord("time").points.dtype, float) - self.assertArrayEqual( - result.data, - [[3, 4, 5], [4.5, 5.5, 6.5], [6, 7, 8], [6.5, 7.5, 8.5]], - ) - - def test_mixed_numbers_and_datetimes_nearest(self): - interpolator = self.interpolator(NEAREST) - result = interpolator( - [ - ( - 12, - datetime.datetime(1970, 1, 1, 18), - datetime.datetime(1970, 1, 2, 0), - 26, - ) - ] - ) - self.assertEqual(result.coord("time").points.dtype, float) - self.assertArrayEqual( - result.data, [[3, 4, 5], [3, 4, 5], [6, 7, 8], [6, 7, 8]] - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py b/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py deleted file mode 100644 index 54e54bc304..0000000000 --- a/lib/iris/tests/unit/analysis/interpolation/test_get_xy_dim_coords.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :func:`iris.analysis._interpolation.get_xy_dim_coords`. - -""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -import copy - -import numpy as np - -from iris.analysis._interpolation import get_xy_dim_coords -import iris.coord_systems -import iris.coords -import iris.experimental.regrid -import iris.tests.stock - - -class TestGetXYCoords(tests.IrisTest): - @tests.skip_data - def test_grid_lat_lon(self): - cube = iris.tests.stock.realistic_4d() - x, y = get_xy_dim_coords(cube) - self.assertIs(x, cube.coord("grid_longitude")) - self.assertIs(y, cube.coord("grid_latitude")) - - def test_lat_lon(self): - cube = iris.tests.stock.lat_lon_cube() - x, y = get_xy_dim_coords(cube) - self.assertIs(x, cube.coord("longitude")) - self.assertIs(y, cube.coord("latitude")) - - def test_projection_coords(self): - cube = iris.tests.stock.lat_lon_cube() - cube.coord("longitude").rename("projection_x_coordinate") - cube.coord("latitude").rename("projection_y_coordinate") - x, y = get_xy_dim_coords(cube) - self.assertIs(x, cube.coord("projection_x_coordinate")) - self.assertIs(y, cube.coord("projection_y_coordinate")) - - @tests.skip_data - def test_missing_x_coord(self): - cube = iris.tests.stock.realistic_4d() - cube.remove_coord("grid_longitude") - with self.assertRaises(ValueError): - get_xy_dim_coords(cube) - - @tests.skip_data - def test_missing_y_coord(self): - cube = iris.tests.stock.realistic_4d() - cube.remove_coord("grid_latitude") - with self.assertRaises(ValueError): - get_xy_dim_coords(cube) - - @tests.skip_data - def test_multiple_coords(self): - cube = iris.tests.stock.realistic_4d() - cs = iris.coord_systems.GeogCS(6371229) - time_coord = cube.coord("time") - time_dims = cube.coord_dims(time_coord) - lat_coord = iris.coords.DimCoord( - np.arange(time_coord.shape[0]), - standard_name="latitude", - units="degrees", - coord_system=cs, - ) - cube.remove_coord(time_coord) - cube.add_dim_coord(lat_coord, time_dims) - model_level_coord = cube.coord("model_level_number") - model_level_dims = cube.coord_dims(model_level_coord) - lon_coord = iris.coords.DimCoord( - np.arange(model_level_coord.shape[0]), - standard_name="longitude", - units="degrees", - coord_system=cs, - ) - cube.remove_coord(model_level_coord) - cube.add_dim_coord(lon_coord, model_level_dims) - - with self.assertRaises(ValueError): - get_xy_dim_coords(cube) - - cube.remove_coord("grid_latitude") - cube.remove_coord("grid_longitude") - - x, y = get_xy_dim_coords(cube) - self.assertIs(x, lon_coord) - self.assertIs(y, lat_coord) - - def test_no_coordsystem(self): - cube = iris.tests.stock.lat_lon_cube() - for coord in cube.coords(): - coord.coord_system = None - x, y = get_xy_dim_coords(cube) - self.assertIs(x, cube.coord("longitude")) - self.assertIs(y, cube.coord("latitude")) - - def test_one_coordsystem(self): - cube = iris.tests.stock.lat_lon_cube() - cube.coord("longitude").coord_system = None - with self.assertRaises(ValueError): - get_xy_dim_coords(cube) - - def test_different_coordsystem(self): - cube = iris.tests.stock.lat_lon_cube() - - lat_cs = copy.copy(cube.coord("latitude").coord_system) - lat_cs.semi_major_axis = 7000000 - cube.coord("latitude").coord_system = lat_cs - - lon_cs = copy.copy(cube.coord("longitude").coord_system) - lon_cs.semi_major_axis = 7000001 - cube.coord("longitude").coord_system = lon_cs - - with self.assertRaises(ValueError): - get_xy_dim_coords(cube) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/maths/__init__.py b/lib/iris/tests/unit/analysis/maths/__init__.py deleted file mode 100644 index 7d11c54660..0000000000 --- a/lib/iris/tests/unit/analysis/maths/__init__.py +++ /dev/null @@ -1,217 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.analysis.maths` module.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from abc import ABCMeta, abstractmethod - -import numpy as np -from numpy import ma - -from iris.analysis import MEAN -from iris.coords import DimCoord -from iris.cube import Cube -import iris.tests.stock as stock - - -class CubeArithmeticBroadcastingTestMixin(metaclass=ABCMeta): - # A framework for testing the broadcasting behaviour of the various cube - # arithmetic operations. (A test for each operation inherits this). - @property - @abstractmethod - def data_op(self): - # Define an operator to be called, I.E. 'operator.xx'. - pass - - @property - @abstractmethod - def cube_func(self): - # Define an iris arithmetic function to be called - # I.E. 'iris.analysis.maths.xx'. - pass - - def test_transposed(self): - cube = stock.realistic_4d_no_derived() - other = cube.copy() - other.transpose() - res = self.cube_func(cube, other) - self.assertCML(res, checksum=False) - expected_data = self.data_op(cube.data, other.data.T) - self.assertArrayEqual(res.data, expected_data) - - def test_collapse_zeroth_dim(self): - cube = stock.realistic_4d_no_derived() - other = cube.collapsed("time", MEAN) - res = self.cube_func(cube, other) - self.assertCML(res, checksum=False) - # No modification to other.data is needed as numpy broadcasting - # should be sufficient. - expected_data = self.data_op(cube.data, other.data) - # Use assertMaskedArrayEqual as collapsing with MEAN results - # in a cube with a masked data array. - self.assertMaskedArrayEqual(res.data, expected_data) - - def test_collapse_all_dims(self): - cube = stock.realistic_4d_no_derived() - other = cube.collapsed(cube.coords(dim_coords=True), MEAN) - res = self.cube_func(cube, other) - self.assertCML(res, checksum=False) - # No modification to other.data is needed as numpy broadcasting - # should be sufficient. - expected_data = self.data_op(cube.data, other.data) - # Use assertArrayEqual rather than assertMaskedArrayEqual as - # collapsing all dims does not result in a masked array. - self.assertArrayEqual(res.data, expected_data) - - def test_collapse_last_dims(self): - cube = stock.realistic_4d_no_derived() - other = cube.collapsed(["grid_latitude", "grid_longitude"], MEAN) - res = self.cube_func(cube, other) - self.assertCML(res, checksum=False) - # Transpose the dimensions in self.cube that have been collapsed in - # other to lie at the front, thereby enabling numpy broadcasting to - # function when applying data operator. Finish by transposing back - # again to restore order. - expected_data = self.data_op( - cube.data.transpose((2, 3, 0, 1)), other.data - ).transpose(2, 3, 0, 1) - self.assertMaskedArrayEqual(res.data, expected_data) - - def test_collapse_middle_dim(self): - cube = stock.realistic_4d_no_derived() - other = cube.collapsed(["model_level_number"], MEAN) - res = self.cube_func(cube, other) - self.assertCML(res, checksum=False) - # Add the collapsed dimension back in via np.newaxis to enable - # numpy broadcasting to function. - expected_data = self.data_op(cube.data, other.data[:, np.newaxis, ...]) - self.assertMaskedArrayEqual(res.data, expected_data) - - def test_slice(self): - cube = stock.realistic_4d_no_derived() - for dim in range(cube.ndim): - keys = [slice(None)] * cube.ndim - keys[dim] = 3 - other = cube[tuple(keys)] - res = self.cube_func(cube, other) - self.assertCML(res, checksum=False) - # Add the collapsed dimension back in via np.newaxis to enable - # numpy broadcasting to function. - keys[dim] = np.newaxis - expected_data = self.data_op(cube.data, other.data[tuple(keys)]) - msg = "Problem broadcasting cubes when sliced on dimension {}." - self.assertArrayEqual( - res.data, expected_data, err_msg=msg.format(dim) - ) - - -class CubeArithmeticMaskingTestMixin(metaclass=ABCMeta): - # A framework for testing the mask handling behaviour of the various cube - # arithmetic operations. (A test for each operation inherits this). - @property - @abstractmethod - def data_op(self): - # Define an operator to be called, I.E. 'operator.xx'. - pass - - @property - @abstractmethod - def cube_func(self): - # Define an iris arithmetic function to be called - # I.E. 'iris.analysis.maths.xx'. - pass - - def _test_partial_mask(self, in_place): - # Helper method for masked data tests. - dat_a = ma.array([2.0, 2.0, 2.0, 2.0], mask=[1, 0, 1, 0]) - dat_b = ma.array([2.0, 2.0, 2.0, 2.0], mask=[1, 1, 0, 0]) - - cube_a = Cube(dat_a) - cube_b = Cube(dat_b) - - com = self.data_op(dat_b, dat_a) - res = self.cube_func(cube_b, cube_a, in_place=in_place) - - return com, res, cube_b - - def test_partial_mask_in_place(self): - # Cube in_place arithmetic operation. - com, res, orig_cube = self._test_partial_mask(True) - - self.assertMaskedArrayEqual(com, res.data, strict=True) - self.assertIs(res, orig_cube) - - def test_partial_mask_not_in_place(self): - # Cube arithmetic not an in_place operation. - com, res, orig_cube = self._test_partial_mask(False) - - self.assertMaskedArrayEqual(com, res.data) - self.assertIsNot(res, orig_cube) - - -class CubeArithmeticCoordsTest(tests.IrisTest): - # This class sets up pairs of cubes to test iris' ability to reject - # arithmetic operations on coordinates which do not match. - def SetUpNonMatching(self): - # On this cube pair, the coordinates to perform operations on do not - # match in either points array or name. - data = np.zeros((3, 4)) - a = DimCoord([1, 2, 3], long_name="a") - b = DimCoord([1, 2, 3, 4], long_name="b") - x = DimCoord([4, 5, 6], long_name="x") - y = DimCoord([5, 6, 7, 8], long_name="y") - - nomatch1 = Cube(data, dim_coords_and_dims=[(a, 0), (b, 1)]) - nomatch2 = Cube(data, dim_coords_and_dims=[(x, 0), (y, 1)]) - - return nomatch1, nomatch2 - - def SetUpReversed(self): - # On this cube pair, the coordinates to perform operations on have - # matching long names but the points array on one cube is reversed - # with respect to that on the other. - data = np.zeros((3, 4)) - a1 = DimCoord([1, 2, 3], long_name="a") - b1 = DimCoord([1, 2, 3, 4], long_name="b") - a2 = DimCoord([3, 2, 1], long_name="a") - b2 = DimCoord([1, 2, 3, 4], long_name="b") - - reversed1 = Cube(data, dim_coords_and_dims=[(a1, 0), (b1, 1)]) - reversed2 = Cube(data, dim_coords_and_dims=[(a2, 0), (b2, 1)]) - - return reversed1, reversed2 - - -class CubeArithmeticMaskedConstantTestMixin(metaclass=ABCMeta): - @property - @abstractmethod - def cube_func(self): - # Define an iris arithmetic function to be called - # I.E. 'iris.analysis.maths.xx'. - pass - - def test_masked_constant_in_place(self): - # Cube in_place arithmetic operation. - dtype = np.int64 - dat = ma.masked_array(0, 1, dtype) - cube = Cube(dat) - res = self.cube_func(cube, 5, in_place=True) - self.assertMaskedArrayEqual(ma.masked_array(0, 1), res.data) - self.assertEqual(dtype, res.dtype) - self.assertIs(res, cube) - - def test_masked_constant_not_in_place(self): - # Cube in_place arithmetic operation. - dtype = np.int64 - dat = ma.masked_array(0, 1, dtype) - cube = Cube(dat) - res = self.cube_func(cube, 5, in_place=False) - self.assertMaskedArrayEqual(ma.masked_array(0, 1), res.data) - self.assertEqual(dtype, res.dtype) - self.assertIsNot(res, cube) diff --git a/lib/iris/tests/unit/analysis/maths/test__get_dtype.py b/lib/iris/tests/unit/analysis/maths/test__get_dtype.py deleted file mode 100644 index 220b728b32..0000000000 --- a/lib/iris/tests/unit/analysis/maths/test__get_dtype.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the function :func:`iris.analysis.maths._get_dtype`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -from numpy import ma - -from iris.analysis.maths import _get_dtype -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube - - -class Test(tests.IrisTest): - def _check_call(self, obj, expected_dtype): - result = _get_dtype(obj) - self.assertEqual(expected_dtype, result) - - def test_int8(self): - n = -128 - self._check_call(n, np.int8) - - def test_int16(self): - n = -129 - self._check_call(n, np.int16) - - def test_uint8(self): - n = 255 - self._check_call(n, np.uint8) - - def test_uint16(self): - n = 256 - self._check_call(n, np.uint16) - - def test_float16(self): - n = 60000.0 - self._check_call(n, np.float16) - - def test_float32(self): - n = 65000.0 - self._check_call(n, np.float32) - - def test_float64(self): - n = 1e40 - self._check_call(n, np.float64) - - def test_scalar_demote(self): - n = np.int64(10) - self._check_call(n, np.uint8) - - def test_array(self): - a = np.array([1, 2, 3], dtype=np.int16) - self._check_call(a, np.int16) - - def test_scalar_array(self): - dtype = np.int32 - a = np.array(1, dtype=dtype) - self._check_call(a, dtype) - - def test_masked_array(self): - dtype = np.float16 - m = ma.masked_array([1, 2, 3], [1, 0, 1], dtype=dtype) - self._check_call(m, dtype) - - def test_masked_constant(self): - m = ma.masked - self._check_call(m, m.dtype) - - def test_cube(self): - dtype = np.float32 - data = np.array([1, 2, 3], dtype=dtype) - cube = Cube(data) - self._check_call(cube, dtype) - - def test_aux_coord(self): - dtype = np.int64 - points = np.array([1, 2, 3], dtype=dtype) - aux_coord = AuxCoord(points) - self._check_call(aux_coord, dtype) - - def test_dim_coord(self): - dtype = np.float16 - points = np.array([1, 2, 3], dtype=dtype) - dim_coord = DimCoord(points) - self._check_call(dim_coord, dtype) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py b/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py deleted file mode 100644 index bd81a96fbd..0000000000 --- a/lib/iris/tests/unit/analysis/maths/test__inplace_common_checks.py +++ /dev/null @@ -1,151 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the function :func:`iris.analysis.maths._inplace_common_checks`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.analysis.maths import _inplace_common_checks -from iris.cube import Cube - - -class Test(tests.IrisTest): - # `_inplace_common_checks` is a pass-through function that does not return - # anything but will fail iff `cube` and `other` have integer dtype. Thus in - # a sense we only want to test the failing cases. Doing so, however, leaves - # us open to the case where currently known good cases fail silently. - # To avoid this all the known good cases are also tested by relying on the - # fact that functions with no return value implicitly return `None`. If - # these currently known good cases ever changed these tests would start - # failing and indicate something was wrong. - def setUp(self): - self.scalar_int = 5 - self.scalar_float = 5.5 - - self.float_data = np.array([8, 9], dtype=np.float64) - self.int_data = np.array([9, 8], dtype=np.int64) - self.uint_data = np.array([9, 8], dtype=np.uint64) - - self.float_cube = Cube(self.float_data) - self.int_cube = Cube(self.int_data) - self.uint_cube = Cube(self.uint_data) - - self.op = "addition" - self.emsg = "Cannot perform inplace {}".format(self.op) - - def test_float_cubes(self): - result = _inplace_common_checks( - self.float_cube, self.float_cube, self.op - ) - self.assertIsNone(result) - - def test_int_cubes(self): - result = _inplace_common_checks(self.int_cube, self.int_cube, self.op) - self.assertIsNone(result) - - def test_uint_cubes(self): - result = _inplace_common_checks( - self.uint_cube, self.uint_cube, self.op - ) - self.assertIsNone(result) - - def test_float_cube_int_cube(self): - result = _inplace_common_checks( - self.float_cube, self.int_cube, self.op - ) - self.assertIsNone(result) - - def test_float_cube_uint_cube(self): - result = _inplace_common_checks( - self.float_cube, self.uint_cube, self.op - ) - self.assertIsNone(result) - - def test_int_cube_float_cube(self): - with self.assertRaisesRegex(ArithmeticError, self.emsg): - _inplace_common_checks(self.int_cube, self.float_cube, self.op) - - def test_uint_cube_float_cube(self): - with self.assertRaisesRegex(ArithmeticError, self.emsg): - _inplace_common_checks(self.uint_cube, self.float_cube, self.op) - - def test_float_cube__scalar_int(self): - result = _inplace_common_checks( - self.float_cube, self.scalar_int, self.op - ) - self.assertIsNone(result) - - def test_float_cube__scalar_float(self): - result = _inplace_common_checks( - self.float_cube, self.scalar_float, self.op - ) - self.assertIsNone(result) - - def test_float_cube__int_array(self): - result = _inplace_common_checks( - self.float_cube, self.int_data, self.op - ) - self.assertIsNone(result) - - def test_float_cube__float_array(self): - result = _inplace_common_checks( - self.float_cube, self.float_data, self.op - ) - self.assertIsNone(result) - - def test_int_cube__scalar_int(self): - result = _inplace_common_checks( - self.int_cube, self.scalar_int, self.op - ) - self.assertIsNone(result) - - def test_int_cube_uint_cube(self): - result = _inplace_common_checks(self.int_cube, self.uint_cube, self.op) - self.assertIsNone(result) - - def test_uint_cube_uint_cube(self): - result = _inplace_common_checks( - self.uint_cube, self.uint_cube, self.op - ) - self.assertIsNone(result) - - def test_uint_cube_int_cube(self): - with self.assertRaisesRegex(ArithmeticError, self.emsg): - _inplace_common_checks(self.uint_cube, self.int_cube, self.op) - - def test_int_cube__scalar_float(self): - with self.assertRaisesRegex(ArithmeticError, self.emsg): - _inplace_common_checks(self.int_cube, self.scalar_float, self.op) - - def test_int_cube__int_array(self): - result = _inplace_common_checks(self.int_cube, self.int_cube, self.op) - self.assertIsNone(result) - - def test_int_cube__float_array(self): - with self.assertRaisesRegex(ArithmeticError, self.emsg): - _inplace_common_checks(self.int_cube, self.float_data, self.op) - - def test_uint_cube__scalar_float(self): - with self.assertRaisesRegex(ArithmeticError, self.emsg): - _inplace_common_checks(self.uint_cube, self.scalar_float, self.op) - - def test_uint_cube__int_array(self): - with self.assertRaisesRegex(ArithmeticError, self.emsg): - _inplace_common_checks(self.uint_cube, self.int_cube, self.op) - - def test_uint_cube__float_array(self): - with self.assertRaisesRegex(ArithmeticError, self.emsg): - _inplace_common_checks(self.uint_cube, self.float_data, self.op) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/maths/test__output_dtype.py b/lib/iris/tests/unit/analysis/maths/test__output_dtype.py deleted file mode 100644 index c422e366be..0000000000 --- a/lib/iris/tests/unit/analysis/maths/test__output_dtype.py +++ /dev/null @@ -1,235 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the function :func:`iris.analysis.maths._output_dtype`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from itertools import product -import operator - -import numpy as np - -from iris.analysis.maths import _output_dtype - - -class Test(tests.IrisTest): - def setUp(self): - # Operators which result in a value of the same dtype as their - # arguments when the arguments' dtypes are the same. - self.same_result_ops = [ - operator.add, - operator.sub, - operator.mul, - operator.pow, - operator.floordiv, - np.add, - np.subtract, - np.multiply, - np.power, - np.floor_divide, - ] - - self.unary_same_result_ops = [np.abs] - - # Operators which always result in a float. - self.float_ops = [operator.truediv, np.true_divide] - - self.unary_float_ops = [np.log, np.log2, np.log10, np.exp] - - self.all_binary_ops = self.same_result_ops + self.float_ops - self.all_unary_ops = self.unary_same_result_ops + self.unary_float_ops - - self.dtypes = [ - np.dtype("i2"), - np.dtype("i4"), - np.dtype("i8"), - np.dtype("f2"), - np.dtype("f4"), - np.dtype("f8"), - ] - - def _binary_error_message( - self, - op, - first_dtype, - second_dtype, - expected_dtype, - result_dtype, - in_place=False, - ): - msg = ( - "Output for {op.__class__.__name__} {op.__name__!r} and " - "arguments ({dt1!r}, {dt2!r}, in_place={in_place}) " - "was {res!r}. Expected {exp!r}." - ) - return msg.format( - op=op, - dt1=first_dtype, - dt2=second_dtype, - exp=expected_dtype, - res=result_dtype, - in_place=in_place, - ) - - def _unary_error_message( - self, op, dtype, expected_dtype, result_dtype, in_place=False - ): - msg = ( - "Output for {op.__class__.__name__} {op.__name__!r} and " - "arguments ({dt!r}, in_place={in_place}) was {res!r}. " - "Expected {exp!r}." - ) - return msg.format( - op=op, - dt=dtype, - exp=expected_dtype, - res=result_dtype, - in_place=in_place, - ) - - def test_same_result(self): - # Check that the result dtype is the same as the input dtypes for - # relevant operators. - for dtype in self.dtypes: - for op in self.same_result_ops: - result_dtype = _output_dtype(op, dtype, dtype) - self.assertEqual( - dtype, - result_dtype, - self._binary_error_message( - op, dtype, dtype, dtype, result_dtype - ), - ) - for op in self.unary_same_result_ops: - result_dtype = _output_dtype(op, dtype) - self.assertEqual( - dtype, - result_dtype, - self._unary_error_message(op, dtype, dtype, result_dtype), - ) - - def test_binary_float(self): - # Check that the result dtype is a float for relevant operators. - # Perform checks for a selection of cases. - cases = [ - (np.dtype("i2"), np.dtype("i2"), np.dtype("f8")), - (np.dtype("i2"), np.dtype("i4"), np.dtype("f8")), - (np.dtype("i4"), np.dtype("i4"), np.dtype("f8")), - (np.dtype("i2"), np.dtype("f2"), np.dtype("f4")), - (np.dtype("i2"), np.dtype("f4"), np.dtype("f4")), - (np.dtype("i8"), np.dtype("f2"), np.dtype("f8")), - (np.dtype("f2"), np.dtype("f2"), np.dtype("f2")), - (np.dtype("f4"), np.dtype("f4"), np.dtype("f4")), - (np.dtype("f2"), np.dtype("f4"), np.dtype("f4")), - ] - for dtype1, dtype2, expected_dtype in cases: - for op in self.float_ops: - result_dtype = _output_dtype(op, dtype1, dtype2) - self.assertEqual( - expected_dtype, - result_dtype, - self._binary_error_message( - op, dtype1, dtype2, expected_dtype, result_dtype - ), - ) - - def test_unary_float(self): - cases = [ - (np.dtype("i2"), np.dtype("f4")), - (np.dtype("i4"), np.dtype("f8")), - (np.dtype("i8"), np.dtype("f8")), - (np.dtype("f2"), np.dtype("f2")), - (np.dtype("f4"), np.dtype("f4")), - (np.dtype("f8"), np.dtype("f8")), - ] - for dtype, expected_dtype in cases: - for op in self.unary_float_ops: - result_dtype = _output_dtype(op, dtype) - self.assertEqual( - expected_dtype, - result_dtype, - self._unary_error_message( - op, dtype, expected_dtype, result_dtype - ), - ) - - def test_binary_float_argument(self): - # Check that when one argument is a float dtype, a float dtype results - # Unary operators are covered by other tests. - dtypes = [ - np.dtype("i2"), - np.dtype("i4"), - np.dtype("i8"), - np.dtype("f2"), - np.dtype("f4"), - np.dtype("f8"), - ] - expected_dtypes = [ - np.dtype("f4"), - np.dtype("f8"), - np.dtype("f8"), - np.dtype("f2"), - np.dtype("f4"), - np.dtype("f8"), - ] - for op in self.all_binary_ops: - for dtype, expected_dtype in zip(dtypes, expected_dtypes): - result_dtype = _output_dtype(op, dtype, np.dtype("f2")) - self.assertEqual( - expected_dtype, - result_dtype, - self._binary_error_message( - op, dtype, np.dtype("f2"), expected_dtype, result_dtype - ), - ) - - def test_in_place(self): - # Check that when the in_place argument is True, the result is always - # the same as first operand. - for dtype1, dtype2 in product(self.dtypes, self.dtypes): - for op in self.all_binary_ops: - result_dtype = _output_dtype(op, dtype1, dtype2, in_place=True) - self.assertEqual( - result_dtype, - dtype1, - self._binary_error_message( - op, dtype1, dtype2, dtype1, result_dtype, in_place=True - ), - ) - for dtype in self.dtypes: - for op in self.all_unary_ops: - result_dtype = _output_dtype(op, dtype, in_place=True) - self.assertEqual( - result_dtype, - dtype, - self._unary_error_message( - op, dtype, dtype, result_dtype, in_place=True - ), - ) - - def test_commuative(self): - # Check that the operation is commutative if in_place is not specified. - for dtype1, dtype2 in product(self.dtypes, self.dtypes): - for op in self.all_binary_ops: - result_dtype1 = _output_dtype(op, dtype1, dtype2) - result_dtype2 = _output_dtype(op, dtype2, dtype1) - self.assertEqual( - result_dtype1, - result_dtype2, - "_output_dtype is not commutative with arguments " - "{!r} and {!r}: {!r} != {!r}".format( - dtype1, dtype2, result_dtype1, result_dtype2 - ), - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/maths/test_add.py b/lib/iris/tests/unit/analysis/maths/test_add.py deleted file mode 100644 index 77dd7877bf..0000000000 --- a/lib/iris/tests/unit/analysis/maths/test_add.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :func:`iris.analysis.maths.add` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import operator - -from iris.analysis.maths import add -from iris.tests.unit.analysis.maths import ( - CubeArithmeticBroadcastingTestMixin, - CubeArithmeticCoordsTest, - CubeArithmeticMaskedConstantTestMixin, - CubeArithmeticMaskingTestMixin, -) - - -@tests.skip_data -@tests.iristest_timing_decorator -class TestBroadcasting( - tests.IrisTest_nometa, CubeArithmeticBroadcastingTestMixin -): - @property - def data_op(self): - return operator.add - - @property - def cube_func(self): - return add - - -@tests.iristest_timing_decorator -class TestMasking(tests.IrisTest_nometa, CubeArithmeticMaskingTestMixin): - @property - def data_op(self): - return operator.add - - @property - def cube_func(self): - return add - - -class TestCoordMatch(CubeArithmeticCoordsTest): - def test_no_match(self): - cube1, cube2 = self.SetUpNonMatching() - with self.assertRaises(ValueError): - add(cube1, cube2) - - def test_reversed_points(self): - cube1, cube2 = self.SetUpReversed() - with self.assertRaises(ValueError): - add(cube1, cube2) - - -@tests.iristest_timing_decorator -class TestMaskedConstant( - tests.IrisTest_nometa, CubeArithmeticMaskedConstantTestMixin -): - @property - def data_op(self): - return operator.add - - @property - def cube_func(self): - return add - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/maths/test_divide.py b/lib/iris/tests/unit/analysis/maths/test_divide.py deleted file mode 100644 index 1763f223b0..0000000000 --- a/lib/iris/tests/unit/analysis/maths/test_divide.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :func:`iris.analysis.maths.divide` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import operator - -import numpy as np - -from iris.analysis.maths import divide -from iris.cube import Cube -from iris.tests.unit.analysis.maths import ( - CubeArithmeticBroadcastingTestMixin, - CubeArithmeticCoordsTest, - CubeArithmeticMaskingTestMixin, -) - - -@tests.skip_data -@tests.iristest_timing_decorator -class TestBroadcasting( - tests.IrisTest_nometa, CubeArithmeticBroadcastingTestMixin -): - @property - def data_op(self): - return operator.truediv - - @property - def cube_func(self): - return divide - - -@tests.iristest_timing_decorator -class TestMasking(tests.IrisTest_nometa, CubeArithmeticMaskingTestMixin): - @property - def data_op(self): - return operator.truediv - - @property - def cube_func(self): - return divide - - def test_unmasked_div_zero(self): - # Ensure cube behaviour matches numpy operator behaviour for the - # handling of arrays containing 0. - dat_a = np.array([0.0, 0.0, 0.0, 0.0]) - dat_b = np.array([2.0, 2.0, 2.0, 2.0]) - - cube_a = Cube(dat_a) - cube_b = Cube(dat_b) - - com = self.data_op(dat_b, dat_a) - res = self.cube_func(cube_b, cube_a).data - - self.assertArrayEqual(com, res) - - def test_masked_div_zero(self): - # Ensure cube behaviour matches numpy operator behaviour for the - # handling of arrays containing 0. - dat_a = np.ma.array([0.0, 0.0, 0.0, 0.0], mask=False) - dat_b = np.ma.array([2.0, 2.0, 2.0, 2.0], mask=False) - - cube_a = Cube(dat_a) - cube_b = Cube(dat_b) - - com = self.data_op(dat_b, dat_a) - res = self.cube_func(cube_b, cube_a).data - - self.assertMaskedArrayEqual(com, res, strict=True) - - -class TestCoordMatch(CubeArithmeticCoordsTest): - def test_no_match(self): - cube1, cube2 = self.SetUpNonMatching() - with self.assertRaises(ValueError): - divide(cube1, cube2) - - def test_reversed_points(self): - cube1, cube2 = self.SetUpReversed() - with self.assertRaises(ValueError): - divide(cube1, cube2) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/maths/test_multiply.py b/lib/iris/tests/unit/analysis/maths/test_multiply.py deleted file mode 100644 index 600593c64b..0000000000 --- a/lib/iris/tests/unit/analysis/maths/test_multiply.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :func:`iris.analysis.maths.multiply` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import operator - -from iris.analysis.maths import multiply -from iris.tests.unit.analysis.maths import ( - CubeArithmeticBroadcastingTestMixin, - CubeArithmeticCoordsTest, - CubeArithmeticMaskedConstantTestMixin, - CubeArithmeticMaskingTestMixin, -) - - -@tests.skip_data -@tests.iristest_timing_decorator -class TestBroadcasting( - tests.IrisTest_nometa, CubeArithmeticBroadcastingTestMixin -): - @property - def data_op(self): - return operator.mul - - @property - def cube_func(self): - return multiply - - -@tests.iristest_timing_decorator -class TestMasking(tests.IrisTest_nometa, CubeArithmeticMaskingTestMixin): - @property - def data_op(self): - return operator.mul - - @property - def cube_func(self): - return multiply - - -class TestCoordMatch(CubeArithmeticCoordsTest): - def test_no_match(self): - cube1, cube2 = self.SetUpNonMatching() - with self.assertRaises(ValueError): - multiply(cube1, cube2) - - def test_reversed_points(self): - cube1, cube2 = self.SetUpReversed() - with self.assertRaises(ValueError): - multiply(cube1, cube2) - - -@tests.iristest_timing_decorator -class TestMaskedConstant( - tests.IrisTest_nometa, CubeArithmeticMaskedConstantTestMixin -): - @property - def data_op(self): - return operator.mul - - @property - def cube_func(self): - return multiply - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/maths/test_subtract.py b/lib/iris/tests/unit/analysis/maths/test_subtract.py deleted file mode 100644 index 964e8c04c7..0000000000 --- a/lib/iris/tests/unit/analysis/maths/test_subtract.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :func:`iris.analysis.maths.subtract` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import operator - -from iris.analysis.maths import subtract -from iris.tests.unit.analysis.maths import ( - CubeArithmeticBroadcastingTestMixin, - CubeArithmeticCoordsTest, - CubeArithmeticMaskedConstantTestMixin, - CubeArithmeticMaskingTestMixin, -) - - -@tests.skip_data -@tests.iristest_timing_decorator -class TestBroadcasting( - tests.IrisTest_nometa, CubeArithmeticBroadcastingTestMixin -): - @property - def data_op(self): - return operator.sub - - @property - def cube_func(self): - return subtract - - -@tests.iristest_timing_decorator -class TestMasking(tests.IrisTest_nometa, CubeArithmeticMaskingTestMixin): - @property - def data_op(self): - return operator.sub - - @property - def cube_func(self): - return subtract - - -class TestCoordMatch(CubeArithmeticCoordsTest): - def test_no_match(self): - cube1, cube2 = self.SetUpNonMatching() - with self.assertRaises(ValueError): - subtract(cube1, cube2) - - def test_reversed_points(self): - cube1, cube2 = self.SetUpReversed() - with self.assertRaises(ValueError): - subtract(cube1, cube2) - - -@tests.iristest_timing_decorator -class TestMaskedConstant( - tests.IrisTest_nometa, CubeArithmeticMaskedConstantTestMixin -): - @property - def data_op(self): - return operator.sub - - @property - def cube_func(self): - return subtract - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/regrid/__init__.py b/lib/iris/tests/unit/analysis/regrid/__init__.py deleted file mode 100644 index a0a0fd0a6b..0000000000 --- a/lib/iris/tests/unit/analysis/regrid/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.analysis._regrid` module.""" diff --git a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py deleted file mode 100644 index f0dba83748..0000000000 --- a/lib/iris/tests/unit/analysis/regrid/test_RectilinearRegridder.py +++ /dev/null @@ -1,1463 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.analysis._regrid.RectilinearRegridder`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import dask.array as da -import numpy as np -import numpy.ma as ma - -from iris.analysis._regrid import RectilinearRegridder as Regridder -from iris.aux_factory import HybridHeightFactory -from iris.coord_systems import OSGB, GeogCS -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube -from iris.tests.stock import global_pp, lat_lon_cube, realistic_4d - -RESULT_DIR = ("analysis", "regrid") - -# Convenience to access Regridder static method. -regrid = Regridder._regrid - - -class Test__regrid__linear(tests.IrisTest): - def setUp(self): - self.x = DimCoord(np.linspace(-2, 57, 60)) - self.y = DimCoord(np.linspace(0, 49, 50)) - self.xs, self.ys = np.meshgrid(self.x.points, self.y.points) - - def transformation(x, y): - return x + y ** 2 - - # Construct a function which adds dimensions to the 2D data array - # so that we can test higher dimensional functionality. - def dim_extender(arr): - return arr[np.newaxis, ..., np.newaxis] * [1, 2] - - self.data = dim_extender(transformation(self.xs, self.ys)) - - target_x = np.linspace(-3, 60, 4) - target_y = np.linspace(0.5, 51, 3) - self.target_x, self.target_y = np.meshgrid(target_x, target_y) - - #: Expected values, which not quite the analytical value, but - #: representative of the bilinear interpolation scheme. - self.expected = np.array( - [ - [ - [ - [np.nan, np.nan], - [18.5, 37.0], - [39.5, 79.0], - [np.nan, np.nan], - ], - [ - [np.nan, np.nan], - [681.25, 1362.5], - [702.25, 1404.5], - [np.nan, np.nan], - ], - [ - [np.nan, np.nan], - [np.nan, np.nan], - [np.nan, np.nan], - [np.nan, np.nan], - ], - ] - ] - ) - - self.x_dim = 2 - self.y_dim = 1 - - def assert_values(self, values): - # values is a list of [x, y, [val1, val2]] - xs, ys, expecteds = zip(*values) - expecteds = np.array(expecteds)[None, None, ...] - result = regrid( - self.data, - self.x_dim, - self.y_dim, - self.x, - self.y, - np.array([xs]), - np.array([ys]), - ) - self.assertArrayAllClose(result, expecteds, rtol=1e-04, equal_nan=True) - - # Check that transposing the input data results in the same values - ndim = self.data.ndim - result2 = regrid( - self.data.T, - ndim - self.x_dim - 1, - ndim - self.y_dim - 1, - self.x, - self.y, - np.array([xs]), - np.array([ys]), - ) - self.assertArrayEqual(result.T, result2) - - def test_single_values(self): - # Check that the values are sensible e.g. (3 + 4**2 == 19) - self.assert_values( - [ - [3, 4, [19, 38]], - [-2, 0, [-2, -4]], - [-2.01, 0, [np.nan, np.nan]], - [2, -0.01, [np.nan, np.nan]], - [57, 0, [57, 114]], - [57.01, 0, [np.nan, np.nan]], - [57, 49, [2458, 4916]], - [57, 49.01, [np.nan, np.nan]], - ] - ) - - def test_simple_result(self): - result = regrid( - self.data, - self.x_dim, - self.y_dim, - self.x, - self.y, - self.target_x, - self.target_y, - ) - self.assertArrayEqual(result, self.expected) - - def test_simple_masked(self): - data = ma.MaskedArray(self.data, mask=True) - data.mask[:, 1:30, 1:30] = False - result = regrid( - data, - self.x_dim, - self.y_dim, - self.x, - self.y, - self.target_x, - self.target_y, - ) - expected_mask = np.array( - [ - [ - [[True, True], [True, True], [True, True], [True, True]], - [[True, True], [False, False], [True, True], [True, True]], - [[True, True], [True, True], [True, True], [True, True]], - ] - ], - dtype=bool, - ) - expected = ma.MaskedArray(self.expected, mask=expected_mask) - self.assertMaskedArrayEqual(result, expected) - - def test_simple_masked_no_mask(self): - data = ma.MaskedArray(self.data, mask=False) - result = regrid( - data, - self.x_dim, - self.y_dim, - self.x, - self.y, - self.target_x, - self.target_y, - ) - self.assertIsInstance(result, ma.MaskedArray) - - def test_result_transpose_shape(self): - ndim = self.data.ndim - result = regrid( - self.data.T, - ndim - self.x_dim - 1, - ndim - self.y_dim - 1, - self.x, - self.y, - self.target_x, - self.target_y, - ) - self.assertArrayEqual(result, self.expected.T) - - def test_reverse_x_coord(self): - index = [slice(None)] * self.data.ndim - index[self.x_dim] = slice(None, None, -1) - result = regrid( - self.data[tuple(index)], - self.x_dim, - self.y_dim, - self.x[::-1], - self.y, - self.target_x, - self.target_y, - ) - self.assertArrayEqual(result, self.expected) - - def test_circular_x_coord(self): - # Check that interpolation of a circular src coordinate doesn't result - # in an out of bounds value. - self.x.circular = True - self.x.units = "degree" - result = regrid( - self.data, - self.x_dim, - self.y_dim, - self.x, - self.y, - np.array([[58]]), - np.array([[0]]), - ) - self.assertArrayAlmostEqual( - result, np.array([56.80398671, 113.60797342], ndmin=self.data.ndim) - ) - - -# Check what happens to NaN values, extrapolated values, and -# masked values. -class Test__regrid__extrapolation_modes(tests.IrisTest): - values_by_method = { - "linear": [ - [np.nan, np.nan, 2, 3, np.nan], - [np.nan, np.nan, 6, 7, np.nan], - [8, 9, 10, 11, np.nan], - ], - "nearest": [ - [np.nan, 1, 2, 3, np.nan], - [4, 5, 6, 7, np.nan], - [8, 9, 10, 11, np.nan], - ], - } - - extrapolate_values_by_method = { - "linear": [ - [np.nan, np.nan, 2, 3, 4], - [np.nan, np.nan, 6, 7, 8], - [8, 9, 10, 11, 12], - ], - "nearest": [[np.nan, 1, 2, 3, 3], [4, 5, 6, 7, 7], [8, 9, 10, 11, 11]], - } - - def setUp(self): - self.methods = ("linear", "nearest") - self.test_dtypes = [ - np.dtype(spec) - for spec in ("i1", "i2", "i4", "i8", "f2", "f4", "f8") - ] - - def _regrid(self, data, method, extrapolation_mode=None): - x = np.arange(4) - y = np.arange(3) - x_coord = DimCoord(x) - y_coord = DimCoord(y) - x_dim, y_dim = 1, 0 - grid_x, grid_y = np.meshgrid(np.arange(5), y) - kwargs = dict(method=method) - if extrapolation_mode is not None: - kwargs["extrapolation_mode"] = extrapolation_mode - result = regrid( - data, x_dim, y_dim, x_coord, y_coord, grid_x, grid_y, **kwargs - ) - return result - - def test_default_ndarray(self): - # NaN -> NaN - # Extrapolated -> NaN - data = np.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - for method in self.methods: - result = self._regrid(data, method) - self.assertNotIsInstance(result, ma.MaskedArray) - expected = self.values_by_method[method] - self.assertArrayEqual(result, expected) - - def test_default_maskedarray(self): - # NaN -> NaN - # Extrapolated -> Masked - # Masked -> Masked - data = ma.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - data[2, 3] = ma.masked - for method in self.methods: - result = self._regrid(data, method) - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 0, 1], [0, 0, 0, 0, 1], [0, 0, 0, 1, 1]] - values = self.values_by_method[method] - expected = ma.MaskedArray(values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_default_maskedarray_none_masked(self): - # NaN -> NaN - # Extrapolated -> Masked - # Masked -> N/A - data = ma.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - for method in self.methods: - result = self._regrid(data, method) - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 0, 1], [0, 0, 0, 0, 1], [0, 0, 0, 0, 1]] - values = self.values_by_method[method] - expected = ma.MaskedArray(values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_default_maskedarray_none_masked_expanded(self): - # NaN -> NaN - # Extrapolated -> Masked - # Masked -> N/A - data = ma.arange(12, dtype=np.float64).reshape(3, 4) - # Make sure the mask has been expanded - data.mask = False - data[0, 0] = np.nan - for method in self.methods: - result = self._regrid(data, method) - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 0, 1], [0, 0, 0, 0, 1], [0, 0, 0, 0, 1]] - values = self.values_by_method[method] - expected = ma.MaskedArray(values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_method_ndarray(self): - # NaN -> NaN - # Extrapolated -> linear - data = np.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - for method in self.methods: - result = self._regrid(data, method, "extrapolate") - self.assertNotIsInstance(result, ma.MaskedArray) - expected = self.extrapolate_values_by_method[method] - self.assertArrayEqual(result, expected) - - def test_method_maskedarray(self): - # NaN -> NaN - # Extrapolated -> linear - # Masked -> Masked - data = ma.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - data[2, 3] = ma.masked - for method in self.methods: - result = self._regrid(data, method, "extrapolate") - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 1, 1]] - values = self.extrapolate_values_by_method[method] - expected = ma.MaskedArray(values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_nan_ndarray(self): - # NaN -> NaN - # Extrapolated -> NaN - data = np.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - for method in self.methods: - result = self._regrid(data, method, "nan") - self.assertNotIsInstance(result, ma.MaskedArray) - expected = self.values_by_method[method] - self.assertArrayEqual(result, expected) - - def test_nan_maskedarray(self): - # NaN -> NaN - # Extrapolated -> NaN - # Masked -> Masked - data = ma.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - data[2, 3] = ma.masked - for method in self.methods: - result = self._regrid(data, method, "nan") - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 1, 0]] - values = self.values_by_method[method] - expected = ma.MaskedArray(values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_error_ndarray(self): - # Values irrelevant - the function raises an error. - data = np.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - for method in self.methods: - with self.assertRaisesRegex(ValueError, "out of bounds"): - self._regrid(data, method, "error") - - def test_error_maskedarray(self): - # Values irrelevant - the function raises an error. - data = ma.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - data[2, 3] = ma.masked - for method in self.methods: - with self.assertRaisesRegex(ValueError, "out of bounds"): - self._regrid(data, method, "error") - - def test_mask_ndarray(self): - # NaN -> NaN - # Extrapolated -> Masked (this is different from all the other - # modes) - data = np.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - for method in self.methods: - result = self._regrid(data, method, "mask") - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 0, 1], [0, 0, 0, 0, 1], [0, 0, 0, 0, 1]] - values = self.values_by_method[method] - expected = ma.MaskedArray(values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_mask_maskedarray(self): - # NaN -> NaN - # Extrapolated -> Masked - # Masked -> Masked - data = ma.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - data[2, 3] = ma.masked - for method in self.methods: - result = self._regrid(data, method, "mask") - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 0, 1], [0, 0, 0, 0, 1], [0, 0, 0, 1, 1]] - values = self.values_by_method[method] - expected = ma.MaskedArray(values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_nanmask_ndarray(self): - # NaN -> NaN - # Extrapolated -> NaN - data = np.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - for method in self.methods: - result = self._regrid(data, method, "nanmask") - self.assertNotIsInstance(result, ma.MaskedArray) - expected = self.values_by_method[method] - self.assertArrayEqual(result, expected) - - def test_nanmask_maskedarray(self): - # NaN -> NaN - # Extrapolated -> Masked - # Masked -> Masked - data = ma.arange(12, dtype=np.float64).reshape(3, 4) - data[0, 0] = np.nan - data[2, 3] = ma.masked - for method in self.methods: - result = self._regrid(data, method, "nanmask") - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 0, 1], [0, 0, 0, 0, 1], [0, 0, 0, 1, 1]] - values = self.values_by_method[method] - expected = ma.MaskedArray(values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_invalid(self): - data = np.arange(12, dtype=np.float64).reshape(3, 4) - emsg = "Invalid extrapolation mode" - for method in self.methods: - with self.assertRaisesRegex(ValueError, emsg): - self._regrid(data, method, "BOGUS") - - def test_method_result_types(self): - # Check return types from basic calculation on floats and ints. - for method in self.methods: - result_dtypes = {} - for source_dtype in self.test_dtypes: - data = np.arange(12, dtype=source_dtype).reshape(3, 4) - result = self._regrid(data, method) - result_dtypes[source_dtype] = result.dtype - if method == "linear": - # Linear results are promoted to float. - expected_types_mapping = { - test_dtype: np.promote_types(test_dtype, np.float16) - for test_dtype in self.test_dtypes - } - if method == "nearest": - # Nearest results are the same as the original data. - expected_types_mapping = { - test_dtype: test_dtype for test_dtype in self.test_dtypes - } - self.assertEqual(result_dtypes, expected_types_mapping) - - -class Test___call___lazy(tests.IrisTest): - def setUp(self): - self.cube = lat_lon_cube() - # Regridder method and extrapolation-mode. - self.args = ("linear", "mask") - self.regridder = Regridder(self.cube, self.cube, *self.args) - self.lazy_cube = self.cube.copy(da.asarray(self.cube.data)) - self.lazy_regridder = Regridder( - self.lazy_cube, self.lazy_cube, *self.args - ) - - def test_lazy_regrid(self): - result = self.lazy_regridder(self.lazy_cube) - self.assertTrue(result.has_lazy_data()) - expected = self.regridder(self.cube) - self.assertTrue(result == expected) - - -class Test___call____invalid_types(tests.IrisTest): - def setUp(self): - self.cube = lat_lon_cube() - # Regridder method and extrapolation-mode. - self.args = ("linear", "mask") - self.regridder = Regridder(self.cube, self.cube, *self.args) - - def test_src_as_array(self): - arr = np.zeros((3, 4)) - with self.assertRaises(TypeError): - Regridder(arr, self.cube, *self.args) - with self.assertRaises(TypeError): - self.regridder(arr) - - def test_grid_as_array(self): - with self.assertRaises(TypeError): - Regridder(self.cube, np.zeros((3, 4)), *self.args) - - def test_src_as_int(self): - with self.assertRaises(TypeError): - Regridder(42, self.cube, *self.args) - with self.assertRaises(TypeError): - self.regridder(42) - - def test_grid_as_int(self): - with self.assertRaises(TypeError): - Regridder(self.cube, 42, *self.args) - - -class Test___call____missing_coords(tests.IrisTest): - def setUp(self): - self.args = ("linear", "mask") - - def ok_bad(self, coord_names): - # Deletes the named coords from `bad`. - ok = lat_lon_cube() - bad = lat_lon_cube() - for name in coord_names: - bad.remove_coord(name) - return ok, bad - - def test_src_missing_lat(self): - ok, bad = self.ok_bad(["latitude"]) - with self.assertRaises(ValueError): - Regridder(bad, ok, *self.args) - regridder = Regridder(ok, ok, *self.args) - with self.assertRaises(ValueError): - regridder(bad) - - def test_grid_missing_lat(self): - ok, bad = self.ok_bad(["latitude"]) - with self.assertRaises(ValueError): - Regridder(ok, bad, *self.args) - - def test_src_missing_lon(self): - ok, bad = self.ok_bad(["longitude"]) - with self.assertRaises(ValueError): - Regridder(bad, ok, *self.args) - regridder = Regridder(ok, ok, *self.args) - with self.assertRaises(ValueError): - regridder(bad) - - def test_grid_missing_lon(self): - ok, bad = self.ok_bad(["longitude"]) - with self.assertRaises(ValueError): - Regridder(ok, bad, *self.args) - - def test_src_missing_lat_lon(self): - ok, bad = self.ok_bad(["latitude", "longitude"]) - with self.assertRaises(ValueError): - Regridder(bad, ok, *self.args) - regridder = Regridder(ok, ok, *self.args) - with self.assertRaises(ValueError): - regridder(bad) - - def test_grid_missing_lat_lon(self): - ok, bad = self.ok_bad(["latitude", "longitude"]) - with self.assertRaises(ValueError): - Regridder(ok, bad, *self.args) - - -class Test___call____not_dim_coord(tests.IrisTest): - def setUp(self): - self.args = ("linear", "mask") - - def ok_bad(self, coord_name): - # Demotes the named DimCoord on `bad` to an AuxCoord. - ok = lat_lon_cube() - bad = lat_lon_cube() - coord = bad.coord(coord_name) - dims = bad.coord_dims(coord) - bad.remove_coord(coord_name) - aux_coord = AuxCoord.from_coord(coord) - bad.add_aux_coord(aux_coord, dims) - return ok, bad - - def test_src_with_aux_lat(self): - ok, bad = self.ok_bad("latitude") - with self.assertRaises(ValueError): - Regridder(bad, ok, *self.args) - regridder = Regridder(ok, ok, *self.args) - with self.assertRaises(ValueError): - regridder(bad) - - def test_grid_with_aux_lat(self): - ok, bad = self.ok_bad("latitude") - with self.assertRaises(ValueError): - Regridder(ok, bad, *self.args) - - def test_src_with_aux_lon(self): - ok, bad = self.ok_bad("longitude") - with self.assertRaises(ValueError): - Regridder(bad, ok, *self.args) - regridder = Regridder(ok, ok, *self.args) - with self.assertRaises(ValueError): - regridder(bad) - - def test_grid_with_aux_lon(self): - ok, bad = self.ok_bad("longitude") - with self.assertRaises(ValueError): - Regridder(ok, bad, *self.args) - - -class Test___call____not_dim_coord_share(tests.IrisTest): - def setUp(self): - self.args = ("linear", "mask") - - def ok_bad(self): - # Make lat/lon share a single dimension on `bad`. - ok = lat_lon_cube() - bad = lat_lon_cube() - lat = bad.coord("latitude") - bad = bad[0, : lat.shape[0]] - bad.remove_coord("latitude") - bad.add_aux_coord(lat, 0) - return ok, bad - - def test_src_shares_dim(self): - ok, bad = self.ok_bad() - with self.assertRaises(ValueError): - Regridder(bad, ok, *self.args) - regridder = Regridder(ok, ok, *self.args) - with self.assertRaises(ValueError): - regridder(bad) - - def test_grid_shares_dim(self): - ok, bad = self.ok_bad() - with self.assertRaises(ValueError): - Regridder(ok, bad, *self.args) - - -class Test___call____bad_georeference(tests.IrisTest): - def setUp(self): - self.args = ("linear", "mask") - - def ok_bad(self, lat_cs, lon_cs): - # Updates `bad` to use the given coordinate systems. - ok = lat_lon_cube() - bad = lat_lon_cube() - bad.coord("latitude").coord_system = lat_cs - bad.coord("longitude").coord_system = lon_cs - return ok, bad - - def test_src_no_cs(self): - ok, bad = self.ok_bad(None, None) - regridder = Regridder(bad, ok, *self.args) - with self.assertRaises(ValueError): - regridder(bad) - - def test_grid_no_cs(self): - ok, bad = self.ok_bad(None, None) - regridder = Regridder(ok, bad, *self.args) - with self.assertRaises(ValueError): - regridder(ok) - - def test_src_one_cs(self): - ok, bad = self.ok_bad(None, GeogCS(6371000)) - with self.assertRaises(ValueError): - Regridder(bad, ok, *self.args) - - def test_grid_one_cs(self): - ok, bad = self.ok_bad(None, GeogCS(6371000)) - with self.assertRaises(ValueError): - Regridder(ok, bad, *self.args) - - def test_src_inconsistent_cs(self): - ok, bad = self.ok_bad(GeogCS(6370000), GeogCS(6371000)) - with self.assertRaises(ValueError): - Regridder(bad, ok, *self.args) - - def test_grid_inconsistent_cs(self): - ok, bad = self.ok_bad(GeogCS(6370000), GeogCS(6371000)) - with self.assertRaises(ValueError): - Regridder(ok, bad, *self.args) - - -class Test___call____bad_angular_units(tests.IrisTest): - def ok_bad(self): - # Changes the longitude coord to radians on `bad`. - ok = lat_lon_cube() - bad = lat_lon_cube() - bad.coord("longitude").units = "radians" - return ok, bad - - def test_src_radians(self): - ok, bad = self.ok_bad() - regridder = Regridder(bad, ok, "linear", "mask") - with self.assertRaises(ValueError): - regridder(bad) - - def test_grid_radians(self): - ok, bad = self.ok_bad() - with self.assertRaises(ValueError): - Regridder(ok, bad, "linear", "mask") - - -def uk_cube(): - data = np.arange(12, dtype=np.float32).reshape(3, 4) - uk = Cube(data) - cs = OSGB() - y_coord = DimCoord( - np.arange(3), "projection_y_coordinate", units="m", coord_system=cs - ) - x_coord = DimCoord( - np.arange(4), "projection_x_coordinate", units="m", coord_system=cs - ) - uk.add_dim_coord(y_coord, 0) - uk.add_dim_coord(x_coord, 1) - surface = AuxCoord(data * 10, "surface_altitude", units="m") - uk.add_aux_coord(surface, (0, 1)) - uk.add_aux_factory(HybridHeightFactory(orography=surface)) - return uk - - -class Test___call____bad_linear_units(tests.IrisTest): - def ok_bad(self): - # Defines `bad` with an x coordinate in km. - ok = lat_lon_cube() - bad = uk_cube() - bad.coord(axis="x").units = "km" - return ok, bad - - def test_src_km(self): - ok, bad = self.ok_bad() - regridder = Regridder(bad, ok, "linear", "mask") - with self.assertRaises(ValueError): - regridder(bad) - - def test_grid_km(self): - ok, bad = self.ok_bad() - with self.assertRaises(ValueError): - Regridder(ok, bad, "linear", "mask") - - -class Test___call____no_coord_systems(tests.IrisTest): - # Test behaviour in the absence of any coordinate systems. - - def setUp(self): - self.mode = "mask" - self.methods = ("linear", "nearest") - - def remove_coord_systems(self, cube): - for coord in cube.coords(): - coord.coord_system = None - - def test_ok(self): - # Ensure regridding is supported when the coordinate definitions match. - # NB. We change the coordinate *values* to ensure that does not - # prevent the regridding operation. - src = uk_cube() - self.remove_coord_systems(src) - grid = src.copy() - for coord in grid.dim_coords: - coord.points = coord.points + 1 - for method in self.methods: - regridder = Regridder(src, grid, method, self.mode) - result = regridder(src) - for coord in result.dim_coords: - self.assertEqual(coord, grid.coord(coord)) - expected = ma.arange(12).reshape((3, 4)) + 5 - expected[:, 3] = ma.masked - expected[2, :] = ma.masked - self.assertMaskedArrayEqual(result.data, expected) - - def test_matching_units(self): - # Check we are insensitive to the units provided they match. - # NB. We change the coordinate *values* to ensure that does not - # prevent the regridding operation. - src = uk_cube() - self.remove_coord_systems(src) - # Move to unusual units (i.e. not metres or degrees). - for coord in src.dim_coords: - coord.units = "feet" - grid = src.copy() - for coord in grid.dim_coords: - coord.points = coord.points + 1 - for method in self.methods: - regridder = Regridder(src, grid, method, self.mode) - result = regridder(src) - for coord in result.dim_coords: - self.assertEqual(coord, grid.coord(coord)) - expected = ma.arange(12).reshape((3, 4)) + 5 - expected[:, 3] = ma.masked - expected[2, :] = ma.masked - self.assertMaskedArrayEqual(result.data, expected) - - def test_different_units(self): - src = uk_cube() - self.remove_coord_systems(src) - # Move to unusual units (i.e. not metres or degrees). - for coord in src.coords(): - coord.units = "feet" - grid = src.copy() - grid.coord("projection_y_coordinate").units = "yards" - # We change the coordinate *values* to ensure that does not - # prevent the regridding operation. - for coord in grid.dim_coords: - coord.points = coord.points + 1 - for method in self.methods: - regridder = Regridder(src, grid, method, self.mode) - emsg = "matching coordinate metadata" - with self.assertRaisesRegex(ValueError, emsg): - regridder(src) - - def test_coord_metadata_mismatch(self): - # Check for failure when coordinate definitions differ. - uk = uk_cube() - self.remove_coord_systems(uk) - lat_lon = lat_lon_cube() - self.remove_coord_systems(lat_lon) - for method in self.methods: - regridder = Regridder(uk, lat_lon, method, self.mode) - with self.assertRaises(ValueError): - regridder(uk) - - -class Test___call____extrapolation_modes(tests.IrisTest): - values = [ - [np.nan, 6, 7, np.nan], - [9, 10, 11, np.nan], - [np.nan, np.nan, np.nan, np.nan], - ] - - extrapolate_values_by_method = { - "linear": [[np.nan, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 16]], - "nearest": [[np.nan, 6, 7, 7], [9, 10, 11, 11], [9, 10, 11, 11]], - } - - surface_values = [ - [50, 60, 70, np.nan], - [90, 100, 110, np.nan], - [np.nan, np.nan, np.nan, np.nan], - ] - - def setUp(self): - self.methods = ("linear", "nearest") - - def _ndarray_cube(self, method): - assert method in self.methods - src = uk_cube() - index = (0, 0) if method == "linear" else (1, 1) - src.data[index] = np.nan - return src - - def _masked_cube(self, method): - assert method in self.methods - src = uk_cube() - src.data = ma.asarray(src.data) - nan_index = (0, 0) if method == "linear" else (1, 1) - mask_index = (2, 3) - src.data[nan_index] = np.nan - src.data[mask_index] = ma.masked - return src - - def _regrid(self, src, method, extrapolation_mode="mask"): - grid = src.copy() - for coord in grid.dim_coords: - coord.points = coord.points + 1 - regridder = Regridder(src, grid, method, extrapolation_mode) - result = regridder(src) - - surface = result.coord("surface_altitude").points - self.assertNotIsInstance(surface, ma.MaskedArray) - self.assertArrayEqual(surface, self.surface_values) - - return result.data - - def test_default_ndarray(self): - # NaN -> NaN - # Extrapolated -> Masked - for method in self.methods: - src = self._ndarray_cube(method) - result = self._regrid(src, method) - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 1], [0, 0, 0, 1], [1, 1, 1, 1]] - expected = ma.MaskedArray(self.values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_default_maskedarray(self): - # NaN -> NaN - # Extrapolated -> Masked - # Masked -> Masked - for method in self.methods: - src = self._masked_cube(method) - result = self._regrid(src, method) - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 1], [0, 0, 1, 1], [1, 1, 1, 1]] - expected = ma.MaskedArray(self.values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_default_maskedarray_none_masked(self): - # NaN -> NaN - # Extrapolated -> Masked - # Masked -> N/A - for method in self.methods: - src = uk_cube() - src.data = ma.asarray(src.data) - index = (0, 0) if method == "linear" else (1, 1) - src.data[index] = np.nan - result = self._regrid(src, method) - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 1], [0, 0, 0, 1], [1, 1, 1, 1]] - expected = ma.MaskedArray(self.values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_default_maskedarray_none_masked_expanded(self): - # NaN -> NaN - # Extrapolated -> Masked - # Masked -> N/A - for method in self.methods: - src = uk_cube() - src.data = ma.asarray(src.data) - # Make sure the mask has been expanded - src.data.mask = False - index = (0, 0) if method == "linear" else (1, 1) - src.data[index] = np.nan - result = self._regrid(src, method) - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 1], [0, 0, 0, 1], [1, 1, 1, 1]] - expected = ma.MaskedArray(self.values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_method_ndarray(self): - # NaN -> NaN - # Extrapolated -> linear - for method in self.methods: - src = self._ndarray_cube(method) - result = self._regrid(src, method, "extrapolate") - self.assertNotIsInstance(result, ma.MaskedArray) - expected = self.extrapolate_values_by_method[method] - self.assertArrayEqual(result, expected) - - def test_nan_ndarray(self): - # NaN -> NaN - # Extrapolated -> NaN - for method in self.methods: - src = self._ndarray_cube(method) - result = self._regrid(src, method, "nan") - self.assertNotIsInstance(result, ma.MaskedArray) - self.assertArrayEqual(result, self.values) - - def test_nan_maskedarray(self): - # NaN -> NaN - # Extrapolated -> NaN - # Masked -> Masked - for method in self.methods: - src = self._masked_cube(method) - result = self._regrid(src, method, "nan") - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 0]] - expected = ma.MaskedArray(self.values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_error_ndarray(self): - # Values irrelevant - the function raises an error. - for method in self.methods: - src = self._ndarray_cube(method) - with self.assertRaisesRegex(ValueError, "out of bounds"): - self._regrid(src, method, "error") - - def test_error_maskedarray(self): - # Values irrelevant - the function raises an error. - for method in self.methods: - src = self._masked_cube(method) - with self.assertRaisesRegex(ValueError, "out of bounds"): - self._regrid(src, method, "error") - - def test_mask_ndarray(self): - # NaN -> NaN - # Extrapolated -> Masked (this is different from all the other - # modes) - for method in self.methods: - src = self._ndarray_cube(method) - result = self._regrid(src, method, "mask") - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 1], [0, 0, 0, 1], [1, 1, 1, 1]] - expected = ma.MaskedArray(self.values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_mask_maskedarray(self): - # NaN -> NaN - # Extrapolated -> Masked - # Masked -> Masked - for method in self.methods: - src = self._masked_cube(method) - result = self._regrid(src, method, "mask") - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 1], [0, 0, 1, 1], [1, 1, 1, 1]] - expected = ma.MaskedArray(self.values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_nanmask_ndarray(self): - # NaN -> NaN - # Extrapolated -> NaN - for method in self.methods: - src = self._ndarray_cube(method) - result = self._regrid(src, method, "nanmask") - self.assertNotIsInstance(result, ma.MaskedArray) - self.assertArrayEqual(result, self.values) - - def test_nanmask_maskedarray(self): - # NaN -> NaN - # Extrapolated -> Masked - # Masked -> Masked - for method in self.methods: - src = self._masked_cube(method) - result = self._regrid(src, method, "nanmask") - self.assertIsInstance(result, ma.MaskedArray) - mask = [[0, 0, 0, 1], [0, 0, 1, 1], [1, 1, 1, 1]] - expected = ma.MaskedArray(self.values, mask) - self.assertMaskedArrayEqual(result, expected) - - def test_invalid(self): - src = uk_cube() - emsg = "Invalid extrapolation mode" - for method in self.methods: - with self.assertRaisesRegex(ValueError, emsg): - self._regrid(src, method, "BOGUS") - - -@tests.skip_data -class Test___call____rotated_to_lat_lon(tests.IrisTest): - def setUp(self): - self.src = realistic_4d()[:5, :2, ::40, ::30] - self.mode = "mask" - self.methods = ("linear", "nearest") - - def test_single_point(self): - src = self.src[0, 0] - grid = global_pp()[:1, :1] - # These coordinate values have been derived by converting the - # rotated coordinates of src[1, 1] into lat/lon by using cs2cs. - grid.coord("longitude").points = -3.144870 - grid.coord("latitude").points = 52.406444 - for method in self.methods: - regridder = Regridder(src, grid, method, self.mode) - result = regridder(src) - self.assertEqual(src.data[1, 1], result.data) - - def test_transposed_src(self): - # The source dimensions are in a non-standard order. - src = self.src - src.transpose([3, 1, 2, 0]) - grid = self._grid_subset() - for method in self.methods: - regridder = Regridder(src, grid, method, self.mode) - result = regridder(src) - result.transpose([3, 1, 2, 0]) - cml = RESULT_DIR + ("{}_subset.cml".format(method),) - self.assertCMLApproxData(result, cml) - - def _grid_subset(self): - # The destination grid points are entirely contained within the - # src grid points. - grid = global_pp()[:4, :5] - grid.coord("longitude").points = np.linspace(-3.182, -3.06, 5) - grid.coord("latitude").points = np.linspace(52.372, 52.44, 4) - return grid - - def test_reversed(self): - src = self.src - grid = self._grid_subset() - - for method in self.methods: - cml = RESULT_DIR + ("{}_subset.cml".format(method),) - regridder = Regridder(src, grid[::-1], method, self.mode) - result = regridder(src) - self.assertCMLApproxData(result[:, :, ::-1], cml) - - sample = src[:, :, ::-1] - regridder = Regridder(sample, grid[::-1], method, self.mode) - result = regridder(sample) - self.assertCMLApproxData(result[:, :, ::-1], cml) - - sample = src[:, :, :, ::-1] - regridder = Regridder(sample, grid[::-1], method, self.mode) - result = regridder(sample) - self.assertCMLApproxData(result[:, :, ::-1], cml) - - sample = src[:, :, ::-1, ::-1] - regridder = Regridder(sample, grid[::-1], method, self.mode) - result = regridder(sample) - self.assertCMLApproxData(result[:, :, ::-1], cml) - - regridder = Regridder(src, grid[:, ::-1], method, self.mode) - result = regridder(src) - self.assertCMLApproxData(result[:, :, :, ::-1], cml) - - sample = src[:, :, ::-1] - regridder = Regridder(sample, grid[:, ::-1], method, self.mode) - result = regridder(sample) - self.assertCMLApproxData(result[:, :, :, ::-1], cml) - - sample = src[:, :, :, ::-1] - regridder = Regridder(sample, grid[:, ::-1], method, self.mode) - result = regridder(sample) - self.assertCMLApproxData(result[:, :, :, ::-1], cml) - - sample = src[:, :, ::-1, ::-1] - regridder = Regridder(sample, grid[:, ::-1], method, self.mode) - result = regridder(sample) - self.assertCMLApproxData(result[:, :, :, ::-1], cml) - - regridder = Regridder(src, grid[::-1, ::-1], method, self.mode) - result = regridder(src) - self.assertCMLApproxData(result[:, :, ::-1, ::-1], cml) - - sample = src[:, :, ::-1] - regridder = Regridder(sample, grid[::-1, ::-1], method, self.mode) - result = regridder(sample) - self.assertCMLApproxData(result[:, :, ::-1, ::-1], cml) - - sample = src[:, :, :, ::-1] - regridder = Regridder(sample, grid[::-1, ::-1], method, self.mode) - result = regridder(sample) - self.assertCMLApproxData(result[:, :, ::-1, ::-1], cml) - - sample = src[:, :, ::-1, ::-1] - regridder = Regridder(sample, grid[::-1, ::-1], method, self.mode) - result = regridder(sample) - self.assertCMLApproxData(result[:, :, ::-1, ::-1], cml) - - def test_grid_subset(self): - # The destination grid points are entirely contained within the - # src grid points. - grid = self._grid_subset() - for method in self.methods: - regridder = Regridder(self.src, grid, method, self.mode) - result = regridder(self.src) - cml = RESULT_DIR + ("{}_subset.cml".format(method),) - self.assertCMLApproxData(result, cml) - - def _big_grid(self): - grid = self._grid_subset() - big_grid = Cube(np.zeros((5, 10, 3, 4, 5))) - big_grid.add_dim_coord(grid.coord("latitude"), 3) - big_grid.add_dim_coord(grid.coord("longitude"), 4) - return big_grid - - def test_grid_subset_big(self): - # Add some extra dimensions to the destination Cube and - # these should be safely ignored. - big_grid = self._big_grid() - for method in self.methods: - regridder = Regridder(self.src, big_grid, method, self.mode) - result = regridder(self.src) - cml = RESULT_DIR + ("{}_subset.cml".format(method),) - self.assertCMLApproxData(result, cml) - - def test_grid_subset_big_transposed(self): - # The order of the grid's dimensions (including the X and Y - # dimensions) must not affect the result. - big_grid = self._big_grid() - big_grid.transpose([4, 0, 3, 1, 2]) - for method in self.methods: - regridder = Regridder(self.src, big_grid, method, self.mode) - result = regridder(self.src) - cml = RESULT_DIR + ("{}_subset.cml".format(method),) - self.assertCMLApproxData(result, cml) - - def test_grid_subset_anon(self): - # Must cope OK with anonymous source dimensions. - src = self.src - src.remove_coord("time") - grid = self._grid_subset() - for method in self.methods: - regridder = Regridder(src, grid, method, self.mode) - result = regridder(src) - cml = RESULT_DIR + ("{}_subset_anon.cml".format(method),) - self.assertCMLApproxData(result, cml) - - def test_grid_subset_missing_data_1(self): - # The destination grid points are entirely contained within the - # src grid points AND we have missing data. - src = self.src - src.data = ma.MaskedArray(src.data) - src.data[:, :, 0, 0] = ma.masked - grid = self._grid_subset() - for method in self.methods: - regridder = Regridder(src, grid, method, self.mode) - result = regridder(src) - cml = RESULT_DIR + ("{}_subset_masked_1.cml".format(method),) - self.assertCMLApproxData(result, cml) - - def test_grid_subset_missing_data_2(self): - # The destination grid points are entirely contained within the - # src grid points AND we have missing data. - src = self.src - src.data = ma.MaskedArray(src.data) - src.data[:, :, 1, 2] = ma.masked - grid = self._grid_subset() - for method in self.methods: - regridder = Regridder(src, grid, method, self.mode) - result = regridder(src) - cml = RESULT_DIR + ("{}_subset_masked_2.cml".format(method),) - self.assertCMLApproxData(result, cml) - - def test_grid_partial_overlap(self): - # The destination grid points are partially contained within the - # src grid points. - grid = global_pp()[:4, :4] - grid.coord("longitude").points = np.linspace(-3.3, -3.06, 4) - grid.coord("latitude").points = np.linspace(52.377, 52.43, 4) - for method in self.methods: - regridder = Regridder(self.src, grid, method, self.mode) - result = regridder(self.src) - cml = RESULT_DIR + ("{}_partial_overlap.cml".format(method),) - self.assertCMLApproxData(result, cml) - - def test_grid_no_overlap(self): - # The destination grid points are NOT contained within the - # src grid points. - grid = global_pp()[:4, :4] - grid.coord("longitude").points = np.linspace(-3.3, -3.2, 4) - grid.coord("latitude").points = np.linspace(52.377, 52.43, 4) - for method in self.methods: - regridder = Regridder(self.src, grid, method, self.mode) - result = regridder(self.src) - self.assertCMLApproxData(result, RESULT_DIR + ("no_overlap.cml",)) - - def test_grid_subset_missing_data_aux(self): - # The destination grid points are entirely contained within the - # src grid points AND we have missing data on the aux coordinate. - src = self.src - src.coord("surface_altitude").points[1, 2] = ma.masked - grid = self._grid_subset() - for method in self.methods: - regridder = Regridder(src, grid, method, self.mode) - result = regridder(src) - cml = RESULT_DIR + ("{}_masked_altitude.cml".format(method),) - self.assertCMLApproxData(result, cml) - - -@tests.skip_data -class Test___call____NOP(tests.IrisTest): - def setUp(self): - # The destination grid points are exactly the same as the - # src grid points. - self.src = realistic_4d()[:5, :2, ::40, ::30] - self.lazy_src = self.src.copy( - da.asarray(self.src.data, chunks=(1, 2) + self.src.shape[2:]) - ) - self.grid = self.src.copy() - - def test_nop__linear(self): - regridder = Regridder(self.src, self.grid, "linear", "mask") - result = regridder(self.src) - self.assertEqual(result, self.src) - - def test_nop__nearest(self): - regridder = Regridder(self.src, self.grid, "nearest", "mask") - result = regridder(self.src) - self.assertEqual(result, self.src) - - def test_nop__linear_lazy(self): - regridder = Regridder(self.lazy_src, self.grid, "linear", "mask") - result = regridder(self.lazy_src) - self.assertEqual(result, self.lazy_src) - - def test_nop__nearest_lazy(self): - regridder = Regridder(self.lazy_src, self.grid, "nearest", "mask") - result = regridder(self.lazy_src) - self.assertEqual(result, self.lazy_src) - - -@tests.skip_data -class Test___call____circular(tests.IrisTest): - def setUp(self): - src = global_pp()[::10, ::10] - level_height = AuxCoord( - 0, - long_name="level_height", - units="m", - attributes={"positive": "up"}, - ) - sigma = AuxCoord(1, long_name="sigma", units="1") - surface_altitude = AuxCoord( - (src.data - src.data.min()) * 50, "surface_altitude", units="m" - ) - src.add_aux_coord(level_height) - src.add_aux_coord(sigma) - src.add_aux_coord(surface_altitude, [0, 1]) - hybrid_height = HybridHeightFactory( - level_height, sigma, surface_altitude - ) - src.add_aux_factory(hybrid_height) - self.src = src - - grid = global_pp()[:4, :4] - grid.coord("longitude").points = grid.coord("longitude").points - 5 - self.grid = grid - self.mode = "mask" - self.methods = ("linear", "nearest") - - def test_non_circular(self): - # Non-circular src -> non-circular grid - for method in self.methods: - regridder = Regridder(self.src, self.grid, method, self.mode) - result = regridder(self.src) - self.assertFalse(result.coord("longitude").circular) - cml = RESULT_DIR + ("{}_non_circular.cml".format(method),) - self.assertCMLApproxData(result, cml) - - def _check_circular_results(self, src_cube, missingmask=""): - results = [] - for method in self.methods: - regridder = Regridder(src_cube, self.grid, method, self.mode) - result = regridder(src_cube) - results.append(result) - self.assertFalse(result.coord("longitude").circular) - cml = RESULT_DIR + ( - "{}_circular_src{}.cml".format(method, missingmask), - ) - self.assertCMLApproxData(result, cml) - return results - - def test_circular_src(self): - # Circular src -> non-circular grid, standard test. - src = self.src - src.coord("longitude").circular = True - self._check_circular_results(src) - - def test_circular_src__masked_missingmask(self): - # Test the special case where src_cube.data.mask is just *False*, - # instead of being an array. - src = self.src - src.coord("longitude").circular = True - src.data = ma.MaskedArray(src.data) - self.assertEqual(src.data.mask, False) - method_results = self._check_circular_results(src, "missingmask") - for method_result in method_results: - self.assertIsInstance(method_result.data.mask, np.ndarray) - self.assertTrue(np.all(method_result.data.mask == np.array(False))) - - def test_circular_src__masked(self): - # Test that masked source points produce the expected masked results. - - # Define source + destination sample points. - # Note: these are chosen to avoid any marginal edge-cases, such as - # where a destination value matches a source point (for 'linear'), or a - # half-way point (for 'nearest'). - src_x = [0.0, 60.0, 120.0, 180.0, 240.0, 300.0] - dst_x = [20.0, 80.0, 140.0, 200.0, 260.0, 320.0] - src_y = [100.0, 200.0, 300.0, 400.0, 500.0] - dst_y = [40.0, 140.0, 240.0, 340.0, 440.0, 540.0] - - # Define the expected result masks for the tested methods, - # when just the middle source point is masked... - result_masks = { - "nearest": np.array( - [ - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 1, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - ], - dtype=bool, - ), - "linear": np.array( - [ - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 1, 1, 0, 0, 0], - [0, 1, 1, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0], - ], - dtype=bool, - ), - } - - # Cook up some distinctive data values. - src_nx, src_ny, dst_nx, dst_ny = ( - len(dd) for dd in (src_x, src_y, dst_x, dst_y) - ) - data_x = np.arange(src_nx).reshape((1, src_nx)) - data_y = np.arange(src_ny).reshape((src_ny, 1)) - data = 3.0 + data_x + 20.0 * data_y - - # Make src and dst test cubes. - def make_2d_cube(x_points, y_points, data): - cube = Cube(data) - y_coord = DimCoord( - y_points, standard_name="latitude", units="degrees" - ) - x_coord = DimCoord( - x_points, standard_name="longitude", units="degrees" - ) - x_coord.circular = True - cube.add_dim_coord(y_coord, 0) - cube.add_dim_coord(x_coord, 1) - return cube - - src_cube_full = make_2d_cube(src_x, src_y, data) - dst_cube = make_2d_cube(dst_x, dst_y, np.zeros((dst_ny, dst_nx))) - - src_cube_masked = src_cube_full.copy() - src_cube_masked.data = ma.array( - src_cube_masked.data, mask=np.zeros((src_ny, src_nx)) - ) - - # Mask the middle source point, and give it a huge underlying data - # value to ensure that it does not take any part in the results. - src_cube_masked.data[2, 2] = 1e19 - src_cube_masked.data.mask[2, 2] = True - - # Test results against the unmasked operation, for each method. - for method in self.methods: - regridder = Regridder( - src_cube_full, dst_cube, method, extrapolation_mode="nan" - ) - result_basic = regridder(src_cube_full) - result_masked = regridder(src_cube_masked) - # Check we get a masked result - self.assertIsInstance(result_masked.data, ma.MaskedArray) - # Check that the result matches the basic one, except for being - # masked at the specific expected points. - expected_result_data = ma.array(result_basic.data) - expected_result_data.mask = result_masks[method] - self.assertMaskedArrayEqual( - result_masked.data, expected_result_data - ) - - def test_circular_grid(self): - # Non-circular src -> circular grid - grid = self.grid - grid.coord("longitude").circular = True - for method in self.methods: - regridder = Regridder(self.src, grid, method, self.mode) - result = regridder(self.src) - self.assertTrue(result.coord("longitude").circular) - cml = RESULT_DIR + ("{}_circular_grid.cml".format(method),) - self.assertCMLApproxData(result, cml) - - def test_circular_src_and_grid(self): - # Circular src -> circular grid - src = self.src - src.coord("longitude").circular = True - grid = self.grid - grid.coord("longitude").circular = True - for method in self.methods: - regridder = Regridder(src, grid, method, self.mode) - result = regridder(src) - self.assertTrue(result.coord("longitude").circular) - cml = RESULT_DIR + ("{}_both_circular.cml".format(method),) - self.assertCMLApproxData(result, cml) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py b/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py deleted file mode 100644 index 68db839d06..0000000000 --- a/lib/iris/tests/unit/analysis/regrid/test__CurvilinearRegridder.py +++ /dev/null @@ -1,314 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.analysis._regrid.CurvilinearRegridder`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.analysis._regrid import CurvilinearRegridder as Regridder -from iris.analysis.cartography import rotate_pole -from iris.coord_systems import GeogCS, RotatedGeogCS -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube -from iris.fileformats.pp import EARTH_RADIUS -from iris.tests.stock import global_pp, lat_lon_cube - -RESULT_DIR = ("analysis", "regrid") - - -class Test___init__(tests.IrisTest): - def setUp(self): - self.src_grid = lat_lon_cube() - self.bad = np.ones((3, 4)) - self.weights = np.ones(self.src_grid.shape, self.src_grid.dtype) - - def test_bad_src_type(self): - with self.assertRaisesRegex(TypeError, "'src_grid_cube'"): - Regridder(self.bad, self.src_grid, self.weights) - - def test_bad_grid_type(self): - with self.assertRaisesRegex(TypeError, "'target_grid_cube'"): - Regridder(self.src_grid, self.bad, self.weights) - - -@tests.skip_data -class Test___call__(tests.IrisTest): - def setUp(self): - self.func_setup = ( - "iris.analysis._regrid." - "_regrid_weighted_curvilinear_to_rectilinear__prepare" - ) - self.func_operate = ( - "iris.analysis._regrid." - "_regrid_weighted_curvilinear_to_rectilinear__perform" - ) - # Define a test source grid and target grid, basically the same. - self.src_grid = global_pp() - self.tgt_grid = global_pp() - # Modify the names so we can tell them apart. - self.src_grid.rename("src_grid") - self.tgt_grid.rename("TARGET_GRID") - # Replace the source-grid x and y coords with equivalent 2d versions. - x_coord = self.src_grid.coord("longitude") - y_coord = self.src_grid.coord("latitude") - (nx,) = x_coord.shape - (ny,) = y_coord.shape - xx, yy = np.meshgrid(x_coord.points, y_coord.points) - self.src_grid.remove_coord(x_coord) - self.src_grid.remove_coord(y_coord) - x_coord_2d = AuxCoord( - xx, - standard_name=x_coord.standard_name, - units=x_coord.units, - coord_system=x_coord.coord_system, - ) - y_coord_2d = AuxCoord( - yy, - standard_name=y_coord.standard_name, - units=y_coord.units, - coord_system=y_coord.coord_system, - ) - self.src_grid.add_aux_coord(x_coord_2d, (0, 1)) - self.src_grid.add_aux_coord(y_coord_2d, (0, 1)) - self.weights = np.ones(self.src_grid.shape, self.src_grid.dtype) - # Define an actual, dummy cube for the internal partial result, so we - # can do a cubelist merge on it, which is too complicated to mock out. - self.dummy_slice_result = Cube([1]) - - def test_same_src_as_init(self): - # Check the regridder call calls the underlying routines as expected. - src_grid = self.src_grid - target_grid = self.tgt_grid - regridder = Regridder(src_grid, target_grid, self.weights) - with mock.patch( - self.func_setup, return_value=mock.sentinel.regrid_info - ) as patch_setup: - with mock.patch( - self.func_operate, return_value=self.dummy_slice_result - ) as patch_operate: - result = regridder(src_grid) - patch_setup.assert_called_once_with( - src_grid, self.weights, target_grid - ) - patch_operate.assert_called_once_with( - src_grid, mock.sentinel.regrid_info - ) - # The result is a re-merged version of the internal result, so it is - # therefore '==' but not the same object. - self.assertEqual(result, self.dummy_slice_result) - - def test_no_weights(self): - # Check we can use the regridder without weights. - src_grid = self.src_grid - target_grid = self.tgt_grid - regridder = Regridder(src_grid, target_grid) - with mock.patch( - self.func_setup, return_value=mock.sentinel.regrid_info - ) as patch_setup: - with mock.patch( - self.func_operate, return_value=self.dummy_slice_result - ): - _ = regridder(src_grid) - patch_setup.assert_called_once_with(src_grid, None, target_grid) - - def test_diff_src_from_init(self): - # Check we can call the regridder with a different cube from the one we - # built it with. - src_grid = self.src_grid - target_grid = self.tgt_grid - regridder = Regridder(src_grid, target_grid, self.weights) - # Provide a "different" cube for the actual regrid. - different_src_cube = self.src_grid.copy() - # Rename so we can distinguish them. - different_src_cube.rename("Different_source") - with mock.patch( - self.func_setup, return_value=mock.sentinel.regrid_info - ): - with mock.patch( - self.func_operate, return_value=self.dummy_slice_result - ) as patch_operate: - _ = regridder(different_src_cube) - patch_operate.assert_called_once_with( - different_src_cube, mock.sentinel.regrid_info - ) - - def test_caching(self): - # Check that it calculates regrid info just once, and re-uses it in - # subsequent calls. - src_grid = self.src_grid - target_grid = self.tgt_grid - regridder = Regridder(src_grid, target_grid, self.weights) - different_src_cube = self.src_grid.copy() - different_src_cube.rename("Different_source") - with mock.patch( - self.func_setup, return_value=mock.sentinel.regrid_info - ) as patch_setup: - with mock.patch( - self.func_operate, return_value=self.dummy_slice_result - ) as patch_operate: - _ = regridder(src_grid) - _ = regridder(different_src_cube) - patch_setup.assert_called_once_with( - src_grid, self.weights, target_grid - ) - self.assertEqual(len(patch_operate.call_args_list), 2) - self.assertEqual( - patch_operate.call_args_list, - [ - mock.call(src_grid, mock.sentinel.regrid_info), - mock.call(different_src_cube, mock.sentinel.regrid_info), - ], - ) - - -@tests.skip_data -class Test___call____bad_src(tests.IrisTest): - def setUp(self): - self.src_grid = global_pp() - y = self.src_grid.coord("latitude") - x = self.src_grid.coord("longitude") - self.src_grid.remove_coord("latitude") - self.src_grid.remove_coord("longitude") - self.src_grid.add_aux_coord(y, 0) - self.src_grid.add_aux_coord(x, 1) - weights = np.ones(self.src_grid.shape, self.src_grid.dtype) - self.regridder = Regridder(self.src_grid, self.src_grid, weights) - - def test_bad_src_type(self): - with self.assertRaisesRegex(TypeError, "must be a Cube"): - self.regridder(np.ones((3, 4))) - - def test_bad_src_shape(self): - with self.assertRaisesRegex( - ValueError, "not defined on the same source grid" - ): - self.regridder(self.src_grid[::2, ::2]) - - -class Test__call__multidimensional(tests.IrisTest): - def test_multidim(self): - # Testing with >2D data to demonstrate correct operation over - # additional non-XY dimensions (including data masking), which is - # handled by the PointInCell wrapper class. - - # Define a simple target grid first, in plain latlon coordinates. - plain_latlon_cs = GeogCS(EARTH_RADIUS) - grid_x_coord = DimCoord( - points=[15.0, 25.0, 35.0], - bounds=[[10.0, 20.0], [20.0, 30.0], [30.0, 40.0]], - standard_name="longitude", - units="degrees", - coord_system=plain_latlon_cs, - ) - grid_y_coord = DimCoord( - points=[-30.0, -50.0], - bounds=[[-20.0, -40.0], [-40.0, -60.0]], - standard_name="latitude", - units="degrees", - coord_system=plain_latlon_cs, - ) - grid_cube = Cube(np.zeros((2, 3))) - grid_cube.add_dim_coord(grid_y_coord, 0) - grid_cube.add_dim_coord(grid_x_coord, 1) - - # Define some key points in true-lat/lon thta have known positions - # First 3x2 points in the centre of each output cell. - x_centres, y_centres = np.meshgrid( - grid_x_coord.points, grid_y_coord.points - ) - # An extra point also falling in cell 1, 1 - x_in11, y_in11 = 26.3, -48.2 - # An extra point completely outside the target grid - x_out, y_out = 70.0, -40.0 - - # Define a rotated coord system for the source data - pole_lon, pole_lat = -125.3, 53.4 - src_cs = RotatedGeogCS( - grid_north_pole_latitude=pole_lat, - grid_north_pole_longitude=pole_lon, - ellipsoid=plain_latlon_cs, - ) - - # Concatenate all the testpoints in a flat array, and find the rotated - # equivalents. - xx = list(x_centres.flat[:]) + [x_in11, x_out] - yy = list(y_centres.flat[:]) + [y_in11, y_out] - xx, yy = rotate_pole( - lons=np.array(xx), - lats=np.array(yy), - pole_lon=pole_lon, - pole_lat=pole_lat, - ) - # Define handy index numbers for all these. - i00, i01, i02, i10, i11, i12, i_in, i_out = range(8) - - # Build test data in the shape Z,YX = (3, 8) - data = [ - [1, 2, 3, 11, 12, 13, 7, 99], - [1, 2, 3, 11, 12, 13, 7, 99], - [7, 6, 5, 51, 52, 53, 12, 1], - ] - mask = [ - [0, 0, 0, 0, 0, 0, 0, 0], - [0, 1, 0, 0, 0, 0, 1, 0], - [0, 0, 0, 0, 0, 0, 0, 0], - ] - src_data = np.ma.array(data, mask=mask, dtype=float) - - # Make the source cube. - src_cube = Cube(src_data) - src_x = AuxCoord( - xx, - standard_name="grid_longitude", - units="degrees", - coord_system=src_cs, - ) - src_y = AuxCoord( - yy, - standard_name="grid_latitude", - units="degrees", - coord_system=src_cs, - ) - src_z = DimCoord(np.arange(3), long_name="z") - src_cube.add_dim_coord(src_z, 0) - src_cube.add_aux_coord(src_x, 1) - src_cube.add_aux_coord(src_y, 1) - # Add in some extra metadata, to ensure it gets copied over. - src_cube.add_aux_coord(DimCoord([0], long_name="extra_scalar_coord")) - src_cube.attributes["extra_attr"] = 12.3 - - # Define what the expected answers should be, shaped (3, 2, 3). - expected_result = [ - [[1.0, 2.0, 3.0], [11.0, 0.5 * (12 + 7), 13.0]], - [[1.0, -999, 3.0], [11.0, 12.0, 13.0]], - [[7.0, 6.0, 5.0], [51.0, 0.5 * (52 + 12), 53.0]], - ] - expected_result = np.ma.masked_less(expected_result, 0) - - # Perform the calculation with the regridder. - regridder = Regridder(src_cube, grid_cube) - - # Check all is as expected. - result = regridder(src_cube) - self.assertEqual(result.coord("z"), src_cube.coord("z")) - self.assertEqual( - result.coord("extra_scalar_coord"), - src_cube.coord("extra_scalar_coord"), - ) - self.assertEqual( - result.coord("longitude"), grid_cube.coord("longitude") - ) - self.assertEqual(result.coord("latitude"), grid_cube.coord("latitude")) - self.assertMaskedArrayAlmostEqual(result.data, expected_result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/scipy_interpolate/__init__.py b/lib/iris/tests/unit/analysis/scipy_interpolate/__init__.py deleted file mode 100644 index 67218194c2..0000000000 --- a/lib/iris/tests/unit/analysis/scipy_interpolate/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.analysis.scipy_interpolate` module.""" diff --git a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py b/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py deleted file mode 100644 index f4c6623ad1..0000000000 --- a/lib/iris/tests/unit/analysis/scipy_interpolate/test__RegularGridInterpolator.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the -:func:`iris.analysis._scipy_interpolate._RegularGridInterpolator` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -from scipy.sparse.csr import csr_matrix - -from iris.analysis._scipy_interpolate import _RegularGridInterpolator -import iris.tests.stock as stock - - -class Test(tests.IrisTest): - def setUp(self): - # Load a source cube, then generate an interpolator instance, calculate - # the interpolation weights and set up a target grid. - self.cube = stock.simple_2d() - x_points = self.cube.coord("bar").points - y_points = self.cube.coord("foo").points - self.interpolator = _RegularGridInterpolator( - [x_points, y_points], - self.cube.data, - method="linear", - bounds_error=False, - fill_value=None, - ) - newx = x_points + 0.7 - newy = y_points + 0.7 - - d_0 = self.cube.data[0, 0] - d_1 = self.cube.data[0, 1] - d_2 = self.cube.data[1, 0] - d_3 = self.cube.data[1, 1] - px_0, px_1 = x_points[0], x_points[1] - py_0, py_1 = y_points[0], y_points[1] - px_t = px_0 + 0.7 - py_t = py_0 + 0.7 - dyt_0 = self._interpolate_point(py_t, py_0, py_1, d_0, d_1) - dyt_1 = self._interpolate_point(py_t, py_0, py_1, d_2, d_3) - self.test_increment = self._interpolate_point( - px_t, px_0, px_1, dyt_0, dyt_1 - ) - - xv, yv = np.meshgrid(newy, newx) - self.tgrid = np.dstack((yv, xv)) - self.weights = self.interpolator.compute_interp_weights(self.tgrid) - - @staticmethod - def _interpolate_point(p_t, p_0, p_1, d_0, d_1): - return d_0 + (d_1 - d_0) * ((p_t - p_0) / (p_1 - p_0)) - - def test_compute_interp_weights(self): - weights = self.weights - self.assertIsInstance(weights, tuple) - self.assertEqual(len(weights), 5) - self.assertEqual(weights[0], self.tgrid.shape) - self.assertEqual(weights[1], "linear") - self.assertIsInstance(weights[2], csr_matrix) - - def test__evaluate_linear_sparse(self): - interpolator = self.interpolator - weights = self.weights - output_data = interpolator._evaluate_linear_sparse(weights[2]) - test_data = self.cube.data.reshape(-1) + self.test_increment - self.assertArrayAlmostEqual(output_data, test_data) - - def test_interp_using_pre_computed_weights(self): - interpolator = self.interpolator - weights = self.weights - output_data = interpolator.interp_using_pre_computed_weights(weights) - test_data = self.cube.data + self.test_increment - self.assertEqual(output_data.shape, self.cube.data.shape) - self.assertArrayAlmostEqual(output_data, test_data) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/stats/__init__.py b/lib/iris/tests/unit/analysis/stats/__init__.py deleted file mode 100644 index 0b896d648d..0000000000 --- a/lib/iris/tests/unit/analysis/stats/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.analysis.stats` module.""" diff --git a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py b/lib/iris/tests/unit/analysis/stats/test_pearsonr.py deleted file mode 100644 index 63cf4e2abe..0000000000 --- a/lib/iris/tests/unit/analysis/stats/test_pearsonr.py +++ /dev/null @@ -1,176 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.analysis.stats.pearsonr` function.""" - -# Import iris tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -import iris -import iris.analysis.stats as stats -from iris.exceptions import CoordinateNotFoundError - - -@tests.skip_data -class Test(tests.IrisTest): - def setUp(self): - # 3D cubes: - cube_temp = iris.load_cube( - tests.get_data_path( - ("NetCDF", "global", "xyt", "SMALL_total_column_co2.nc") - ) - ) - self.cube_a = cube_temp[0:6] - self.cube_b = cube_temp[20:26] - self.cube_b.replace_coord(self.cube_a.coord("time").copy()) - cube_temp = self.cube_a.copy() - cube_temp.coord("latitude").guess_bounds() - cube_temp.coord("longitude").guess_bounds() - self.weights = iris.analysis.cartography.area_weights(cube_temp) - - def test_perfect_corr(self): - r = stats.pearsonr(self.cube_a, self.cube_a, ["latitude", "longitude"]) - self.assertArrayEqual(r.data, np.array([1.0] * 6)) - - def test_perfect_corr_all_dims(self): - r = stats.pearsonr(self.cube_a, self.cube_a) - self.assertArrayEqual(r.data, np.array([1.0])) - - def test_incompatible_cubes(self): - with self.assertRaises(ValueError): - stats.pearsonr( - self.cube_a[:, 0, :], self.cube_b[0, :, :], "longitude" - ) - - def test_compatible_cubes(self): - r = stats.pearsonr(self.cube_a, self.cube_b, ["latitude", "longitude"]) - self.assertArrayAlmostEqual( - r.data, - [ - 0.81114936, - 0.81690538, - 0.79833135, - 0.81118674, - 0.79745386, - 0.81278484, - ], - ) - - def test_broadcast_cubes(self): - r1 = stats.pearsonr( - self.cube_a, self.cube_b[0, :, :], ["latitude", "longitude"] - ) - r2 = stats.pearsonr( - self.cube_b[0, :, :], self.cube_a, ["latitude", "longitude"] - ) - r_by_slice = [ - stats.pearsonr( - self.cube_a[i, :, :], - self.cube_b[0, :, :], - ["latitude", "longitude"], - ).data - for i in range(6) - ] - self.assertArrayEqual(r1.data, np.array(r_by_slice)) - self.assertArrayEqual(r2.data, np.array(r_by_slice)) - - def test_compatible_cubes_weighted(self): - r = stats.pearsonr( - self.cube_a, self.cube_b, ["latitude", "longitude"], self.weights - ) - self.assertArrayAlmostEqual( - r.data, - [ - 0.79105429, - 0.79988078, - 0.78825089, - 0.79925653, - 0.79009810, - 0.80115292, - ], - ) - - def test_broadcast_cubes_weighted(self): - r = stats.pearsonr( - self.cube_a, - self.cube_b[0, :, :], - ["latitude", "longitude"], - weights=self.weights[0, :, :], - ) - r_by_slice = [ - stats.pearsonr( - self.cube_a[i, :, :], - self.cube_b[0, :, :], - ["latitude", "longitude"], - weights=self.weights[0, :, :], - ).data - for i in range(6) - ] - self.assertArrayAlmostEqual(r.data, np.array(r_by_slice)) - - def test_weight_error(self): - with self.assertRaises(ValueError): - stats.pearsonr( - self.cube_a, - self.cube_b[0, :, :], - ["latitude", "longitude"], - weights=self.weights, - ) - - def test_non_existent_coord(self): - with self.assertRaises(CoordinateNotFoundError): - stats.pearsonr(self.cube_a, self.cube_b, "bad_coord") - - def test_mdtol(self): - cube_small = self.cube_a[:, 0, 0] - cube_small_masked = cube_small.copy() - cube_small_masked.data = ma.array( - cube_small.data, mask=np.array([0, 0, 0, 1, 1, 1], dtype=bool) - ) - r1 = stats.pearsonr(cube_small, cube_small_masked) - r2 = stats.pearsonr(cube_small, cube_small_masked, mdtol=0.49) - self.assertArrayAlmostEqual(r1.data, np.array([0.74586593])) - self.assertMaskedArrayEqual(r2.data, ma.array([0], mask=[True])) - - def test_common_mask_simple(self): - cube_small = self.cube_a[:, 0, 0] - cube_small_masked = cube_small.copy() - cube_small_masked.data = ma.array( - cube_small.data, mask=np.array([0, 0, 0, 1, 1, 1], dtype=bool) - ) - r = stats.pearsonr(cube_small, cube_small_masked, common_mask=True) - self.assertArrayAlmostEqual(r.data, np.array([1.0])) - - def test_common_mask_broadcast(self): - cube_small = self.cube_a[:, 0, 0] - cube_small_2d = self.cube_a[:, 0:2, 0] - cube_small.data = ma.array( - cube_small.data, mask=np.array([0, 0, 0, 0, 0, 1], dtype=bool) - ) - cube_small_2d.data = ma.array( - np.tile(cube_small.data[:, np.newaxis], 2), - mask=np.zeros((6, 2), dtype=bool), - ) - # 2d mask varies on unshared coord: - cube_small_2d.data.mask[0, 1] = 1 - r = stats.pearsonr( - cube_small, - cube_small_2d, - weights=self.weights[:, 0, 0], - common_mask=True, - ) - self.assertArrayAlmostEqual(r.data, np.array([1.0, 1.0])) - # 2d mask does not vary on unshared coord: - cube_small_2d.data.mask[0, 0] = 1 - r = stats.pearsonr(cube_small, cube_small_2d, common_mask=True) - self.assertArrayAlmostEqual(r.data, np.array([1.0, 1.0])) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_Aggregator.py b/lib/iris/tests/unit/analysis/test_Aggregator.py deleted file mode 100644 index 08180e61d0..0000000000 --- a/lib/iris/tests/unit/analysis/test_Aggregator.py +++ /dev/null @@ -1,322 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.analysis.Aggregator` class instance.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np -import numpy.ma as ma - -from iris.analysis import Aggregator -from iris.exceptions import LazyAggregatorError - - -class Test_aggregate(tests.IrisTest): - # These unit tests don't call a data aggregation function, they call a - # mocked one i.e. the return values of the mocked data aggregation - # function don't matter, only how these are dealt with by the aggregate - # method. - def setUp(self): - self.TEST = Aggregator("test", None) - self.array = ma.array( - [[1, 2, 3], [4, 5, 6]], - mask=[[False, True, False], [True, False, False]], - dtype=np.float64, - ) - self.expected_result_axis0 = ma.array([1, 2, 3], mask=None) - self.expected_result_axis1 = ma.array([4, 5], mask=None) - - def test_masked_notol(self): - # Providing masked array with no tolerance keyword (mdtol) provided. - axis = 0 - mock_return = self.expected_result_axis0.copy() - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(self.array, axis) - self.assertMaskedArrayEqual(result, self.expected_result_axis0) - mock_method.assert_called_once_with(self.array, axis=axis) - - axis = 1 - mock_return = self.expected_result_axis1.copy() - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(self.array, axis) - self.assertMaskedArrayEqual(result, self.expected_result_axis1) - mock_method.assert_called_once_with(self.array, axis=axis) - - def test_masked_above_tol(self): - # Providing masked array with a high tolerance (mdtol) provided. - axis = 0 - mock_return = self.expected_result_axis0.copy() - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(self.array, axis, mdtol=0.55) - self.assertMaskedArrayEqual(result, self.expected_result_axis0) - mock_method.assert_called_once_with(self.array, axis=axis) - - axis = 1 - mock_return = self.expected_result_axis1.copy() - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(self.array, axis, mdtol=0.55) - self.assertMaskedArrayEqual(result, self.expected_result_axis1) - mock_method.assert_called_once_with(self.array, axis=axis) - - def test_masked_below_tol(self): - # Providing masked array with a tolerance on missing values, low - # enough to modify the resulting mask for axis 0. - axis = 0 - result_axis_0 = self.expected_result_axis0.copy() - result_axis_0.mask = np.array([True, True, False]) - mock_return = ma.array([1, 2, 3], mask=None) - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(self.array, axis, mdtol=0.45) - self.assertMaskedArrayAlmostEqual(result, result_axis_0) - mock_method.assert_called_once_with(self.array, axis=axis) - - axis = 1 - mock_return = self.expected_result_axis1.copy() - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(self.array, axis, mdtol=0.45) - self.assertMaskedArrayEqual(result, self.expected_result_axis1) - mock_method.assert_called_once_with(self.array, axis=axis) - - def test_masked_below_tol_alt(self): - # Providing masked array with a tolerance on missing values, low - # enough to modify the resulting mask for axis 1. - axis = 1 - result_axis_1 = self.expected_result_axis1.copy() - result_axis_1.mask = np.array([True, True]) - mock_return = self.expected_result_axis1.copy() - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(self.array, axis, mdtol=0.1) - self.assertMaskedArrayAlmostEqual(result, result_axis_1) - mock_method.assert_called_once_with(self.array, axis=axis) - - def test_unmasked_with_mdtol(self): - # Providing aggregator with an unmasked array and tolerance specified - # for missing data - ensure that result is unaffected. - data = self.array.data - - axis = 0 - mock_return = self.expected_result_axis0.data.copy() - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(data, axis, mdtol=0.5) - self.assertArrayAlmostEqual(result, mock_return.copy()) - mock_method.assert_called_once_with(data, axis=axis) - - axis = 1 - mock_return = self.expected_result_axis1.data.copy() - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(data, axis, mdtol=0.5) - self.assertArrayAlmostEqual(result, mock_return.copy()) - mock_method.assert_called_once_with(data, axis=axis) - - def test_unmasked(self): - # Providing aggregator with an unmasked array and no additional keyword - # arguments ensure that result is unaffected. - data = self.array.data - - axis = 0 - mock_return = self.expected_result_axis0.data.copy() - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(data, axis) - self.assertArrayAlmostEqual(result, mock_return.copy()) - mock_method.assert_called_once_with(data, axis=axis) - - axis = 1 - mock_return = self.expected_result_axis1.data.copy() - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(data, axis) - self.assertArrayAlmostEqual(result, mock_return.copy()) - mock_method.assert_called_once_with(data, axis=axis) - - def test_returning_scalar_mdtol(self): - # Test the case when the data aggregation function returns a scalar and - # turns it into a masked array. - axis = -1 - data = self.array.flatten() - mock_return = 2 - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(data, axis, mdtol=1) - self.assertMaskedArrayEqual(result, ma.array(2, mask=False)) - mock_method.assert_called_once_with(data, axis=axis) - - def test_returning_scalar_mdtol_alt(self): - # Test the case when the data aggregation function returns a scalar - # with no tolerance for missing data values and turns it into a masked - # array. - axis = -1 - data = self.array.flatten() - mock_return = 2 - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(data, axis, mdtol=0) - self.assertMaskedArrayEqual(result, ma.array(2, mask=True)) - mock_method.assert_called_once_with(data, axis=axis) - - def test_returning_non_masked_array_from_masked_array(self): - # Providing a masked array, call_func returning a non-masked array, - # resulting in a masked array output. - axis = 0 - mock_return = self.expected_result_axis0.data.copy() - result_axis_0 = ma.array(mock_return, mask=[True, True, False]) - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(self.array, axis, mdtol=0.45) - self.assertMaskedArrayAlmostEqual(result, result_axis_0) - mock_method.assert_called_once_with(self.array, axis=axis) - - axis = 1 - mock_return = self.expected_result_axis1.data.copy() - with mock.patch.object( - self.TEST, "call_func", return_value=mock_return - ) as mock_method: - result = self.TEST.aggregate(self.array, axis, mdtol=0.45) - self.assertMaskedArrayEqual(result, self.expected_result_axis1) - mock_method.assert_called_once_with(self.array, axis=axis) - - def test_kwarg_pass_through_no_kwargs(self): - call_func = mock.Mock() - data = mock.sentinel.data - axis = mock.sentinel.axis - aggregator = Aggregator("", call_func) - aggregator.aggregate(data, axis) - call_func.assert_called_once_with(data, axis=axis) - - def test_kwarg_pass_through_call_kwargs(self): - call_func = mock.Mock() - data = mock.sentinel.data - axis = mock.sentinel.axis - kwargs = dict(wibble="wobble", foo="bar") - aggregator = Aggregator("", call_func) - aggregator.aggregate(data, axis, **kwargs) - call_func.assert_called_once_with(data, axis=axis, **kwargs) - - def test_kwarg_pass_through_init_kwargs(self): - call_func = mock.Mock() - data = mock.sentinel.data - axis = mock.sentinel.axis - kwargs = dict(wibble="wobble", foo="bar") - aggregator = Aggregator("", call_func, **kwargs) - aggregator.aggregate(data, axis) - call_func.assert_called_once_with(data, axis=axis, **kwargs) - - def test_kwarg_pass_through_combined_kwargs(self): - call_func = mock.Mock() - data = mock.sentinel.data - axis = mock.sentinel.axis - init_kwargs = dict(wibble="wobble", var=1.0) - call_kwargs = dict(foo="foo", var=0.5) - aggregator = Aggregator("", call_func, **init_kwargs) - aggregator.aggregate(data, axis, **call_kwargs) - expected_kwargs = init_kwargs.copy() - expected_kwargs.update(call_kwargs) - call_func.assert_called_once_with(data, axis=axis, **expected_kwargs) - - def test_mdtol_intercept(self): - call_func = mock.Mock() - data = mock.sentinel.data - axis = mock.sentinel.axis - aggregator = Aggregator("", call_func) - aggregator.aggregate(data, axis, wibble="wobble", mdtol=0.8) - call_func.assert_called_once_with(data, axis=axis, wibble="wobble") - - def test_no_lazy_func(self): - dummy_agg = Aggregator("custom_op", lambda x: 1) - expected = "custom_op aggregator does not support lazy operation" - with self.assertRaisesRegex(LazyAggregatorError, expected): - dummy_agg.lazy_aggregate(np.arange(10), axis=0) - - -class Test_update_metadata(tests.IrisTest): - def test_no_units_change(self): - # If the Aggregator has no units_func then the units should be - # left unchanged. - aggregator = Aggregator("", None) - cube = mock.Mock(units=mock.sentinel.units) - aggregator.update_metadata(cube, []) - self.assertIs(cube.units, mock.sentinel.units) - - def test_units_change(self): - # If the Aggregator has a units_func then the new units should - # be defined by its return value. - units_func = mock.Mock(return_value=mock.sentinel.new_units) - aggregator = Aggregator("", None, units_func) - cube = mock.Mock(units=mock.sentinel.units) - aggregator.update_metadata(cube, []) - units_func.assert_called_once_with(mock.sentinel.units) - self.assertEqual(cube.units, mock.sentinel.new_units) - - -class Test_lazy_aggregate(tests.IrisTest): - def test_kwarg_pass_through_no_kwargs(self): - lazy_func = mock.Mock() - data = mock.sentinel.data - axis = mock.sentinel.axis - aggregator = Aggregator("", None, lazy_func=lazy_func) - aggregator.lazy_aggregate(data, axis) - lazy_func.assert_called_once_with(data, axis=axis) - - def test_kwarg_pass_through_call_kwargs(self): - lazy_func = mock.Mock() - data = mock.sentinel.data - axis = mock.sentinel.axis - kwargs = dict(wibble="wobble", foo="bar") - aggregator = Aggregator("", None, lazy_func=lazy_func) - aggregator.lazy_aggregate(data, axis, **kwargs) - lazy_func.assert_called_once_with(data, axis=axis, **kwargs) - - def test_kwarg_pass_through_init_kwargs(self): - lazy_func = mock.Mock() - data = mock.sentinel.data - axis = mock.sentinel.axis - kwargs = dict(wibble="wobble", foo="bar") - aggregator = Aggregator("", None, lazy_func=lazy_func, **kwargs) - aggregator.lazy_aggregate(data, axis) - lazy_func.assert_called_once_with(data, axis=axis, **kwargs) - - def test_kwarg_pass_through_combined_kwargs(self): - lazy_func = mock.Mock() - data = mock.sentinel.data - axis = mock.sentinel.axis - init_kwargs = dict(wibble="wobble", var=1.0) - call_kwargs = dict(foo="foo", var=0.5) - aggregator = Aggregator("", None, lazy_func=lazy_func, **init_kwargs) - aggregator.lazy_aggregate(data, axis, **call_kwargs) - expected_kwargs = init_kwargs.copy() - expected_kwargs.update(call_kwargs) - lazy_func.assert_called_once_with(data, axis=axis, **expected_kwargs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_AreaWeighted.py b/lib/iris/tests/unit/analysis/test_AreaWeighted.py deleted file mode 100644 index 2454e0817c..0000000000 --- a/lib/iris/tests/unit/analysis/test_AreaWeighted.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.analysis.AreaWeighted`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.analysis import AreaWeighted - - -class Test(tests.IrisTest): - def check_call(self, mdtol=None): - # Check that `iris.analysis.AreaWeighted` correctly calls an - # `iris.analysis._area_weighted.AreaWeightedRegridder` object. - if mdtol is None: - area_weighted = AreaWeighted() - mdtol = 1 - else: - area_weighted = AreaWeighted(mdtol=mdtol) - self.assertEqual(area_weighted.mdtol, mdtol) - - with mock.patch( - "iris.analysis.AreaWeightedRegridder", - return_value=mock.sentinel.regridder, - ) as awr: - regridder = area_weighted.regridder( - mock.sentinel.src, mock.sentinel.target - ) - - awr.assert_called_once_with( - mock.sentinel.src, mock.sentinel.target, mdtol=mdtol - ) - self.assertIs(regridder, mock.sentinel.regridder) - - def test_default(self): - self.check_call() - - def test_specified_mdtol(self): - self.check_call(0.5) - - def test_invalid_high_mdtol(self): - msg = "mdtol must be in range 0 - 1" - with self.assertRaisesRegex(ValueError, msg): - AreaWeighted(mdtol=1.2) - - def test_invalid_low_mdtol(self): - msg = "mdtol must be in range 0 - 1" - with self.assertRaisesRegex(ValueError, msg): - AreaWeighted(mdtol=-0.2) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_COUNT.py b/lib/iris/tests/unit/analysis/test_COUNT.py deleted file mode 100644 index 96274f7cd0..0000000000 --- a/lib/iris/tests/unit/analysis/test_COUNT.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :data:`iris.analysis.COUNT` aggregator.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import as_lazy_data, is_lazy_data -from iris.analysis import COUNT -from iris.coords import DimCoord -from iris.cube import Cube - - -class Test_basics(tests.IrisTest): - def setUp(self): - data = np.array([1, 2, 3, 4, 5]) - coord = DimCoord([6, 7, 8, 9, 10], long_name="foo") - self.cube = Cube(data) - self.cube.add_dim_coord(coord, 0) - self.lazy_cube = Cube(as_lazy_data(data)) - self.lazy_cube.add_dim_coord(coord, 0) - self.func = lambda x: x >= 3 - - def test_name(self): - self.assertEqual(COUNT.name(), "count") - - def test_no_function(self): - exp_emsg = r"function must be a callable. Got <.* 'NoneType'>" - with self.assertRaisesRegex(TypeError, exp_emsg): - COUNT.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0) - - def test_not_callable(self): - with self.assertRaisesRegex(TypeError, "function must be a callable"): - COUNT.aggregate(self.cube.data, axis=0, function="wibble") - - def test_lazy_not_callable(self): - with self.assertRaisesRegex(TypeError, "function must be a callable"): - COUNT.lazy_aggregate( - self.lazy_cube.lazy_data(), axis=0, function="wibble" - ) - - def test_collapse(self): - data = COUNT.aggregate(self.cube.data, axis=0, function=self.func) - self.assertArrayEqual(data, [3]) - - def test_lazy(self): - lazy_data = COUNT.lazy_aggregate( - self.lazy_cube.lazy_data(), axis=0, function=self.func - ) - self.assertTrue(is_lazy_data(lazy_data)) - - def test_lazy_collapse(self): - lazy_data = COUNT.lazy_aggregate( - self.lazy_cube.lazy_data(), axis=0, function=self.func - ) - self.assertArrayEqual(lazy_data.compute(), [3]) - - -class Test_units_func(tests.IrisTest): - def test(self): - self.assertIsNotNone(COUNT.units_func) - new_units = COUNT.units_func(None) - self.assertEqual(new_units, 1) - - -class Test_masked(tests.IrisTest): - def setUp(self): - self.cube = Cube(ma.masked_equal([1, 2, 3, 4, 5], 3)) - self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0) - self.func = lambda x: x >= 3 - - def test_ma(self): - data = COUNT.aggregate(self.cube.data, axis=0, function=self.func) - self.assertArrayEqual(data, [2]) - - -class Test_lazy_masked(tests.IrisTest): - def setUp(self): - lazy_data = as_lazy_data(ma.masked_equal([1, 2, 3, 4, 5], 3)) - self.lazy_cube = Cube(lazy_data) - self.lazy_cube.add_dim_coord( - DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0 - ) - self.func = lambda x: x >= 3 - - def test_ma(self): - lazy_data = COUNT.lazy_aggregate( - self.lazy_cube.lazy_data(), axis=0, function=self.func - ) - self.assertTrue(is_lazy_data(lazy_data)) - self.assertArrayEqual(lazy_data.compute(), [2]) - - -class Test_aggregate_shape(tests.IrisTest): - def test(self): - shape = () - kwargs = dict() - self.assertTupleEqual(COUNT.aggregate_shape(**kwargs), shape) - kwargs = dict(wibble="wobble") - self.assertTupleEqual(COUNT.aggregate_shape(**kwargs), shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_Linear.py b/lib/iris/tests/unit/analysis/test_Linear.py deleted file mode 100644 index 27565f8c51..0000000000 --- a/lib/iris/tests/unit/analysis/test_Linear.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.analysis.Linear`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.analysis import Linear - - -def create_scheme(mode=None): - kwargs = {} - if mode is not None: - kwargs["extrapolation_mode"] = mode - return Linear(**kwargs) - - -class Test_extrapolation_mode(tests.IrisTest): - def check_mode(self, mode): - linear = create_scheme(mode) - self.assertEqual(linear.extrapolation_mode, mode) - - def test_default(self): - linear = Linear() - self.assertEqual(linear.extrapolation_mode, "linear") - - def test_extrapolate(self): - self.check_mode("extrapolate") - - def test_linear(self): - self.check_mode("linear") - - def test_nan(self): - self.check_mode("nan") - - def test_error(self): - self.check_mode("error") - - def test_mask(self): - self.check_mode("mask") - - def test_nanmask(self): - self.check_mode("nanmask") - - def test_invalid(self): - with self.assertRaisesRegex(ValueError, "Extrapolation mode"): - Linear("bogus") - - -class Test_interpolator(tests.IrisTest): - def check_mode(self, mode=None): - linear = create_scheme(mode) - - # Check that calling `linear.interpolator(...)` returns an - # instance of RectilinearInterpolator which has been created - # using the correct arguments. - with mock.patch( - "iris.analysis.RectilinearInterpolator", - return_value=mock.sentinel.interpolator, - ) as ri: - interpolator = linear.interpolator( - mock.sentinel.cube, mock.sentinel.coords - ) - if mode is None or mode == "linear": - expected_mode = "extrapolate" - else: - expected_mode = mode - ri.assert_called_once_with( - mock.sentinel.cube, mock.sentinel.coords, "linear", expected_mode - ) - self.assertIs(interpolator, mock.sentinel.interpolator) - - def test_default(self): - self.check_mode() - - def test_extrapolate(self): - self.check_mode("extrapolate") - - def test_linear(self): - self.check_mode("linear") - - def test_nan(self): - self.check_mode("nan") - - def test_error(self): - self.check_mode("error") - - def test_mask(self): - self.check_mode("mask") - - def test_nanmask(self): - self.check_mode("nanmask") - - -class Test_regridder(tests.IrisTest): - def check_mode(self, mode=None): - linear = create_scheme(mode) - - # Check that calling `linear.regridder(...)` returns an instance - # of RectilinearRegridder which has been created using the correct - # arguments. - with mock.patch( - "iris.analysis.RectilinearRegridder", - return_value=mock.sentinel.regridder, - ) as lr: - regridder = linear.regridder( - mock.sentinel.src, mock.sentinel.target - ) - if mode is None or mode == "linear": - expected_mode = "extrapolate" - else: - expected_mode = mode - lr.assert_called_once_with( - mock.sentinel.src, mock.sentinel.target, "linear", expected_mode - ) - self.assertIs(regridder, mock.sentinel.regridder) - - def test_default(self): - self.check_mode() - - def test_extrapolate(self): - self.check_mode("extrapolate") - - def test_linear(self): - self.check_mode("linear") - - def test_nan(self): - self.check_mode("nan") - - def test_error(self): - self.check_mode("error") - - def test_mask(self): - self.check_mode("mask") - - def test_nanmask(self): - self.check_mode("nanmask") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_MAX.py b/lib/iris/tests/unit/analysis/test_MAX.py deleted file mode 100644 index 91d4daf1f0..0000000000 --- a/lib/iris/tests/unit/analysis/test_MAX.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :data:`iris.analysis.MAX` aggregator.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import as_lazy_data, is_lazy_data -from iris.analysis import MAX -from iris.coords import DimCoord -from iris.cube import Cube - - -class Test_basics(tests.IrisTest): - def setUp(self): - data = np.array([1, 2, 3, 4, 5]) - coord = DimCoord([6, 7, 8, 9, 10], long_name="foo") - self.cube = Cube(data) - self.cube.add_dim_coord(coord, 0) - self.lazy_cube = Cube(as_lazy_data(data)) - self.lazy_cube.add_dim_coord(coord, 0) - - def test_name(self): - self.assertEqual(MAX.name(), "maximum") - - def test_collapse(self): - data = MAX.aggregate(self.cube.data, axis=0) - self.assertArrayEqual(data, [5]) - - def test_lazy(self): - lazy_data = MAX.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0) - self.assertTrue(is_lazy_data(lazy_data)) - - def test_lazy_collapse(self): - lazy_data = MAX.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0) - self.assertArrayEqual(lazy_data.compute(), [5]) - - -class Test_masked(tests.IrisTest): - def setUp(self): - self.cube = Cube(ma.masked_greater([1, 2, 3, 4, 5], 3)) - self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0) - - def test_ma(self): - data = MAX.aggregate(self.cube.data, axis=0) - self.assertArrayEqual(data, [3]) - - -class Test_lazy_masked(tests.IrisTest): - def setUp(self): - masked_data = ma.masked_greater([1, 2, 3, 4, 5], 3) - self.cube = Cube(as_lazy_data(masked_data)) - self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0) - - def test_lazy_ma(self): - lazy_data = MAX.lazy_aggregate(self.cube.lazy_data(), axis=0) - self.assertTrue(is_lazy_data(lazy_data)) - self.assertArrayEqual(lazy_data.compute(), [3]) - - -class Test_aggregate_shape(tests.IrisTest): - def test(self): - shape = () - kwargs = dict() - self.assertTupleEqual(MAX.aggregate_shape(**kwargs), shape) - kwargs = dict(wibble="wobble") - self.assertTupleEqual(MAX.aggregate_shape(**kwargs), shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_MEAN.py b/lib/iris/tests/unit/analysis/test_MEAN.py deleted file mode 100644 index 18e2b4ca6c..0000000000 --- a/lib/iris/tests/unit/analysis/test_MEAN.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :data:`iris.analysis.MEAN` aggregator.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import as_concrete_data, as_lazy_data -from iris.analysis import MEAN - - -class Test_lazy_aggregate(tests.IrisTest): - def setUp(self): - self.data = ma.arange(12).reshape(3, 4) - self.data.mask = [[0, 0, 0, 1], [0, 0, 1, 1], [0, 1, 1, 1]] - # --> fractions of masked-points in columns = [0, 1/3, 2/3, 1] - self.array = as_lazy_data(self.data) - self.axis = 0 - self.expected_masked = ma.mean(self.data, axis=self.axis) - - def test_mdtol_default(self): - # Default operation is "mdtol=1" --> unmasked if *any* valid points. - # --> output column masks = [0, 0, 0, 1] - agg = MEAN.lazy_aggregate(self.array, axis=self.axis) - masked_result = as_concrete_data(agg) - self.assertMaskedArrayAlmostEqual(masked_result, self.expected_masked) - - def test_mdtol_belowall(self): - # Mdtol=0.25 --> masked columns = [0, 1, 1, 1] - agg = MEAN.lazy_aggregate(self.array, axis=self.axis, mdtol=0.25) - masked_result = as_concrete_data(agg) - expected_masked = self.expected_masked - expected_masked.mask = [False, True, True, True] - self.assertMaskedArrayAlmostEqual(masked_result, expected_masked) - - def test_mdtol_intermediate(self): - # mdtol=0.5 --> masked columns = [0, 0, 1, 1] - agg = MEAN.lazy_aggregate(self.array, axis=self.axis, mdtol=0.5) - masked_result = as_concrete_data(agg) - expected_masked = self.expected_masked - expected_masked.mask = [False, False, True, True] - self.assertMaskedArrayAlmostEqual(masked_result, expected_masked) - - def test_mdtol_aboveall(self): - # mdtol=0.75 --> masked columns = [0, 0, 0, 1] - # In this case, effectively the same as mdtol=None. - agg = MEAN.lazy_aggregate(self.array, axis=self.axis, mdtol=0.75) - masked_result = as_concrete_data(agg) - self.assertMaskedArrayAlmostEqual(masked_result, self.expected_masked) - - def test_multi_axis(self): - data = np.arange(24.0).reshape((2, 3, 4)) - collapse_axes = (0, 2) - lazy_data = as_lazy_data(data) - agg = MEAN.lazy_aggregate(lazy_data, axis=collapse_axes) - result = as_concrete_data(agg) - expected = np.mean(data, axis=collapse_axes) - self.assertArrayAllClose(result, expected) - - def test_last_axis(self): - # From setUp: - # self.data.mask = [[0, 0, 0, 1], - # [0, 0, 1, 1], - # [0, 1, 1, 1]] - # --> fractions of masked-points in ROWS = [1/4, 1/2, 3/4] - axis = -1 - agg = MEAN.lazy_aggregate(self.array, axis=axis, mdtol=0.51) - expected_masked = ma.mean(self.data, axis=-1) - expected_masked = np.ma.masked_array(expected_masked, [0, 0, 1]) - masked_result = as_concrete_data(agg) - self.assertMaskedArrayAlmostEqual(masked_result, expected_masked) - - def test_all_axes_belowtol(self): - agg = MEAN.lazy_aggregate(self.array, axis=None, mdtol=0.75) - expected_masked = ma.mean(self.data) - masked_result = as_concrete_data(agg) - self.assertMaskedArrayAlmostEqual(masked_result, expected_masked) - - def test_all_axes_abovetol(self): - agg = MEAN.lazy_aggregate(self.array, axis=None, mdtol=0.45) - expected_masked = ma.masked_less([0.0], 1) - masked_result = as_concrete_data(agg) - self.assertMaskedArrayAlmostEqual(masked_result, expected_masked) - - -class Test_name(tests.IrisTest): - def test(self): - self.assertEqual(MEAN.name(), "mean") - - -class Test_aggregate_shape(tests.IrisTest): - def test(self): - shape = () - kwargs = dict() - self.assertTupleEqual(MEAN.aggregate_shape(**kwargs), shape) - kwargs = dict(one=1, two=2) - self.assertTupleEqual(MEAN.aggregate_shape(**kwargs), shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_MIN.py b/lib/iris/tests/unit/analysis/test_MIN.py deleted file mode 100644 index f12790f0f1..0000000000 --- a/lib/iris/tests/unit/analysis/test_MIN.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :data:`iris.analysis.MIN` aggregator.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import as_lazy_data, is_lazy_data -from iris.analysis import MIN -from iris.coords import DimCoord -from iris.cube import Cube - - -class Test_basics(tests.IrisTest): - def setUp(self): - data = np.array([1, 2, 3, 4, 5]) - coord = DimCoord([6, 7, 8, 9, 10], long_name="foo") - self.cube = Cube(data) - self.cube.add_dim_coord(coord, 0) - self.lazy_cube = Cube(as_lazy_data(data)) - self.lazy_cube.add_dim_coord(coord, 0) - - def test_name(self): - self.assertEqual(MIN.name(), "minimum") - - def test_collapse(self): - data = MIN.aggregate(self.cube.data, axis=0) - self.assertArrayEqual(data, [1]) - - def test_lazy(self): - lazy_data = MIN.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0) - self.assertTrue(is_lazy_data(lazy_data)) - - def test_lazy_collapse(self): - lazy_data = MIN.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0) - self.assertArrayEqual(lazy_data.compute(), [1]) - - -class Test_masked(tests.IrisTest): - def setUp(self): - self.cube = Cube(ma.masked_less([1, 2, 3, 4, 5], 3)) - self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0) - - def test_ma(self): - data = MIN.aggregate(self.cube.data, axis=0) - self.assertArrayEqual(data, [3]) - - -class Test_lazy_masked(tests.IrisTest): - def setUp(self): - masked_data = ma.masked_less([1, 2, 3, 4, 5], 3) - self.cube = Cube(as_lazy_data(masked_data)) - self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0) - - def test_lazy_ma(self): - lazy_data = MIN.lazy_aggregate(self.cube.lazy_data(), axis=0) - self.assertTrue(is_lazy_data(lazy_data)) - self.assertArrayEqual(lazy_data.compute(), [3]) - - -class Test_aggregate_shape(tests.IrisTest): - def test(self): - shape = () - kwargs = dict() - self.assertTupleEqual(MIN.aggregate_shape(**kwargs), shape) - kwargs = dict(wibble="wobble") - self.assertTupleEqual(MIN.aggregate_shape(**kwargs), shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_Nearest.py b/lib/iris/tests/unit/analysis/test_Nearest.py deleted file mode 100644 index f3736d2cf3..0000000000 --- a/lib/iris/tests/unit/analysis/test_Nearest.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.analysis.Nearest`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.analysis import Nearest - - -def create_scheme(mode=None): - kwargs = {} - if mode is not None: - kwargs["extrapolation_mode"] = mode - return Nearest(**kwargs) - - -class Test___init__(tests.IrisTest): - def test_invalid(self): - with self.assertRaisesRegex(ValueError, "Extrapolation mode"): - Nearest("bogus") - - -class Test_extrapolation_mode(tests.IrisTest): - def check_mode(self, mode): - scheme = create_scheme(mode) - self.assertEqual(scheme.extrapolation_mode, mode) - - def test_default(self): - scheme = Nearest() - self.assertEqual(scheme.extrapolation_mode, "extrapolate") - - def test_extrapolate(self): - self.check_mode("extrapolate") - - def test_nan(self): - self.check_mode("nan") - - def test_error(self): - self.check_mode("error") - - def test_mask(self): - self.check_mode("mask") - - def test_nanmask(self): - self.check_mode("nanmask") - - -class Test_interpolator(tests.IrisTest): - def check_mode(self, mode=None): - scheme = create_scheme(mode) - - # Check that calling `scheme.interpolator(...)` returns an - # instance of RectilinearInterpolator which has been created - # using the correct arguments. - with mock.patch( - "iris.analysis.RectilinearInterpolator", - return_value=mock.sentinel.interpolator, - ) as ri: - interpolator = scheme.interpolator( - mock.sentinel.cube, mock.sentinel.coords - ) - if mode is None: - expected_mode = "extrapolate" - else: - expected_mode = mode - ri.assert_called_once_with( - mock.sentinel.cube, mock.sentinel.coords, "nearest", expected_mode - ) - self.assertIs(interpolator, mock.sentinel.interpolator) - - def test_default(self): - self.check_mode() - - def test_extrapolate(self): - self.check_mode("extrapolate") - - def test_nan(self): - self.check_mode("nan") - - def test_error(self): - self.check_mode("error") - - def test_mask(self): - self.check_mode("mask") - - def test_nanmask(self): - self.check_mode("nanmask") - - -class Test_regridder(tests.IrisTest): - def check_mode(self, mode=None): - scheme = create_scheme(mode) - - # Ensure that calling the regridder results in an instance of - # RectilinearRegridder being returned, which has been created with - # the expected arguments. - with mock.patch( - "iris.analysis.RectilinearRegridder", - return_value=mock.sentinel.regridder, - ) as rr: - regridder = scheme.regridder( - mock.sentinel.src_grid, mock.sentinel.tgt_grid - ) - - expected_mode = "extrapolate" if mode is None else mode - rr.assert_called_once_with( - mock.sentinel.src_grid, - mock.sentinel.tgt_grid, - "nearest", - expected_mode, - ) - self.assertIs(regridder, mock.sentinel.regridder) - - def test_default(self): - self.check_mode() - - def test_extrapolate(self): - self.check_mode("extrapolate") - - def test_nan(self): - self.check_mode("nan") - - def test_error(self): - self.check_mode("error") - - def test_mask(self): - self.check_mode("mask") - - def test_nanmask(self): - self.check_mode("nanmask") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_PERCENTILE.py b/lib/iris/tests/unit/analysis/test_PERCENTILE.py deleted file mode 100644 index 52648f6fb8..0000000000 --- a/lib/iris/tests/unit/analysis/test_PERCENTILE.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :data:`iris.analysis.PERCENTILE` aggregator.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris.analysis import PERCENTILE - - -class Test_aggregate(tests.IrisTest): - def test_missing_mandatory_kwarg(self): - emsg = "percentile aggregator requires .* keyword argument 'percent'" - with self.assertRaisesRegex(ValueError, emsg): - PERCENTILE.aggregate("dummy", axis=0) - - def test_1d_single(self): - data = np.arange(11) - actual = PERCENTILE.aggregate(data, axis=0, percent=50) - expected = 5 - self.assertTupleEqual(actual.shape, ()) - self.assertEqual(actual, expected) - - def test_masked_1d_single(self): - data = ma.arange(11) - data[3:7] = ma.masked - actual = PERCENTILE.aggregate(data, axis=0, percent=50) - expected = 7 - self.assertTupleEqual(actual.shape, ()) - self.assertEqual(actual, expected) - - def test_1d_multi(self): - data = np.arange(11) - percent = np.array([20, 50, 90]) - actual = PERCENTILE.aggregate(data, axis=0, percent=percent) - expected = [2, 5, 9] - self.assertTupleEqual(actual.shape, percent.shape) - self.assertArrayEqual(actual, expected) - - def test_masked_1d_multi(self): - data = ma.arange(11) - data[3:9] = ma.masked - percent = np.array([25, 50, 75]) - actual = PERCENTILE.aggregate(data, axis=0, percent=percent) - expected = [1, 2, 9] - self.assertTupleEqual(actual.shape, percent.shape) - self.assertArrayEqual(actual, expected) - - def test_2d_single(self): - shape = (2, 11) - data = np.arange(np.prod(shape)).reshape(shape) - actual = PERCENTILE.aggregate(data, axis=0, percent=50) - self.assertTupleEqual(actual.shape, shape[-1:]) - expected = np.arange(shape[-1]) + 5.5 - self.assertArrayEqual(actual, expected) - - def test_masked_2d_single(self): - shape = (2, 11) - data = ma.arange(np.prod(shape)).reshape(shape) - data[0, ::2] = ma.masked - data[1, 1::2] = ma.masked - actual = PERCENTILE.aggregate(data, axis=0, percent=50) - self.assertTupleEqual(actual.shape, shape[-1:]) - expected = np.empty(shape[-1:]) - expected[1::2] = data[0, 1::2] - expected[::2] = data[1, ::2] - self.assertArrayEqual(actual, expected) - - def test_2d_multi(self): - shape = (2, 10) - data = np.arange(np.prod(shape)).reshape(shape) - percent = np.array([10, 50, 90, 100]) - actual = PERCENTILE.aggregate(data, axis=0, percent=percent) - self.assertTupleEqual(actual.shape, (shape[-1], percent.size)) - expected = np.tile(np.arange(shape[-1]), percent.size) - expected = expected.reshape(percent.size, shape[-1]).T + 1 - expected = expected + (percent / 10 - 1) - self.assertArrayAlmostEqual(actual, expected) - - def test_masked_2d_multi(self): - shape = (3, 10) - data = ma.arange(np.prod(shape)).reshape(shape) - data[1] = ma.masked - percent = np.array([10, 50, 70, 80]) - actual = PERCENTILE.aggregate(data, axis=0, percent=percent) - self.assertTupleEqual(actual.shape, (shape[-1], percent.size)) - expected = np.tile(np.arange(shape[-1]), percent.size) - expected = expected.reshape(percent.size, shape[-1]).T - expected = expected + (percent / 10 * 2) - self.assertArrayAlmostEqual(actual, expected) - - -class Test_name(tests.IrisTest): - def test(self): - self.assertEqual(PERCENTILE.name(), "percentile") - - -class Test_aggregate_shape(tests.IrisTest): - def test_missing_mandatory_kwarg(self): - emsg = "percentile aggregator requires .* keyword argument 'percent'" - with self.assertRaisesRegex(ValueError, emsg): - PERCENTILE.aggregate_shape() - with self.assertRaisesRegex(ValueError, emsg): - kwargs = dict() - PERCENTILE.aggregate_shape(**kwargs) - with self.assertRaisesRegex(ValueError, emsg): - kwargs = dict(point=10) - PERCENTILE.aggregate_shape(**kwargs) - - def test_mandatory_kwarg_no_shape(self): - kwargs = dict(percent=50) - self.assertTupleEqual(PERCENTILE.aggregate_shape(**kwargs), ()) - kwargs = dict(percent=[50]) - self.assertTupleEqual(PERCENTILE.aggregate_shape(**kwargs), ()) - - def test_mandatory_kwarg_shape(self): - kwargs = dict(percent=(10, 20)) - self.assertTupleEqual(PERCENTILE.aggregate_shape(**kwargs), (2,)) - kwargs = dict(percent=list(range(13))) - self.assertTupleEqual(PERCENTILE.aggregate_shape(**kwargs), (13,)) - - -class Test_cell_method(tests.IrisTest): - def test(self): - self.assertIsNone(PERCENTILE.cell_method) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_PROPORTION.py b/lib/iris/tests/unit/analysis/test_PROPORTION.py deleted file mode 100644 index b7118241af..0000000000 --- a/lib/iris/tests/unit/analysis/test_PROPORTION.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :data:`iris.analysis.PROPORTION` aggregator.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy.ma as ma - -from iris.analysis import PROPORTION -from iris.coords import DimCoord -import iris.cube - - -class Test_units_func(tests.IrisTest): - def test(self): - self.assertIsNotNone(PROPORTION.units_func) - new_units = PROPORTION.units_func(None) - self.assertEqual(new_units, 1) - - -class Test_masked(tests.IrisTest): - def setUp(self): - self.cube = iris.cube.Cube(ma.masked_equal([1, 2, 3, 4, 5], 3)) - self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0) - self.func = lambda x: x >= 3 - - def test_ma(self): - cube = self.cube.collapsed("foo", PROPORTION, function=self.func) - self.assertArrayEqual(cube.data, [0.5]) - - def test_false_mask(self): - # Test corner case where mask is returned as boolean value rather - # than boolean array when the mask is unspecified on construction. - masked_cube = iris.cube.Cube(ma.array([1, 2, 3, 4, 5])) - masked_cube.add_dim_coord( - DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0 - ) - cube = masked_cube.collapsed("foo", PROPORTION, function=self.func) - self.assertArrayEqual(cube.data, ma.array([0.6])) - - -class Test_name(tests.IrisTest): - def test(self): - self.assertEqual(PROPORTION.name(), "proportion") - - -class Test_aggregate_shape(tests.IrisTest): - def test(self): - shape = () - kwargs = dict() - self.assertTupleEqual(PROPORTION.aggregate_shape(**kwargs), shape) - kwargs = dict(captain="caveman", penelope="pitstop") - self.assertTupleEqual(PROPORTION.aggregate_shape(**kwargs), shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py b/lib/iris/tests/unit/analysis/test_PercentileAggregator.py deleted file mode 100644 index a8e6ed28ed..0000000000 --- a/lib/iris/tests/unit/analysis/test_PercentileAggregator.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.analysis.PercentileAggregator` class instance. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.analysis import PercentileAggregator, _percentile -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube - - -class Test(tests.IrisTest): - def test_init(self): - name = "percentile" - call_func = _percentile - units_func = mock.sentinel.units_func - lazy_func = mock.sentinel.lazy_func - aggregator = PercentileAggregator( - units_func=units_func, lazy_func=lazy_func - ) - self.assertEqual(aggregator.name(), name) - self.assertIs(aggregator.call_func, call_func) - self.assertIs(aggregator.units_func, units_func) - self.assertIs(aggregator.lazy_func, lazy_func) - self.assertIsNone(aggregator.cell_method) - - -class Test_post_process(tests.IrisTest): - def setUp(self): - shape = (2, 5) - data = np.arange(np.prod(shape)) - - self.coord_simple = DimCoord(data, "time") - self.cube_simple = Cube(data) - self.cube_simple.add_dim_coord(self.coord_simple, 0) - - self.coord_multi_0 = DimCoord(np.arange(shape[0]), "time") - self.coord_multi_1 = DimCoord(np.arange(shape[1]), "height") - self.cube_multi = Cube(data.reshape(shape)) - self.cube_multi.add_dim_coord(self.coord_multi_0, 0) - self.cube_multi.add_dim_coord(self.coord_multi_1, 1) - - def test_missing_mandatory_kwarg(self): - aggregator = PercentileAggregator() - emsg = "percentile aggregator requires .* keyword argument 'percent'" - with self.assertRaisesRegex(ValueError, emsg): - aggregator.aggregate("dummy", axis=0) - - def test_simple_single_point(self): - aggregator = PercentileAggregator() - percent = 50 - kwargs = dict(percent=percent) - data = np.empty(self.cube_simple.shape) - coords = [self.coord_simple] - actual = aggregator.post_process( - self.cube_simple, data, coords, **kwargs - ) - self.assertEqual(actual.shape, self.cube_simple.shape) - self.assertIs(actual.data, data) - name = "percentile_over_time" - coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name, units="percent") - self.assertEqual(coord, expected) - - def test_simple_multiple_points(self): - aggregator = PercentileAggregator() - percent = np.array([10, 20, 50, 90]) - kwargs = dict(percent=percent) - shape = self.cube_simple.shape + percent.shape - data = np.empty(shape) - coords = [self.coord_simple] - actual = aggregator.post_process( - self.cube_simple, data, coords, **kwargs - ) - self.assertEqual(actual.shape, percent.shape + self.cube_simple.shape) - expected = np.rollaxis(data, -1) - self.assertArrayEqual(actual.data, expected) - name = "percentile_over_time" - coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name, units="percent") - self.assertEqual(coord, expected) - - def test_multi_single_point(self): - aggregator = PercentileAggregator() - percent = 70 - kwargs = dict(percent=percent) - data = np.empty(self.cube_multi.shape) - coords = [self.coord_multi_0] - actual = aggregator.post_process( - self.cube_multi, data, coords, **kwargs - ) - self.assertEqual(actual.shape, self.cube_multi.shape) - self.assertIs(actual.data, data) - name = "percentile_over_time" - coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name, units="percent") - self.assertEqual(coord, expected) - - def test_multi_multiple_points(self): - aggregator = PercentileAggregator() - percent = np.array([17, 29, 81]) - kwargs = dict(percent=percent) - shape = self.cube_multi.shape + percent.shape - data = np.empty(shape) - coords = [self.coord_multi_0] - actual = aggregator.post_process( - self.cube_multi, data, coords, **kwargs - ) - self.assertEqual(actual.shape, percent.shape + self.cube_multi.shape) - expected = np.rollaxis(data, -1) - self.assertArrayEqual(actual.data, expected) - name = "percentile_over_time" - coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name, units="percent") - self.assertEqual(coord, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_PointInCell.py b/lib/iris/tests/unit/analysis/test_PointInCell.py deleted file mode 100644 index 2570465245..0000000000 --- a/lib/iris/tests/unit/analysis/test_PointInCell.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.analysis.PointInCell`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.analysis import PointInCell - - -class Test_regridder(tests.IrisTest): - def test(self): - point_in_cell = PointInCell(mock.sentinel.weights) - - with mock.patch( - "iris.analysis.CurvilinearRegridder", - return_value=mock.sentinel.regridder, - ) as ecr: - regridder = point_in_cell.regridder( - mock.sentinel.src, mock.sentinel.target - ) - - ecr.assert_called_once_with( - mock.sentinel.src, mock.sentinel.target, mock.sentinel.weights - ) - self.assertIs(regridder, mock.sentinel.regridder) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_RMS.py b/lib/iris/tests/unit/analysis/test_RMS.py deleted file mode 100644 index 141b3e262b..0000000000 --- a/lib/iris/tests/unit/analysis/test_RMS.py +++ /dev/null @@ -1,187 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :data:`iris.analysis.RMS` aggregator.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import as_lazy_data -from iris.analysis import RMS - - -class Test_aggregate(tests.IrisTest): - def test_1d(self): - # 1-dimensional input - data = np.array([5, 2, 6, 4], dtype=np.float64) - rms = RMS.aggregate(data, 0) - expected_rms = 4.5 - self.assertAlmostEqual(rms, expected_rms) - - def test_2d(self): - # 2-dimensional input - data = np.array([[5, 2, 6, 4], [12, 4, 10, 8]], dtype=np.float64) - expected_rms = np.array([4.5, 9.0], dtype=np.float64) - rms = RMS.aggregate(data, 1) - self.assertArrayAlmostEqual(rms, expected_rms) - - def test_1d_weighted(self): - # 1-dimensional input with weights - data = np.array([4, 7, 10, 8], dtype=np.float64) - weights = np.array([1, 4, 3, 2], dtype=np.float64) - expected_rms = 8.0 - rms = RMS.aggregate(data, 0, weights=weights) - self.assertAlmostEqual(rms, expected_rms) - - def test_2d_weighted(self): - # 2-dimensional input with weights - data = np.array([[4, 7, 10, 8], [14, 16, 20, 8]], dtype=np.float64) - weights = np.array([[1, 4, 3, 2], [2, 1, 1.5, 0.5]], dtype=np.float64) - expected_rms = np.array([8.0, 16.0], dtype=np.float64) - rms = RMS.aggregate(data, 1, weights=weights) - self.assertArrayAlmostEqual(rms, expected_rms) - - def test_unit_weighted(self): - # unit weights should be the same as no weights - data = np.array([5, 2, 6, 4], dtype=np.float64) - weights = np.ones_like(data) - rms = RMS.aggregate(data, 0, weights=weights) - expected_rms = 4.5 - self.assertAlmostEqual(rms, expected_rms) - - def test_masked(self): - # masked entries should be completely ignored - data = ma.array( - [5, 10, 2, 11, 6, 4], - mask=[False, True, False, True, False, False], - dtype=np.float64, - ) - expected_rms = 4.5 - rms = RMS.aggregate(data, 0) - self.assertAlmostEqual(rms, expected_rms) - - def test_masked_weighted(self): - # weights should work properly with masked arrays - data = ma.array( - [4, 7, 18, 10, 11, 8], - mask=[False, False, True, False, True, False], - dtype=np.float64, - ) - weights = np.array([1, 4, 5, 3, 8, 2], dtype=np.float64) - expected_rms = 8.0 - rms = RMS.aggregate(data, 0, weights=weights) - self.assertAlmostEqual(rms, expected_rms) - - -class Test_lazy_aggregate(tests.IrisTest): - def test_1d(self): - # 1-dimensional input. - data = as_lazy_data(np.array([5, 2, 6, 4], dtype=np.float64)) - rms = RMS.lazy_aggregate(data, 0) - expected_rms = 4.5 - self.assertAlmostEqual(rms, expected_rms) - - def test_2d(self): - # 2-dimensional input. - data = as_lazy_data( - np.array([[5, 2, 6, 4], [12, 4, 10, 8]], dtype=np.float64) - ) - expected_rms = np.array([4.5, 9.0], dtype=np.float64) - rms = RMS.lazy_aggregate(data, 1) - self.assertArrayAlmostEqual(rms, expected_rms) - - def test_1d_weighted(self): - # 1-dimensional input with weights. - data = as_lazy_data(np.array([4, 7, 10, 8], dtype=np.float64)) - weights = np.array([1, 4, 3, 2], dtype=np.float64) - expected_rms = 8.0 - # https://github.com/dask/dask/issues/3846. - with self.assertRaisesRegex(TypeError, "unexpected keyword argument"): - rms = RMS.lazy_aggregate(data, 0, weights=weights) - self.assertAlmostEqual(rms, expected_rms) - - def test_1d_lazy_weighted(self): - # 1-dimensional input with lazy weights. - data = as_lazy_data(np.array([4, 7, 10, 8], dtype=np.float64)) - weights = as_lazy_data(np.array([1, 4, 3, 2], dtype=np.float64)) - expected_rms = 8.0 - # https://github.com/dask/dask/issues/3846. - with self.assertRaisesRegex(TypeError, "unexpected keyword argument"): - rms = RMS.lazy_aggregate(data, 0, weights=weights) - self.assertAlmostEqual(rms, expected_rms) - - def test_2d_weighted(self): - # 2-dimensional input with weights. - data = as_lazy_data( - np.array([[4, 7, 10, 8], [14, 16, 20, 8]], dtype=np.float64) - ) - weights = np.array([[1, 4, 3, 2], [2, 1, 1.5, 0.5]], dtype=np.float64) - expected_rms = np.array([8.0, 16.0], dtype=np.float64) - # https://github.com/dask/dask/issues/3846. - with self.assertRaisesRegex(TypeError, "unexpected keyword argument"): - rms = RMS.lazy_aggregate(data, 1, weights=weights) - self.assertArrayAlmostEqual(rms, expected_rms) - - def test_unit_weighted(self): - # Unit weights should be the same as no weights. - data = as_lazy_data(np.array([5, 2, 6, 4], dtype=np.float64)) - weights = np.ones_like(data) - expected_rms = 4.5 - # https://github.com/dask/dask/issues/3846. - with self.assertRaisesRegex(TypeError, "unexpected keyword argument"): - rms = RMS.lazy_aggregate(data, 0, weights=weights) - self.assertAlmostEqual(rms, expected_rms) - - def test_masked(self): - # Masked entries should be completely ignored. - data = as_lazy_data( - ma.array( - [5, 10, 2, 11, 6, 4], - mask=[False, True, False, True, False, False], - dtype=np.float64, - ) - ) - expected_rms = 4.5 - rms = RMS.lazy_aggregate(data, 0) - self.assertAlmostEqual(rms, expected_rms) - - def test_masked_weighted(self): - # Weights should work properly with masked arrays, but currently don't - # (see https://github.com/dask/dask/issues/3846). - # For now, masked weights are simply not supported. - data = as_lazy_data( - ma.array( - [4, 7, 18, 10, 11, 8], - mask=[False, False, True, False, True, False], - dtype=np.float64, - ) - ) - weights = np.array([1, 4, 5, 3, 8, 2]) - expected_rms = 8.0 - with self.assertRaisesRegex(TypeError, "unexpected keyword argument"): - rms = RMS.lazy_aggregate(data, 0, weights=weights) - self.assertAlmostEqual(rms, expected_rms) - - -class Test_name(tests.IrisTest): - def test(self): - self.assertEqual(RMS.name(), "root_mean_square") - - -class Test_aggregate_shape(tests.IrisTest): - def test(self): - shape = () - kwargs = dict() - self.assertTupleEqual(RMS.aggregate_shape(**kwargs), shape) - kwargs = dict(tom="jerry", calvin="hobbes") - self.assertTupleEqual(RMS.aggregate_shape(**kwargs), shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_STD_DEV.py b/lib/iris/tests/unit/analysis/test_STD_DEV.py deleted file mode 100644 index 978bdb4ddf..0000000000 --- a/lib/iris/tests/unit/analysis/test_STD_DEV.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :data:`iris.analysis.STD_DEV` aggregator.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris._lazy_data import as_concrete_data, as_lazy_data, is_lazy_data -from iris.analysis import STD_DEV -from iris.coords import DimCoord -from iris.cube import Cube - - -class Test_basics(tests.IrisTest): - def setUp(self): - data = np.array([1, 2, 3, 4, 5]) - coord = DimCoord([6, 7, 8, 9, 10], long_name="foo") - self.cube = Cube(data) - self.cube.add_dim_coord(coord, 0) - self.lazy_cube = Cube(as_lazy_data(data)) - self.lazy_cube.add_dim_coord(coord, 0) - - def test_name(self): - self.assertEqual(STD_DEV.name(), "standard_deviation") - - def test_collapse(self): - data = STD_DEV.aggregate(self.cube.data, axis=0) - self.assertArrayAlmostEqual(data, [1.58113883]) - - def test_lazy(self): - lazy_data = STD_DEV.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0) - self.assertTrue(is_lazy_data(lazy_data)) - - def test_lazy_collapse(self): - lazy_data = STD_DEV.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0) - self.assertArrayAlmostEqual(lazy_data.compute(), [1.58113883]) - - -class Test_lazy_aggregate(tests.IrisTest): - def test_mdtol(self): - na = -999.888 - array = np.ma.masked_equal( - [[1.0, 2.0, 1.0, 2.0], [1.0, 2.0, 3.0, na], [1.0, 2.0, na, na]], na - ) - array = as_lazy_data(array) - var = STD_DEV.lazy_aggregate(array, axis=1, mdtol=0.3) - masked_result = as_concrete_data(var) - masked_expected = np.ma.masked_array( - [0.57735, 1.0, 0.707107], mask=[0, 0, 1] - ) - self.assertMaskedArrayAlmostEqual(masked_result, masked_expected) - - def test_ddof_one(self): - array = as_lazy_data(np.arange(8)) - var = STD_DEV.lazy_aggregate(array, axis=0, ddof=1) - result = as_concrete_data(var) - self.assertArrayAlmostEqual(result, np.array(2.449489)) - - def test_ddof_zero(self): - array = as_lazy_data(np.arange(8)) - var = STD_DEV.lazy_aggregate(array, axis=0, ddof=0) - result = as_concrete_data(var) - self.assertArrayAlmostEqual(result, np.array(2.291287)) - - -class Test_aggregate_shape(tests.IrisTest): - def test(self): - shape = () - kwargs = dict() - self.assertTupleEqual(STD_DEV.aggregate_shape(**kwargs), shape) - kwargs = dict(forfar=5, fife=4) - self.assertTupleEqual(STD_DEV.aggregate_shape(**kwargs), shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_SUM.py b/lib/iris/tests/unit/analysis/test_SUM.py deleted file mode 100644 index dd2dcf9f9c..0000000000 --- a/lib/iris/tests/unit/analysis/test_SUM.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :data:`iris.analysis.SUM` aggregator.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import as_lazy_data, is_lazy_data -from iris.analysis import SUM -from iris.coords import DimCoord -from iris.cube import Cube - - -class Test_basics(tests.IrisTest): - def setUp(self): - data = np.array([1, 2, 3, 4, 5]) - coord = DimCoord([6, 7, 8, 9, 10], long_name="foo") - self.cube = Cube(data) - self.cube.add_dim_coord(coord, 0) - self.lazy_cube = Cube(as_lazy_data(data)) - self.lazy_cube.add_dim_coord(coord, 0) - - def test_name(self): - self.assertEqual(SUM.name(), "sum") - - def test_collapse(self): - data = SUM.aggregate(self.cube.data, axis=0) - self.assertArrayEqual(data, [15]) - - def test_lazy(self): - lazy_data = SUM.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0) - self.assertTrue(is_lazy_data(lazy_data)) - - def test_lazy_collapse(self): - lazy_data = SUM.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0) - self.assertArrayEqual(lazy_data.compute(), [15]) - - -class Test_masked(tests.IrisTest): - def setUp(self): - self.cube = Cube(ma.masked_equal([1, 2, 3, 4, 5], 3)) - self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0) - - def test_ma(self): - data = SUM.aggregate(self.cube.data, axis=0) - self.assertArrayEqual(data, [12]) - - -class Test_lazy_masked(tests.IrisTest): - def setUp(self): - masked_data = ma.masked_equal([1, 2, 3, 4, 5], 3) - self.cube = Cube(as_lazy_data(masked_data)) - self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0) - - def test_lazy_ma(self): - lazy_data = SUM.lazy_aggregate(self.cube.lazy_data(), axis=0) - self.assertTrue(is_lazy_data(lazy_data)) - self.assertArrayEqual(lazy_data.compute(), [12]) - - -class Test_weights_and_returned(tests.IrisTest): - def setUp(self): - data_2d = np.arange(1, 11).reshape(2, 5) - coord_0 = DimCoord([11, 12], long_name="bar") - coord_1 = DimCoord([6, 7, 8, 9, 10], long_name="foo") - self.cube_2d = Cube(data_2d) - self.cube_2d.add_dim_coord(coord_0, 0) - self.cube_2d.add_dim_coord(coord_1, 1) - self.weights = np.array([2, 1, 1, 1, 1] * 2).reshape(2, 5) - - def test_weights(self): - data = SUM.aggregate(self.cube_2d.data, axis=0, weights=self.weights) - self.assertArrayEqual(data, [14, 9, 11, 13, 15]) - - def test_returned(self): - data, weights = SUM.aggregate(self.cube_2d.data, axis=0, returned=True) - self.assertArrayEqual(data, [7, 9, 11, 13, 15]) - self.assertArrayEqual(weights, [2, 2, 2, 2, 2]) - - def test_weights_and_returned(self): - data, weights = SUM.aggregate( - self.cube_2d.data, axis=0, weights=self.weights, returned=True - ) - self.assertArrayEqual(data, [14, 9, 11, 13, 15]) - self.assertArrayEqual(weights, [4, 2, 2, 2, 2]) - - -class Test_lazy_weights_and_returned(tests.IrisTest): - def setUp(self): - data_2d = np.arange(1, 11).reshape(2, 5) - coord_0 = DimCoord([11, 12], long_name="bar") - coord_1 = DimCoord([6, 7, 8, 9, 10], long_name="foo") - self.cube_2d = Cube(as_lazy_data(data_2d)) - self.cube_2d.add_dim_coord(coord_0, 0) - self.cube_2d.add_dim_coord(coord_1, 1) - self.weights = np.array([2, 1, 1, 1, 1] * 2).reshape(2, 5) - - def test_weights(self): - lazy_data = SUM.lazy_aggregate( - self.cube_2d.lazy_data(), axis=0, weights=self.weights - ) - self.assertTrue(is_lazy_data(lazy_data)) - self.assertArrayEqual(lazy_data.compute(), [14, 9, 11, 13, 15]) - - def test_returned(self): - lazy_data, weights = SUM.lazy_aggregate( - self.cube_2d.lazy_data(), axis=0, returned=True - ) - self.assertTrue(is_lazy_data(lazy_data)) - self.assertArrayEqual(lazy_data.compute(), [7, 9, 11, 13, 15]) - self.assertArrayEqual(weights, [2, 2, 2, 2, 2]) - - def test_weights_and_returned(self): - lazy_data, weights = SUM.lazy_aggregate( - self.cube_2d.lazy_data(), - axis=0, - weights=self.weights, - returned=True, - ) - self.assertTrue(is_lazy_data(lazy_data)) - self.assertArrayEqual(lazy_data.compute(), [14, 9, 11, 13, 15]) - self.assertArrayEqual(weights, [4, 2, 2, 2, 2]) - - -class Test_aggregate_shape(tests.IrisTest): - def test(self): - shape = () - kwargs = dict() - self.assertTupleEqual(SUM.aggregate_shape(**kwargs), shape) - kwargs = dict(wibble="wobble") - self.assertTupleEqual(SUM.aggregate_shape(**kwargs), shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_VARIANCE.py b/lib/iris/tests/unit/analysis/test_VARIANCE.py deleted file mode 100644 index 857bc7e1d2..0000000000 --- a/lib/iris/tests/unit/analysis/test_VARIANCE.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :data:`iris.analysis.VARIANCE` aggregator.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import as_concrete_data, as_lazy_data -from iris.analysis import VARIANCE -from iris.coords import DimCoord -import iris.cube - - -class Test_units_func(tests.IrisTest): - def test(self): - self.assertIsNotNone(VARIANCE.units_func) - mul = mock.Mock(return_value=mock.sentinel.new_unit) - units = mock.Mock(__mul__=mul) - new_units = VARIANCE.units_func(units) - # Make sure the VARIANCE units_func tries to square the units. - mul.assert_called_once_with(units) - self.assertEqual(new_units, mock.sentinel.new_unit) - - -class Test_masked(tests.IrisTest): - def setUp(self): - self.cube = iris.cube.Cube(ma.masked_equal([1, 2, 3, 4, 5], 3)) - self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name="foo"), 0) - - def test_ma_ddof0(self): - cube = self.cube.collapsed("foo", VARIANCE, ddof=0) - expected = 10 / 4.0 - self.assertArrayEqual(np.var(self.cube.data, ddof=0), expected) - self.assertArrayAlmostEqual(cube.data, expected) - - def test_ma_ddof1(self): - cube = self.cube.collapsed("foo", VARIANCE, ddof=1) - expected = 10 / 3.0 - self.assertArrayEqual(np.var(self.cube.data, ddof=1), expected) - self.assertArrayEqual(cube.data, expected) - - # test that the default ddof is 1 - default_cube = self.cube.collapsed("foo", VARIANCE) - self.assertArrayEqual(cube.data, default_cube.data) - - -class Test_lazy_aggregate(tests.IrisTest): - def test_ddof_one(self): - array = as_lazy_data(np.arange(8)) - var = VARIANCE.lazy_aggregate(array, axis=0, ddof=1) - result = as_concrete_data(var) - self.assertArrayAlmostEqual(result, np.array(6.0)) - - def test_ddof_zero(self): - array = as_lazy_data(np.arange(8)) - var = VARIANCE.lazy_aggregate(array, axis=0, ddof=0) - result = as_concrete_data(var) - self.assertArrayAlmostEqual(result, np.array(5.25)) - - -class Test_name(tests.IrisTest): - def test(self): - self.assertEqual(VARIANCE.name(), "variance") - - -class Test_aggregate_shape(tests.IrisTest): - def test(self): - shape = () - kwargs = dict() - self.assertTupleEqual(VARIANCE.aggregate_shape(**kwargs), shape) - kwargs = dict(bat="man", wonder="woman") - self.assertTupleEqual(VARIANCE.aggregate_shape(**kwargs), shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_WPERCENTILE.py b/lib/iris/tests/unit/analysis/test_WPERCENTILE.py deleted file mode 100644 index a59bf4ce9c..0000000000 --- a/lib/iris/tests/unit/analysis/test_WPERCENTILE.py +++ /dev/null @@ -1,238 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :data:`iris.analysis.PERCENTILE` aggregator.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris.analysis import WPERCENTILE - - -class Test_aggregate(tests.IrisTest): - def test_missing_mandatory_kwargs(self): - emsg = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'percent'" - ) - with self.assertRaisesRegex(ValueError, emsg): - WPERCENTILE.aggregate("dummy", axis=0, weights=None) - emsg = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'weights'" - ) - with self.assertRaisesRegex(ValueError, emsg): - WPERCENTILE.aggregate("dummy", axis=0, percent=50) - - def test_wrong_weights_shape(self): - data = np.arange(11) - weights = np.ones(10) - emsg = "_weighted_percentile: weights wrong shape." - with self.assertRaisesRegex(ValueError, emsg): - WPERCENTILE.aggregate(data, axis=0, percent=50, weights=weights) - - def test_1d_single(self): - data = np.arange(11) - weights = np.ones(data.shape) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=50, weights=weights - ) - expected = 5 - self.assertTupleEqual(actual.shape, ()) - self.assertEqual(actual, expected) - - def test_1d_single_unequal(self): - data = np.arange(12) - weights = np.ones(data.shape) - weights[0:3] = 3 - actual, weight_total = WPERCENTILE.aggregate( - data, axis=0, percent=50, weights=weights, returned=True - ) - expected = 2.75 - self.assertTupleEqual(actual.shape, ()) - self.assertEqual(actual, expected) - self.assertEqual(weight_total, 18) - - def test_masked_1d_single(self): - data = ma.arange(11) - weights = np.ones(data.shape) - data[3:7] = ma.masked - actual = WPERCENTILE.aggregate( - data, axis=0, percent=50, weights=weights - ) - expected = 7 - self.assertTupleEqual(actual.shape, ()) - self.assertEqual(actual, expected) - - def test_1d_multi(self): - data = np.arange(11) - weights = np.ones(data.shape) - percent = np.array([20, 50, 90]) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=percent, weights=weights - ) - expected = [1.7, 5, 9.4] - self.assertTupleEqual(actual.shape, percent.shape) - self.assertArrayAlmostEqual(actual, expected) - - def test_1d_multi_unequal(self): - data = np.arange(13) - weights = np.ones(data.shape) - weights[1::2] = 3 - percent = np.array([20, 50, 96]) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=percent, weights=weights - ) - expected = [2.25, 6, 11.75] - self.assertTupleEqual(actual.shape, percent.shape) - self.assertArrayAlmostEqual(actual, expected) - - def test_masked_1d_multi(self): - data = ma.arange(11) - weights = np.ones(data.shape) - data[3:9] = ma.masked - percent = np.array([25, 50, 75]) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=percent, weights=weights - ) - expected = [0.75, 2, 9.25] - self.assertTupleEqual(actual.shape, percent.shape) - self.assertArrayAlmostEqual(actual, expected) - - def test_2d_single(self): - shape = (2, 11) - data = np.arange(np.prod(shape)).reshape(shape) - weights = np.ones(shape) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=50, weights=weights - ) - self.assertTupleEqual(actual.shape, shape[-1:]) - expected = np.arange(shape[-1]) + 5.5 - self.assertArrayEqual(actual, expected) - - def test_masked_2d_single(self): - shape = (2, 11) - data = ma.arange(np.prod(shape)).reshape(shape) - data[0, ::2] = ma.masked - data[1, 1::2] = ma.masked - weights = np.ones(shape) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=50, weights=weights - ) - self.assertTupleEqual(actual.shape, shape[-1:]) - expected = np.empty(shape[-1:]) - expected[1::2] = data[0, 1::2] - expected[::2] = data[1, ::2] - self.assertArrayEqual(actual, expected) - - def test_2d_multi(self): - shape = (2, 10) - data = np.arange(np.prod(shape)).reshape(shape) - weights = np.ones(shape) - percent = np.array([10, 50, 70, 100]) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=percent, weights=weights - ) - self.assertTupleEqual(actual.shape, (shape[-1], percent.size)) - expected = np.tile(np.arange(shape[-1]), percent.size).astype("f8") - expected = expected.reshape(percent.size, shape[-1]).T - expected[:, 1:-1] += (percent[1:-1] - 25) * 0.2 - expected[:, -1] += 10.0 - self.assertArrayAlmostEqual(actual, expected) - - def test_masked_2d_multi(self): - shape = (3, 10) - data = ma.arange(np.prod(shape)).reshape(shape) - weights = np.ones(shape) - data[1] = ma.masked - percent = np.array([10, 50, 70, 80]) - actual = WPERCENTILE.aggregate( - data, axis=0, percent=percent, weights=weights - ) - self.assertTupleEqual(actual.shape, (shape[-1], percent.size)) - expected = np.tile(np.arange(shape[-1]), percent.size).astype("f8") - expected = expected.reshape(percent.size, shape[-1]).T - expected[:, 1:-1] += (percent[1:-1] - 25) * 0.4 - expected[:, -1] += 20.0 - self.assertArrayAlmostEqual(actual, expected) - - def test_masked_2d_multi_unequal(self): - shape = (3, 10) - data = ma.arange(np.prod(shape)).reshape(shape) - weights = np.ones(shape) - weights[0] = 3 - data[1] = ma.masked - percent = np.array([30, 50, 75, 80]) - actual, weight_total = WPERCENTILE.aggregate( - data, axis=0, percent=percent, weights=weights, returned=True - ) - self.assertTupleEqual(actual.shape, (shape[-1], percent.size)) - expected = np.tile(np.arange(shape[-1]), percent.size) - expected = expected.reshape(percent.size, shape[-1]).T - expected[:, 1:] = 2.0 * ( - (0.875 - percent[1:] / 100.0) * data[0, np.newaxis].T - + (percent[1:] / 100.0 - 0.375) * data[-1, np.newaxis].T - ) - self.assertArrayAlmostEqual(actual, expected) - self.assertTupleEqual(weight_total.shape, (shape[-1],)) - self.assertArrayEqual(weight_total, np.repeat(4, shape[-1])) - - -class Test_name(tests.IrisTest): - def test(self): - self.assertEqual(WPERCENTILE.name(), "weighted_percentile") - - -class Test_aggregate_shape(tests.IrisTest): - def test_missing_mandatory_kwarg(self): - emsg_pc = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'percent'" - ) - emsg_wt = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'weights'" - ) - with self.assertRaisesRegex(ValueError, emsg_pc): - WPERCENTILE.aggregate_shape(weights=None) - with self.assertRaisesRegex(ValueError, emsg_pc): - kwargs = dict(weights=None) - WPERCENTILE.aggregate_shape(**kwargs) - with self.assertRaisesRegex(ValueError, emsg_pc): - kwargs = dict(point=10) - WPERCENTILE.aggregate_shape(**kwargs) - with self.assertRaisesRegex(ValueError, emsg_wt): - WPERCENTILE.aggregate_shape(percent=50) - with self.assertRaisesRegex(ValueError, emsg_wt): - kwargs = dict(percent=50) - WPERCENTILE.aggregate_shape(**kwargs) - with self.assertRaisesRegex(ValueError, emsg_wt): - kwargs = dict(percent=50, weight=None) - WPERCENTILE.aggregate_shape(**kwargs) - - def test_mandatory_kwarg_no_shape(self): - kwargs = dict(percent=50, weights=None) - self.assertTupleEqual(WPERCENTILE.aggregate_shape(**kwargs), ()) - kwargs = dict(percent=[50], weights=None) - self.assertTupleEqual(WPERCENTILE.aggregate_shape(**kwargs), ()) - - def test_mandatory_kwarg_shape(self): - kwargs = dict(percent=(10, 20), weights=None) - self.assertTupleEqual(WPERCENTILE.aggregate_shape(**kwargs), (2,)) - kwargs = dict(percent=range(13), weights=None) - self.assertTupleEqual(WPERCENTILE.aggregate_shape(**kwargs), (13,)) - - -class Test_cell_method(tests.IrisTest): - def test(self): - self.assertIsNone(WPERCENTILE.cell_method) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py b/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py deleted file mode 100644 index 0cd808d1c7..0000000000 --- a/lib/iris/tests/unit/analysis/test_WeightedPercentileAggregator.py +++ /dev/null @@ -1,149 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.analysis.PercentileAggregator` class instance. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.analysis import WeightedPercentileAggregator, _weighted_percentile -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube - - -class Test(tests.IrisTest): - def test_init(self): - name = "weighted_percentile" - call_func = _weighted_percentile - units_func = mock.sentinel.units_func - lazy_func = mock.sentinel.lazy_func - aggregator = WeightedPercentileAggregator( - units_func=units_func, lazy_func=lazy_func - ) - self.assertEqual(aggregator.name(), name) - self.assertIs(aggregator.call_func, call_func) - self.assertIs(aggregator.units_func, units_func) - self.assertIs(aggregator.lazy_func, lazy_func) - self.assertIsNone(aggregator.cell_method) - - -class Test_post_process(tests.IrisTest): - def setUp(self): - shape = (2, 5) - data = np.arange(np.prod(shape)) - - self.coord_simple = DimCoord(data, "time") - self.cube_simple = Cube(data) - self.cube_simple.add_dim_coord(self.coord_simple, 0) - self.weights_simple = np.ones_like(data, dtype=float) - - self.coord_multi_0 = DimCoord(np.arange(shape[0]), "time") - self.coord_multi_1 = DimCoord(np.arange(shape[1]), "height") - self.cube_multi = Cube(data.reshape(shape)) - self.cube_multi.add_dim_coord(self.coord_multi_0, 0) - self.cube_multi.add_dim_coord(self.coord_multi_1, 1) - self.weights_multi = np.ones(shape, dtype=float) - - def test_missing_mandatory_kwarg(self): - aggregator = WeightedPercentileAggregator() - emsg = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'percent'" - ) - with self.assertRaisesRegex(ValueError, emsg): - aggregator.aggregate("dummy", axis=0, weights=None) - emsg = ( - "weighted_percentile aggregator requires " - ".* keyword argument 'weights'" - ) - with self.assertRaisesRegex(ValueError, emsg): - aggregator.aggregate("dummy", axis=0, percent=50) - - def test_simple_single_point(self): - aggregator = WeightedPercentileAggregator() - percent = 50 - kwargs = dict(percent=percent, weights=self.weights_simple) - data = np.empty(self.cube_simple.shape) - coords = [self.coord_simple] - actual = aggregator.post_process( - self.cube_simple, data, coords, **kwargs - ) - self.assertEqual(actual.shape, self.cube_simple.shape) - self.assertIs(actual.data, data) - name = "weighted_percentile_over_time" - coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name, units="percent") - self.assertEqual(coord, expected) - - def test_simple_multiple_points(self): - aggregator = WeightedPercentileAggregator() - percent = np.array([10, 20, 50, 90]) - kwargs = dict( - percent=percent, weights=self.weights_simple, returned=True - ) - shape = self.cube_simple.shape + percent.shape - data = np.empty(shape) - total_weights = 1.0 - coords = [self.coord_simple] - actual = aggregator.post_process( - self.cube_simple, (data, total_weights), coords, **kwargs - ) - self.assertEqual(len(actual), 2) - self.assertEqual( - actual[0].shape, percent.shape + self.cube_simple.shape - ) - expected = np.rollaxis(data, -1) - self.assertArrayEqual(actual[0].data, expected) - self.assertIs(actual[1], total_weights) - name = "weighted_percentile_over_time" - coord = actual[0].coord(name) - expected = AuxCoord(percent, long_name=name, units="percent") - self.assertEqual(coord, expected) - - def test_multi_single_point(self): - aggregator = WeightedPercentileAggregator() - percent = 70 - kwargs = dict(percent=percent, weights=self.weights_multi) - data = np.empty(self.cube_multi.shape) - coords = [self.coord_multi_0] - actual = aggregator.post_process( - self.cube_multi, data, coords, **kwargs - ) - self.assertEqual(actual.shape, self.cube_multi.shape) - self.assertIs(actual.data, data) - name = "weighted_percentile_over_time" - coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name, units="percent") - self.assertEqual(coord, expected) - - def test_multi_multiple_points(self): - aggregator = WeightedPercentileAggregator() - percent = np.array([17, 29, 81]) - kwargs = dict(percent=percent, weights=self.weights_multi) - shape = self.cube_multi.shape + percent.shape - data = np.empty(shape) - coords = [self.coord_multi_0] - actual = aggregator.post_process( - self.cube_multi, data, coords, **kwargs - ) - self.assertEqual(actual.shape, percent.shape + self.cube_multi.shape) - expected = np.rollaxis(data, -1) - self.assertArrayEqual(actual.data, expected) - name = "weighted_percentile_over_time" - coord = actual.coord(name) - expected = AuxCoord(percent, long_name=name, units="percent") - self.assertEqual(coord, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/trajectory/__init__.py b/lib/iris/tests/unit/analysis/trajectory/__init__.py deleted file mode 100644 index 55d3ebd8bc..0000000000 --- a/lib/iris/tests/unit/analysis/trajectory/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.analysis.trajectory` module.""" diff --git a/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py b/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py deleted file mode 100644 index 32c41b78db..0000000000 --- a/lib/iris/tests/unit/analysis/trajectory/test_Trajectory.py +++ /dev/null @@ -1,197 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :class:`iris.analysis.trajectory.Trajectory`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.analysis.trajectory import Trajectory -from iris.tests.stock import simple_3d, simple_4d_with_hybrid_height - - -class Test___init__(tests.IrisTest): - def test_2_points(self): - # basic 2-seg line along x - waypoints = [{"lat": 0, "lon": 0}, {"lat": 1, "lon": 2}] - trajectory = Trajectory(waypoints, sample_count=5) - - self.assertEqual(trajectory.length, np.sqrt(5)) - self.assertEqual(trajectory.sample_count, 5) - self.assertEqual( - trajectory.sampled_points, - [ - {"lat": 0.0, "lon": 0.0}, - {"lat": 0.25, "lon": 0.5}, - {"lat": 0.5, "lon": 1.0}, - {"lat": 0.75, "lon": 1.5}, - {"lat": 1.0, "lon": 2.0}, - ], - ) - - def test_3_points(self): - # basic 2-seg line along x - waypoints = [ - {"lat": 0, "lon": 0}, - {"lat": 0, "lon": 1}, - {"lat": 0, "lon": 2}, - ] - trajectory = Trajectory(waypoints, sample_count=21) - - self.assertEqual(trajectory.length, 2.0) - self.assertEqual(trajectory.sample_count, 21) - self.assertEqual( - trajectory.sampled_points[19], - {"lat": 0.0, "lon": 1.9000000000000001}, - ) - - def test_zigzag(self): - # 4-seg m-shape - waypoints = [ - {"lat": 0, "lon": 0}, - {"lat": 1, "lon": 1}, - {"lat": 0, "lon": 2}, - {"lat": 1, "lon": 3}, - {"lat": 0, "lon": 4}, - ] - trajectory = Trajectory(waypoints, sample_count=33) - - self.assertEqual(trajectory.length, 5.6568542494923806) - self.assertEqual(trajectory.sample_count, 33) - self.assertEqual( - trajectory.sampled_points[31], - {"lat": 0.12499999999999989, "lon": 3.875}, - ) - - -class Test__get_interp_points(tests.IrisTest): - def test_basic(self): - dim_names = "lat" - waypoints = [{dim_names: 0}, {dim_names: 1}] - sample_count = 5 - trajectory = Trajectory(waypoints, sample_count=sample_count) - result = trajectory._get_interp_points() - expected_points = list(np.linspace(0, 1, sample_count)) - - self.assertEqual(len(result), len(waypoints[0])) - self.assertEqual(len(result[0][1]), sample_count) - self.assertEqual(result[0][1], expected_points) - self.assertEqual(result[0][0], dim_names) - - def test_2d(self): - dim_names = ["lat", "lon"] - waypoints = [ - {dim_names[0]: 0, dim_names[1]: 0}, - {dim_names[0]: 1, dim_names[1]: 2}, - ] - sample_count = 5 - trajectory = Trajectory(waypoints, sample_count=sample_count) - result = trajectory._get_interp_points() - - self.assertEqual(len(result), len(waypoints[0])) - self.assertEqual(len(result[0][1]), sample_count) - self.assertEqual(len(result[1][1]), sample_count) - self.assertIn(result[0][0], dim_names) - self.assertIn(result[1][0], dim_names) - - def test_3d(self): - dim_names = ["y", "x", "z"] - waypoints = [ - {dim_names[0]: 0, dim_names[1]: 0, dim_names[2]: 2}, - {dim_names[0]: 1, dim_names[1]: 2, dim_names[2]: 10}, - ] - sample_count = 5 - trajectory = Trajectory(waypoints, sample_count=sample_count) - result = trajectory._get_interp_points() - - self.assertEqual(len(result), len(waypoints[0])) - self.assertEqual(len(result[0][1]), sample_count) - self.assertEqual(len(result[1][1]), sample_count) - self.assertEqual(len(result[2][1]), sample_count) - self.assertIn(result[0][0], dim_names) - self.assertIn(result[1][0], dim_names) - self.assertIn(result[2][0], dim_names) - - -class Test_interpolate(tests.IrisTest): - def _result_cube_metadata(self, res_cube): - dim_names = [c.name() for c in res_cube.dim_coords] - named_dims = [res_cube.coord_dims(c)[0] for c in res_cube.dim_coords] - anon_dims = list(set(range(res_cube.ndim)) - set(named_dims)) - anon_dims = None if not len(anon_dims) else anon_dims - return dim_names, named_dims, anon_dims - - def test_cube__simple_3d(self): - # Test that an 'index' coord is added to the resultant cube. - cube = simple_3d() - waypoints = [ - {"latitude": 40, "longitude": 40}, - {"latitude": 0, "longitude": 0}, - ] - sample_count = 3 - new_coord_name = "index" - trajectory = Trajectory(waypoints, sample_count=sample_count) - result = trajectory.interpolate(cube) - - dim_names, named_dims, anon_dims = self._result_cube_metadata(result) - new_coord = result.coord(new_coord_name) - exp_named_dims = [0, 1] - - self.assertEqual(result.ndim, cube.ndim - 1) - self.assertIn(new_coord_name, dim_names) - self.assertEqual(named_dims, exp_named_dims) - self.assertIsNone(anon_dims) - self.assertEqual(len(new_coord.points), sample_count) - - def test_cube__anon_dim(self): - cube = simple_4d_with_hybrid_height() - cube.remove_coord("model_level_number") # Make cube dim 1 anonymous. - waypoints = [ - {"grid_latitude": 21, "grid_longitude": 31}, - {"grid_latitude": 23, "grid_longitude": 33}, - ] - sample_count = 4 - new_coord_name = "index" - trajectory = Trajectory(waypoints, sample_count=sample_count) - result = trajectory.interpolate(cube) - - dim_names, named_dims, anon_dims = self._result_cube_metadata(result) - new_coord = result.coord(new_coord_name) - exp_named_dims = [0, 2] - exp_anon_dims = [1] - - self.assertEqual(result.ndim, cube.ndim - 1) - self.assertIn(new_coord_name, dim_names) - self.assertEqual(named_dims, exp_named_dims) - self.assertEqual(anon_dims, exp_anon_dims) - self.assertEqual(len(new_coord.points), sample_count) - - def test_call(self): - # Test that :func:`iris.analysis.trajectory.interpolate` is called by - # `Trajectory.interpolate`. - cube = simple_3d() - to_patch = "iris.analysis.trajectory.interpolate" - waypoints = [ - {"latitude": 40, "longitude": 40}, - {"latitude": 0, "longitude": 0}, - ] - sample_count = 3 - trajectory = Trajectory(waypoints, sample_count=sample_count) - - with mock.patch(to_patch, return_value=cube) as mock_interpolate: - trajectory.interpolate(cube) - mock_interpolate.assert_called_once() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py b/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py deleted file mode 100644 index a652ceb72e..0000000000 --- a/lib/iris/tests/unit/analysis/trajectory/test_UnstructuredNearestNeighbourRegridder.py +++ /dev/null @@ -1,328 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for -:class:`iris.analysis.trajectory.UnstructuredNearestNeigbourRegridder`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.analysis.trajectory import ( - UnstructuredNearestNeigbourRegridder as unn_gridder, -) -from iris.coord_systems import GeogCS, RotatedGeogCS -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube, CubeList - - -class MixinExampleSetup: - # Common code for regridder test classes. - - def setUp(self): - # Basic test values. - src_x_y_value = np.array( - [ - [20.12, 11.73, 0.01], - [120.23, -20.73, 1.12], - [290.34, 33.88, 2.23], - [-310.45, 57.8, 3.34], - ] - ) - tgt_grid_x = np.array([-173.2, -100.3, -32.5, 1.4, 46.6, 150.7]) - tgt_grid_y = np.array([-80.1, -30.2, 0.3, 47.4, 75.5]) - - # Make sample 1-D source cube. - src = Cube(src_x_y_value[:, 2]) - src.add_aux_coord( - AuxCoord( - src_x_y_value[:, 0], standard_name="longitude", units="degrees" - ), - 0, - ) - src.add_aux_coord( - AuxCoord( - src_x_y_value[:, 1], standard_name="latitude", units="degrees" - ), - 0, - ) - self.src_cube = src - - # Make sample grid cube. - grid = Cube(np.zeros(tgt_grid_y.shape + tgt_grid_x.shape)) - grid.add_dim_coord( - DimCoord(tgt_grid_y, standard_name="latitude", units="degrees"), 0 - ) - grid.add_dim_coord( - DimCoord(tgt_grid_x, standard_name="longitude", units="degrees"), 1 - ) - self.grid_cube = grid - - # Make expected-result, from the expected source-index at each point. - expected_result_indices = np.array( - [ - [1, 1, 1, 1, 1, 1], - [1, 2, 0, 0, 0, 1], - [1, 2, 2, 0, 0, 1], - [3, 2, 2, 3, 3, 3], - [3, 2, 3, 3, 3, 3], - ] - ) - self.expected_data = self.src_cube.data[expected_result_indices] - - # Make a 3D source cube, based on the existing 2d test data. - z_cubes = [src.copy() for _ in range(3)] - for i_z, z_cube in enumerate(z_cubes): - z_cube.add_aux_coord(DimCoord([i_z], long_name="z")) - z_cube.data = z_cube.data + 100.0 * i_z - self.src_z_cube = CubeList(z_cubes).merge_cube() - - # Make a corresponding 3d expected result. - self.expected_data_zxy = self.src_z_cube.data[ - :, expected_result_indices - ] - - def _check_expected( - self, - src_cube=None, - grid_cube=None, - expected_data=None, - expected_coord_names=None, - ): - # Test regridder creation + operation against expected results. - if src_cube is None: - src_cube = self.src_cube - if grid_cube is None: - grid_cube = self.grid_cube - gridder = unn_gridder(src_cube, grid_cube) - result = gridder(src_cube) - if expected_coord_names is not None: - # Check result coordinate identities. - self.assertEqual( - [coord.name() for coord in result.coords()], - expected_coord_names, - ) - if expected_data is None: - # By default, check against the 'standard' data result. - expected_data = self.expected_data - self.assertArrayEqual(result.data, expected_data) - return result - - -class Test__init__(MixinExampleSetup, tests.IrisTest): - # Exercise all the constructor argument checks. - - def test_fail_no_src_x(self): - self.src_cube.remove_coord("longitude") - msg_re = "Source cube must have X- and Y-axis coordinates" - with self.assertRaisesRegex(ValueError, msg_re): - unn_gridder(self.src_cube, self.grid_cube) - - def test_fail_no_src_y(self): - self.src_cube.remove_coord("latitude") - msg_re = "Source cube must have X- and Y-axis coordinates" - with self.assertRaisesRegex(ValueError, msg_re): - unn_gridder(self.src_cube, self.grid_cube) - - def test_fail_bad_src_dims(self): - self.src_cube = self.grid_cube - msg_re = "Source.*same cube dimensions" - with self.assertRaisesRegex(ValueError, msg_re): - unn_gridder(self.src_cube, self.grid_cube) - - def test_fail_mixed_latlons(self): - self.src_cube.coord("longitude").rename("projection_x_coordinate") - msg_re = "any.*latitudes/longitudes.*all must be" - with self.assertRaisesRegex(ValueError, msg_re): - unn_gridder(self.src_cube, self.grid_cube) - - def test_fail_bad_latlon_units(self): - self.grid_cube.coord("longitude").units = "m" - msg_re = 'does not convert to "degrees"' - with self.assertRaisesRegex(ValueError, msg_re): - unn_gridder(self.src_cube, self.grid_cube) - - def test_fail_non_latlon_units_mismatch(self): - # Convert all to non-latlon system (does work: see in "Test__call__"). - for cube in (self.src_cube, self.grid_cube): - for axis_name in ("x", "y"): - coord = cube.coord(axis=axis_name) - coord_name = "projection_{}_coordinate".format(axis_name) - coord.rename(coord_name) - coord.units = "m" - # Change one of the output units. - self.grid_cube.coord(axis="x").units = "1" - msg_re = "Source and target.*must have the same units" - with self.assertRaisesRegex(ValueError, msg_re): - unn_gridder(self.src_cube, self.grid_cube) - - def test_fail_no_tgt_x(self): - self.grid_cube.remove_coord("longitude") - msg_re = "must contain a single 1D x coordinate" - with self.assertRaisesRegex(ValueError, msg_re): - unn_gridder(self.src_cube, self.grid_cube) - - def test_fail_no_tgt_y(self): - self.grid_cube.remove_coord("latitude") - msg_re = "must contain a single 1D y coordinate" - with self.assertRaisesRegex(ValueError, msg_re): - unn_gridder(self.src_cube, self.grid_cube) - - def test_fail_src_cs_mismatch(self): - cs = GeogCS(1000.0) - self.src_cube.coord("latitude").coord_system = cs - msg_re = "must all have the same coordinate system" - with self.assertRaisesRegex(ValueError, msg_re): - unn_gridder(self.src_cube, self.grid_cube) - - def test_fail_tgt_cs_mismatch(self): - cs = GeogCS(1000.0) - self.grid_cube.coord("latitude").coord_system = cs - msg_re = "x.*and y.*must have the same coordinate system" - with self.assertRaisesRegex(ValueError, msg_re): - unn_gridder(self.src_cube, self.grid_cube) - - def test_fail_src_tgt_cs_mismatch(self): - cs = GeogCS(1000.0) - self.src_cube.coord("latitude").coord_system = cs - self.src_cube.coord("longitude").coord_system = cs - msg_re = "Source and target.*same coordinate system" - with self.assertRaisesRegex(ValueError, msg_re): - unn_gridder(self.src_cube, self.grid_cube) - - -class Test__call__(MixinExampleSetup, tests.IrisTest): - # Test regridder operation and results. - - def test_basic_latlon(self): - # Check a test operation. - self._check_expected( - expected_coord_names=["latitude", "longitude"], - expected_data=self.expected_data, - ) - - def test_non_latlon(self): - # Check different answer in cartesian coordinates (no wrapping, etc). - # Convert to non-latlon system, with the same coord values. - for cube in (self.src_cube, self.grid_cube): - for axis_name in ("x", "y"): - coord = cube.coord(axis=axis_name) - coord_name = "projection_{}_coordinate".format(axis_name) - coord.rename(coord_name) - coord.units = "m" - # Check for a somewhat different result. - non_latlon_indices = np.array( - [ - [3, 0, 0, 0, 1, 1], - [3, 0, 0, 0, 0, 1], - [3, 0, 0, 0, 0, 1], - [3, 0, 0, 0, 0, 1], - [3, 0, 0, 0, 0, 1], - ] - ) - expected_data = self.src_cube.data[non_latlon_indices] - self._check_expected(expected_data=expected_data) - - def test_multidimensional_xy(self): - # Recast the 4-point source cube as 2*2 : should yield the same result. - co_x = self.src_cube.coord(axis="x") - co_y = self.src_cube.coord(axis="y") - new_src = Cube(self.src_cube.data.reshape((2, 2))) - new_x_co = AuxCoord( - co_x.points.reshape((2, 2)), - standard_name="longitude", - units="degrees", - ) - new_y_co = AuxCoord( - co_y.points.reshape((2, 2)), - standard_name="latitude", - units="degrees", - ) - new_src.add_aux_coord(new_x_co, (0, 1)) - new_src.add_aux_coord(new_y_co, (0, 1)) - self._check_expected(src_cube=new_src) - - def test_transposed_grid(self): - # Show that changing the order of the grid X and Y has no effect. - new_grid_cube = self.grid_cube.copy() - new_grid_cube.transpose((1, 0)) - # Check that the new grid is in (X, Y) order. - self.assertEqual( - [coord.name() for coord in new_grid_cube.coords()], - ["longitude", "latitude"], - ) - # Check that the result is the same, dimension order is still Y,X. - self._check_expected( - grid_cube=new_grid_cube, - expected_coord_names=["latitude", "longitude"], - ) - - def test_compatible_source(self): - # Check operation on data with different dimensions to the original - # source cube for the regridder creation. - gridder = unn_gridder(self.src_cube, self.grid_cube) - result = gridder(self.src_z_cube) - self.assertEqual( - [coord.name() for coord in result.coords()], - ["z", "latitude", "longitude"], - ) - self.assertArrayEqual(result.data, self.expected_data_zxy) - - def test_fail_incompatible_source(self): - # Check that a slightly modified source cube is *not* acceptable. - modified_src_cube = self.src_cube.copy() - points = modified_src_cube.coord(axis="x").points - points[0] += 0.01 - modified_src_cube.coord(axis="x").points = points - gridder = unn_gridder(self.src_cube, self.grid_cube) - msg = "not defined on the same source grid" - with self.assertRaisesRegex(ValueError, msg): - gridder(modified_src_cube) - - def test_transposed_source(self): - # Check operation on data where the 'trajectory' dimension is not the - # last one. - src_z_cube = self.src_z_cube - src_z_cube.transpose((1, 0)) - self._check_expected( - src_cube=src_z_cube, expected_data=self.expected_data_zxy - ) - - def test_radians_degrees(self): - # Check source + target unit conversions, grid and result in degrees. - for axis_name in ("x", "y"): - self.src_cube.coord(axis=axis_name).convert_units("radians") - self.grid_cube.coord(axis=axis_name).convert_units("degrees") - result = self._check_expected() - self.assertEqual(result.coord(axis="x").units, "degrees") - - def test_degrees_radians(self): - # Check source + target unit conversions, grid and result in radians. - for axis_name in ("x", "y"): - self.src_cube.coord(axis=axis_name).convert_units("degrees") - self.grid_cube.coord(axis=axis_name).convert_units("radians") - result = self._check_expected() - self.assertEqual(result.coord(axis="x").units, "radians") - - def test_alternative_cs(self): - # Check the result is just the same in a different coordinate system. - cs = RotatedGeogCS( - grid_north_pole_latitude=75.3, - grid_north_pole_longitude=102.5, - ellipsoid=GeogCS(100.0), - ) - for cube in (self.src_cube, self.grid_cube): - for coord_name in ("longitude", "latitude"): - cube.coord(coord_name).coord_system = cs - self._check_expected() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py b/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py deleted file mode 100644 index 8b9e4cafa4..0000000000 --- a/lib/iris/tests/unit/analysis/trajectory/test__nearest_neighbour_indices_ndcoords.py +++ /dev/null @@ -1,228 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for -:meth:`iris.analysis.trajectory._nearest_neighbour_indices_ndcoords`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.analysis.trajectory import ( - _nearest_neighbour_indices_ndcoords as nn_ndinds, -) -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube - - -class Test2d(tests.IrisTest): - def test_nonlatlon_simple_2d(self): - co_y = DimCoord([10.0, 20.0], long_name="y") - co_x = DimCoord([1.0, 2.0, 3.0], long_name="x") - cube = Cube(np.zeros((2, 3))) - cube.add_dim_coord(co_y, 0) - cube.add_dim_coord(co_x, 1) - sample_point = [("x", 2.8), ("y", 18.5)] - result = nn_ndinds(cube, sample_point) - self.assertEqual(result, [(1, 2)]) - - def test_nonlatlon_multiple_2d(self): - co_y = DimCoord([10.0, 20.0], long_name="y") - co_x = DimCoord([1.0, 2.0, 3.0], long_name="x") - cube = Cube(np.zeros((2, 3))) - cube.add_dim_coord(co_y, 0) - cube.add_dim_coord(co_x, 1) - sample_points = [("x", [2.8, -350.0, 1.7]), ("y", [18.5, 8.7, 12.2])] - result = nn_ndinds(cube, sample_points) - self.assertEqual(result, [(1, 2), (0, 0), (0, 1)]) - - def test_latlon_simple_2d(self): - co_y = DimCoord( - [10.0, 20.0], standard_name="latitude", units="degrees" - ) - co_x = DimCoord( - [1.0, 2.0, 3.0], standard_name="longitude", units="degrees" - ) - cube = Cube(np.zeros((2, 3))) - cube.add_dim_coord(co_y, 0) - cube.add_dim_coord(co_x, 1) - sample_point = [("longitude", 2.8), ("latitude", 18.5)] - result = nn_ndinds(cube, sample_point) - self.assertEqual(result, [(1, 2)]) - - def test_latlon_multiple_2d(self): - co_y = DimCoord( - [10.0, 20.0], standard_name="latitude", units="degrees" - ) - co_x = DimCoord( - [1.0, 2.0, 3.0], standard_name="longitude", units="degrees" - ) - cube = Cube(np.zeros((2, 3))) - cube.add_dim_coord(co_y, 0) - cube.add_dim_coord(co_x, 1) - sample_points = [ - ("longitude", [2.8, -350.0, 1.7]), - ("latitude", [18.5, 8.7, 12.2]), - ] - result = nn_ndinds(cube, sample_points) - # Note slight difference from non-latlon version. - self.assertEqual(result, [(1, 2), (0, 2), (0, 1)]) - - -class Test1d(tests.IrisTest): - def test_nonlatlon_simple_1d(self): - co_x = AuxCoord([1.0, 2.0, 3.0, 1.0, 2.0, 3.0], long_name="x") - co_y = AuxCoord([10.0, 10.0, 10.0, 20.0, 20.0, 20.0], long_name="y") - cube = Cube(np.zeros(6)) - cube.add_aux_coord(co_y, 0) - cube.add_aux_coord(co_x, 0) - sample_point = [("x", 2.8), ("y", 18.5)] - result = nn_ndinds(cube, sample_point) - self.assertEqual(result, [(5,)]) - - def test_latlon_simple_1d(self): - cube = Cube([11.0, 12.0, 13.0, 21.0, 22.0, 23.0]) - co_x = AuxCoord( - [1.0, 2.0, 3.0, 1.0, 2.0, 3.0], - standard_name="longitude", - units="degrees", - ) - co_y = AuxCoord( - [10.0, 10.0, 10.0, 20.0, 20.0, 20.0], - standard_name="latitude", - units="degrees", - ) - cube.add_aux_coord(co_y, 0) - cube.add_aux_coord(co_x, 0) - sample_point = [("longitude", 2.8), ("latitude", 18.5)] - result = nn_ndinds(cube, sample_point) - self.assertEqual(result, [(5,)]) - - -class TestApiExtras(tests.IrisTest): - # Check operation with alternative calling setups. - def test_no_y_dim(self): - # Operate in X only, returned slice should be [:, ix]. - co_x = DimCoord([1.0, 2.0, 3.0], long_name="x") - co_y = DimCoord([10.0, 20.0], long_name="y") - cube = Cube(np.zeros((2, 3))) - cube.add_dim_coord(co_y, 0) - cube.add_dim_coord(co_x, 1) - sample_point = [("x", 2.8)] - result = nn_ndinds(cube, sample_point) - self.assertEqual(result, [(slice(None), 2)]) - - def test_no_x_dim(self): - # Operate in Y only, returned slice should be [iy, :]. - co_x = DimCoord([1.0, 2.0, 3.0], long_name="x") - co_y = DimCoord([10.0, 20.0], long_name="y") - cube = Cube(np.zeros((2, 3))) - cube.add_dim_coord(co_y, 0) - cube.add_dim_coord(co_x, 1) - sample_point = [("y", 18.5)] - result = nn_ndinds(cube, sample_point) - self.assertEqual(result, [(1, slice(None))]) - - def test_sample_dictionary(self): - # Pass sample_point arg as a dictionary: this usage mode is deprecated. - co_x = AuxCoord([1.0, 2.0, 3.0], long_name="x") - co_y = AuxCoord([10.0, 20.0], long_name="y") - cube = Cube(np.zeros((2, 3))) - cube.add_aux_coord(co_y, 0) - cube.add_aux_coord(co_x, 1) - sample_point = {"x": 2.8, "y": 18.5} - exp_emsg = r"must be a list of \(coordinate, value\) pairs" - with self.assertRaisesRegex(TypeError, exp_emsg): - nn_ndinds(cube, sample_point) - - -class TestLatlon(tests.IrisTest): - # Check correct calculations on lat-lon points. - def _testcube_latlon_1d(self, lats, lons): - cube = Cube(np.zeros(len(lons))) - co_x = AuxCoord(lons, standard_name="longitude", units="degrees") - co_y = AuxCoord(lats, standard_name="latitude", units="degrees") - cube.add_aux_coord(co_y, 0) - cube.add_aux_coord(co_x, 0) - return cube - - def _check_latlon_1d(self, lats, lons, sample_point, expect): - cube = self._testcube_latlon_1d(lats, lons) - result = nn_ndinds(cube, sample_point) - self.assertEqual(result, [(expect,)]) - - def test_lat_scaling(self): - # Check that (88, 25) is closer to (88, 0) than to (87, 25) - self._check_latlon_1d( - lats=[88, 87], - lons=[0, 25], - sample_point=[("latitude", 88), ("longitude", 25)], - expect=0, - ) - - def test_alternate_latlon_names_okay(self): - # Check that (88, 25) is **STILL** closer to (88, 0) than to (87, 25) - # ... when coords have odd, but still recognisable, latlon names. - cube = self._testcube_latlon_1d(lats=[88, 87], lons=[0, 25]) - cube.coord("latitude").rename("y_latitude_y") - cube.coord("longitude").rename("x_longitude_x") - sample_point = [("y_latitude_y", 88), ("x_longitude_x", 25)] - result = nn_ndinds(cube, sample_point) - self.assertEqual(result, [(0,)]) - - def test_alternate_nonlatlon_names_different(self): - # Check that (88, 25) is **NOT** closer to (88, 0) than to (87, 25) - # ... by plain XY euclidean-distance, if coords have non-latlon names. - cube = self._testcube_latlon_1d(lats=[88, 87], lons=[0, 25]) - cube.coord("latitude").rename("y") - cube.coord("longitude").rename("x") - sample_point = [("y", 88), ("x", 25)] - result = nn_ndinds(cube, sample_point) - self.assertEqual(result, [(1,)]) - - def test_lons_wrap_359_0(self): - # Check that (0, 359) is closer to (0, 0) than to (0, 350) - self._check_latlon_1d( - lats=[0, 0], - lons=[0, 350], - sample_point=[("latitude", 0), ("longitude", 359)], - expect=0, - ) - - def test_lons_wrap_359_neg1(self): - # Check that (0, 359) is closer to (0, -1) than to (0, 350) - self._check_latlon_1d( - lats=[0, 0], - lons=[350, -1], - sample_point=[("latitude", 0), ("longitude", 359)], - expect=1, - ) - - def test_lons_wrap_neg179_plus179(self): - # Check that (0, -179) is closer to (0, 179) than to (0, -170) - self._check_latlon_1d( - lats=[0, 0], - lons=[-170, 179], - sample_point=[("latitude", 0), ("longitude", -179)], - expect=1, - ) - - def test_lons_over_pole(self): - # Check that (89, 0) is closer to (89, 180) than to (85, 0) - self._check_latlon_1d( - lats=[85, 89], - lons=[0, 180], - sample_point=[("latitude", 89), ("longitude", 0)], - expect=1, - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py b/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py deleted file mode 100644 index 038019611c..0000000000 --- a/lib/iris/tests/unit/analysis/trajectory/test_interpolate.py +++ /dev/null @@ -1,185 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :meth:`iris.analysis.trajectory.interpolate`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.analysis.trajectory import interpolate -from iris.coords import AuxCoord, DimCoord -import iris.tests.stock - - -class TestFailCases(tests.IrisTest): - @tests.skip_data - def test_derived_coord(self): - cube = iris.tests.stock.realistic_4d() - sample_pts = [("altitude", [0, 10, 50])] - msg = "'altitude'.*derived coordinates are not allowed" - with self.assertRaisesRegex(ValueError, msg): - interpolate(cube, sample_pts, "nearest") - - # Try to request unknown interpolation method. - - def test_unknown_method(self): - cube = iris.tests.stock.simple_2d() - sample_point = [("x", 2.8)] - msg = "Unhandled interpolation.*linekar" - with self.assertRaisesRegex(ValueError, msg): - interpolate(cube, sample_point, method="linekar") - - -class TestNearest(tests.IrisTest): - # Test interpolation with 'nearest' method. - # This is basically a wrapper to the routine: - # 'analysis._interpolate_private._nearest_neighbour_indices_ndcoords'. - # That has its own test, so we don't test the basic calculation - # exhaustively here. Instead we check the way it handles the source and - # result cubes (especially coordinates). - - def setUp(self): - cube = iris.tests.stock.simple_3d() - # Actually, this cube *isn't* terribly realistic, as the lat+lon coords - # have integer type, which in this case produces some peculiar results. - # Let's fix that (and not bother to test the peculiar behaviour). - for coord_name in ("longitude", "latitude"): - coord = cube.coord(coord_name) - coord.points = coord.points.astype(float) - self.test_cube = cube - # Define coordinates for a single-point testcase. - y_val, x_val = 0, -90 - # Work out cube indices of the testpoint. - self.single_point_iy = np.where( - cube.coord("latitude").points == y_val - )[0][0] - self.single_point_ix = np.where( - cube.coord("longitude").points == x_val - )[0][0] - # Use slightly-different values to test nearest-neighbour operation. - self.single_sample_point = [ - ("latitude", [y_val + 19.23]), - ("longitude", [x_val - 17.54]), - ] - - def test_single_point_same_cube(self): - # Check exact result matching for a single point. - cube = self.test_cube - result = interpolate(cube, self.single_sample_point, method="nearest") - # Check that the result is a single trajectory point, exactly equal to - # the expected part of the original data. - self.assertEqual(result.shape[-1], 1) - result = result[..., 0] - expected = cube[:, self.single_point_iy, self.single_point_ix] - self.assertEqual(result, expected) - - def test_multi_point_same_cube(self): - # Check an exact result for multiple points. - cube = self.test_cube - # Use latitude selection to recreate a whole row of the original cube. - sample_points = [ - ("longitude", [-180, -90, 0, 90]), - ("latitude", [0, 0, 0, 0]), - ] - result = interpolate(cube, sample_points, method="nearest") - - # The result should be identical to a single latitude section of the - # original, but with modified coords (latitude has 4 repeated zeros). - expected = cube[:, 1, :] - # Result 'longitude' is now an aux coord. - co_x = expected.coord("longitude") - expected.remove_coord(co_x) - expected.add_aux_coord(co_x, 1) - # Result 'latitude' is now an aux coord containing 4*[0]. - expected.remove_coord("latitude") - co_y = AuxCoord( - [0, 0, 0, 0], standard_name="latitude", units="degrees" - ) - expected.add_aux_coord(co_y, 1) - self.assertEqual(result, expected) - - def test_aux_coord_noninterpolation_dim(self): - # Check exact result with an aux-coord mapped to an uninterpolated dim. - cube = self.test_cube - cube.add_aux_coord(DimCoord([17, 19], long_name="aux0"), 0) - - # The result cube should exactly equal a single source point. - result = interpolate(cube, self.single_sample_point, method="nearest") - self.assertEqual(result.shape[-1], 1) - result = result[..., 0] - expected = cube[:, self.single_point_iy, self.single_point_ix] - self.assertEqual(result, expected) - - def test_aux_coord_one_interp_dim(self): - # Check exact result with an aux-coord over one interpolation dims. - cube = self.test_cube - cube.add_aux_coord(AuxCoord([11, 12, 13, 14], long_name="aux_x"), 2) - - # The result cube should exactly equal a single source point. - result = interpolate(cube, self.single_sample_point, method="nearest") - self.assertEqual(result.shape[-1], 1) - result = result[..., 0] - expected = cube[:, self.single_point_iy, self.single_point_ix] - self.assertEqual(result, expected) - - def test_aux_coord_both_interp_dims(self): - # Check exact result with an aux-coord over both interpolation dims. - cube = self.test_cube - cube.add_aux_coord( - AuxCoord( - [[11, 12, 13, 14], [21, 22, 23, 24], [31, 32, 33, 34]], - long_name="aux_xy", - ), - (1, 2), - ) - - # The result cube should exactly equal a single source point. - result = interpolate(cube, self.single_sample_point, method="nearest") - self.assertEqual(result.shape[-1], 1) - result = result[..., 0] - expected = cube[:, self.single_point_iy, self.single_point_ix] - self.assertEqual(result, expected) - - def test_aux_coord_fail_mixed_dims(self): - # Check behaviour with an aux-coord mapped over both interpolation and - # non-interpolation dims : not supported. - cube = self.test_cube - cube.add_aux_coord( - AuxCoord( - [[111, 112, 113, 114], [211, 212, 213, 214]], - long_name="aux_0x", - ), - (0, 2), - ) - msg = ( - "Coord aux_0x at one x-y position has the shape.*" - "instead of being a single point" - ) - with self.assertRaisesRegex(ValueError, msg): - interpolate(cube, self.single_sample_point, method="nearest") - - def test_metadata(self): - # Check exact result matching for a single point, with additional - # attributes and cell-methods. - cube = self.test_cube - cube.attributes["ODD_ATTR"] = "string-value-example" - cube.add_cell_method(iris.coords.CellMethod("mean", "area")) - result = interpolate(cube, self.single_sample_point, method="nearest") - # Check that the result is a single trajectory point, exactly equal to - # the expected part of the original data. - self.assertEqual(result.shape[-1], 1) - result = result[..., 0] - expected = cube[:, self.single_point_iy, self.single_point_ix] - self.assertEqual(result, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/__init__.py b/lib/iris/tests/unit/aux_factory/__init__.py deleted file mode 100644 index 00b9f1a3bd..0000000000 --- a/lib/iris/tests/unit/aux_factory/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.aux_factory` module.""" diff --git a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py deleted file mode 100644 index 6e417a3b38..0000000000 --- a/lib/iris/tests/unit/aux_factory/test_AtmosphereSigmaFactory.py +++ /dev/null @@ -1,296 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the -`iris.aux_factory.AtmosphereSigmaFactory` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from cf_units import Unit -import numpy as np - -from iris.aux_factory import AtmosphereSigmaFactory -from iris.coords import AuxCoord, DimCoord - - -class Test___init__(tests.IrisTest): - def setUp(self): - self.pressure_at_top = mock.Mock(units=Unit("Pa"), nbounds=0, shape=()) - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock(units=Unit("Pa"), nbounds=0) - self.kwargs = dict( - pressure_at_top=self.pressure_at_top, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_insufficient_coordinates_no_args(self): - with self.assertRaises(ValueError): - AtmosphereSigmaFactory() - - def test_insufficient_coordinates_no_ptop(self): - with self.assertRaises(ValueError): - AtmosphereSigmaFactory( - pressure_at_top=None, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_insufficient_coordinates_no_sigma(self): - with self.assertRaises(ValueError): - AtmosphereSigmaFactory( - pressure_at_top=self.pressure_at_top, - sigma=None, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_insufficient_coordinates_no_ps(self): - with self.assertRaises(ValueError): - AtmosphereSigmaFactory( - pressure_at_top=self.pressure_at_top, - sigma=self.sigma, - surface_air_pressure=None, - ) - - def test_ptop_shapes(self): - for shape in [(), (1,)]: - self.pressure_at_top.shape = shape - AtmosphereSigmaFactory(**self.kwargs) - - def test_ptop_invalid_shapes(self): - for shape in [(2,), (1, 1)]: - self.pressure_at_top.shape = shape - with self.assertRaises(ValueError): - AtmosphereSigmaFactory(**self.kwargs) - - def test_sigma_bounds(self): - for n_bounds in [0, 2]: - self.sigma.nbounds = n_bounds - AtmosphereSigmaFactory(**self.kwargs) - - def test_sigma_invalid_bounds(self): - for n_bounds in [-1, 1, 3]: - self.sigma.nbounds = n_bounds - with self.assertRaises(ValueError): - AtmosphereSigmaFactory(**self.kwargs) - - def test_sigma_units(self): - for units in ["1", "unknown", None]: - self.sigma.units = Unit(units) - AtmosphereSigmaFactory(**self.kwargs) - - def test_sigma_invalid_units(self): - for units in ["Pa", "m"]: - self.sigma.units = Unit(units) - with self.assertRaises(ValueError): - AtmosphereSigmaFactory(**self.kwargs) - - def test_ptop_ps_units(self): - for units in [("Pa", "Pa")]: - self.pressure_at_top.units = Unit(units[0]) - self.surface_air_pressure.units = Unit(units[1]) - AtmosphereSigmaFactory(**self.kwargs) - - def test_ptop_ps_invalid_units(self): - for units in [("Pa", "1"), ("1", "Pa"), ("bar", "Pa"), ("Pa", "hPa")]: - self.pressure_at_top.units = Unit(units[0]) - self.surface_air_pressure.units = Unit(units[1]) - with self.assertRaises(ValueError): - AtmosphereSigmaFactory(**self.kwargs) - - def test_ptop_units(self): - for units in ["Pa", "bar", "mbar", "hPa"]: - self.pressure_at_top.units = Unit(units) - self.surface_air_pressure.units = Unit(units) - AtmosphereSigmaFactory(**self.kwargs) - - def test_ptop_invalid_units(self): - for units in ["1", "m", "kg", None]: - self.pressure_at_top.units = Unit(units) - self.surface_air_pressure.units = Unit(units) - with self.assertRaises(ValueError): - AtmosphereSigmaFactory(**self.kwargs) - - -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.pressure_at_top = mock.Mock(units=Unit("Pa"), nbounds=0, shape=()) - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock(units=Unit("Pa"), nbounds=0) - self.kwargs = dict( - pressure_at_top=self.pressure_at_top, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_values(self): - factory = AtmosphereSigmaFactory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) - - -class Test__derive(tests.IrisTest): - def test_function_scalar(self): - assert AtmosphereSigmaFactory._derive(0, 0, 0) == 0 - assert AtmosphereSigmaFactory._derive(3, 0, 0) == 3 - assert AtmosphereSigmaFactory._derive(0, 5, 0) == 0 - assert AtmosphereSigmaFactory._derive(0, 0, 7) == 0 - assert AtmosphereSigmaFactory._derive(3, 5, 0) == -12 - assert AtmosphereSigmaFactory._derive(3, 0, 7) == 3 - assert AtmosphereSigmaFactory._derive(0, 5, 7) == 35 - assert AtmosphereSigmaFactory._derive(3, 5, 7) == 23 - - def test_function_array(self): - ptop = 3 - sigma = np.array([2, 4]) - ps = np.arange(4).reshape(2, 2) - np.testing.assert_equal( - AtmosphereSigmaFactory._derive(ptop, sigma, ps), - [[-3, -5], [1, 3]], - ) - - -class Test_make_coord(tests.IrisTest): - @staticmethod - def coord_dims(coord): - mapping = dict( - pressure_at_top=(), - sigma=(0,), - surface_air_pressure=(1, 2), - ) - return mapping[coord.name()] - - @staticmethod - def derive(pressure_at_top, sigma, surface_air_pressure, coord=True): - result = pressure_at_top + sigma * ( - surface_air_pressure - pressure_at_top - ) - if coord: - name = "air_pressure" - result = AuxCoord( - result, - standard_name=name, - units="Pa", - ) - return result - - def setUp(self): - self.pressure_at_top = AuxCoord( - [3.0], - long_name="pressure_at_top", - units="Pa", - ) - self.sigma = DimCoord( - [1.0, 0.4, 0.1], - bounds=[[1.0, 0.6], [0.6, 0.2], [0.2, 0.0]], - long_name="sigma", - units="1", - ) - self.surface_air_pressure = AuxCoord( - [[-1.0, 2.0], [1.0, 3.0]], - long_name="surface_air_pressure", - units="Pa", - ) - self.kwargs = dict( - pressure_at_top=self.pressure_at_top, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_derived_coord(self): - # Broadcast expected points given the known dimensional mapping - pressure_at_top = self.pressure_at_top.points[0] - sigma = self.sigma.points[..., np.newaxis, np.newaxis] - surface_air_pressure = self.surface_air_pressure.points[ - np.newaxis, ... - ] - - # Calculate the expected result - - expected_coord = self.derive( - pressure_at_top, sigma, surface_air_pressure - ) - - # Calculate the actual result - factory = AtmosphereSigmaFactory(**self.kwargs) - coord = factory.make_coord(self.coord_dims) - - # Check bounds - expected_bounds = [ - [[[-1.0, 0.6], [2.0, 2.4]], [[1.0, 1.8], [3.0, 3.0]]], - [[[0.6, 2.2], [2.4, 2.8]], [[1.8, 2.6], [3.0, 3.0]]], - [[[2.2, 3.0], [2.8, 3.0]], [[2.6, 3.0], [3.0, 3.0]]], - ] - np.testing.assert_allclose(coord.bounds, expected_bounds) - coord.bounds = None - - # Check points and metadata - self.assertEqual(expected_coord, coord) - - -class Test_update(tests.IrisTest): - def setUp(self): - self.pressure_at_top = mock.Mock(units=Unit("Pa"), nbounds=0, shape=()) - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock(units=Unit("Pa"), nbounds=0) - self.kwargs = dict( - pressure_at_top=self.pressure_at_top, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - self.factory = AtmosphereSigmaFactory(**self.kwargs) - - def test_pressure_at_top(self): - new_pressure_at_top = mock.Mock(units=Unit("Pa"), nbounds=0, shape=()) - self.factory.update(self.pressure_at_top, new_pressure_at_top) - self.assertIs(self.factory.pressure_at_top, new_pressure_at_top) - - def test_pressure_at_top_wrong_shape(self): - new_pressure_at_top = mock.Mock( - units=Unit("Pa"), nbounds=0, shape=(2,) - ) - with self.assertRaises(ValueError): - self.factory.update(self.pressure_at_top, new_pressure_at_top) - - def test_sigma(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.factory.update(self.sigma, new_sigma) - self.assertIs(self.factory.sigma, new_sigma) - - def test_sigma_too_many_bounds(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): - self.factory.update(self.sigma, new_sigma) - - def test_sigma_incompatible_units(self): - new_sigma = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.sigma, new_sigma) - - def test_surface_air_pressure(self): - new_surface_air_pressure = mock.Mock(units=Unit("Pa"), nbounds=0) - self.factory.update( - self.surface_air_pressure, new_surface_air_pressure - ) - self.assertIs( - self.factory.surface_air_pressure, new_surface_air_pressure - ) - - def test_surface_air_pressure_incompatible_units(self): - new_surface_air_pressure = mock.Mock(units=Unit("mbar"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update( - self.surface_air_pressure, new_surface_air_pressure - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py b/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py deleted file mode 100644 index 3375f63bf2..0000000000 --- a/lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py +++ /dev/null @@ -1,166 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for `iris.aux_factory.AuxCoordFactory`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -import iris -from iris._lazy_data import as_lazy_data, is_lazy_data -from iris.aux_factory import AuxCoordFactory -from iris.coords import AuxCoord - - -class Test__nd_points(tests.IrisTest): - def test_numpy_scalar_coord__zero_ndim(self): - points = np.array(1) - coord = AuxCoord(points) - result = AuxCoordFactory._nd_points(coord, (), 0) - expected = np.array([1]) - self.assertArrayEqual(result, expected) - - def test_numpy_scalar_coord(self): - value = 1 - points = np.array(value) - coord = AuxCoord(points) - result = AuxCoordFactory._nd_points(coord, (), 2) - expected = np.array(value).reshape(1, 1) - self.assertArrayEqual(result, expected) - - def test_numpy_simple(self): - points = np.arange(12).reshape(4, 3) - coord = AuxCoord(points) - result = AuxCoordFactory._nd_points(coord, (0, 1), 2) - expected = points - self.assertArrayEqual(result, expected) - - def test_numpy_complex(self): - points = np.arange(12).reshape(4, 3) - coord = AuxCoord(points) - result = AuxCoordFactory._nd_points(coord, (3, 2), 5) - expected = points.T[np.newaxis, np.newaxis, ..., np.newaxis] - self.assertArrayEqual(result, expected) - - def test_lazy_simple(self): - raw_points = np.arange(12).reshape(4, 3) - points = as_lazy_data(raw_points, raw_points.shape) - coord = AuxCoord(points) - self.assertTrue(is_lazy_data(coord.core_points())) - result = AuxCoordFactory._nd_points(coord, (0, 1), 2) - # Check we haven't triggered the loading of the coordinate values. - self.assertTrue(is_lazy_data(coord.core_points())) - self.assertTrue(is_lazy_data(result)) - expected = raw_points - self.assertArrayEqual(result, expected) - - def test_lazy_complex(self): - raw_points = np.arange(12).reshape(4, 3) - points = as_lazy_data(raw_points, raw_points.shape) - coord = AuxCoord(points) - self.assertTrue(is_lazy_data(coord.core_points())) - result = AuxCoordFactory._nd_points(coord, (3, 2), 5) - # Check we haven't triggered the loading of the coordinate values. - self.assertTrue(is_lazy_data(coord.core_points())) - self.assertTrue(is_lazy_data(result)) - expected = raw_points.T[np.newaxis, np.newaxis, ..., np.newaxis] - self.assertArrayEqual(result, expected) - - -class Test__nd_bounds(tests.IrisTest): - def test_numpy_scalar_coord__zero_ndim(self): - points = np.array(0.5) - bounds = np.arange(2) - coord = AuxCoord(points, bounds=bounds) - result = AuxCoordFactory._nd_bounds(coord, (), 0) - expected = bounds - self.assertArrayEqual(result, expected) - - def test_numpy_scalar_coord(self): - points = np.array(0.5) - bounds = np.arange(2).reshape(1, 2) - coord = AuxCoord(points, bounds=bounds) - result = AuxCoordFactory._nd_bounds(coord, (), 2) - expected = bounds[np.newaxis] - self.assertArrayEqual(result, expected) - - def test_numpy_simple(self): - points = np.arange(12).reshape(4, 3) - bounds = np.arange(24).reshape(4, 3, 2) - coord = AuxCoord(points, bounds=bounds) - result = AuxCoordFactory._nd_bounds(coord, (0, 1), 2) - expected = bounds - self.assertArrayEqual(result, expected) - - def test_numpy_complex(self): - points = np.arange(12).reshape(4, 3) - bounds = np.arange(24).reshape(4, 3, 2) - coord = AuxCoord(points, bounds=bounds) - result = AuxCoordFactory._nd_bounds(coord, (3, 2), 5) - expected = bounds.transpose((1, 0, 2)).reshape(1, 1, 3, 4, 1, 2) - self.assertArrayEqual(result, expected) - - def test_lazy_simple(self): - raw_points = np.arange(12).reshape(4, 3) - points = as_lazy_data(raw_points, raw_points.shape) - raw_bounds = np.arange(24).reshape(4, 3, 2) - bounds = as_lazy_data(raw_bounds, raw_bounds.shape) - coord = AuxCoord(points, bounds=bounds) - self.assertTrue(is_lazy_data(coord.core_bounds())) - result = AuxCoordFactory._nd_bounds(coord, (0, 1), 2) - # Check we haven't triggered the loading of the coordinate values. - self.assertTrue(is_lazy_data(coord.core_bounds())) - self.assertTrue(is_lazy_data(result)) - expected = raw_bounds - self.assertArrayEqual(result, expected) - - def test_lazy_complex(self): - raw_points = np.arange(12).reshape(4, 3) - points = as_lazy_data(raw_points, raw_points.shape) - raw_bounds = np.arange(24).reshape(4, 3, 2) - bounds = as_lazy_data(raw_bounds, raw_bounds.shape) - coord = AuxCoord(points, bounds=bounds) - self.assertTrue(is_lazy_data(coord.core_bounds())) - result = AuxCoordFactory._nd_bounds(coord, (3, 2), 5) - # Check we haven't triggered the loading of the coordinate values. - self.assertTrue(is_lazy_data(coord.core_bounds())) - self.assertTrue(is_lazy_data(result)) - expected = raw_bounds.transpose((1, 0, 2)).reshape(1, 1, 3, 4, 1, 2) - self.assertArrayEqual(result, expected) - - -@tests.skip_data -class Test_lazy_aux_coords(tests.IrisTest): - def setUp(self): - path = tests.get_data_path( - ["NetCDF", "testing", "small_theta_colpex.nc"] - ) - self.cube = iris.load_cube(path, "air_potential_temperature") - - def _check_lazy(self): - coords = self.cube.aux_coords + self.cube.derived_coords - for coord in coords: - self.assertTrue(coord.has_lazy_points()) - if coord.has_bounds(): - self.assertTrue(coord.has_lazy_bounds()) - - def test_lazy_coord_loading(self): - # Test that points and bounds arrays stay lazy upon cube loading. - self._check_lazy() - - def test_lazy_coord_printing(self): - # Test that points and bounds arrays stay lazy after cube printing. - _ = str(self.cube) - self._check_lazy() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py b/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py deleted file mode 100644 index 48fead3aa5..0000000000 --- a/lib/iris/tests/unit/aux_factory/test_HybridPressureFactory.py +++ /dev/null @@ -1,309 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the -`iris.aux_factory.HybridPressureFactory` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import cf_units -import numpy as np - -import iris -from iris.aux_factory import HybridPressureFactory - - -class Test___init__(tests.IrisTest): - def setUp(self): - self.delta = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) - self.sigma = mock.Mock(units=cf_units.Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock( - units=cf_units.Unit("Pa"), nbounds=0 - ) - - def test_insufficient_coords(self): - with self.assertRaises(ValueError): - HybridPressureFactory() - with self.assertRaises(ValueError): - HybridPressureFactory( - delta=None, sigma=self.sigma, surface_air_pressure=None - ) - with self.assertRaises(ValueError): - HybridPressureFactory( - delta=None, - sigma=None, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_incompatible_delta_units(self): - self.delta.units = cf_units.Unit("m") - with self.assertRaises(ValueError): - HybridPressureFactory( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_incompatible_sigma_units(self): - self.sigma.units = cf_units.Unit("Pa") - with self.assertRaises(ValueError): - HybridPressureFactory( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_incompatible_surface_air_pressure_units(self): - self.surface_air_pressure.units = cf_units.Unit("unknown") - with self.assertRaises(ValueError): - HybridPressureFactory( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_different_pressure_units(self): - self.delta.units = cf_units.Unit("hPa") - self.surface_air_pressure.units = cf_units.Unit("Pa") - with self.assertRaises(ValueError): - HybridPressureFactory( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_too_many_delta_bounds(self): - self.delta.nbounds = 4 - with self.assertRaises(ValueError): - HybridPressureFactory( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_too_many_sigma_bounds(self): - self.sigma.nbounds = 4 - with self.assertRaises(ValueError): - HybridPressureFactory( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_factory_metadata(self): - factory = HybridPressureFactory( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - self.assertEqual(factory.standard_name, "air_pressure") - self.assertIsNone(factory.long_name) - self.assertIsNone(factory.var_name) - self.assertEqual(factory.units, self.delta.units) - self.assertEqual(factory.units, self.surface_air_pressure.units) - self.assertIsNone(factory.coord_system) - self.assertEqual(factory.attributes, {}) - - def test_promote_sigma_units_unknown_to_dimensionless(self): - sigma = mock.Mock(units=cf_units.Unit("unknown"), nbounds=0) - factory = HybridPressureFactory( - delta=self.delta, - sigma=sigma, - surface_air_pressure=self.surface_air_pressure, - ) - self.assertEqual("1", factory.dependencies["sigma"].units) - - -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.delta = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) - self.sigma = mock.Mock(units=cf_units.Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock( - units=cf_units.Unit("Pa"), nbounds=0 - ) - - def test_value(self): - kwargs = dict( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - factory = HybridPressureFactory(**kwargs) - self.assertEqual(factory.dependencies, kwargs) - - -class Test_make_coord(tests.IrisTest): - @staticmethod - def coords_dims_func(coord): - mapping = dict( - level_pressure=(0,), sigma=(0,), surface_air_pressure=(1, 2) - ) - return mapping[coord.name()] - - def setUp(self): - self.delta = iris.coords.DimCoord( - [0.0, 1.0, 2.0], long_name="level_pressure", units="Pa" - ) - self.sigma = iris.coords.DimCoord( - [1.0, 0.9, 0.8], long_name="sigma", units="1" - ) - self.surface_air_pressure = iris.coords.AuxCoord( - np.arange(4).reshape(2, 2), "surface_air_pressure", units="Pa" - ) - - def test_points_only(self): - # Determine expected coord by manually broadcasting coord points - # knowing the dimension mapping. - delta_pts = self.delta.points[..., np.newaxis, np.newaxis] - sigma_pts = self.sigma.points[..., np.newaxis, np.newaxis] - surf_pts = self.surface_air_pressure.points[np.newaxis, ...] - expected_points = delta_pts + sigma_pts * surf_pts - expected_coord = iris.coords.AuxCoord( - expected_points, standard_name="air_pressure", units="Pa" - ) - factory = HybridPressureFactory( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - derived_coord = factory.make_coord(self.coords_dims_func) - self.assertEqual(expected_coord, derived_coord) - - def test_none_delta(self): - delta_pts = 0 - sigma_pts = self.sigma.points[..., np.newaxis, np.newaxis] - surf_pts = self.surface_air_pressure.points[np.newaxis, ...] - expected_points = delta_pts + sigma_pts * surf_pts - expected_coord = iris.coords.AuxCoord( - expected_points, standard_name="air_pressure", units="Pa" - ) - factory = HybridPressureFactory( - sigma=self.sigma, surface_air_pressure=self.surface_air_pressure - ) - derived_coord = factory.make_coord(self.coords_dims_func) - self.assertEqual(expected_coord, derived_coord) - - def test_none_sigma(self): - delta_pts = self.delta.points[..., np.newaxis, np.newaxis] - sigma_pts = 0 - surf_pts = self.surface_air_pressure.points[np.newaxis, ...] - expected_points = delta_pts + sigma_pts * surf_pts - expected_coord = iris.coords.AuxCoord( - expected_points, standard_name="air_pressure", units="Pa" - ) - factory = HybridPressureFactory( - delta=self.delta, surface_air_pressure=self.surface_air_pressure - ) - derived_coord = factory.make_coord(self.coords_dims_func) - self.assertEqual(expected_coord, derived_coord) - - def test_none_surface_air_pressure(self): - # Note absence of broadcasting as multidimensional coord - # is not present. - expected_points = self.delta.points - expected_coord = iris.coords.AuxCoord( - expected_points, standard_name="air_pressure", units="Pa" - ) - factory = HybridPressureFactory(delta=self.delta, sigma=self.sigma) - derived_coord = factory.make_coord(self.coords_dims_func) - self.assertEqual(expected_coord, derived_coord) - - def test_with_bounds(self): - self.delta.guess_bounds(0) - self.sigma.guess_bounds(0.5) - # Determine expected coord by manually broadcasting coord points - # and bounds based on the dimension mapping. - delta_pts = self.delta.points[..., np.newaxis, np.newaxis] - sigma_pts = self.sigma.points[..., np.newaxis, np.newaxis] - surf_pts = self.surface_air_pressure.points[np.newaxis, ...] - expected_points = delta_pts + sigma_pts * surf_pts - delta_vals = self.delta.bounds.reshape(3, 1, 1, 2) - sigma_vals = self.sigma.bounds.reshape(3, 1, 1, 2) - surf_vals = self.surface_air_pressure.points.reshape(1, 2, 2, 1) - expected_bounds = delta_vals + sigma_vals * surf_vals - expected_coord = iris.coords.AuxCoord( - expected_points, - standard_name="air_pressure", - units="Pa", - bounds=expected_bounds, - ) - factory = HybridPressureFactory( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - derived_coord = factory.make_coord(self.coords_dims_func) - self.assertEqual(expected_coord, derived_coord) - - -class Test_update(tests.IrisTest): - def setUp(self): - self.delta = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) - self.sigma = mock.Mock(units=cf_units.Unit("1"), nbounds=0) - self.surface_air_pressure = mock.Mock( - units=cf_units.Unit("Pa"), nbounds=0 - ) - - self.factory = HybridPressureFactory( - delta=self.delta, - sigma=self.sigma, - surface_air_pressure=self.surface_air_pressure, - ) - - def test_good_delta(self): - new_delta_coord = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) - self.factory.update(self.delta, new_delta_coord) - self.assertIs(self.factory.delta, new_delta_coord) - - def test_bad_delta(self): - new_delta_coord = mock.Mock(units=cf_units.Unit("1"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.delta, new_delta_coord) - - def test_alternative_bad_delta(self): - new_delta_coord = mock.Mock(units=cf_units.Unit("Pa"), nbounds=4) - with self.assertRaises(ValueError): - self.factory.update(self.delta, new_delta_coord) - - def test_good_surface_air_pressure(self): - new_surface_p_coord = mock.Mock(units=cf_units.Unit("Pa"), nbounds=0) - self.factory.update(self.surface_air_pressure, new_surface_p_coord) - self.assertIs(self.factory.surface_air_pressure, new_surface_p_coord) - - def test_bad_surface_air_pressure(self): - new_surface_p_coord = mock.Mock(units=cf_units.Unit("km"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.surface_air_pressure, new_surface_p_coord) - - def test_non_dependency(self): - old_coord = mock.Mock() - new_coord = mock.Mock() - orig_dependencies = self.factory.dependencies - self.factory.update(old_coord, new_coord) - self.assertEqual(orig_dependencies, self.factory.dependencies) - - def test_none_delta(self): - self.factory.update(self.delta, None) - self.assertIsNone(self.factory.delta) - - def test_none_sigma(self): - self.factory.update(self.sigma, None) - self.assertIsNone(self.factory.sigma) - - def test_insufficient_coords(self): - self.factory.update(self.delta, None) - with self.assertRaises(ValueError): - self.factory.update(self.surface_air_pressure, None) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py deleted file mode 100644 index f588c9f001..0000000000 --- a/lib/iris/tests/unit/aux_factory/test_OceanSFactory.py +++ /dev/null @@ -1,325 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the -`iris.aux_factory.OceanSFactory` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from cf_units import Unit -import numpy as np - -from iris.aux_factory import OceanSFactory -from iris.coords import AuxCoord, DimCoord - - -class Test___init__(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.a = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.b = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.kwargs = dict( - s=self.s, - eta=self.eta, - depth=self.depth, - a=self.a, - b=self.b, - depth_c=self.depth_c, - ) - - def test_insufficient_coordinates(self): - with self.assertRaises(ValueError): - OceanSFactory() - with self.assertRaises(ValueError): - OceanSFactory( - s=None, - eta=self.eta, - depth=self.depth, - a=self.a, - b=self.b, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSFactory( - s=self.s, - eta=None, - depth=self.depth, - a=self.a, - b=self.b, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSFactory( - s=self.s, - eta=self.eta, - depth=None, - a=self.a, - b=self.b, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSFactory( - s=self.s, - eta=self.eta, - depth=self.depth, - a=None, - b=self.b, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSFactory( - s=self.s, - eta=self.eta, - depth=self.depth, - a=self.a, - b=None, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSFactory( - s=self.s, - eta=self.eta, - depth=self.depth, - a=self.a, - b=self.b, - depth_c=None, - ) - - def test_s_too_many_bounds(self): - self.s.nbounds = 4 - with self.assertRaises(ValueError): - OceanSFactory(**self.kwargs) - - def test_a_non_scalar(self): - self.a.shape = (2,) - with self.assertRaises(ValueError): - OceanSFactory(**self.kwargs) - - def test_b_non_scalar(self): - self.b.shape = (2,) - with self.assertRaises(ValueError): - OceanSFactory(**self.kwargs) - - def test_depth_c_non_scalar(self): - self.depth_c.shape = (2,) - with self.assertRaises(ValueError): - OceanSFactory(**self.kwargs) - - def test_s_incompatible_units(self): - self.s.units = Unit("km") - with self.assertRaises(ValueError): - OceanSFactory(**self.kwargs) - - def test_eta_incompatible_units(self): - self.eta.units = Unit("km") - with self.assertRaises(ValueError): - OceanSFactory(**self.kwargs) - - def test_depth_c_incompatible_units(self): - self.depth_c.units = Unit("km") - with self.assertRaises(ValueError): - OceanSFactory(**self.kwargs) - - def test_depth_incompatible_units(self): - self.depth.units = Unit("km") - with self.assertRaises(ValueError): - OceanSFactory(**self.kwargs) - - def test_promote_s_units_unknown_to_dimensionless(self): - s = mock.Mock(units=Unit("unknown"), nbounds=0) - self.kwargs["s"] = s - factory = OceanSFactory(**self.kwargs) - self.assertEqual("1", factory.dependencies["s"].units) - - -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.a = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.b = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.kwargs = dict( - s=self.s, - eta=self.eta, - depth=self.depth, - a=self.a, - b=self.b, - depth_c=self.depth_c, - ) - - def test_values(self): - factory = OceanSFactory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) - - -class Test_make_coord(tests.IrisTest): - @staticmethod - def coord_dims(coord): - mapping = dict( - s=(0,), eta=(1, 2), depth=(1, 2), a=(), b=(), depth_c=() - ) - return mapping[coord.name()] - - @staticmethod - def derive(s, eta, depth, a, b, depth_c, coord=True): - c = (1 - b) * np.sinh(a * s) / np.sinh(a) + b * ( - np.tanh(a * (s + 0.5)) / (2 * np.tanh(0.5 * a)) - 0.5 - ) - result = eta * (1 + s) + depth_c * s + (depth - depth_c) * c - if coord: - name = "sea_surface_height_above_reference_ellipsoid" - result = AuxCoord( - result, - standard_name=name, - units="m", - attributes=dict(positive="up"), - ) - return result - - def setUp(self): - self.s = DimCoord( - np.arange(-0.975, 0, 0.05, dtype=float), units="1", long_name="s" - ) - self.eta = AuxCoord( - np.arange(-1, 3, dtype=np.float64).reshape(2, 2), - long_name="eta", - units="m", - ) - self.depth = AuxCoord( - np.arange(4, dtype=np.float64).reshape(2, 2) * 1e3, - long_name="depth", - units="m", - ) - self.a = AuxCoord([4], units="1", long_name="a") - self.b = AuxCoord([0.9], units="1", long_name="b") - self.depth_c = AuxCoord([4], long_name="depth_c", units="m") - self.kwargs = dict( - s=self.s, - eta=self.eta, - depth=self.depth, - a=self.a, - b=self.b, - depth_c=self.depth_c, - ) - - def test_derived_points(self): - # Broadcast expected points given the known dimensional mapping. - s = self.s.points[..., np.newaxis, np.newaxis] - eta = self.eta.points[np.newaxis, ...] - depth = self.depth.points[np.newaxis, ...] - a = self.a.points - b = self.b.points - depth_c = self.depth_c.points - # Calculate the expected result. - expected_coord = self.derive(s, eta, depth, a, b, depth_c) - # Calculate the actual result. - factory = OceanSFactory(**self.kwargs) - coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - -class Test_update(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.a = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.b = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.kwargs = dict( - s=self.s, - eta=self.eta, - depth=self.depth, - a=self.a, - b=self.b, - depth_c=self.depth_c, - ) - self.factory = OceanSFactory(**self.kwargs) - - def test_s(self): - new_s = mock.Mock(units=Unit("1"), nbounds=0) - self.factory.update(self.s, new_s) - self.assertIs(self.factory.s, new_s) - - def test_s_too_many_bounds(self): - new_s = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): - self.factory.update(self.s, new_s) - - def test_s_incompatible_units(self): - new_s = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.s, new_s) - - def test_eta(self): - new_eta = mock.Mock(units=Unit("m"), nbounds=0) - self.factory.update(self.eta, new_eta) - self.assertIs(self.factory.eta, new_eta) - - def test_eta_incompatible_units(self): - new_eta = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.eta, new_eta) - - def test_depth(self): - new_depth = mock.Mock(units=Unit("m"), nbounds=0) - self.factory.update(self.depth, new_depth) - self.assertIs(self.factory.depth, new_depth) - - def test_depth_incompatible_units(self): - new_depth = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.depth, new_depth) - - def test_a(self): - new_a = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.factory.update(self.a, new_a) - self.assertIs(self.factory.a, new_a) - - def test_a_non_scalar(self): - new_a = mock.Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): - self.factory.update(self.a, new_a) - - def test_b(self): - new_b = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.factory.update(self.b, new_b) - self.assertIs(self.factory.b, new_b) - - def test_b_non_scalar(self): - new_b = mock.Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): - self.factory.update(self.b, new_b) - - def test_depth_c(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.factory.update(self.depth_c, new_depth_c) - self.assertIs(self.factory.depth_c, new_depth_c) - - def test_depth_c_non_scalar(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): - self.factory.update(self.depth_c, new_depth_c) - - def test_depth_c_incompatible_units(self): - new_depth_c = mock.Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with self.assertRaises(ValueError): - self.factory.update(self.depth_c, new_depth_c) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py deleted file mode 100644 index 7a2f4c631c..0000000000 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg1Factory.py +++ /dev/null @@ -1,298 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the -`iris.aux_factory.OceanSg1Factory` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from cf_units import Unit -import numpy as np - -from iris.aux_factory import OceanSg1Factory -from iris.coords import AuxCoord, DimCoord - - -class Test___init__(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.kwargs = dict( - s=self.s, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - - def test_insufficient_coordinates(self): - with self.assertRaises(ValueError): - OceanSg1Factory() - with self.assertRaises(ValueError): - OceanSg1Factory( - s=None, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSg1Factory( - s=self.s, - c=None, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSg1Factory( - s=self.s, - c=self.c, - eta=None, - depth=self.depth, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSg1Factory( - s=self.s, - c=self.c, - eta=self.eta, - depth=None, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSg1Factory( - s=self.s, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=None, - ) - - def test_s_too_many_bounds(self): - self.s.nbounds = 4 - with self.assertRaises(ValueError): - OceanSg1Factory(**self.kwargs) - - def test_c_too_many_bounds(self): - self.c.nbounds = 4 - with self.assertRaises(ValueError): - OceanSg1Factory(**self.kwargs) - - def test_depth_c_non_scalar(self): - self.depth_c.shape = (2,) - with self.assertRaises(ValueError): - OceanSg1Factory(**self.kwargs) - - def test_s_incompatible_units(self): - self.s.units = Unit("km") - with self.assertRaises(ValueError): - OceanSg1Factory(**self.kwargs) - - def test_c_incompatible_units(self): - self.c.units = Unit("km") - with self.assertRaises(ValueError): - OceanSg1Factory(**self.kwargs) - - def test_eta_incompatible_units(self): - self.eta.units = Unit("km") - with self.assertRaises(ValueError): - OceanSg1Factory(**self.kwargs) - - def test_depth_c_incompatible_units(self): - self.depth_c.units = Unit("km") - with self.assertRaises(ValueError): - OceanSg1Factory(**self.kwargs) - - def test_depth_incompatible_units(self): - self.depth.units = Unit("km") - with self.assertRaises(ValueError): - OceanSg1Factory(**self.kwargs) - - def test_promote_c_and_s_units_unknown_to_dimensionless(self): - c = mock.Mock(units=Unit("unknown"), nbounds=0) - s = mock.Mock(units=Unit("unknown"), nbounds=0) - self.kwargs["c"] = c - self.kwargs["s"] = s - factory = OceanSg1Factory(**self.kwargs) - self.assertEqual("1", factory.dependencies["c"].units) - self.assertEqual("1", factory.dependencies["s"].units) - - -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.kwargs = dict( - s=self.s, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - - def test_values(self): - factory = OceanSg1Factory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) - - -class Test_make_coord(tests.IrisTest): - @staticmethod - def coord_dims(coord): - mapping = dict(s=(0,), c=(0,), eta=(1, 2), depth=(1, 2), depth_c=()) - return mapping[coord.name()] - - @staticmethod - def derive(s, c, eta, depth, depth_c, coord=True): - S = depth_c * s + (depth - depth_c) * c - result = S + eta * (1 + S / depth) - if coord: - name = "sea_surface_height_above_reference_ellipsoid" - result = AuxCoord( - result, - standard_name=name, - units="m", - attributes=dict(positive="up"), - ) - return result - - def setUp(self): - self.s = DimCoord( - np.linspace(-0.985, -0.014, 36), units="1", long_name="s" - ) - self.c = DimCoord( - np.linspace(-0.959, -0.001, 36), units="1", long_name="c" - ) - self.eta = AuxCoord( - np.arange(-1, 3, dtype=np.float64).reshape(2, 2), - long_name="eta", - units="m", - ) - self.depth = AuxCoord( - np.array([[5, 200], [1000, 4000]], dtype=np.float64), - long_name="depth", - units="m", - ) - self.depth_c = AuxCoord([5], long_name="depth_c", units="m") - self.kwargs = dict( - s=self.s, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - - def test_derived_points(self): - # Broadcast expected points given the known dimensional mapping. - s = self.s.points[..., np.newaxis, np.newaxis] - c = self.c.points[..., np.newaxis, np.newaxis] - eta = self.eta.points[np.newaxis, ...] - depth = self.depth.points[np.newaxis, ...] - depth_c = self.depth_c.points - # Calculate the expected result. - expected_coord = self.derive(s, c, eta, depth, depth_c) - # Calculate the actual result. - factory = OceanSg1Factory(**self.kwargs) - coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - -class Test_update(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.kwargs = dict( - s=self.s, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - self.factory = OceanSg1Factory(**self.kwargs) - - def test_s(self): - new_s = mock.Mock(units=Unit("1"), nbounds=0) - self.factory.update(self.s, new_s) - self.assertIs(self.factory.s, new_s) - - def test_c(self): - new_c = mock.Mock(units=Unit("1"), nbounds=0) - self.factory.update(self.c, new_c) - self.assertIs(self.factory.c, new_c) - - def test_s_too_many_bounds(self): - new_s = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): - self.factory.update(self.s, new_s) - - def test_c_too_many_bounds(self): - new_c = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): - self.factory.update(self.c, new_c) - - def test_s_incompatible_units(self): - new_s = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.s, new_s) - - def test_c_incompatible_units(self): - new_c = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.c, new_c) - - def test_eta(self): - new_eta = mock.Mock(units=Unit("m"), nbounds=0) - self.factory.update(self.eta, new_eta) - self.assertIs(self.factory.eta, new_eta) - - def test_eta_incompatible_units(self): - new_eta = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.eta, new_eta) - - def test_depth(self): - new_depth = mock.Mock(units=Unit("m"), nbounds=0) - self.factory.update(self.depth, new_depth) - self.assertIs(self.factory.depth, new_depth) - - def test_depth_incompatible_units(self): - new_depth = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.depth, new_depth) - - def test_depth_c(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.factory.update(self.depth_c, new_depth_c) - self.assertIs(self.factory.depth_c, new_depth_c) - - def test_depth_c_non_scalar(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): - self.factory.update(self.depth_c, new_depth_c) - - def test_depth_c_incompatible_units(self): - new_depth_c = mock.Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with self.assertRaises(ValueError): - self.factory.update(self.depth_c, new_depth_c) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py b/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py deleted file mode 100644 index 4d1f268a1e..0000000000 --- a/lib/iris/tests/unit/aux_factory/test_OceanSg2Factory.py +++ /dev/null @@ -1,298 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the -`iris.aux_factory.OceanSg2Factory` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from cf_units import Unit -import numpy as np - -from iris.aux_factory import OceanSg2Factory -from iris.coords import AuxCoord, DimCoord - - -class Test___init__(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.kwargs = dict( - s=self.s, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - - def test_insufficient_coordinates(self): - with self.assertRaises(ValueError): - OceanSg2Factory() - with self.assertRaises(ValueError): - OceanSg2Factory( - s=None, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSg2Factory( - s=self.s, - c=None, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSg2Factory( - s=self.s, - c=self.c, - eta=None, - depth=self.depth, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSg2Factory( - s=self.s, - c=self.c, - eta=self.eta, - depth=None, - depth_c=self.depth_c, - ) - with self.assertRaises(ValueError): - OceanSg2Factory( - s=self.s, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=None, - ) - - def test_s_too_many_bounds(self): - self.s.nbounds = 4 - with self.assertRaises(ValueError): - OceanSg2Factory(**self.kwargs) - - def test_c_too_many_bounds(self): - self.c.nbounds = 4 - with self.assertRaises(ValueError): - OceanSg2Factory(**self.kwargs) - - def test_depth_c_non_scalar(self): - self.depth_c.shape = (2,) - with self.assertRaises(ValueError): - OceanSg2Factory(**self.kwargs) - - def test_s_incompatible_units(self): - self.s.units = Unit("km") - with self.assertRaises(ValueError): - OceanSg2Factory(**self.kwargs) - - def test_c_incompatible_units(self): - self.c.units = Unit("km") - with self.assertRaises(ValueError): - OceanSg2Factory(**self.kwargs) - - def test_eta_incompatible_units(self): - self.eta.units = Unit("km") - with self.assertRaises(ValueError): - OceanSg2Factory(**self.kwargs) - - def test_depth_c_incompatible_units(self): - self.depth_c.units = Unit("km") - with self.assertRaises(ValueError): - OceanSg2Factory(**self.kwargs) - - def test_depth_incompatible_units(self): - self.depth.units = Unit("km") - with self.assertRaises(ValueError): - OceanSg2Factory(**self.kwargs) - - def test_promote_c_and_s_units_unknown_to_dimensionless(self): - c = mock.Mock(units=Unit("unknown"), nbounds=0) - s = mock.Mock(units=Unit("unknown"), nbounds=0) - self.kwargs["c"] = c - self.kwargs["s"] = s - factory = OceanSg2Factory(**self.kwargs) - self.assertEqual("1", factory.dependencies["c"].units) - self.assertEqual("1", factory.dependencies["s"].units) - - -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.kwargs = dict( - s=self.s, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - - def test_values(self): - factory = OceanSg2Factory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) - - -class Test_make_coord(tests.IrisTest): - @staticmethod - def coord_dims(coord): - mapping = dict(s=(0,), c=(0,), eta=(1, 2), depth=(1, 2), depth_c=()) - return mapping[coord.name()] - - @staticmethod - def derive(s, c, eta, depth, depth_c, coord=True): - S = (depth_c * s + depth * c) / (depth_c + depth) - result = eta + (eta + depth) * S - if coord: - name = "sea_surface_height_above_reference_ellipsoid" - result = AuxCoord( - result, - standard_name=name, - units="m", - attributes=dict(positive="up"), - ) - return result - - def setUp(self): - self.s = DimCoord( - np.linspace(-0.985, -0.014, 36), units="1", long_name="s" - ) - self.c = DimCoord( - np.linspace(-0.959, -0.001, 36), units="1", long_name="c" - ) - self.eta = AuxCoord( - np.arange(-1, 3, dtype=np.float64).reshape(2, 2), - long_name="eta", - units="m", - ) - self.depth = AuxCoord( - np.array([[5, 200], [1000, 4000]], dtype=np.float64), - long_name="depth", - units="m", - ) - self.depth_c = AuxCoord([1], long_name="depth_c", units="m") - self.kwargs = dict( - s=self.s, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - - def test_derived_points(self): - # Broadcast expected points given the known dimensional mapping. - s = self.s.points[..., np.newaxis, np.newaxis] - c = self.c.points[..., np.newaxis, np.newaxis] - eta = self.eta.points[np.newaxis, ...] - depth = self.depth.points[np.newaxis, ...] - depth_c = self.depth_c.points - # Calculate the expected result. - expected_coord = self.derive(s, c, eta, depth, depth_c) - # Calculate the actual result. - factory = OceanSg2Factory(**self.kwargs) - coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - -class Test_update(tests.IrisTest): - def setUp(self): - self.s = mock.Mock(units=Unit("1"), nbounds=0) - self.c = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.kwargs = dict( - s=self.s, - c=self.c, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - ) - self.factory = OceanSg2Factory(**self.kwargs) - - def test_s(self): - new_s = mock.Mock(units=Unit("1"), nbounds=0) - self.factory.update(self.s, new_s) - self.assertIs(self.factory.s, new_s) - - def test_c(self): - new_c = mock.Mock(units=Unit("1"), nbounds=0) - self.factory.update(self.c, new_c) - self.assertIs(self.factory.c, new_c) - - def test_s_too_many_bounds(self): - new_s = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): - self.factory.update(self.s, new_s) - - def test_c_too_many_bounds(self): - new_c = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): - self.factory.update(self.c, new_c) - - def test_s_incompatible_units(self): - new_s = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.s, new_s) - - def test_c_incompatible_units(self): - new_c = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.c, new_c) - - def test_eta(self): - new_eta = mock.Mock(units=Unit("m"), nbounds=0) - self.factory.update(self.eta, new_eta) - self.assertIs(self.factory.eta, new_eta) - - def test_eta_incompatible_units(self): - new_eta = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.eta, new_eta) - - def test_depth(self): - new_depth = mock.Mock(units=Unit("m"), nbounds=0) - self.factory.update(self.depth, new_depth) - self.assertIs(self.factory.depth, new_depth) - - def test_depth_incompatible_units(self): - new_depth = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.depth, new_depth) - - def test_depth_c(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.factory.update(self.depth_c, new_depth_c) - self.assertIs(self.factory.depth_c, new_depth_c) - - def test_depth_c_non_scalar(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): - self.factory.update(self.depth_c, new_depth_c) - - def test_depth_c_incompatible_units(self): - new_depth_c = mock.Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with self.assertRaises(ValueError): - self.factory.update(self.depth_c, new_depth_c) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py deleted file mode 100644 index 30d9647952..0000000000 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaFactory.py +++ /dev/null @@ -1,174 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the -`iris.aux_factory.OceanSigmaFactory` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from cf_units import Unit -import numpy as np - -from iris.aux_factory import OceanSigmaFactory -from iris.coords import AuxCoord, DimCoord - - -class Test___init__(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.kwargs = dict(sigma=self.sigma, eta=self.eta, depth=self.depth) - - def test_insufficient_coordinates(self): - with self.assertRaises(ValueError): - OceanSigmaFactory() - with self.assertRaises(ValueError): - OceanSigmaFactory(sigma=None, eta=self.eta, depth=self.depth) - with self.assertRaises(ValueError): - OceanSigmaFactory(sigma=self.sigma, eta=None, depth=self.depth) - with self.assertRaises(ValueError): - OceanSigmaFactory(sigma=self.sigma, eta=self.eta, depth=None) - - def test_sigma_too_many_bounds(self): - self.sigma.nbounds = 4 - with self.assertRaises(ValueError): - OceanSigmaFactory(**self.kwargs) - - def test_sigma_incompatible_units(self): - self.sigma.units = Unit("km") - with self.assertRaises(ValueError): - OceanSigmaFactory(**self.kwargs) - - def test_eta_incompatible_units(self): - self.eta.units = Unit("km") - with self.assertRaises(ValueError): - OceanSigmaFactory(**self.kwargs) - - def test_depth_incompatible_units(self): - self.depth.units = Unit("km") - with self.assertRaises(ValueError): - OceanSigmaFactory(**self.kwargs) - - def test_promote_sigma_units_unknown_to_dimensionless(self): - sigma = mock.Mock(units=Unit("unknown"), nbounds=0) - self.kwargs["sigma"] = sigma - factory = OceanSigmaFactory(**self.kwargs) - self.assertEqual("1", factory.dependencies["sigma"].units) - - -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.kwargs = dict(sigma=self.sigma, eta=self.eta, depth=self.depth) - - def test_values(self): - factory = OceanSigmaFactory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) - - -class Test_make_coord(tests.IrisTest): - @staticmethod - def coord_dims(coord): - mapping = dict(sigma=(0,), eta=(1, 2), depth=(1, 2)) - return mapping[coord.name()] - - @staticmethod - def derive(sigma, eta, depth, coord=True): - result = eta + sigma * (depth + eta) - if coord: - name = "sea_surface_height_above_reference_ellipsoid" - result = AuxCoord( - result, - standard_name=name, - units="m", - attributes=dict(positive="up"), - ) - return result - - def setUp(self): - self.sigma = DimCoord( - np.linspace(-0.05, -1, 5), long_name="sigma", units="1" - ) - self.eta = AuxCoord( - np.arange(-1, 3, dtype=np.float64).reshape(2, 2), - long_name="eta", - units="m", - ) - self.depth = AuxCoord( - np.arange(4, dtype=np.float64).reshape(2, 2) * 1e3, - long_name="depth", - units="m", - ) - self.kwargs = dict(sigma=self.sigma, eta=self.eta, depth=self.depth) - - def test_derived_points(self): - # Broadcast expected points given the known dimensional mapping. - sigma = self.sigma.points[..., np.newaxis, np.newaxis] - eta = self.eta.points[np.newaxis, ...] - depth = self.depth.points[np.newaxis, ...] - # Calculate the expected result. - expected_coord = self.derive(sigma, eta, depth) - # Calculate the actual result. - factory = OceanSigmaFactory(**self.kwargs) - coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - -class Test_update(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.kwargs = dict(sigma=self.sigma, eta=self.eta, depth=self.depth) - self.factory = OceanSigmaFactory(**self.kwargs) - - def test_sigma(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.factory.update(self.sigma, new_sigma) - self.assertIs(self.factory.sigma, new_sigma) - - def test_sigma_too_many_bounds(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): - self.factory.update(self.sigma, new_sigma) - - def test_sigma_incompatible_units(self): - new_sigma = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.sigma, new_sigma) - - def test_eta(self): - new_eta = mock.Mock(units=Unit("m"), nbounds=0) - self.factory.update(self.eta, new_eta) - self.assertIs(self.factory.eta, new_eta) - - def test_eta_incompatible_units(self): - new_eta = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.eta, new_eta) - - def test_depth(self): - new_depth = mock.Mock(units=Unit("m"), nbounds=0) - self.factory.update(self.depth, new_depth) - self.assertIs(self.factory.depth, new_depth) - - def test_depth_incompatible_units(self): - new_depth = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.depth, new_depth) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py b/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py deleted file mode 100644 index 736a883846..0000000000 --- a/lib/iris/tests/unit/aux_factory/test_OceanSigmaZFactory.py +++ /dev/null @@ -1,450 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the -`iris.aux_factory.OceanSigmaZFactory` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from cf_units import Unit -import numpy as np - -from iris.aux_factory import OceanSigmaZFactory -from iris.coords import AuxCoord, DimCoord - - -class Test___init__(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.nsigma = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.zlev = mock.Mock(units=Unit("m"), nbounds=0) - self.kwargs = dict( - sigma=self.sigma, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - nsigma=self.nsigma, - zlev=self.zlev, - ) - - def test_insufficient_coordinates(self): - with self.assertRaises(ValueError): - OceanSigmaZFactory() - with self.assertRaises(ValueError): - OceanSigmaZFactory( - sigma=self.sigma, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - nsigma=self.nsigma, - zlev=None, - ) - with self.assertRaises(ValueError): - OceanSigmaZFactory( - sigma=None, - eta=None, - depth=self.depth, - depth_c=self.depth_c, - nsigma=self.nsigma, - zlev=self.zlev, - ) - with self.assertRaises(ValueError): - OceanSigmaZFactory( - sigma=self.sigma, - eta=None, - depth=None, - depth_c=self.depth_c, - nsigma=self.nsigma, - zlev=self.zlev, - ) - with self.assertRaises(ValueError): - OceanSigmaZFactory( - sigma=self.sigma, - eta=None, - depth=self.depth, - depth_c=None, - nsigma=self.nsigma, - zlev=self.zlev, - ) - with self.assertRaises(ValueError): - OceanSigmaZFactory( - sigma=self.sigma, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - nsigma=None, - zlev=self.zlev, - ) - - def test_sigma_too_many_bounds(self): - self.sigma.nbounds = 4 - with self.assertRaises(ValueError): - OceanSigmaZFactory(**self.kwargs) - - def test_zlev_too_many_bounds(self): - self.zlev.nbounds = 4 - with self.assertRaises(ValueError): - OceanSigmaZFactory(**self.kwargs) - - def test_sigma_zlev_same_boundedness(self): - self.zlev.nbounds = 2 - with self.assertRaises(ValueError): - OceanSigmaZFactory(**self.kwargs) - - def test_depth_c_non_scalar(self): - self.depth_c.shape = (2,) - with self.assertRaises(ValueError): - OceanSigmaZFactory(**self.kwargs) - - def test_nsigma_non_scalar(self): - self.nsigma.shape = (4,) - with self.assertRaises(ValueError): - OceanSigmaZFactory(**self.kwargs) - - def test_zlev_incompatible_units(self): - self.zlev.units = Unit("Pa") - with self.assertRaises(ValueError): - OceanSigmaZFactory(**self.kwargs) - - def test_sigma_incompatible_units(self): - self.sigma.units = Unit("km") - with self.assertRaises(ValueError): - OceanSigmaZFactory(**self.kwargs) - - def test_eta_incompatible_units(self): - self.eta.units = Unit("km") - with self.assertRaises(ValueError): - OceanSigmaZFactory(**self.kwargs) - - def test_depth_c_incompatible_units(self): - self.depth_c.units = Unit("km") - with self.assertRaises(ValueError): - OceanSigmaZFactory(**self.kwargs) - - def test_depth_incompatible_units(self): - self.depth.units = Unit("km") - with self.assertRaises(ValueError): - OceanSigmaZFactory(**self.kwargs) - - def test_promote_sigma_units_unknown_to_dimensionless(self): - sigma = mock.Mock(units=Unit("unknown"), nbounds=0) - self.kwargs["sigma"] = sigma - factory = OceanSigmaZFactory(**self.kwargs) - self.assertEqual("1", factory.dependencies["sigma"].units) - - -class Test_dependencies(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.nsigma = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.zlev = mock.Mock(units=Unit("m"), nbounds=0) - self.kwargs = dict( - sigma=self.sigma, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - nsigma=self.nsigma, - zlev=self.zlev, - ) - - def test_values(self): - factory = OceanSigmaZFactory(**self.kwargs) - self.assertEqual(factory.dependencies, self.kwargs) - - -class Test_make_coord(tests.IrisTest): - @staticmethod - def coord_dims(coord): - mapping = dict( - sigma=(0,), - eta=(1, 2), - depth=(1, 2), - depth_c=(), - nsigma=(), - zlev=(0,), - ) - return mapping[coord.name()] - - @staticmethod - def derive(sigma, eta, depth, depth_c, nsigma, zlev, coord=True): - nsigma_slice = slice(0, int(nsigma)) - temp = eta + sigma * (np.minimum(depth_c, depth) + eta) - shape = temp.shape - result = np.ones(shape, dtype=temp.dtype) * zlev - result[nsigma_slice] = temp[nsigma_slice] - if coord: - name = "sea_surface_height_above_reference_ellipsoid" - result = AuxCoord( - result, - standard_name=name, - units="m", - attributes=dict(positive="up"), - ) - return result - - def setUp(self): - self.sigma = DimCoord( - np.arange(5, dtype=np.float64) * 10, long_name="sigma", units="1" - ) - self.eta = AuxCoord( - np.arange(4, dtype=np.float64).reshape(2, 2), - long_name="eta", - units="m", - ) - self.depth = AuxCoord( - np.arange(4, dtype=np.float64).reshape(2, 2) * 10, - long_name="depth", - units="m", - ) - self.depth_c = AuxCoord([15], long_name="depth_c", units="m") - self.nsigma = AuxCoord([3], long_name="nsigma") - self.zlev = DimCoord( - np.arange(5, dtype=np.float64) * 10, long_name="zlev", units="m" - ) - self.kwargs = dict( - sigma=self.sigma, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - nsigma=self.nsigma, - zlev=self.zlev, - ) - - def test_derived_points(self): - # Broadcast expected points given the known dimensional mapping. - sigma = self.sigma.points[..., np.newaxis, np.newaxis] - eta = self.eta.points[np.newaxis, ...] - depth = self.depth.points[np.newaxis, ...] - depth_c = self.depth_c.points - nsigma = self.nsigma.points - zlev = self.zlev.points[..., np.newaxis, np.newaxis] - # Calculate the expected result. - expected_coord = self.derive(sigma, eta, depth, depth_c, nsigma, zlev) - # Calculate the actual result. - factory = OceanSigmaZFactory(**self.kwargs) - coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - def test_derived_points_with_bounds(self): - self.sigma.guess_bounds() - self.zlev.guess_bounds() - # Broadcast expected points given the known dimensional mapping. - sigma = self.sigma.points[..., np.newaxis, np.newaxis] - eta = self.eta.points[np.newaxis, ...] - depth = self.depth.points[np.newaxis, ...] - depth_c = self.depth_c.points - nsigma = self.nsigma.points - zlev = self.zlev.points[..., np.newaxis, np.newaxis] - # Calculate the expected coordinate with points. - expected_coord = self.derive(sigma, eta, depth, depth_c, nsigma, zlev) - # Broadcast expected bounds given the known dimensional mapping. - sigma = self.sigma.bounds.reshape(sigma.shape + (2,)) - eta = self.eta.points.reshape(eta.shape + (1,)) - depth = self.depth.points.reshape(depth.shape + (1,)) - depth_c = self.depth_c.points.reshape(depth_c.shape + (1,)) - nsigma = self.nsigma.points.reshape(nsigma.shape + (1,)) - zlev = self.zlev.bounds.reshape(zlev.shape + (2,)) - # Calculate the expected bounds. - bounds = self.derive( - sigma, eta, depth, depth_c, nsigma, zlev, coord=False - ) - expected_coord.bounds = bounds - # Calculate the actual result. - factory = OceanSigmaZFactory(**self.kwargs) - coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - def test_no_eta(self): - # Broadcast expected points given the known dimensional mapping. - sigma = self.sigma.points[..., np.newaxis, np.newaxis] - eta = 0 - depth = self.depth.points[np.newaxis, ...] - depth_c = self.depth_c.points - nsigma = self.nsigma.points - zlev = self.zlev.points[..., np.newaxis, np.newaxis] - # Calculate the expected result. - expected_coord = self.derive(sigma, eta, depth, depth_c, nsigma, zlev) - # Calculate the actual result. - self.kwargs["eta"] = None - factory = OceanSigmaZFactory(**self.kwargs) - coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - def test_no_sigma(self): - # Broadcast expected points given the known dimensional mapping. - sigma = 0 - eta = self.eta.points[np.newaxis, ...] - depth = self.depth.points[np.newaxis, ...] - depth_c = self.depth_c.points - nsigma = self.nsigma.points - zlev = self.zlev.points[..., np.newaxis, np.newaxis] - # Calculate the expected result. - expected_coord = self.derive(sigma, eta, depth, depth_c, nsigma, zlev) - # Calculate the actual result. - self.kwargs["sigma"] = None - factory = OceanSigmaZFactory(**self.kwargs) - coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - def test_no_depth_c(self): - # Broadcast expected points given the known dimensional mapping. - sigma = self.sigma.points[..., np.newaxis, np.newaxis] - eta = self.eta.points[np.newaxis, ...] - depth = self.depth.points[np.newaxis, ...] - depth_c = 0 - nsigma = self.nsigma.points - zlev = self.zlev.points[..., np.newaxis, np.newaxis] - # Calculate the expected result. - expected_coord = self.derive(sigma, eta, depth, depth_c, nsigma, zlev) - # Calculate the actual result. - self.kwargs["depth_c"] = None - factory = OceanSigmaZFactory(**self.kwargs) - coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - def test_no_depth(self): - # Broadcast expected points given the known dimensional mapping. - sigma = self.sigma.points[..., np.newaxis, np.newaxis] - eta = self.eta.points[np.newaxis, ...] - depth = 0 - depth_c = self.depth_c.points - nsigma = self.nsigma.points - zlev = self.zlev.points[..., np.newaxis, np.newaxis] - # Calculate the expected result. - expected_coord = self.derive(sigma, eta, depth, depth_c, nsigma, zlev) - # Calculate the actual result. - self.kwargs["depth"] = None - factory = OceanSigmaZFactory(**self.kwargs) - coord = factory.make_coord(self.coord_dims) - self.assertEqual(expected_coord, coord) - - -class Test_update(tests.IrisTest): - def setUp(self): - self.sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.eta = mock.Mock(units=Unit("m"), nbounds=0) - self.depth = mock.Mock(units=Unit("m"), nbounds=0) - self.depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.nsigma = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.zlev = mock.Mock(units=Unit("m"), nbounds=0) - self.kwargs = dict( - sigma=self.sigma, - eta=self.eta, - depth=self.depth, - depth_c=self.depth_c, - nsigma=self.nsigma, - zlev=self.zlev, - ) - self.factory = OceanSigmaZFactory(**self.kwargs) - - def test_sigma(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=0) - self.factory.update(self.sigma, new_sigma) - self.assertIs(self.factory.sigma, new_sigma) - - def test_sigma_too_many_bounds(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=4) - with self.assertRaises(ValueError): - self.factory.update(self.sigma, new_sigma) - - def test_sigma_zlev_same_boundedness(self): - new_sigma = mock.Mock(units=Unit("1"), nbounds=2) - with self.assertRaises(ValueError): - self.factory.update(self.sigma, new_sigma) - - def test_sigma_incompatible_units(self): - new_sigma = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.sigma, new_sigma) - - def test_eta(self): - new_eta = mock.Mock(units=Unit("m"), nbounds=0) - self.factory.update(self.eta, new_eta) - self.assertIs(self.factory.eta, new_eta) - - def test_eta_incompatible_units(self): - new_eta = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.eta, new_eta) - - def test_depth(self): - new_depth = mock.Mock(units=Unit("m"), nbounds=0) - self.factory.update(self.depth, new_depth) - self.assertIs(self.factory.depth, new_depth) - - def test_depth_incompatible_units(self): - new_depth = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.depth, new_depth) - - def test_depth_c(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(1,)) - self.factory.update(self.depth_c, new_depth_c) - self.assertIs(self.factory.depth_c, new_depth_c) - - def test_depth_c_non_scalar(self): - new_depth_c = mock.Mock(units=Unit("m"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): - self.factory.update(self.depth_c, new_depth_c) - - def test_depth_c_incompatible_units(self): - new_depth_c = mock.Mock(units=Unit("Pa"), nbounds=0, shape=(1,)) - with self.assertRaises(ValueError): - self.factory.update(self.depth_c, new_depth_c) - - def test_nsigma(self): - new_nsigma = mock.Mock(units=Unit("1"), nbounds=0, shape=(1,)) - self.factory.update(self.nsigma, new_nsigma) - self.assertIs(self.factory.nsigma, new_nsigma) - - def test_nsigma_missing(self): - with self.assertRaises(ValueError): - self.factory.update(self.nsigma, None) - - def test_nsigma_non_scalar(self): - new_nsigma = mock.Mock(units=Unit("1"), nbounds=0, shape=(10,)) - with self.assertRaises(ValueError): - self.factory.update(self.nsigma, new_nsigma) - - def test_zlev(self): - new_zlev = mock.Mock(units=Unit("m"), nbounds=0) - self.factory.update(self.zlev, new_zlev) - self.assertIs(self.factory.zlev, new_zlev) - - def test_zlev_missing(self): - with self.assertRaises(ValueError): - self.factory.update(self.zlev, None) - - def test_zlev_too_many_bounds(self): - new_zlev = mock.Mock(units=Unit("m"), nbounds=4) - with self.assertRaises(ValueError): - self.factory.update(self.zlev, new_zlev) - - def test_zlev_same_boundedness(self): - new_zlev = mock.Mock(units=Unit("m"), nbounds=2) - with self.assertRaises(ValueError): - self.factory.update(self.zlev, new_zlev) - - def test_zlev_incompatible_units(self): - new_zlev = new_zlev = mock.Mock(units=Unit("Pa"), nbounds=0) - with self.assertRaises(ValueError): - self.factory.update(self.zlev, new_zlev) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/__init__.py b/lib/iris/tests/unit/common/__init__.py deleted file mode 100644 index 5380785042..0000000000 --- a/lib/iris/tests/unit/common/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.common` module.""" diff --git a/lib/iris/tests/unit/common/lenient/__init__.py b/lib/iris/tests/unit/common/lenient/__init__.py deleted file mode 100644 index 2a99e7a4c2..0000000000 --- a/lib/iris/tests/unit/common/lenient/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.common.lenient` package.""" diff --git a/lib/iris/tests/unit/common/lenient/test_Lenient.py b/lib/iris/tests/unit/common/lenient/test_Lenient.py deleted file mode 100644 index 62e2b24891..0000000000 --- a/lib/iris/tests/unit/common/lenient/test_Lenient.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.common.lenient.Lenient`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest.mock import sentinel - -from iris.common.lenient import _LENIENT, Lenient - - -class Test___init__(tests.IrisTest): - def test_default(self): - lenient = Lenient() - expected = dict(maths=True) - self.assertEqual(expected, lenient.__dict__) - - def test_kwargs(self): - lenient = Lenient(maths=False) - expected = dict(maths=False) - self.assertEqual(expected, lenient.__dict__) - - def test_kwargs_invalid(self): - emsg = "Invalid .* option, got 'merge'." - with self.assertRaisesRegex(KeyError, emsg): - _ = Lenient(merge=True) - - -class Test___contains__(tests.IrisTest): - def setUp(self): - self.lenient = Lenient() - - def test_in(self): - self.assertIn("maths", self.lenient) - - def test_not_in(self): - self.assertNotIn("concatenate", self.lenient) - - -class Test___getitem__(tests.IrisTest): - def setUp(self): - self.lenient = Lenient() - - def test_in(self): - self.assertTrue(self.lenient["maths"]) - - def test_not_in(self): - emsg = "Invalid .* option, got 'MATHS'." - with self.assertRaisesRegex(KeyError, emsg): - _ = self.lenient["MATHS"] - - -class Test___repr__(tests.IrisTest): - def setUp(self): - self.lenient = Lenient() - - def test(self): - expected = "Lenient(maths=True)" - self.assertEqual(expected, repr(self.lenient)) - - -class Test___setitem__(tests.IrisTest): - def setUp(self): - self.lenient = Lenient() - - def test_key_invalid(self): - emsg = "Invalid .* option, got 'MATHS." - with self.assertRaisesRegex(KeyError, emsg): - self.lenient["MATHS"] = False - - def test_maths_value_invalid(self): - value = sentinel.value - emsg = f"Invalid .* option 'maths' value, got {value!r}." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient["maths"] = value - - def test_maths_disable__lenient_enable_true(self): - self.assertTrue(_LENIENT.enable) - self.lenient["maths"] = False - self.assertFalse(self.lenient.__dict__["maths"]) - self.assertFalse(_LENIENT.enable) - - def test_maths_disable__lenient_enable_false(self): - _LENIENT.__dict__["enable"] = False - self.assertFalse(_LENIENT.enable) - self.lenient["maths"] = False - self.assertFalse(self.lenient.__dict__["maths"]) - self.assertFalse(_LENIENT.enable) - - def test_maths_enable__lenient_enable_true(self): - self.assertTrue(_LENIENT.enable) - self.lenient["maths"] = True - self.assertTrue(self.lenient.__dict__["maths"]) - self.assertTrue(_LENIENT.enable) - - def test_maths_enable__lenient_enable_false(self): - _LENIENT.__dict__["enable"] = False - self.assertFalse(_LENIENT.enable) - self.lenient["maths"] = True - self.assertTrue(self.lenient.__dict__["maths"]) - self.assertTrue(_LENIENT.enable) - - -class Test_context(tests.IrisTest): - def setUp(self): - self.lenient = Lenient() - - def test_nop(self): - self.assertTrue(self.lenient["maths"]) - - with self.lenient.context(): - self.assertTrue(self.lenient["maths"]) - - self.assertTrue(self.lenient["maths"]) - - def test_maths_disable__lenient_true(self): - # synchronised - self.assertTrue(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) - - with self.lenient.context(maths=False): - # still synchronised - self.assertFalse(_LENIENT.enable) - self.assertFalse(self.lenient["maths"]) - - # still synchronised - self.assertTrue(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) - - def test_maths_disable__lenient_false(self): - # not synchronised - _LENIENT.__dict__["enable"] = False - self.assertFalse(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) - - with self.lenient.context(maths=False): - # now synchronised - self.assertFalse(_LENIENT.enable) - self.assertFalse(self.lenient["maths"]) - - # still synchronised - self.assertTrue(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) - - def test_maths_enable__lenient_true(self): - # not synchronised - self.assertTrue(_LENIENT.enable) - self.lenient.__dict__["maths"] = False - self.assertFalse(self.lenient["maths"]) - - with self.lenient.context(maths=True): - # now synchronised - self.assertTrue(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) - - # still synchronised - self.assertFalse(_LENIENT.enable) - self.assertFalse(self.lenient["maths"]) - - def test_maths_enable__lenient_false(self): - # synchronised - _LENIENT.__dict__["enable"] = False - self.assertFalse(_LENIENT.enable) - self.lenient.__dict__["maths"] = False - self.assertFalse(self.lenient["maths"]) - - with self.lenient.context(maths=True): - # still synchronised - self.assertTrue(_LENIENT.enable) - self.assertTrue(self.lenient["maths"]) - - # still synchronised - self.assertFalse(_LENIENT.enable) - self.assertFalse(self.lenient["maths"]) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/lenient/test__Lenient.py b/lib/iris/tests/unit/common/lenient/test__Lenient.py deleted file mode 100644 index 44f38d9c5a..0000000000 --- a/lib/iris/tests/unit/common/lenient/test__Lenient.py +++ /dev/null @@ -1,835 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.common.lenient._Lenient`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from collections.abc import Iterable - -from iris.common.lenient import ( - _LENIENT_ENABLE_DEFAULT, - _LENIENT_PROTECTED, - _Lenient, - _qualname, -) - - -class Test___init__(tests.IrisTest): - def setUp(self): - self.expected = dict(active=None, enable=_LENIENT_ENABLE_DEFAULT) - - def test_default(self): - lenient = _Lenient() - self.assertEqual(self.expected, lenient.__dict__) - - def test_args_service_str(self): - service = "service1" - lenient = _Lenient(service) - self.expected.update(dict(service1=True)) - self.assertEqual(self.expected, lenient.__dict__) - - def test_args_services_str(self): - services = ("service1", "service2") - lenient = _Lenient(*services) - self.expected.update(dict(service1=True, service2=True)) - self.assertEqual(self.expected, lenient.__dict__) - - def test_args_services_callable(self): - def service1(): - pass - - def service2(): - pass - - services = (service1, service2) - lenient = _Lenient(*services) - self.expected.update( - {_qualname(service1): True, _qualname(service2): True} - ) - self.assertEqual(self.expected, lenient.__dict__) - - def test_kwargs_client_str(self): - client = dict(client1="service1") - lenient = _Lenient(**client) - self.expected.update(dict(client1=("service1",))) - self.assertEqual(self.expected, lenient.__dict__) - - def test_kwargs_clients_str(self): - clients = dict(client1="service1", client2="service2") - lenient = _Lenient(**clients) - self.expected.update( - dict(client1=("service1",), client2=("service2",)) - ) - self.assertEqual(self.expected, lenient.__dict__) - - def test_kwargs_clients_callable(self): - def client1(): - pass - - def client2(): - pass - - def service1(): - pass - - def service2(): - pass - - qualname_client1 = _qualname(client1) - qualname_client2 = _qualname(client2) - clients = { - qualname_client1: service1, - qualname_client2: (service1, service2), - } - lenient = _Lenient(**clients) - self.expected.update( - { - _qualname(client1): (_qualname(service1),), - _qualname(client2): (_qualname(service1), _qualname(service2)), - } - ) - self.assertEqual(self.expected, lenient.__dict__) - - -class Test___call__(tests.IrisTest): - def setUp(self): - self.client = "myclient" - self.lenient = _Lenient() - - def test_missing_service_str(self): - self.assertFalse(self.lenient("myservice")) - - def test_missing_service_callable(self): - def myservice(): - pass - - self.assertFalse(self.lenient(myservice)) - - def test_disabled_service_str(self): - service = "myservice" - self.lenient.__dict__[service] = False - self.assertFalse(self.lenient(service)) - - def test_disable_service_callable(self): - def myservice(): - pass - - qualname_service = _qualname(myservice) - self.lenient.__dict__[qualname_service] = False - self.assertFalse(self.lenient(myservice)) - - def test_service_str_with_no_active_client(self): - service = "myservice" - self.lenient.__dict__[service] = True - self.assertFalse(self.lenient(service)) - - def test_service_callable_with_no_active_client(self): - def myservice(): - pass - - qualname_service = _qualname(myservice) - self.lenient.__dict__[qualname_service] = True - self.assertFalse(self.lenient(myservice)) - - def test_service_str_with_active_client_with_no_registered_services(self): - service = "myservice" - self.lenient.__dict__[service] = True - self.lenient.__dict__["active"] = self.client - self.assertFalse(self.lenient(service)) - - def test_service_callable_with_active_client_with_no_registered_services( - self, - ): - def myservice(): - pass - - def myclient(): - pass - - qualname_service = _qualname(myservice) - self.lenient.__dict__[qualname_service] = True - self.lenient.__dict__["active"] = _qualname(myclient) - self.assertFalse(self.lenient(myservice)) - - def test_service_str_with_active_client_with_unmatched_registered_services( - self, - ): - service = "myservice" - self.lenient.__dict__[service] = True - self.lenient.__dict__["active"] = self.client - self.lenient.__dict__[self.client] = ("service1", "service2") - self.assertFalse(self.lenient(service)) - - def test_service_callable_with_active_client_with_unmatched_registered_services( - self, - ): - def myservice(): - pass - - def myclient(): - pass - - qualname_service = _qualname(myservice) - qualname_client = _qualname(myclient) - self.lenient.__dict__[qualname_service] = True - self.lenient.__dict__["active"] = qualname_client - self.lenient.__dict__[qualname_client] = ("service1", "service2") - self.assertFalse(self.lenient(myservice)) - - def test_service_str_with_active_client_with_registered_services(self): - service = "myservice" - self.lenient.__dict__[service] = True - self.lenient.__dict__["active"] = self.client - self.lenient.__dict__[self.client] = ("service1", "service2", service) - self.assertTrue(self.lenient(service)) - - def test_service_callable_with_active_client_with_registered_services( - self, - ): - def myservice(): - pass - - def myclient(): - pass - - qualname_service = _qualname(myservice) - qualname_client = _qualname(myclient) - self.lenient.__dict__[qualname_service] = True - self.lenient.__dict__["active"] = qualname_client - self.lenient.__dict__[qualname_client] = ( - "service1", - "service2", - qualname_service, - ) - self.assertTrue(self.lenient(myservice)) - - def test_service_str_with_active_client_with_unmatched_registered_service_str( - self, - ): - service = "myservice" - self.lenient.__dict__[service] = True - self.lenient.__dict__["active"] = self.client - self.lenient.__dict__[self.client] = "serviceXXX" - self.assertFalse(self.lenient(service)) - - def test_service_callable_with_active_client_with_unmatched_registered_service_str( - self, - ): - def myservice(): - pass - - def myclient(): - pass - - qualname_service = _qualname(myservice) - qualname_client = _qualname(myclient) - self.lenient.__dict__[qualname_service] = True - self.lenient.__dict__["active"] = qualname_client - self.lenient.__dict__[qualname_client] = f"{qualname_service}XXX" - self.assertFalse(self.lenient(myservice)) - - def test_service_str_with_active_client_with_registered_service_str(self): - service = "myservice" - self.lenient.__dict__[service] = True - self.lenient.__dict__["active"] = self.client - self.lenient.__dict__[self.client] = service - self.assertTrue(self.lenient(service)) - - def test_service_callable_with_active_client_with_registered_service_str( - self, - ): - def myservice(): - pass - - def myclient(): - pass - - qualname_service = _qualname(myservice) - qualname_client = _qualname(myclient) - self.lenient.__dict__[qualname_service] = True - self.lenient.__dict__["active"] = qualname_client - self.lenient.__dict__[qualname_client] = qualname_service - self.assertTrue(self.lenient(myservice)) - - def test_enable(self): - service = "myservice" - self.lenient.__dict__[service] = True - self.lenient.__dict__["active"] = self.client - self.lenient.__dict__[self.client] = service - self.assertTrue(self.lenient(service)) - self.lenient.__dict__["enable"] = False - self.assertFalse(self.lenient(service)) - - -class Test___contains__(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_in(self): - self.assertIn("active", self.lenient) - - def test_not_in(self): - self.assertNotIn("ACTIVATE", self.lenient) - - def test_in_qualname(self): - def func(): - pass - - qualname_func = _qualname(func) - lenient = _Lenient() - lenient.__dict__[qualname_func] = None - self.assertIn(func, lenient) - self.assertIn(qualname_func, lenient) - - -class Test___getattr__(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_in(self): - self.assertIsNone(self.lenient.active) - - def test_not_in(self): - emsg = "Invalid .* option, got 'wibble'." - with self.assertRaisesRegex(AttributeError, emsg): - _ = self.lenient.wibble - - -class Test__getitem__(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_in(self): - self.assertIsNone(self.lenient["active"]) - - def test_in_callable(self): - def service(): - pass - - qualname_service = _qualname(service) - self.lenient.__dict__[qualname_service] = True - self.assertTrue(self.lenient[service]) - - def test_not_in(self): - emsg = "Invalid .* option, got 'wibble'." - with self.assertRaisesRegex(KeyError, emsg): - _ = self.lenient["wibble"] - - def test_not_in_callable(self): - def service(): - pass - - qualname_service = _qualname(service) - emsg = f"Invalid .* option, got '{qualname_service}'." - with self.assertRaisesRegex(KeyError, emsg): - _ = self.lenient[service] - - -class Test___setitem__(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_not_in(self): - emsg = "Invalid .* option, got 'wibble'." - with self.assertRaisesRegex(KeyError, emsg): - self.lenient["wibble"] = None - - def test_in_value_str(self): - client = "client" - service = "service" - self.lenient.__dict__[client] = None - self.lenient[client] = service - self.assertEqual(self.lenient.__dict__[client], (service,)) - - def test_callable_in_value_str(self): - def client(): - pass - - service = "service" - qualname_client = _qualname(client) - self.lenient.__dict__[qualname_client] = None - self.lenient[client] = service - self.assertEqual(self.lenient.__dict__[qualname_client], (service,)) - - def test_in_value_callable(self): - def service(): - pass - - client = "client" - qualname_service = _qualname(service) - self.lenient.__dict__[client] = None - self.lenient[client] = service - self.assertEqual(self.lenient.__dict__[client], (qualname_service,)) - - def test_callable_in_value_callable(self): - def client(): - pass - - def service(): - pass - - qualname_client = _qualname(client) - qualname_service = _qualname(service) - self.lenient.__dict__[qualname_client] = None - self.lenient[client] = service - self.assertEqual( - self.lenient.__dict__[qualname_client], (qualname_service,) - ) - - def test_in_value_bool(self): - client = "client" - self.lenient.__dict__[client] = None - self.lenient[client] = True - self.assertTrue(self.lenient.__dict__[client]) - self.assertFalse(isinstance(self.lenient.__dict__[client], Iterable)) - - def test_callable_in_value_bool(self): - def client(): - pass - - qualname_client = _qualname(client) - self.lenient.__dict__[qualname_client] = None - self.lenient[client] = True - self.assertTrue(self.lenient.__dict__[qualname_client]) - self.assertFalse( - isinstance(self.lenient.__dict__[qualname_client], Iterable) - ) - - def test_in_value_iterable(self): - client = "client" - services = ("service1", "service2") - self.lenient.__dict__[client] = None - self.lenient[client] = services - self.assertEqual(self.lenient.__dict__[client], services) - - def test_callable_in_value_iterable(self): - def client(): - pass - - qualname_client = _qualname(client) - services = ("service1", "service2") - self.lenient.__dict__[qualname_client] = None - self.lenient[client] = services - self.assertEqual(self.lenient.__dict__[qualname_client], services) - - def test_in_value_iterable_callable(self): - def service1(): - pass - - def service2(): - pass - - client = "client" - self.lenient.__dict__[client] = None - qualname_services = (_qualname(service1), _qualname(service2)) - self.lenient[client] = (service1, service2) - self.assertEqual(self.lenient.__dict__[client], qualname_services) - - def test_callable_in_value_iterable_callable(self): - def client(): - pass - - def service1(): - pass - - def service2(): - pass - - qualname_client = _qualname(client) - self.lenient.__dict__[qualname_client] = None - qualname_services = (_qualname(service1), _qualname(service2)) - self.lenient[client] = (service1, service2) - self.assertEqual( - self.lenient.__dict__[qualname_client], qualname_services - ) - - def test_active_iterable(self): - active = "active" - self.assertIsNone(self.lenient.__dict__[active]) - emsg = "Invalid .* option 'active'" - with self.assertRaisesRegex(ValueError, emsg): - self.lenient[active] = (None,) - - def test_active_str(self): - active = "active" - client = "client1" - self.assertIsNone(self.lenient.__dict__[active]) - self.lenient[active] = client - self.assertEqual(self.lenient.__dict__[active], client) - - def test_active_callable(self): - def client(): - pass - - active = "active" - qualname_client = _qualname(client) - self.assertIsNone(self.lenient.__dict__[active]) - self.lenient[active] = client - self.assertEqual(self.lenient.__dict__[active], qualname_client) - - def test_enable(self): - enable = "enable" - self.assertEqual( - self.lenient.__dict__[enable], _LENIENT_ENABLE_DEFAULT - ) - self.lenient[enable] = True - self.assertTrue(self.lenient.__dict__[enable]) - self.lenient[enable] = False - self.assertFalse(self.lenient.__dict__[enable]) - - def test_enable_invalid(self): - emsg = "Invalid .* option 'enable'" - with self.assertRaisesRegex(ValueError, emsg): - self.lenient["enable"] = None - - -class Test_context(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - self.default = dict(active=None, enable=_LENIENT_ENABLE_DEFAULT) - - def copy(self): - return self.lenient.__dict__.copy() - - def test_nop(self): - pre = self.copy() - with self.lenient.context(): - context = self.copy() - post = self.copy() - self.assertEqual(pre, self.default) - self.assertEqual(context, self.default) - self.assertEqual(post, self.default) - - def test_active_str(self): - client = "client" - pre = self.copy() - with self.lenient.context(active=client): - context = self.copy() - post = self.copy() - self.assertEqual(pre, self.default) - expected = self.default.copy() - expected.update(dict(active=client)) - self.assertEqual(context, expected) - self.assertEqual(post, self.default) - - def test_active_callable(self): - def client(): - pass - - pre = self.copy() - with self.lenient.context(active=client): - context = self.copy() - post = self.copy() - qualname_client = _qualname(client) - self.assertEqual(pre, self.default) - expected = self.default.copy() - expected.update(dict(active=qualname_client)) - self.assertEqual(context, expected) - self.assertEqual(post, self.default) - - def test_kwargs(self): - client = "client" - self.lenient.__dict__["service1"] = False - self.lenient.__dict__["service2"] = False - pre = self.copy() - with self.lenient.context(active=client, service1=True, service2=True): - context = self.copy() - post = self.copy() - self.default.update(dict(service1=False, service2=False)) - self.assertEqual(pre, self.default) - expected = self.default.copy() - expected.update(dict(active=client, service1=True, service2=True)) - self.assertEqual(context, expected) - self.assertEqual(post, self.default) - - def test_args_str(self): - client = "client" - services = ("service1", "service2") - pre = self.copy() - with self.lenient.context(*services, active=client): - context = self.copy() - post = self.copy() - self.assertEqual(pre, self.default) - expected = self.default.copy() - expected.update(dict(active=client, client=services)) - self.assertEqual(context["active"], expected["active"]) - self.assertEqual(set(context["client"]), set(expected["client"])) - self.assertEqual(post, self.default) - - def test_args_callable(self): - def service1(): - pass - - def service2(): - pass - - client = "client" - services = (service1, service2) - pre = self.copy() - with self.lenient.context(*services, active=client): - context = self.copy() - post = self.copy() - qualname_services = tuple([_qualname(service) for service in services]) - self.assertEqual(pre, self.default) - expected = self.default.copy() - expected.update(dict(active=client, client=qualname_services)) - self.assertEqual(context["active"], expected["active"]) - self.assertEqual(set(context["client"]), set(expected["client"])) - self.assertEqual(post, self.default) - - def test_context_runtime(self): - services = ("service1", "service2") - pre = self.copy() - with self.lenient.context(*services): - context = self.copy() - post = self.copy() - self.assertEqual(pre, self.default) - expected = self.default.copy() - expected.update(dict(active="__context", __context=services)) - self.assertEqual(context, expected) - self.assertEqual(post, self.default) - - -class Test_enable(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_getter(self): - self.assertEqual(self.lenient.enable, _LENIENT_ENABLE_DEFAULT) - - def test_setter_invalid(self): - emsg = "Invalid .* option 'enable'" - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.enable = 0 - - def test_setter(self): - self.assertEqual(self.lenient.enable, _LENIENT_ENABLE_DEFAULT) - self.lenient.enable = False - self.assertFalse(self.lenient.enable) - - -class Test_register_client(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_not_protected(self): - emsg = "Cannot register .* client" - for protected in _LENIENT_PROTECTED: - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.register_client(protected, "service") - - def test_str_service_str(self): - client = "client" - services = "service" - self.lenient.register_client(client, services) - self.assertIn(client, self.lenient.__dict__) - self.assertEqual(self.lenient.__dict__[client], (services,)) - - def test_str_services_str(self): - client = "client" - services = ("service1", "service2") - self.lenient.register_client(client, services) - self.assertIn(client, self.lenient.__dict__) - self.assertEqual(self.lenient.__dict__[client], services) - - def test_callable_service_callable(self): - def client(): - pass - - def service(): - pass - - qualname_client = _qualname(client) - qualname_service = _qualname(service) - self.lenient.register_client(client, service) - self.assertIn(qualname_client, self.lenient.__dict__) - self.assertEqual( - self.lenient.__dict__[qualname_client], (qualname_service,) - ) - - def test_callable_services_callable(self): - def client(): - pass - - def service1(): - pass - - def service2(): - pass - - qualname_client = _qualname(client) - qualname_services = (_qualname(service1), _qualname(service2)) - self.lenient.register_client(client, (service1, service2)) - self.assertIn(qualname_client, self.lenient.__dict__) - self.assertEqual( - self.lenient.__dict__[qualname_client], qualname_services - ) - - def test_services_empty(self): - emsg = "Require at least one .* client service." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.register_client("client", ()) - - def test_services_overwrite(self): - client = "client" - services = ("service1", "service2") - self.lenient.__dict__[client] = services - self.assertEqual(self.lenient[client], services) - new_services = ("service3", "service4") - self.lenient.register_client(client, services=new_services) - self.assertEqual(self.lenient[client], new_services) - - def test_services_append(self): - client = "client" - services = ("service1", "service2") - self.lenient.__dict__[client] = services - self.assertEqual(self.lenient[client], services) - new_services = ("service3", "service4") - self.lenient.register_client( - client, services=new_services, append=True - ) - expected = set(services + new_services) - self.assertEqual(set(self.lenient[client]), expected) - - -class Test_register_service(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_str(self): - service = "service" - self.assertNotIn(service, self.lenient.__dict__) - self.lenient.register_service(service) - self.assertIn(service, self.lenient.__dict__) - self.assertFalse(isinstance(self.lenient.__dict__[service], Iterable)) - self.assertTrue(self.lenient.__dict__[service]) - - def test_callable(self): - def service(): - pass - - qualname_service = _qualname(service) - self.assertNotIn(qualname_service, self.lenient.__dict__) - self.lenient.register_service(service) - self.assertIn(qualname_service, self.lenient.__dict__) - self.assertFalse( - isinstance(self.lenient.__dict__[qualname_service], Iterable) - ) - self.assertTrue(self.lenient.__dict__[qualname_service]) - - def test_not_protected(self): - emsg = "Cannot register .* service" - for protected in _LENIENT_PROTECTED: - self.lenient.__dict__[protected] = None - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.register_service("active") - - -class Test_unregister_client(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_not_protected(self): - emsg = "Cannot unregister .* client, as .* is a protected .* option." - for protected in _LENIENT_PROTECTED: - self.lenient.__dict__[protected] = None - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_client(protected) - - def test_not_in(self): - emsg = "Cannot unregister unknown .* client" - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_client("client") - - def test_not_client(self): - client = "client" - self.lenient.__dict__[client] = True - emsg = "Cannot unregister .* client, as .* is not a valid .* client." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_client(client) - - def test_not_client_callable(self): - def client(): - pass - - qualname_client = _qualname(client) - self.lenient.__dict__[qualname_client] = True - emsg = "Cannot unregister .* client, as .* is not a valid .* client." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_client(client) - - def test_str(self): - client = "client" - self.lenient.__dict__[client] = (None,) - self.lenient.unregister_client(client) - self.assertNotIn(client, self.lenient.__dict__) - - def test_callable(self): - def client(): - pass - - qualname_client = _qualname(client) - self.lenient.__dict__[qualname_client] = (None,) - self.lenient.unregister_client(client) - self.assertNotIn(qualname_client, self.lenient.__dict__) - - -class Test_unregister_service(tests.IrisTest): - def setUp(self): - self.lenient = _Lenient() - - def test_not_protected(self): - emsg = "Cannot unregister .* service, as .* is a protected .* option." - for protected in _LENIENT_PROTECTED: - self.lenient.__dict__[protected] = None - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_service(protected) - - def test_not_in(self): - emsg = "Cannot unregister unknown .* service" - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_service("service") - - def test_not_service(self): - service = "service" - self.lenient.__dict__[service] = (None,) - emsg = "Cannot unregister .* service, as .* is not a valid .* service." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_service(service) - - def test_not_service_callable(self): - def service(): - pass - - qualname_service = _qualname(service) - self.lenient.__dict__[qualname_service] = (None,) - emsg = "Cannot unregister .* service, as .* is not a valid .* service." - with self.assertRaisesRegex(ValueError, emsg): - self.lenient.unregister_service(service) - - def test_str(self): - service = "service" - self.lenient.__dict__[service] = True - self.lenient.unregister_service(service) - self.assertNotIn(service, self.lenient.__dict__) - - def test_callable(self): - def service(): - pass - - qualname_service = _qualname(service) - self.lenient.__dict__[qualname_service] = True - self.lenient.unregister_service(service) - self.assertNotIn(qualname_service, self.lenient.__dict__) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_client.py b/lib/iris/tests/unit/common/lenient/test__lenient_client.py deleted file mode 100644 index 3a19563efc..0000000000 --- a/lib/iris/tests/unit/common/lenient/test__lenient_client.py +++ /dev/null @@ -1,182 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.common.lenient._lenient_client`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from inspect import getmodule -from unittest.mock import sentinel - -from iris.common.lenient import _LENIENT, _lenient_client - - -class Test(tests.IrisTest): - def setUp(self): - module_name = getmodule(self).__name__ - self.client = f"{module_name}" + ".Test.{}..myclient" - self.service = f"{module_name}" + ".Test.{}..myservice" - self.active = "active" - self.args_in = sentinel.arg1, sentinel.arg2 - self.kwargs_in = dict(kwarg1=sentinel.kwarg1, kwarg2=sentinel.kwarg2) - - def test_args_too_many(self): - emsg = "Invalid lenient client arguments, expecting 1" - with self.assertRaisesRegex(AssertionError, emsg): - _lenient_client(None, None) - - def test_args_not_callable(self): - emsg = "Invalid lenient client argument, expecting a callable" - with self.assertRaisesRegex(AssertionError, emsg): - _lenient_client(None) - - def test_args_and_kwargs(self): - def func(): - pass - - emsg = ( - "Invalid lenient client, got both arguments and keyword arguments" - ) - with self.assertRaisesRegex(AssertionError, emsg): - _lenient_client(func, services=func) - - def test_call_naked(self): - @_lenient_client - def myclient(): - return _LENIENT.__dict__.copy() - - result = myclient() - self.assertIn(self.active, result) - qualname_client = self.client.format("test_call_naked") - self.assertEqual(result[self.active], qualname_client) - self.assertNotIn(qualname_client, result) - - def test_call_naked_alternative(self): - def myclient(): - return _LENIENT.__dict__.copy() - - result = _lenient_client(myclient)() - self.assertIn(self.active, result) - qualname_client = self.client.format("test_call_naked_alternative") - self.assertEqual(result[self.active], qualname_client) - self.assertNotIn(qualname_client, result) - - def test_call_naked_client_args_kwargs(self): - @_lenient_client - def myclient(*args, **kwargs): - return args, kwargs - - args_out, kwargs_out = myclient(*self.args_in, **self.kwargs_in) - self.assertEqual(args_out, self.args_in) - self.assertEqual(kwargs_out, self.kwargs_in) - - def test_call_naked_doc(self): - @_lenient_client - def myclient(): - """myclient doc-string""" - - self.assertEqual(myclient.__doc__, "myclient doc-string") - - def test_call_no_kwargs(self): - @_lenient_client() - def myclient(): - return _LENIENT.__dict__.copy() - - result = myclient() - self.assertIn(self.active, result) - qualname_client = self.client.format("test_call_no_kwargs") - self.assertEqual(result[self.active], qualname_client) - self.assertNotIn(qualname_client, result) - - def test_call_no_kwargs_alternative(self): - def myclient(): - return _LENIENT.__dict__.copy() - - result = (_lenient_client())(myclient)() - self.assertIn(self.active, result) - qualname_client = self.client.format("test_call_no_kwargs_alternative") - self.assertEqual(result[self.active], qualname_client) - self.assertNotIn(qualname_client, result) - - def test_call_kwargs_none(self): - @_lenient_client(services=None) - def myclient(): - return _LENIENT.__dict__.copy() - - result = myclient() - self.assertIn(self.active, result) - qualname_client = self.client.format("test_call_kwargs_none") - self.assertEqual(result[self.active], qualname_client) - self.assertNotIn(qualname_client, result) - - def test_call_kwargs_single(self): - service = sentinel.service - - @_lenient_client(services=service) - def myclient(): - return _LENIENT.__dict__.copy() - - result = myclient() - self.assertIn(self.active, result) - qualname_client = self.client.format("test_call_kwargs_single") - self.assertEqual(result[self.active], qualname_client) - self.assertIn(qualname_client, result) - self.assertEqual(result[qualname_client], (service,)) - - def test_call_kwargs_single_callable(self): - def myservice(): - pass - - @_lenient_client(services=myservice) - def myclient(): - return _LENIENT.__dict__.copy() - - test_name = "test_call_kwargs_single_callable" - result = myclient() - self.assertIn(self.active, result) - qualname_client = self.client.format(test_name) - self.assertEqual(result[self.active], qualname_client) - self.assertIn(qualname_client, result) - qualname_services = (self.service.format(test_name),) - self.assertEqual(result[qualname_client], qualname_services) - - def test_call_kwargs_iterable(self): - services = (sentinel.service1, sentinel.service2) - - @_lenient_client(services=services) - def myclient(): - return _LENIENT.__dict__.copy() - - result = myclient() - self.assertIn(self.active, result) - qualname_client = self.client.format("test_call_kwargs_iterable") - self.assertEqual(result[self.active], qualname_client) - self.assertIn(qualname_client, result) - self.assertEqual(set(result[qualname_client]), set(services)) - - def test_call_client_args_kwargs(self): - @_lenient_client() - def myclient(*args, **kwargs): - return args, kwargs - - args_out, kwargs_out = myclient(*self.args_in, **self.kwargs_in) - self.assertEqual(args_out, self.args_in) - self.assertEqual(kwargs_out, self.kwargs_in) - - def test_call_doc(self): - @_lenient_client() - def myclient(): - """myclient doc-string""" - - self.assertEqual(myclient.__doc__, "myclient doc-string") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/lenient/test__lenient_service.py b/lib/iris/tests/unit/common/lenient/test__lenient_service.py deleted file mode 100644 index 9545b137ea..0000000000 --- a/lib/iris/tests/unit/common/lenient/test__lenient_service.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.common.lenient._lenient_service`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from inspect import getmodule -from unittest.mock import sentinel - -from iris.common.lenient import _LENIENT, _lenient_service - - -class Test(tests.IrisTest): - def setUp(self): - module_name = getmodule(self).__name__ - self.service = f"{module_name}" + ".Test.{}..myservice" - self.args_in = sentinel.arg1, sentinel.arg2 - self.kwargs_in = dict(kwarg1=sentinel.kwarg1, kwarg2=sentinel.kwarg2) - - def test_args_too_many(self): - emsg = "Invalid lenient service arguments, expecting 1" - with self.assertRaisesRegex(AssertionError, emsg): - _lenient_service(None, None) - - def test_args_not_callable(self): - emsg = "Invalid lenient service argument, expecting a callable" - with self.assertRaisesRegex(AssertionError, emsg): - _lenient_service(None) - - def test_call_naked(self): - @_lenient_service - def myservice(): - return _LENIENT.__dict__.copy() - - qualname_service = self.service.format("test_call_naked") - state = _LENIENT.__dict__ - self.assertIn(qualname_service, state) - self.assertTrue(state[qualname_service]) - result = myservice() - self.assertIn(qualname_service, result) - self.assertTrue(result[qualname_service]) - - def test_call_naked_alternative(self): - def myservice(): - return _LENIENT.__dict__.copy() - - qualname_service = self.service.format("test_call_naked_alternative") - result = _lenient_service(myservice)() - self.assertIn(qualname_service, result) - self.assertTrue(result[qualname_service]) - - def test_call_naked_service_args_kwargs(self): - @_lenient_service - def myservice(*args, **kwargs): - return args, kwargs - - args_out, kwargs_out = myservice(*self.args_in, **self.kwargs_in) - self.assertEqual(args_out, self.args_in) - self.assertEqual(kwargs_out, self.kwargs_in) - - def test_call_naked_doc(self): - @_lenient_service - def myservice(): - """myservice doc-string""" - - self.assertEqual(myservice.__doc__, "myservice doc-string") - - def test_call(self): - @_lenient_service() - def myservice(): - return _LENIENT.__dict__.copy() - - qualname_service = self.service.format("test_call") - state = _LENIENT.__dict__ - self.assertIn(qualname_service, state) - self.assertTrue(state[qualname_service]) - result = myservice() - self.assertIn(qualname_service, result) - self.assertTrue(result[qualname_service]) - - def test_call_alternative(self): - def myservice(): - return _LENIENT.__dict__.copy() - - qualname_service = self.service.format("test_call_alternative") - result = (_lenient_service())(myservice)() - self.assertIn(qualname_service, result) - self.assertTrue(result[qualname_service]) - - def test_call_service_args_kwargs(self): - @_lenient_service() - def myservice(*args, **kwargs): - return args, kwargs - - args_out, kwargs_out = myservice(*self.args_in, **self.kwargs_in) - self.assertEqual(args_out, self.args_in) - self.assertEqual(kwargs_out, self.kwargs_in) - - def test_call_doc(self): - @_lenient_service() - def myservice(): - """myservice doc-string""" - - self.assertEqual(myservice.__doc__, "myservice doc-string") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/lenient/test__qualname.py b/lib/iris/tests/unit/common/lenient/test__qualname.py deleted file mode 100644 index 3deefbf30d..0000000000 --- a/lib/iris/tests/unit/common/lenient/test__qualname.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.common.lenient._qualname`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from inspect import getmodule -from unittest.mock import sentinel - -from iris.common.lenient import _qualname - - -class Test(tests.IrisTest): - def setUp(self): - module_name = getmodule(self).__name__ - self.locals = f"{module_name}" + ".Test.{}..{}" - - def test_pass_thru_non_callable(self): - func = sentinel.func - result = _qualname(func) - self.assertEqual(result, func) - - def test_callable_function_local(self): - def myfunc(): - pass - - qualname_func = self.locals.format( - "test_callable_function_local", "myfunc" - ) - result = _qualname(myfunc) - self.assertEqual(result, qualname_func) - - def test_callable_function(self): - import iris - - result = _qualname(iris.load) - self.assertEqual(result, "iris.load") - - def test_callable_method_local(self): - class MyClass: - def mymethod(self): - pass - - qualname_method = self.locals.format( - "test_callable_method_local", "MyClass.mymethod" - ) - result = _qualname(MyClass.mymethod) - self.assertEqual(result, qualname_method) - - def test_callable_method(self): - import iris - - result = _qualname(iris.cube.Cube.add_ancillary_variable) - self.assertEqual(result, "iris.cube.Cube.add_ancillary_variable") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/metadata/__init__.py b/lib/iris/tests/unit/common/metadata/__init__.py deleted file mode 100644 index aba33c8312..0000000000 --- a/lib/iris/tests/unit/common/metadata/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.common.metadata` package.""" diff --git a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py b/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py deleted file mode 100644 index 9efb43ec42..0000000000 --- a/lib/iris/tests/unit/common/metadata/test_AncillaryVariableMetadata.py +++ /dev/null @@ -1,492 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.common.metadata.AncillaryVariableMetadata`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel - -from iris.common.lenient import _LENIENT, _qualname -from iris.common.metadata import AncillaryVariableMetadata, BaseMetadata - - -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.cls = AncillaryVariableMetadata - - def test_repr(self): - metadata = self.cls( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - units=self.units, - attributes=self.attributes, - ) - fmt = ( - "AncillaryVariableMetadata(standard_name={!r}, long_name={!r}, " - "var_name={!r}, units={!r}, attributes={!r})" - ) - expected = fmt.format( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - ) - self.assertEqual(expected, repr(metadata)) - - def test__fields(self): - expected = ( - "standard_name", - "long_name", - "var_name", - "units", - "attributes", - ) - self.assertEqual(self.cls._fields, expected) - - def test_bases(self): - self.assertTrue(issubclass(self.cls, BaseMetadata)) - - -class Test___eq__(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - ) - self.dummy = sentinel.dummy - self.cls = AncillaryVariableMetadata - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) - - def test_lenient_service(self): - qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) - - def test_call(self): - other = sentinel.other - return_value = sentinel.return_value - metadata = self.cls(*(None,) * len(self.cls._fields)) - with mock.patch.object( - BaseMetadata, "__eq__", return_value=return_value - ) as mocker: - result = metadata.__eq__(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - -class Test___lt__(tests.IrisTest): - def setUp(self): - self.cls = AncillaryVariableMetadata - self.one = self.cls(1, 1, 1, 1, 1) - self.two = self.cls(1, 1, 1, 2, 1) - self.none = self.cls(1, 1, 1, None, 1) - self.attributes = self.cls(1, 1, 1, 1, 10) - - def test__ascending_lt(self): - result = self.one < self.two - self.assertTrue(result) - - def test__descending_lt(self): - result = self.two < self.one - self.assertFalse(result) - - def test__none_rhs_operand(self): - result = self.one < self.none - self.assertFalse(result) - - def test__none_lhs_operand(self): - result = self.none < self.one - self.assertTrue(result) - - def test__ignore_attributes(self): - result = self.one < self.attributes - self.assertFalse(result) - result = self.attributes < self.one - self.assertFalse(result) - - -class Test_combine(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - ) - self.dummy = sentinel.dummy - self.cls = AncillaryVariableMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) - - def test_lenient_service(self): - qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["units"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values.copy() - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - -class Test_difference(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - ) - self.dummy = sentinel.dummy - self.cls = AncillaryVariableMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) - - def test_lenient_service(self): - qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["units"] = (left["units"], right["units"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["units"] = lexpected["units"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_strict_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_none(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - -class Test_equal(tests.IrisTest): - def setUp(self): - self.cls = AncillaryVariableMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) - - def test_lenient_service(self): - qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue(_LENIENT[self.cls.equal]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py deleted file mode 100644 index f4760b3051..0000000000 --- a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py +++ /dev/null @@ -1,1653 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.common.metadata.BaseMetadata`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from collections import OrderedDict -import unittest.mock as mock -from unittest.mock import sentinel - -import numpy as np -import numpy.ma as ma - -from iris.common.lenient import _LENIENT, _qualname -from iris.common.metadata import BaseMetadata, CubeMetadata - - -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.cls = BaseMetadata - - def test_repr(self): - metadata = self.cls( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - units=self.units, - attributes=self.attributes, - ) - fmt = ( - "BaseMetadata(standard_name={!r}, long_name={!r}, " - "var_name={!r}, units={!r}, attributes={!r})" - ) - expected = fmt.format( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - ) - self.assertEqual(expected, repr(metadata)) - - def test_str(self): - metadata = self.cls( - standard_name="", - long_name=None, - var_name=self.var_name, - units=self.units, - attributes={}, - ) - expected = ( - f"BaseMetadata(var_name={self.var_name!r}, units={self.units!r})" - ) - self.assertEqual(expected, str(metadata)) - - def test__fields(self): - expected = ( - "standard_name", - "long_name", - "var_name", - "units", - "attributes", - ) - self.assertEqual(expected, self.cls._fields) - - -class Test___eq__(tests.IrisTest): - def setUp(self): - self.kwargs = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - ) - self.cls = BaseMetadata - self.metadata = self.cls(**self.kwargs) - - def test_lenient_service(self): - qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) - - def test_cannot_compare_non_class(self): - result = self.metadata.__eq__(None) - self.assertIs(NotImplemented, result) - - def test_cannot_compare_different_class(self): - other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) - result = self.metadata.__eq__(other) - self.assertIs(NotImplemented, result) - - def test_lenient(self): - return_value = sentinel.return_value - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ) as mlenient: - with mock.patch.object( - self.cls, "_compare_lenient", return_value=return_value - ) as mcompare: - result = self.metadata.__eq__(self.metadata) - - self.assertEqual(return_value, result) - self.assertEqual(1, mcompare.call_count) - (arg,), kwargs = mcompare.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - self.assertEqual(1, mlenient.call_count) - (arg,), kwargs = mlenient.call_args - self.assertEqual(_qualname(self.cls.__eq__), _qualname(arg)) - self.assertEqual(dict(), kwargs) - - def test_strict_same(self): - self.assertTrue(self.metadata.__eq__(self.metadata)) - other = self.cls(**self.kwargs) - self.assertTrue(self.metadata.__eq__(other)) - self.assertTrue(other.__eq__(self.metadata)) - - def test_strict_different(self): - self.kwargs["var_name"] = None - other = self.cls(**self.kwargs) - self.assertFalse(self.metadata.__eq__(other)) - self.assertFalse(other.__eq__(self.metadata)) - - -class Test___lt__(tests.IrisTest): - def setUp(self): - self.cls = BaseMetadata - self.one = self.cls(1, 1, 1, 1, 1) - self.two = self.cls(1, 1, 1, 2, 1) - self.none = self.cls(1, 1, 1, None, 1) - self.attributes = self.cls(1, 1, 1, 1, 10) - - def test__ascending_lt(self): - result = self.one < self.two - self.assertTrue(result) - - def test__descending_lt(self): - result = self.two < self.one - self.assertFalse(result) - - def test__none_rhs_operand(self): - result = self.one < self.none - self.assertFalse(result) - - def test__none_lhs_operand(self): - result = self.none < self.one - self.assertTrue(result) - - def test__ignore_attributes(self): - result = self.one < self.attributes - self.assertFalse(result) - result = self.attributes < self.one - self.assertFalse(result) - - -class Test___ne__(tests.IrisTest): - def setUp(self): - self.cls = BaseMetadata - self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.other = sentinel.other - - def test_notimplemented(self): - return_value = NotImplemented - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as mocker: - result = self.metadata.__ne__(self.other) - - self.assertIs(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(self.other, arg) - self.assertEqual(dict(), kwargs) - - def test_negate_true(self): - return_value = True - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as mocker: - result = self.metadata.__ne__(self.other) - - self.assertFalse(result) - (arg,), kwargs = mocker.call_args - self.assertEqual(self.other, arg) - self.assertEqual(dict(), kwargs) - - def test_negate_false(self): - return_value = False - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as mocker: - result = self.metadata.__ne__(self.other) - - self.assertTrue(result) - (arg,), kwargs = mocker.call_args - self.assertEqual(self.other, arg) - self.assertEqual(dict(), kwargs) - - -class Test__combine(tests.IrisTest): - def setUp(self): - self.kwargs = dict( - standard_name="standard_name", - long_name="long_name", - var_name="var_name", - units="units", - attributes=dict(one=sentinel.one, two=sentinel.two), - ) - self.cls = BaseMetadata - self.metadata = self.cls(**self.kwargs) - - def test_lenient(self): - return_value = sentinel._combine_lenient - other = sentinel.other - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ) as mlenient: - with mock.patch.object( - self.cls, "_combine_lenient", return_value=return_value - ) as mcombine: - result = self.metadata._combine(other) - - self.assertEqual(1, mlenient.call_count) - (arg,), kwargs = mlenient.call_args - self.assertEqual(self.metadata.combine, arg) - self.assertEqual(dict(), kwargs) - - self.assertEqual(return_value, result) - self.assertEqual(1, mcombine.call_count) - (arg,), kwargs = mcombine.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_strict(self): - dummy = sentinel.dummy - values = self.kwargs.copy() - values["standard_name"] = dummy - values["var_name"] = dummy - values["attributes"] = dummy - other = self.cls(**values) - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - result = self.metadata._combine(other) - - expected = [ - None if values[field] == dummy else values[field] - for field in self.cls._fields - ] - self.assertEqual(expected, result) - - -class Test__combine_lenient(tests.IrisTest): - def setUp(self): - self.cls = BaseMetadata - self.none = self.cls(*(None,) * len(self.cls._fields))._asdict() - self.names = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - ) - - def test_strict_units(self): - left = self.none.copy() - left["units"] = "K" - right = left.copy() - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - expected = list(left.values()) - self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) - - def test_strict_units_different(self): - left = self.none.copy() - right = self.none.copy() - left["units"] = "K" - right["units"] = "km" - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - result = lmetadata._combine_lenient(rmetadata) - expected = list(self.none.values()) - self.assertEqual(expected, result) - result = rmetadata._combine_lenient(lmetadata) - self.assertEqual(expected, result) - - def test_strict_units_different_none(self): - left = self.none.copy() - right = self.none.copy() - left["units"] = "K" - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - result = lmetadata._combine_lenient(rmetadata) - expected = list(self.none.values()) - self.assertEqual(expected, result) - - result = rmetadata._combine_lenient(lmetadata) - self.assertEqual(expected, result) - - def test_attributes(self): - left = self.none.copy() - right = self.none.copy() - ldict = dict(item=sentinel.left) - rdict = dict(item=sentinel.right) - left["attributes"] = ldict - right["attributes"] = rdict - rmetadata = self.cls(**right) - return_value = sentinel.return_value - with mock.patch.object( - self.cls, - "_combine_lenient_attributes", - return_value=return_value, - ) as mocker: - lmetadata = self.cls(**left) - result = lmetadata._combine_lenient(rmetadata) - - expected = self.none.copy() - expected["attributes"] = return_value - expected = list(expected.values()) - self.assertEqual(expected, result) - - self.assertEqual(1, mocker.call_count) - args, kwargs = mocker.call_args - expected = (ldict, rdict) - self.assertEqual(expected, args) - self.assertEqual(dict(), kwargs) - - def test_attributes_non_mapping_different(self): - left = self.none.copy() - right = self.none.copy() - ldict = dict(item=sentinel.left) - rdict = sentinel.right - left["attributes"] = ldict - right["attributes"] = rdict - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - expected = list(self.none.copy().values()) - self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) - - def test_attributes_non_mapping_different_none(self): - left = self.none.copy() - right = self.none.copy() - ldict = dict(item=sentinel.left) - left["attributes"] = ldict - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - result = lmetadata._combine_lenient(rmetadata) - expected = self.none.copy() - expected["attributes"] = ldict - expected = list(expected.values()) - self.assertEqual(expected, result) - - result = rmetadata._combine_lenient(lmetadata) - self.assertEqual(expected, result) - - def test_names(self): - left = self.none.copy() - left.update(self.names) - right = left.copy() - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - expected = list(left.values()) - self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) - - def test_names_different(self): - dummy = sentinel.dummy - left = self.none.copy() - right = self.none.copy() - left.update(self.names) - right["standard_name"] = dummy - right["long_name"] = dummy - right["var_name"] = dummy - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - expected = list(self.none.copy().values()) - self.assertEqual(expected, lmetadata._combine_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._combine_lenient(lmetadata)) - - def test_names_different_none(self): - left = self.none.copy() - right = self.none.copy() - left.update(self.names) - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - result = lmetadata._combine_lenient(rmetadata) - expected = list(left.values()) - self.assertEqual(expected, result) - - result = rmetadata._combine_lenient(lmetadata) - self.assertEqual(expected, result) - - -class Test__combine_lenient_attributes(tests.IrisTest): - def setUp(self): - self.values = OrderedDict( - one="one", - two="two", - three=np.int16(123), - four=np.arange(10), - five=ma.arange(10), - ) - self.cls = BaseMetadata - self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy - - def test_same(self): - left = self.values.copy() - right = self.values.copy() - - result = self.metadata._combine_lenient_attributes(left, right) - expected = left - self.assertDictEqual(expected, result) - - result = self.metadata._combine_lenient_attributes(right, left) - self.assertDictEqual(expected, result) - - def test_different(self): - left = self.values.copy() - right = self.values.copy() - left["two"] = left["four"] = self.dummy - - result = self.metadata._combine_lenient_attributes(left, right) - expected = self.values.copy() - for key in ["two", "four"]: - del expected[key] - self.assertDictEqual(expected, result) - - result = self.metadata._combine_lenient_attributes(right, left) - self.assertDictEqual(expected, result) - - def test_different_none(self): - left = self.values.copy() - right = self.values.copy() - left["one"] = left["three"] = left["five"] = None - - result = self.metadata._combine_lenient_attributes(left, right) - expected = self.values.copy() - for key in ["one", "three", "five"]: - del expected[key] - self.assertDictEqual(expected, result) - - result = self.metadata._combine_lenient_attributes(right, left) - self.assertDictEqual(expected, result) - - def test_extra(self): - left = self.values.copy() - right = self.values.copy() - left["extra_left"] = "extra_left" - right["extra_right"] = "extra_right" - - result = self.metadata._combine_lenient_attributes(left, right) - expected = self.values.copy() - expected["extra_left"] = left["extra_left"] - expected["extra_right"] = right["extra_right"] - self.assertDictEqual(expected, result) - - result = self.metadata._combine_lenient_attributes(right, left) - self.assertDictEqual(expected, result) - - -class Test__combine_strict_attributes(tests.IrisTest): - def setUp(self): - self.values = OrderedDict( - one="one", - two="two", - three=np.int32(123), - four=np.arange(10), - five=ma.arange(10), - ) - self.cls = BaseMetadata - self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy - - def test_same(self): - left = self.values.copy() - right = self.values.copy() - - result = self.metadata._combine_strict_attributes(left, right) - expected = left - self.assertDictEqual(expected, result) - - result = self.metadata._combine_strict_attributes(right, left) - self.assertDictEqual(expected, result) - - def test_different(self): - left = self.values.copy() - right = self.values.copy() - left["one"] = left["three"] = self.dummy - - result = self.metadata._combine_strict_attributes(left, right) - expected = self.values.copy() - for key in ["one", "three"]: - del expected[key] - self.assertDictEqual(expected, result) - - result = self.metadata._combine_strict_attributes(right, left) - self.assertDictEqual(expected, result) - - def test_different_none(self): - left = self.values.copy() - right = self.values.copy() - left["one"] = left["three"] = left["five"] = None - - result = self.metadata._combine_strict_attributes(left, right) - expected = self.values.copy() - for key in ["one", "three", "five"]: - del expected[key] - self.assertDictEqual(expected, result) - - result = self.metadata._combine_strict_attributes(right, left) - self.assertDictEqual(expected, result) - - def test_extra(self): - left = self.values.copy() - right = self.values.copy() - left["extra_left"] = "extra_left" - right["extra_right"] = "extra_right" - - result = self.metadata._combine_strict_attributes(left, right) - expected = self.values.copy() - self.assertDictEqual(expected, result) - - result = self.metadata._combine_strict_attributes(right, left) - self.assertDictEqual(expected, result) - - -class Test__compare_lenient(tests.IrisTest): - def setUp(self): - self.cls = BaseMetadata - self.none = self.cls(*(None,) * len(self.cls._fields))._asdict() - self.names = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - ) - - def test_name_same(self): - left = self.none.copy() - left.update(self.names) - right = left.copy() - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - with mock.patch.object( - self.cls, "_is_attributes", return_value=False - ) as mocker: - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._compare_lenient(lmetadata)) - - # mocker not called for "units" nor "var_name" members. - expected = (len(self.cls._fields) - 2) * 2 - self.assertEqual(expected, mocker.call_count) - - def test_name_same_lenient_false__long_name_different(self): - left = self.none.copy() - left.update(self.names) - right = left.copy() - right["long_name"] = sentinel.dummy - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - with mock.patch.object( - self.cls, "_is_attributes", return_value=False - ) as mocker: - self.assertFalse(lmetadata._compare_lenient(rmetadata)) - self.assertFalse(rmetadata._compare_lenient(lmetadata)) - - # mocker not called for "units" nor "var_name" members. - expected = (len(self.cls._fields) - 2) * 2 - self.assertEqual(expected, mocker.call_count) - - def test_name_same_lenient_true__var_name_different(self): - left = self.none.copy() - left.update(self.names) - right = left.copy() - right["var_name"] = sentinel.dummy - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - with mock.patch.object( - self.cls, "_is_attributes", return_value=False - ) as mocker: - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._compare_lenient(lmetadata)) - - # mocker not called for "units" nor "var_name" members. - expected = (len(self.cls._fields) - 2) * 2 - self.assertEqual(expected, mocker.call_count) - - def test_name_different(self): - left = self.none.copy() - left.update(self.names) - right = left.copy() - right["standard_name"] = None - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - with mock.patch.object(self.cls, "_is_attributes") as mocker: - self.assertFalse(lmetadata._compare_lenient(rmetadata)) - self.assertFalse(rmetadata._compare_lenient(lmetadata)) - - self.assertEqual(0, mocker.call_count) - - def test_strict_units(self): - left = self.none.copy() - left.update(self.names) - left["units"] = "K" - right = left.copy() - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - with mock.patch.object( - self.cls, "_is_attributes", return_value=False - ) as mocker: - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._compare_lenient(lmetadata)) - - # mocker not called for "units" nor "var_name" members. - expected = (len(self.cls._fields) - 2) * 2 - self.assertEqual(expected, mocker.call_count) - - def test_strict_units_different(self): - left = self.none.copy() - left.update(self.names) - left["units"] = "K" - right = left.copy() - right["units"] = "m" - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - with mock.patch.object( - self.cls, "_is_attributes", return_value=False - ) as mocker: - self.assertFalse(lmetadata._compare_lenient(rmetadata)) - self.assertFalse(rmetadata._compare_lenient(lmetadata)) - - # mocker not called for "units" nor "var_name" members. - expected = (len(self.cls._fields) - 2) * 2 - self.assertEqual(expected, mocker.call_count) - - def test_attributes(self): - left = self.none.copy() - left.update(self.names) - right = left.copy() - ldict = dict(item=sentinel.left) - rdict = dict(item=sentinel.right) - left["attributes"] = ldict - right["attributes"] = rdict - rmetadata = self.cls(**right) - with mock.patch.object( - self.cls, - "_compare_lenient_attributes", - return_value=True, - ) as mocker: - lmetadata = self.cls(**left) - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._compare_lenient(lmetadata)) - - self.assertEqual(2, mocker.call_count) - expected = [((ldict, rdict),), ((rdict, ldict),)] - self.assertEqual(expected, mocker.call_args_list) - - def test_attributes_non_mapping_different(self): - left = self.none.copy() - left.update(self.names) - right = left.copy() - ldict = dict(item=sentinel.left) - rdict = sentinel.right - left["attributes"] = ldict - right["attributes"] = rdict - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - self.assertFalse(lmetadata._compare_lenient(rmetadata)) - self.assertFalse(rmetadata._compare_lenient(lmetadata)) - - def test_attributes_non_mapping_different_none(self): - left = self.none.copy() - left.update(self.names) - right = left.copy() - ldict = dict(item=sentinel.left) - left["attributes"] = ldict - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._compare_lenient(lmetadata)) - - def test_names(self): - left = self.none.copy() - left.update(self.names) - left["long_name"] = None - right = self.none.copy() - right["long_name"] = left["standard_name"] - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - self.assertTrue(lmetadata._compare_lenient(rmetadata)) - self.assertTrue(rmetadata._combine_lenient(lmetadata)) - - -class Test__compare_lenient_attributes(tests.IrisTest): - def setUp(self): - self.values = OrderedDict( - one=sentinel.one, - two=sentinel.two, - three=np.int16(123), - four=np.arange(10), - five=ma.arange(5), - ) - self.cls = BaseMetadata - self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy - - def test_same(self): - left = self.values.copy() - right = self.values.copy() - - self.assertTrue(self.metadata._compare_lenient_attributes(left, right)) - self.assertTrue(self.metadata._compare_lenient_attributes(right, left)) - - def test_different(self): - left = self.values.copy() - right = self.values.copy() - left["two"] = left["four"] = self.dummy - - self.assertFalse( - self.metadata._compare_lenient_attributes(left, right) - ) - self.assertFalse( - self.metadata._compare_lenient_attributes(right, left) - ) - - def test_different_none(self): - left = self.values.copy() - right = self.values.copy() - left["one"] = left["three"] = left["five"] = None - - self.assertFalse( - self.metadata._compare_lenient_attributes(left, right) - ) - self.assertFalse( - self.metadata._compare_lenient_attributes(right, left) - ) - - def test_extra(self): - left = self.values.copy() - right = self.values.copy() - left["extra_left"] = sentinel.extra_left - right["extra_right"] = sentinel.extra_right - - self.assertTrue(self.metadata._compare_lenient_attributes(left, right)) - self.assertTrue(self.metadata._compare_lenient_attributes(right, left)) - - -class Test__compare_strict_attributes(tests.IrisTest): - def setUp(self): - self.values = OrderedDict( - one=sentinel.one, - two=sentinel.two, - three=np.int16(123), - four=np.arange(10), - five=ma.arange(5), - ) - self.cls = BaseMetadata - self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy - - def test_same(self): - left = self.values.copy() - right = self.values.copy() - - self.assertTrue(self.metadata._compare_strict_attributes(left, right)) - self.assertTrue(self.metadata._compare_strict_attributes(right, left)) - - def test_different(self): - left = self.values.copy() - right = self.values.copy() - left["two"] = left["four"] = self.dummy - - self.assertFalse(self.metadata._compare_strict_attributes(left, right)) - self.assertFalse(self.metadata._compare_strict_attributes(right, left)) - - def test_different_none(self): - left = self.values.copy() - right = self.values.copy() - left["one"] = left["three"] = left["five"] = None - - self.assertFalse(self.metadata._compare_strict_attributes(left, right)) - self.assertFalse(self.metadata._compare_strict_attributes(right, left)) - - def test_extra(self): - left = self.values.copy() - right = self.values.copy() - left["extra_left"] = sentinel.extra_left - right["extra_right"] = sentinel.extra_right - - self.assertFalse(self.metadata._compare_strict_attributes(left, right)) - self.assertFalse(self.metadata._compare_strict_attributes(right, left)) - - -class Test__difference(tests.IrisTest): - def setUp(self): - self.kwargs = dict( - standard_name="standard_name", - long_name="long_name", - var_name="var_name", - units="units", - attributes=dict(one=sentinel.one, two=sentinel.two), - ) - self.cls = BaseMetadata - self.metadata = self.cls(**self.kwargs) - - def test_lenient(self): - return_value = sentinel._difference_lenient - other = sentinel.other - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ) as mlenient: - with mock.patch.object( - self.cls, "_difference_lenient", return_value=return_value - ) as mdifference: - result = self.metadata._difference(other) - - self.assertEqual(1, mlenient.call_count) - (arg,), kwargs = mlenient.call_args - self.assertEqual(self.metadata.difference, arg) - self.assertEqual(dict(), kwargs) - - self.assertEqual(return_value, result) - self.assertEqual(1, mdifference.call_count) - (arg,), kwargs = mdifference.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_strict(self): - dummy = sentinel.dummy - values = self.kwargs.copy() - values["long_name"] = dummy - values["units"] = dummy - other = self.cls(**values) - method = "_difference_strict_attributes" - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - with mock.patch.object( - self.cls, method, return_value=None - ) as mdifference: - result = self.metadata._difference(other) - - expected = [ - (self.kwargs[field], dummy) if values[field] == dummy else None - for field in self.cls._fields - ] - self.assertEqual(expected, result) - self.assertEqual(1, mdifference.call_count) - args, kwargs = mdifference.call_args - expected = (self.kwargs["attributes"], values["attributes"]) - self.assertEqual(expected, args) - self.assertEqual(dict(), kwargs) - - with mock.patch.object( - self.cls, method, return_value=None - ) as mdifference: - result = other._difference(self.metadata) - - expected = [ - (dummy, self.kwargs[field]) if values[field] == dummy else None - for field in self.cls._fields - ] - self.assertEqual(expected, result) - self.assertEqual(1, mdifference.call_count) - args, kwargs = mdifference.call_args - expected = (self.kwargs["attributes"], values["attributes"]) - self.assertEqual(expected, args) - self.assertEqual(dict(), kwargs) - - -class Test__difference_lenient(tests.IrisTest): - def setUp(self): - self.cls = BaseMetadata - self.none = self.cls(*(None,) * len(self.cls._fields))._asdict() - self.names = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - ) - - def test_strict_units(self): - left = self.none.copy() - left["units"] = "km" - right = left.copy() - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - expected = list(self.none.values()) - self.assertEqual(expected, lmetadata._difference_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._difference_lenient(lmetadata)) - - def test_strict_units_different(self): - left = self.none.copy() - right = self.none.copy() - lunits, runits = "m", "km" - left["units"] = lunits - right["units"] = runits - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - result = lmetadata._difference_lenient(rmetadata) - expected = self.none.copy() - expected["units"] = (lunits, runits) - expected = list(expected.values()) - self.assertEqual(expected, result) - - result = rmetadata._difference_lenient(lmetadata) - expected = self.none.copy() - expected["units"] = (runits, lunits) - expected = list(expected.values()) - self.assertEqual(expected, result) - - def test_strict_units_different_none(self): - left = self.none.copy() - right = self.none.copy() - lunits, runits = "m", None - left["units"] = lunits - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - result = lmetadata._difference_lenient(rmetadata) - expected = self.none.copy() - expected["units"] = (lunits, runits) - expected = list(expected.values()) - - self.assertEqual(expected, result) - result = rmetadata._difference_lenient(lmetadata) - expected = self.none.copy() - expected["units"] = (runits, lunits) - expected = list(expected.values()) - self.assertEqual(expected, result) - - def test_attributes(self): - left = self.none.copy() - right = self.none.copy() - ldict = dict(item=sentinel.left) - rdict = dict(item=sentinel.right) - left["attributes"] = ldict - right["attributes"] = rdict - rmetadata = self.cls(**right) - return_value = sentinel.return_value - with mock.patch.object( - self.cls, - "_difference_lenient_attributes", - return_value=return_value, - ) as mocker: - lmetadata = self.cls(**left) - result = lmetadata._difference_lenient(rmetadata) - - expected = self.none.copy() - expected["attributes"] = return_value - expected = list(expected.values()) - self.assertEqual(expected, result) - - self.assertEqual(1, mocker.call_count) - args, kwargs = mocker.call_args - expected = (ldict, rdict) - self.assertEqual(expected, args) - self.assertEqual(dict(), kwargs) - - def test_attributes_non_mapping_different(self): - left = self.none.copy() - right = self.none.copy() - ldict = dict(item=sentinel.left) - rdict = sentinel.right - left["attributes"] = ldict - right["attributes"] = rdict - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - result = lmetadata._difference_lenient(rmetadata) - expected = self.none.copy() - expected["attributes"] = (ldict, rdict) - expected = list(expected.values()) - self.assertEqual(expected, result) - - result = rmetadata._difference_lenient(lmetadata) - expected = self.none.copy() - expected["attributes"] = (rdict, ldict) - expected = list(expected.values()) - self.assertEqual(expected, result) - - def test_attributes_non_mapping_different_none(self): - left = self.none.copy() - right = self.none.copy() - ldict = dict(item=sentinel.left) - left["attributes"] = ldict - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - result = lmetadata._difference_lenient(rmetadata) - expected = list(self.none.copy().values()) - self.assertEqual(expected, result) - - result = rmetadata._difference_lenient(lmetadata) - self.assertEqual(expected, result) - - def test_names(self): - left = self.none.copy() - left.update(self.names) - right = left.copy() - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - expected = list(self.none.values()) - self.assertEqual(expected, lmetadata._difference_lenient(rmetadata)) - self.assertEqual(expected, rmetadata._difference_lenient(lmetadata)) - - def test_names_different(self): - dummy = sentinel.dummy - left = self.none.copy() - right = self.none.copy() - left.update(self.names) - right["standard_name"] = dummy - right["long_name"] = dummy - right["var_name"] = dummy - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - result = lmetadata._difference_lenient(rmetadata) - expected = self.none.copy() - expected["standard_name"] = ( - left["standard_name"], - right["standard_name"], - ) - expected["long_name"] = (left["long_name"], right["long_name"]) - expected["var_name"] = (left["var_name"], right["var_name"]) - expected = list(expected.values()) - self.assertEqual(expected, result) - - result = rmetadata._difference_lenient(lmetadata) - expected = self.none.copy() - expected["standard_name"] = ( - right["standard_name"], - left["standard_name"], - ) - expected["long_name"] = (right["long_name"], left["long_name"]) - expected["var_name"] = (right["var_name"], left["var_name"]) - expected = list(expected.values()) - self.assertEqual(expected, result) - - def test_names_different_none(self): - left = self.none.copy() - right = self.none.copy() - left.update(self.names) - lmetadata = self.cls(**left) - rmetadata = self.cls(**right) - - result = lmetadata._difference_lenient(rmetadata) - expected = list(self.none.values()) - self.assertEqual(expected, result) - - result = rmetadata._difference_lenient(lmetadata) - self.assertEqual(expected, result) - - -class Test__difference_lenient_attributes(tests.IrisTest): - def setUp(self): - self.values = OrderedDict( - one=sentinel.one, - two=sentinel.two, - three=np.float64(3.14), - four=np.arange(10, dtype=np.float64), - five=ma.arange(10, dtype=np.int16), - ) - self.cls = BaseMetadata - self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy - - def test_same(self): - left = self.values.copy() - right = self.values.copy() - - result = self.metadata._difference_lenient_attributes(left, right) - self.assertIsNone(result) - - result = self.metadata._difference_lenient_attributes(right, left) - self.assertIsNone(result) - - def test_different(self): - left = self.values.copy() - right = self.values.copy() - left["two"] = left["four"] = self.dummy - - result = self.metadata._difference_lenient_attributes(left, right) - for key in ["one", "three", "five"]: - del left[key] - del right[key] - expected_left, expected_right = (left, right) - result_left, result_right = result - self.assertDictEqual(expected_left, result_left) - self.assertDictEqual(expected_right, result_right) - - result = self.metadata._difference_lenient_attributes(right, left) - result_left, result_right = result - self.assertDictEqual(expected_right, result_left) - self.assertDictEqual(expected_left, result_right) - - def test_different_none(self): - left = self.values.copy() - right = self.values.copy() - left["one"] = left["three"] = left["five"] = None - - result = self.metadata._difference_lenient_attributes(left, right) - for key in ["two", "four"]: - del left[key] - del right[key] - expected_left, expected_right = (left, right) - result_left, result_right = result - self.assertDictEqual(expected_left, result_left) - self.assertDictEqual(expected_right, result_right) - - result = self.metadata._difference_lenient_attributes(right, left) - result_left, result_right = result - self.assertDictEqual(expected_right, result_left) - self.assertDictEqual(expected_left, result_right) - - def test_extra(self): - left = self.values.copy() - right = self.values.copy() - left["extra_left"] = sentinel.extra_left - right["extra_right"] = sentinel.extra_right - result = self.metadata._difference_lenient_attributes(left, right) - self.assertIsNone(result) - - result = self.metadata._difference_lenient_attributes(right, left) - self.assertIsNone(result) - - -class Test__difference_strict_attributes(tests.IrisTest): - def setUp(self): - self.values = OrderedDict( - one=sentinel.one, - two=sentinel.two, - three=np.int32(123), - four=np.arange(10), - five=ma.arange(10), - ) - self.cls = BaseMetadata - self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.dummy = sentinel.dummy - - def test_same(self): - left = self.values.copy() - right = self.values.copy() - - result = self.metadata._difference_strict_attributes(left, right) - self.assertIsNone(result) - result = self.metadata._difference_strict_attributes(right, left) - self.assertIsNone(result) - - def test_different(self): - left = self.values.copy() - right = self.values.copy() - left["one"] = left["three"] = left["five"] = self.dummy - - result = self.metadata._difference_strict_attributes(left, right) - expected_left = left.copy() - expected_right = right.copy() - for key in ["two", "four"]: - del expected_left[key] - del expected_right[key] - result_left, result_right = result - self.assertDictEqual(expected_left, result_left) - self.assertDictEqual(expected_right, result_right) - - result = self.metadata._difference_strict_attributes(right, left) - result_left, result_right = result - self.assertDictEqual(expected_right, result_left) - self.assertDictEqual(expected_left, result_right) - - def test_different_none(self): - left = self.values.copy() - right = self.values.copy() - left["one"] = left["three"] = left["five"] = None - - result = self.metadata._difference_strict_attributes(left, right) - expected_left = left.copy() - expected_right = right.copy() - for key in ["two", "four"]: - del expected_left[key] - del expected_right[key] - result_left, result_right = result - self.assertDictEqual(expected_left, result_left) - self.assertDictEqual(expected_right, result_right) - - result = self.metadata._difference_strict_attributes(right, left) - result_left, result_right = result - self.assertDictEqual(expected_right, result_left) - self.assertDictEqual(expected_left, result_right) - - def test_extra(self): - left = self.values.copy() - right = self.values.copy() - left["extra_left"] = sentinel.extra_left - right["extra_right"] = sentinel.extra_right - - result = self.metadata._difference_strict_attributes(left, right) - expected_left = dict(extra_left=left["extra_left"]) - expected_right = dict(extra_right=right["extra_right"]) - result_left, result_right = result - self.assertDictEqual(expected_left, result_left) - self.assertDictEqual(expected_right, result_right) - - result = self.metadata._difference_strict_attributes(right, left) - result_left, result_right = result - self.assertDictEqual(expected_right, result_left) - self.assertDictEqual(expected_left, result_right) - - -class Test__is_attributes(tests.IrisTest): - def setUp(self): - self.cls = BaseMetadata - self.metadata = self.cls(*(None,) * len(self.cls._fields)) - self.field = "attributes" - - def test_field(self): - self.assertTrue(self.metadata._is_attributes(self.field, {}, {})) - - def test_field_not_attributes(self): - self.assertFalse(self.metadata._is_attributes(None, {}, {})) - - def test_left_not_mapping(self): - self.assertFalse(self.metadata._is_attributes(self.field, None, {})) - - def test_right_not_mapping(self): - self.assertFalse(self.metadata._is_attributes(self.field, {}, None)) - - -class Test_combine(tests.IrisTest): - def setUp(self): - kwargs = dict( - standard_name="standard_name", - long_name="long_name", - var_name="var_name", - units="units", - attributes="attributes", - ) - self.cls = BaseMetadata - self.metadata = self.cls(**kwargs) - self.mock_kwargs = OrderedDict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - ) - - def test_lenient_service(self): - qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) - - def test_cannot_combine_non_class(self): - emsg = "Cannot combine" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.combine(None) - - def test_cannot_combine_different_class(self): - other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) - emsg = "Cannot combine" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.combine(other) - - def test_lenient_default(self): - return_value = self.mock_kwargs.values() - with mock.patch.object( - self.cls, "_combine", return_value=return_value - ) as mocker: - result = self.metadata.combine(self.metadata) - - self.assertEqual(self.mock_kwargs, result._asdict()) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - def test_lenient_true(self): - return_value = self.mock_kwargs.values() - with mock.patch.object( - self.cls, "_combine", return_value=return_value - ) as mcombine: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.combine(self.metadata, lenient=True) - - self.assertEqual(1, mcontext.call_count) - (arg,), kwargs = mcontext.call_args - self.assertEqual(_qualname(self.cls.combine), arg) - self.assertEqual(dict(), kwargs) - - self.assertEqual(result._asdict(), self.mock_kwargs) - self.assertEqual(1, mcombine.call_count) - (arg,), kwargs = mcombine.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - def test_lenient_false(self): - return_value = self.mock_kwargs.values() - with mock.patch.object( - self.cls, "_combine", return_value=return_value - ) as mcombine: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.combine(self.metadata, lenient=False) - - self.assertEqual(1, mcontext.call_count) - args, kwargs = mcontext.call_args - self.assertEqual((), args) - self.assertEqual({_qualname(self.cls.combine): False}, kwargs) - - self.assertEqual(self.mock_kwargs, result._asdict()) - self.assertEqual(1, mcombine.call_count) - (arg,), kwargs = mcombine.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - -class Test_difference(tests.IrisTest): - def setUp(self): - kwargs = dict( - standard_name="standard_name", - long_name="long_name", - var_name="var_name", - units="units", - attributes="attributes", - ) - self.cls = BaseMetadata - self.metadata = self.cls(**kwargs) - self.mock_kwargs = OrderedDict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - ) - - def test_lenient_service(self): - qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) - - def test_cannot_differ_non_class(self): - emsg = "Cannot differ" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.difference(None) - - def test_cannot_differ_different_class(self): - other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) - emsg = "Cannot differ" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.difference(other) - - def test_lenient_default(self): - return_value = self.mock_kwargs.values() - with mock.patch.object( - self.cls, "_difference", return_value=return_value - ) as mocker: - result = self.metadata.difference(self.metadata) - - self.assertEqual(self.mock_kwargs, result._asdict()) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - def test_lenient_true(self): - return_value = self.mock_kwargs.values() - with mock.patch.object( - self.cls, "_difference", return_value=return_value - ) as mdifference: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.difference(self.metadata, lenient=True) - - self.assertEqual(1, mcontext.call_count) - (arg,), kwargs = mcontext.call_args - self.assertEqual(_qualname(self.cls.difference), arg) - self.assertEqual(dict(), kwargs) - - self.assertEqual(self.mock_kwargs, result._asdict()) - self.assertEqual(1, mdifference.call_count) - (arg,), kwargs = mdifference.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - def test_lenient_false(self): - return_value = self.mock_kwargs.values() - with mock.patch.object( - self.cls, "_difference", return_value=return_value - ) as mdifference: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.difference(self.metadata, lenient=False) - - self.assertEqual(mcontext.call_count, 1) - args, kwargs = mcontext.call_args - self.assertEqual((), args) - self.assertEqual({_qualname(self.cls.difference): False}, kwargs) - - self.assertEqual(self.mock_kwargs, result._asdict()) - self.assertEqual(1, mdifference.call_count) - (arg,), kwargs = mdifference.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - -class Test_equal(tests.IrisTest): - def setUp(self): - kwargs = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - ) - self.cls = BaseMetadata - self.metadata = self.cls(**kwargs) - - def test_lenient_service(self): - qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue((_LENIENT[self.cls.equal])) - - def test_cannot_compare_non_class(self): - emsg = "Cannot compare" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.equal(None) - - def test_cannot_compare_different_class(self): - other = CubeMetadata(*(None,) * len(CubeMetadata._fields)) - emsg = "Cannot compare" - with self.assertRaisesRegex(TypeError, emsg): - self.metadata.equal(other) - - def test_lenient_default(self): - return_value = sentinel.return_value - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as mocker: - result = self.metadata.equal(self.metadata) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - def test_lenient_true(self): - return_value = sentinel.return_value - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as m__eq__: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.equal(self.metadata, lenient=True) - - self.assertEqual(return_value, result) - self.assertEqual(1, mcontext.call_count) - (arg,), kwargs = mcontext.call_args - self.assertEqual(_qualname(self.cls.equal), arg) - self.assertEqual(dict(), kwargs) - - self.assertEqual(1, m__eq__.call_count) - (arg,), kwargs = m__eq__.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - def test_lenient_false(self): - return_value = sentinel.return_value - with mock.patch.object( - self.cls, "__eq__", return_value=return_value - ) as m__eq__: - with mock.patch.object(_LENIENT, "context") as mcontext: - result = self.metadata.equal(self.metadata, lenient=False) - - self.assertEqual(1, mcontext.call_count) - args, kwargs = mcontext.call_args - self.assertEqual((), args) - self.assertEqual({_qualname(self.cls.equal): False}, kwargs) - - self.assertEqual(return_value, result) - self.assertEqual(1, m__eq__.call_count) - (arg,), kwargs = m__eq__.call_args - self.assertEqual(id(self.metadata), id(arg)) - self.assertEqual(dict(), kwargs) - - -class Test_name(tests.IrisTest): - def setUp(self): - self.cls = BaseMetadata - self.default = self.cls.DEFAULT_NAME - - @staticmethod - def _make(standard_name=None, long_name=None, var_name=None): - return BaseMetadata( - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=None, - attributes=None, - ) - - def test_standard_name(self): - token = "standard_name" - metadata = self._make(standard_name=token) - - result = metadata.name() - self.assertEqual(token, result) - result = metadata.name(token=True) - self.assertEqual(token, result) - - def test_standard_name__invalid_token(self): - token = "nope nope" - metadata = self._make(standard_name=token) - - result = metadata.name() - self.assertEqual(token, result) - result = metadata.name(token=True) - self.assertEqual(self.default, result) - - def test_long_name(self): - token = "long_name" - metadata = self._make(long_name=token) - - result = metadata.name() - self.assertEqual(token, result) - result = metadata.name(token=True) - self.assertEqual(token, result) - - def test_long_name__invalid_token(self): - token = "nope nope" - metadata = self._make(long_name=token) - - result = metadata.name() - self.assertEqual(token, result) - result = metadata.name(token=True) - self.assertEqual(self.default, result) - - def test_var_name(self): - token = "var_name" - metadata = self._make(var_name=token) - - result = metadata.name() - self.assertEqual(token, result) - result = metadata.name(token=True) - self.assertEqual(token, result) - - def test_var_name__invalid_token(self): - token = "nope nope" - metadata = self._make(var_name=token) - - result = metadata.name() - self.assertEqual(token, result) - result = metadata.name(token=True) - self.assertEqual(self.default, result) - - def test_default(self): - metadata = self._make() - - result = metadata.name() - self.assertEqual(self.default, result) - result = metadata.name(token=True) - self.assertEqual(self.default, result) - - def test_default__invalid_token(self): - token = "nope nope" - metadata = self._make() - - result = metadata.name(default=token) - self.assertEqual(token, result) - - emsg = "Cannot retrieve a valid name token" - with self.assertRaisesRegex(ValueError, emsg): - metadata.name(default=token, token=True) - - -class Test_token(tests.IrisTest): - def setUp(self): - self.cls = BaseMetadata - - def test_passthru_None(self): - result = self.cls.token(None) - self.assertIsNone(result) - - def test_fail_leading_underscore(self): - result = self.cls.token("_nope") - self.assertIsNone(result) - - def test_fail_leading_dot(self): - result = self.cls.token(".nope") - self.assertIsNone(result) - - def test_fail_leading_plus(self): - result = self.cls.token("+nope") - self.assertIsNone(result) - - def test_fail_leading_at(self): - result = self.cls.token("@nope") - self.assertIsNone(result) - - def test_fail_space(self): - result = self.cls.token("nope nope") - self.assertIsNone(result) - - def test_fail_colon(self): - result = self.cls.token("nope:") - self.assertIsNone(result) - - def test_pass_simple(self): - token = "simple" - result = self.cls.token(token) - self.assertEqual(token, result) - - def test_pass_leading_digit(self): - token = "123simple" - result = self.cls.token(token) - self.assertEqual(token, result) - - def test_pass_mixture(self): - token = "S.imple@one+two_3" - result = self.cls.token(token) - self.assertEqual(token, result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py b/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py deleted file mode 100644 index a434651206..0000000000 --- a/lib/iris/tests/unit/common/metadata/test_CellMeasureMetadata.py +++ /dev/null @@ -1,661 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.common.metadata.CellMeasureMetadata`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel - -from iris.common.lenient import _LENIENT, _qualname -from iris.common.metadata import BaseMetadata, CellMeasureMetadata - - -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.measure = mock.sentinel.measure - self.cls = CellMeasureMetadata - - def test_repr(self): - metadata = self.cls( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - units=self.units, - attributes=self.attributes, - measure=self.measure, - ) - fmt = ( - "CellMeasureMetadata(standard_name={!r}, long_name={!r}, " - "var_name={!r}, units={!r}, attributes={!r}, measure={!r})" - ) - expected = fmt.format( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - self.measure, - ) - self.assertEqual(expected, repr(metadata)) - - def test__fields(self): - expected = ( - "standard_name", - "long_name", - "var_name", - "units", - "attributes", - "measure", - ) - self.assertEqual(self.cls._fields, expected) - - def test_bases(self): - self.assertTrue(issubclass(self.cls, BaseMetadata)) - - -class Test___eq__(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - measure=sentinel.measure, - ) - self.dummy = sentinel.dummy - self.cls = CellMeasureMetadata - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) - - def test_lenient_service(self): - qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) - - def test_call(self): - other = sentinel.other - return_value = sentinel.return_value - metadata = self.cls(*(None,) * len(self.cls._fields)) - with mock.patch.object( - BaseMetadata, "__eq__", return_value=return_value - ) as mocker: - result = metadata.__eq__(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_measure_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["measure"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different_measure(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["measure"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_measure(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["measure"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_measure_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["measure"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - -class Test___lt__(tests.IrisTest): - def setUp(self): - self.cls = CellMeasureMetadata - self.one = self.cls(1, 1, 1, 1, 1, 1) - self.two = self.cls(1, 1, 1, 2, 1, 1) - self.none = self.cls(1, 1, 1, None, 1, 1) - self.attributes = self.cls(1, 1, 1, 1, 10, 1) - - def test__ascending_lt(self): - result = self.one < self.two - self.assertTrue(result) - - def test__descending_lt(self): - result = self.two < self.one - self.assertFalse(result) - - def test__none_rhs_operand(self): - result = self.one < self.none - self.assertFalse(result) - - def test__none_lhs_operand(self): - result = self.none < self.one - self.assertTrue(result) - - def test__ignore_attributes(self): - result = self.one < self.attributes - self.assertFalse(result) - result = self.attributes < self.one - self.assertFalse(result) - - -class Test_combine(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - measure=sentinel.measure, - ) - self.dummy = sentinel.dummy - self.cls = CellMeasureMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) - - def test_lenient_service(self): - qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_measure_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["measure"] = None - rmetadata = self.cls(**right) - expected = right.copy() - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["units"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different_measure(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["measure"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["measure"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values.copy() - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_measure(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["measure"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["measure"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_measure_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["measure"] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["measure"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - -class Test_difference(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - measure=sentinel.measure, - ) - self.dummy = sentinel.dummy - self.cls = CellMeasureMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) - - def test_lenient_service(self): - qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_measure_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["measure"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["measure"] = (sentinel.measure, None) - rexpected = deepcopy(self.none)._asdict() - rexpected["measure"] = (None, sentinel.measure) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["units"] = (left["units"], right["units"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["units"] = lexpected["units"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different_measure(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["measure"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["measure"] = (left["measure"], right["measure"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["measure"] = lexpected["measure"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_strict_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_measure(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["measure"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["measure"] = (left["measure"], right["measure"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["measure"] = lexpected["measure"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_none(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_measure_none(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["measure"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["measure"] = (left["measure"], right["measure"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["measure"] = lexpected["measure"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - -class Test_equal(tests.IrisTest): - def setUp(self): - self.cls = CellMeasureMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) - - def test_lenient_service(self): - qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue(_LENIENT[self.cls.equal]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py b/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py deleted file mode 100644 index e3b7486012..0000000000 --- a/lib/iris/tests/unit/common/metadata/test_CoordMetadata.py +++ /dev/null @@ -1,722 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.common.metadata.CoordMetadata`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel - -from iris.common.lenient import _LENIENT, _qualname -from iris.common.metadata import BaseMetadata, CoordMetadata - - -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.coord_system = mock.sentinel.coord_system - self.climatological = mock.sentinel.climatological - self.cls = CoordMetadata - - def test_repr(self): - metadata = self.cls( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - units=self.units, - attributes=self.attributes, - coord_system=self.coord_system, - climatological=self.climatological, - ) - fmt = ( - "CoordMetadata(standard_name={!r}, long_name={!r}, " - "var_name={!r}, units={!r}, attributes={!r}, coord_system={!r}, " - "climatological={!r})" - ) - expected = fmt.format( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - self.coord_system, - self.climatological, - ) - self.assertEqual(expected, repr(metadata)) - - def test__fields(self): - expected = ( - "standard_name", - "long_name", - "var_name", - "units", - "attributes", - "coord_system", - "climatological", - ) - self.assertEqual(self.cls._fields, expected) - - def test_bases(self): - self.assertTrue(issubclass(self.cls, BaseMetadata)) - - -class Test___eq__(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - coord_system=sentinel.coord_system, - climatological=sentinel.climatological, - ) - self.dummy = sentinel.dummy - self.cls = CoordMetadata - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) - - def test_lenient_service(self): - qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) - - def test_call(self): - other = sentinel.other - return_value = sentinel.return_value - metadata = self.cls(*(None,) * len(self.cls._fields)) - with mock.patch.object( - BaseMetadata, "__eq__", return_value=return_value - ) as mocker: - result = metadata.__eq__(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - -class Test___lt__(tests.IrisTest): - def setUp(self): - self.cls = CoordMetadata - self.one = self.cls(1, 1, 1, 1, 1, 1, 1) - self.two = self.cls(1, 1, 1, 2, 1, 1, 1) - self.none = self.cls(1, 1, 1, None, 1, 1, 1) - self.attributes_cs = self.cls(1, 1, 1, 1, 10, 10, 1) - - def test__ascending_lt(self): - result = self.one < self.two - self.assertTrue(result) - - def test__descending_lt(self): - result = self.two < self.one - self.assertFalse(result) - - def test__none_rhs_operand(self): - result = self.one < self.none - self.assertFalse(result) - - def test__none_lhs_operand(self): - result = self.none < self.one - self.assertTrue(result) - - def test__ignore_attributes_coord_system(self): - result = self.one < self.attributes_cs - self.assertFalse(result) - result = self.attributes_cs < self.one - self.assertFalse(result) - - -class Test_combine(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - coord_system=sentinel.coord_system, - climatological=sentinel.climatological, - ) - self.dummy = sentinel.dummy - self.cls = CoordMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) - - def test_lenient_service(self): - qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - expected = right.copy() - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertTrue( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertTrue( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["units"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values.copy() - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - -class Test_difference(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - coord_system=sentinel.coord_system, - climatological=sentinel.climatological, - ) - self.dummy = sentinel.dummy - self.cls = CoordMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) - - def test_lenient_service(self): - qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - member_value = getattr(lmetadata, member) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (member_value, None) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = (None, member_value) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["units"] = (left["units"], right["units"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["units"] = lexpected["units"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different_members(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_strict_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_members(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_none(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_members_none(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - -class Test_equal(tests.IrisTest): - def setUp(self): - self.cls = CoordMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) - - def test_lenient_service(self): - qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue(_LENIENT[self.cls.equal]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py b/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py deleted file mode 100644 index 848431565b..0000000000 --- a/lib/iris/tests/unit/common/metadata/test_CubeMetadata.py +++ /dev/null @@ -1,829 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.common.metadata.CubeMetadata`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel - -from iris.common.lenient import _LENIENT, _qualname -from iris.common.metadata import BaseMetadata, CubeMetadata - - -def _make_metadata( - standard_name=None, - long_name=None, - var_name=None, - attributes=None, - force_mapping=True, -): - if force_mapping: - if attributes is None: - attributes = {} - else: - attributes = dict(STASH=attributes) - - return CubeMetadata( - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=None, - attributes=attributes, - cell_methods=None, - ) - - -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.cell_methods = mock.sentinel.cell_methods - self.cls = CubeMetadata - - def test_repr(self): - metadata = self.cls( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - units=self.units, - attributes=self.attributes, - cell_methods=self.cell_methods, - ) - fmt = ( - "CubeMetadata(standard_name={!r}, long_name={!r}, var_name={!r}, " - "units={!r}, attributes={!r}, cell_methods={!r})" - ) - expected = fmt.format( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - self.cell_methods, - ) - self.assertEqual(expected, repr(metadata)) - - def test__fields(self): - expected = ( - "standard_name", - "long_name", - "var_name", - "units", - "attributes", - "cell_methods", - ) - self.assertEqual(self.cls._fields, expected) - - def test_bases(self): - self.assertTrue(issubclass(self.cls, BaseMetadata)) - - -class Test___eq__(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - # Must be a mapping. - attributes=dict(), - cell_methods=sentinel.cell_methods, - ) - self.dummy = sentinel.dummy - self.cls = CubeMetadata - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) - - def test_lenient_service(self): - qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) - - def test_call(self): - other = sentinel.other - return_value = sentinel.return_value - metadata = self.cls(*(None,) * len(self.cls._fields)) - with mock.patch.object( - BaseMetadata, "__eq__", return_value=return_value - ) as mocker: - result = metadata.__eq__(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_cell_methods_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different_cell_methods(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_cell_methods(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_measure_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - -class Test___lt__(tests.IrisTest): - def setUp(self): - self.cls = CubeMetadata - self.one = self.cls(1, 1, 1, 1, 1, 1) - self.two = self.cls(1, 1, 1, 2, 1, 1) - self.none = self.cls(1, 1, 1, None, 1, 1) - self.attributes_cm = self.cls(1, 1, 1, 1, 10, 10) - - def test__ascending_lt(self): - result = self.one < self.two - self.assertTrue(result) - - def test__descending_lt(self): - result = self.two < self.one - self.assertFalse(result) - - def test__none_rhs_operand(self): - result = self.one < self.none - self.assertFalse(result) - - def test__none_lhs_operand(self): - result = self.none < self.one - self.assertTrue(result) - - def test__ignore_attributes_cell_methods(self): - result = self.one < self.attributes_cm - self.assertFalse(result) - result = self.attributes_cm < self.one - self.assertFalse(result) - - -class Test_combine(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - cell_methods=sentinel.cell_methods, - ) - self.dummy = sentinel.dummy - self.cls = CubeMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) - - def test_lenient_service(self): - qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_cell_methods_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - expected = right.copy() - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["units"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different_cell_methods(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["cell_methods"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values.copy() - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_cell_methods(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["cell_methods"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_cell_methods_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["cell_methods"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - -class Test_difference(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - cell_methods=sentinel.cell_methods, - ) - self.dummy = sentinel.dummy - self.cls = CubeMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) - - def test_lenient_service(self): - qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_cell_methods_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["cell_methods"] = (sentinel.cell_methods, None) - rexpected = deepcopy(self.none)._asdict() - rexpected["cell_methods"] = (None, sentinel.cell_methods) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["units"] = (left["units"], right["units"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["units"] = lexpected["units"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different_cell_methods(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["cell_methods"] = ( - left["cell_methods"], - right["cell_methods"], - ) - rexpected = deepcopy(self.none)._asdict() - rexpected["cell_methods"] = lexpected["cell_methods"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_strict_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_cell_methods(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["cell_methods"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["cell_methods"] = ( - left["cell_methods"], - right["cell_methods"], - ) - rexpected = deepcopy(self.none)._asdict() - rexpected["cell_methods"] = lexpected["cell_methods"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_none(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_measure_none(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["cell_methods"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["cell_methods"] = ( - left["cell_methods"], - right["cell_methods"], - ) - rexpected = deepcopy(self.none)._asdict() - rexpected["cell_methods"] = lexpected["cell_methods"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - -class Test_equal(tests.IrisTest): - def setUp(self): - self.cls = CubeMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) - - def test_lenient_service(self): - qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue(_LENIENT[self.cls.equal]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - -class Test_name(tests.IrisTest): - def setUp(self): - self.default = CubeMetadata.DEFAULT_NAME - - def test_standard_name(self): - token = "standard_name" - metadata = _make_metadata(standard_name=token) - result = metadata.name() - self.assertEqual(result, token) - result = metadata.name(token=True) - self.assertEqual(result, token) - - def test_standard_name__invalid_token(self): - token = "nope nope" - metadata = _make_metadata(standard_name=token) - result = metadata.name() - self.assertEqual(result, token) - result = metadata.name(token=True) - self.assertEqual(result, self.default) - - def test_long_name(self): - token = "long_name" - metadata = _make_metadata(long_name=token) - result = metadata.name() - self.assertEqual(result, token) - result = metadata.name(token=True) - self.assertEqual(result, token) - - def test_long_name__invalid_token(self): - token = "nope nope" - metadata = _make_metadata(long_name=token) - result = metadata.name() - self.assertEqual(result, token) - result = metadata.name(token=True) - self.assertEqual(result, self.default) - - def test_var_name(self): - token = "var_name" - metadata = _make_metadata(var_name=token) - result = metadata.name() - self.assertEqual(result, token) - result = metadata.name(token=True) - self.assertEqual(result, token) - - def test_var_name__invalid_token(self): - token = "nope nope" - metadata = _make_metadata(var_name=token) - result = metadata.name() - self.assertEqual(result, token) - result = metadata.name(token=True) - self.assertEqual(result, self.default) - - def test_attributes(self): - token = "stash" - metadata = _make_metadata(attributes=token) - result = metadata.name() - self.assertEqual(result, token) - result = metadata.name(token=True) - self.assertEqual(result, token) - - def test_attributes__invalid_token(self): - token = "nope nope" - metadata = _make_metadata(attributes=token) - result = metadata.name() - self.assertEqual(result, token) - result = metadata.name(token=True) - self.assertEqual(result, self.default) - - def test_attributes__non_mapping(self): - metadata = _make_metadata(force_mapping=False) - self.assertIsNone(metadata.attributes) - emsg = "Invalid 'CubeMetadata.attributes' member, must be a mapping." - with self.assertRaisesRegex(AttributeError, emsg): - _ = metadata.name() - - def test_default(self): - metadata = _make_metadata() - result = metadata.name() - self.assertEqual(result, self.default) - result = metadata.name(token=True) - self.assertEqual(result, self.default) - - def test_default__invalid_token(self): - token = "nope nope" - metadata = _make_metadata() - result = metadata.name(default=token) - self.assertEqual(result, token) - emsg = "Cannot retrieve a valid name token" - with self.assertRaisesRegex(ValueError, emsg): - _ = metadata.name(default=token, token=True) - - -class Test__names(tests.IrisTest): - def test_standard_name(self): - token = "standard_name" - metadata = _make_metadata(standard_name=token) - expected = (token, None, None, None) - result = metadata._names - self.assertEqual(expected, result) - - def test_long_name(self): - token = "long_name" - metadata = _make_metadata(long_name=token) - expected = (None, token, None, None) - result = metadata._names - self.assertEqual(expected, result) - - def test_var_name(self): - token = "var_name" - metadata = _make_metadata(var_name=token) - expected = (None, None, token, None) - result = metadata._names - self.assertEqual(expected, result) - - def test_attributes(self): - token = "stash" - metadata = _make_metadata(attributes=token) - expected = (None, None, None, token) - result = metadata._names - self.assertEqual(expected, result) - - def test_attributes__non_mapping(self): - metadata = _make_metadata(force_mapping=False) - self.assertIsNone(metadata.attributes) - emsg = "Invalid 'CubeMetadata.attributes' member, must be a mapping." - with self.assertRaisesRegex(AttributeError, emsg): - _ = metadata._names - - def test_None(self): - metadata = _make_metadata() - expected = (None, None, None, None) - result = metadata._names - self.assertEqual(expected, result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py b/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py deleted file mode 100644 index efcbde8965..0000000000 --- a/lib/iris/tests/unit/common/metadata/test__NamedTupleMeta.py +++ /dev/null @@ -1,148 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.common.metadata._NamedTupleMeta`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from abc import abstractmethod - -from iris.common.metadata import _NamedTupleMeta - - -class Test(tests.IrisTest): - @staticmethod - def names(classes): - return [cls.__name__ for cls in classes] - - @staticmethod - def emsg_generate(members): - if isinstance(members, str): - members = (members,) - emsg = ".* missing {} required positional argument{}: {}" - args = ", ".join([f"{member!r}" for member in members[:-1]]) - count = len(members) - if count == 1: - args += f"{members[-1]!r}" - elif count == 2: - args += f" and {members[-1]!r}" - else: - args += f", and {members[-1]!r}" - plural = "s" if count > 1 else "" - return emsg.format(len(members), plural, args) - - def test__no_bases_with_abstract_members_property(self): - class Metadata(metaclass=_NamedTupleMeta): - @property - @abstractmethod - def _members(self): - pass - - expected = ["object"] - self.assertEqual(self.names(Metadata.__bases__), expected) - expected = ["Metadata", "object"] - self.assertEqual(self.names(Metadata.__mro__), expected) - emsg = ( - "Can't instantiate abstract class .* with abstract " - "methods _members" - ) - with self.assertRaisesRegex(TypeError, emsg): - _ = Metadata() - - def test__no_bases_single_member(self): - member = "arg_one" - - class Metadata(metaclass=_NamedTupleMeta): - _members = member - - expected = ["MetadataNamedtuple"] - self.assertEqual(self.names(Metadata.__bases__), expected) - expected = ["Metadata", "MetadataNamedtuple", "tuple", "object"] - self.assertEqual(self.names(Metadata.__mro__), expected) - emsg = self.emsg_generate(member) - with self.assertRaisesRegex(TypeError, emsg): - _ = Metadata() - metadata = Metadata(1) - self.assertEqual(metadata._fields, (member,)) - self.assertEqual(metadata.arg_one, 1) - - def test__no_bases_multiple_members(self): - members = ("arg_one", "arg_two") - - class Metadata(metaclass=_NamedTupleMeta): - _members = members - - expected = ["MetadataNamedtuple"] - self.assertEqual(self.names(Metadata.__bases__), expected) - expected = ["Metadata", "MetadataNamedtuple", "tuple", "object"] - self.assertEqual(self.names(Metadata.__mro__), expected) - emsg = self.emsg_generate(members) - with self.assertRaisesRegex(TypeError, emsg): - _ = Metadata() - values = range(len(members)) - metadata = Metadata(*values) - self.assertEqual(metadata._fields, members) - expected = dict(zip(members, values)) - self.assertEqual(metadata._asdict(), expected) - - def test__multiple_bases_multiple_members(self): - members_parent = ("arg_one", "arg_two") - members_child = ("arg_three", "arg_four") - - class MetadataParent(metaclass=_NamedTupleMeta): - _members = members_parent - - class MetadataChild(MetadataParent): - _members = members_child - - # Check the parent class... - expected = ["MetadataParentNamedtuple"] - self.assertEqual(self.names(MetadataParent.__bases__), expected) - expected = [ - "MetadataParent", - "MetadataParentNamedtuple", - "tuple", - "object", - ] - self.assertEqual(self.names(MetadataParent.__mro__), expected) - emsg = self.emsg_generate(members_parent) - with self.assertRaisesRegex(TypeError, emsg): - _ = MetadataParent() - values_parent = range(len(members_parent)) - metadata_parent = MetadataParent(*values_parent) - self.assertEqual(metadata_parent._fields, members_parent) - expected = dict(zip(members_parent, values_parent)) - self.assertEqual(metadata_parent._asdict(), expected) - - # Check the dependant child class... - expected = ["MetadataChildNamedtuple", "MetadataParent"] - self.assertEqual(self.names(MetadataChild.__bases__), expected) - expected = [ - "MetadataChild", - "MetadataChildNamedtuple", - "MetadataParent", - "MetadataParentNamedtuple", - "tuple", - "object", - ] - self.assertEqual(self.names(MetadataChild.__mro__), expected) - emsg = self.emsg_generate((*members_parent, *members_child)) - with self.assertRaisesRegex(TypeError, emsg): - _ = MetadataChild() - fields_child = (*members_parent, *members_child) - values_child = range(len(fields_child)) - metadata_child = MetadataChild(*values_child) - self.assertEqual(metadata_child._fields, fields_child) - expected = dict(zip(fields_child, values_child)) - self.assertEqual(metadata_child._asdict(), expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_hexdigest.py b/lib/iris/tests/unit/common/metadata/test_hexdigest.py deleted file mode 100644 index 949002af89..0000000000 --- a/lib/iris/tests/unit/common/metadata/test_hexdigest.py +++ /dev/null @@ -1,179 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.common.metadata.hexdigest`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np -import numpy.ma as ma -from xxhash import xxh64, xxh64_hexdigest - -from iris.common.metadata import hexdigest - - -class TestBytesLikeObject(tests.IrisTest): - def setUp(self): - self.hasher = xxh64() - self.hasher.reset() - - @staticmethod - def _ndarray(value): - parts = str((value.shape, xxh64_hexdigest(value))) - return xxh64_hexdigest(parts) - - @staticmethod - def _masked(value): - parts = str( - ( - value.shape, - xxh64_hexdigest(value.data), - xxh64_hexdigest(value.mask), - ) - ) - return xxh64_hexdigest(parts) - - def test_string(self): - value = "hello world" - self.hasher.update(value) - expected = self.hasher.hexdigest() - self.assertEqual(expected, hexdigest(value)) - - def test_numpy_array_int(self): - value = np.arange(10, dtype=np.int_) - expected = self._ndarray(value) - self.assertEqual(expected, hexdigest(value)) - - def test_numpy_array_float(self): - value = np.arange(10, dtype=np.float64) - expected = self._ndarray(value) - self.assertEqual(expected, hexdigest(value)) - - def test_numpy_array_float_not_int(self): - ivalue = np.arange(10, dtype=np.int_) - fvalue = np.arange(10, dtype=np.float64) - expected = self._ndarray(ivalue) - self.assertNotEqual(expected, hexdigest(fvalue)) - - def test_numpy_array_reshape(self): - value = np.arange(10).reshape(2, 5) - expected = self._ndarray(value) - self.assertEqual(expected, hexdigest(value)) - - def test_numpy_array_reshape_not_flat(self): - value = np.arange(10).reshape(2, 5) - expected = self._ndarray(value) - self.assertNotEqual(expected, hexdigest(value.flatten())) - - def test_masked_array_int(self): - value = ma.arange(10, dtype=np.int_) - expected = self._masked(value) - self.assertEqual(expected, hexdigest(value)) - - value[0] = ma.masked - self.assertNotEqual(expected, hexdigest(value)) - expected = self._masked(value) - self.assertEqual(expected, hexdigest(value)) - - def test_masked_array_float(self): - value = ma.arange(10, dtype=np.float64) - expected = self._masked(value) - self.assertEqual(expected, hexdigest(value)) - - value[0] = ma.masked - self.assertNotEqual(expected, hexdigest(value)) - expected = self._masked(value) - self.assertEqual(expected, hexdigest(value)) - - def test_masked_array_float_not_int(self): - ivalue = ma.arange(10, dtype=np.int_) - fvalue = ma.arange(10, dtype=np.float64) - expected = self._masked(ivalue) - self.assertNotEqual(expected, hexdigest(fvalue)) - - def test_masked_array_not_array(self): - value = ma.arange(10) - expected = self._masked(value) - self.assertNotEqual(expected, hexdigest(value.data)) - - def test_masked_array_reshape(self): - value = ma.arange(10).reshape(2, 5) - expected = self._masked(value) - self.assertEqual(expected, hexdigest(value)) - - def test_masked_array_reshape_not_flat(self): - value = ma.arange(10).reshape(2, 5) - expected = self._masked(value) - self.assertNotEqual(expected, hexdigest(value.flatten())) - - -class TestNotBytesLikeObject(tests.IrisTest): - def _expected(self, value): - parts = str((type(value), value)) - return xxh64_hexdigest(parts) - - def test_int(self): - value = 123 - expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) - - def test_numpy_int(self): - value = int(123) - expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) - - def test_float(self): - value = 123.4 - expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) - - def test_numpy_float(self): - value = float(123.4) - expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) - - def test_list(self): - value = [1, 2, 3] - expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) - - def test_tuple(self): - value = (1, 2, 3) - expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) - - def test_dict(self): - value = dict(one=1, two=2, three=3) - expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) - - def test_sentinel(self): - value = mock.sentinel.value - expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) - - def test_instance(self): - class Dummy: - pass - - value = Dummy() - expected = self._expected(value) - self.assertEqual(expected, hexdigest(value)) - - def test_int_not_str(self): - value = 123 - expected = self._expected(value) - self.assertNotEqual(expected, hexdigest(str(value))) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py b/lib/iris/tests/unit/common/metadata/test_metadata_filter.py deleted file mode 100644 index 9c5987f235..0000000000 --- a/lib/iris/tests/unit/common/metadata/test_metadata_filter.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.common.metadata_filter`. - -""" - -import numpy as np - -from iris.common.metadata import ( - CoordMetadata, - DimCoordMetadata, - metadata_filter, -) -from iris.coords import AuxCoord - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - -Mock = tests.mock.Mock - - -class Test_standard(tests.IrisTest): - def test_instances_non_iterable(self): - item = Mock() - item.name.return_value = "one" - result = metadata_filter(item, item="one") - self.assertEqual(1, len(result)) - self.assertIn(item, result) - - def test_name(self): - name_one = Mock() - name_one.name.return_value = "one" - name_two = Mock() - name_two.name.return_value = "two" - input_list = [name_one, name_two] - result = metadata_filter(input_list, item="one") - self.assertIn(name_one, result) - self.assertNotIn(name_two, result) - - def test_item(self): - coord = Mock(__class__=AuxCoord) - mock = Mock() - input_list = [coord, mock] - result = metadata_filter(input_list, item=coord) - self.assertIn(coord, result) - self.assertNotIn(mock, result) - - def test_item_metadata(self): - coord = Mock(metadata=CoordMetadata) - dim_coord = Mock(metadata=DimCoordMetadata) - input_list = [coord, dim_coord] - result = metadata_filter(input_list, item=coord) - self.assertIn(coord, result) - self.assertNotIn(dim_coord, result) - - def test_standard_name(self): - name_one = Mock(standard_name="one") - name_two = Mock(standard_name="two") - input_list = [name_one, name_two] - result = metadata_filter(input_list, standard_name="one") - self.assertIn(name_one, result) - self.assertNotIn(name_two, result) - - def test_long_name(self): - name_one = Mock(long_name="one") - name_two = Mock(long_name="two") - input_list = [name_one, name_two] - result = metadata_filter(input_list, long_name="one") - self.assertIn(name_one, result) - self.assertNotIn(name_two, result) - - def test_var_name(self): - name_one = Mock(var_name="one") - name_two = Mock(var_name="two") - input_list = [name_one, name_two] - result = metadata_filter(input_list, var_name="one") - self.assertIn(name_one, result) - self.assertNotIn(name_two, result) - - def test_attributes(self): - # Confirm that this can handle attrib dicts including np arrays. - attrib_one_two = Mock( - attributes={"one": np.arange(1), "two": np.arange(2)} - ) - attrib_three_four = Mock( - attributes={"three": np.arange(3), "four": np.arange(4)} - ) - input_list = [attrib_one_two, attrib_three_four] - result = metadata_filter( - input_list, attributes=attrib_one_two.attributes - ) - self.assertIn(attrib_one_two, result) - self.assertNotIn(attrib_three_four, result) - - def test_invalid_attributes(self): - attrib_one = Mock(attributes={"one": 1}) - input_list = [attrib_one] - self.assertRaisesRegex( - ValueError, - ".*expecting a dictionary.*", - metadata_filter, - input_list, - attributes="one", - ) - - def test_axis__by_guess(self): - # see https://docs.python.org/3/library/unittest.mock.html#deleting-attributes - axis_lon = Mock(standard_name="longitude") - del axis_lon.axis - axis_lat = Mock(standard_name="latitude") - del axis_lat.axis - input_list = [axis_lon, axis_lat] - result = metadata_filter(input_list, axis="x") - self.assertIn(axis_lon, result) - self.assertNotIn(axis_lat, result) - - def test_axis__by_member(self): - axis_x = Mock(axis="x") - axis_y = Mock(axis="y") - input_list = [axis_x, axis_y] - result = metadata_filter(input_list, axis="x") - self.assertEqual(1, len(result)) - self.assertIn(axis_x, result) - - def test_multiple_args(self): - coord_one = Mock(__class__=AuxCoord, long_name="one") - coord_two = Mock(__class__=AuxCoord, long_name="two") - input_list = [coord_one, coord_two] - result = metadata_filter(input_list, item=coord_one, long_name="one") - self.assertIn(coord_one, result) - self.assertNotIn(coord_two, result) diff --git a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py b/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py deleted file mode 100644 index 5ecf0b90d5..0000000000 --- a/lib/iris/tests/unit/common/metadata/test_metadata_manager_factory.py +++ /dev/null @@ -1,203 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.common.metadata.metadata_manager_factory`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import pickle -import unittest.mock as mock - -from cf_units import Unit - -from iris.common.metadata import ( - AncillaryVariableMetadata, - BaseMetadata, - CellMeasureMetadata, - CoordMetadata, - CubeMetadata, - metadata_manager_factory, -) -from iris.experimental.ugrid.metadata import ConnectivityMetadata - -BASES = [ - AncillaryVariableMetadata, - BaseMetadata, - CellMeasureMetadata, - ConnectivityMetadata, - CoordMetadata, - CubeMetadata, -] - - -class Test_factory(tests.IrisTest): - def test__kwargs_invalid(self): - emsg = "Invalid 'BaseMetadata' field parameters, got 'wibble'." - with self.assertRaisesRegex(ValueError, emsg): - metadata_manager_factory(BaseMetadata, wibble="nope") - - -class Test_instance(tests.IrisTest): - def setUp(self): - self.bases = BASES - - def test__namespace(self): - namespace = [ - "DEFAULT_NAME", - "__init__", - "__eq__", - "__getstate__", - "__ne__", - "__reduce__", - "__repr__", - "__setstate__", - "fields", - "name", - "token", - "values", - ] - for base in self.bases: - metadata = metadata_manager_factory(base) - for name in namespace: - self.assertTrue(hasattr(metadata, name)) - if base is CubeMetadata: - self.assertTrue(hasattr(metadata, "_names")) - self.assertIs(metadata.cls, base) - - def test__kwargs_default(self): - for base in self.bases: - kwargs = dict(zip(base._fields, [None] * len(base._fields))) - metadata = metadata_manager_factory(base) - self.assertEqual(metadata.values._asdict(), kwargs) - - def test__kwargs(self): - for base in self.bases: - kwargs = dict(zip(base._fields, range(len(base._fields)))) - metadata = metadata_manager_factory(base, **kwargs) - self.assertEqual(metadata.values._asdict(), kwargs) - - -class Test_instance___eq__(tests.IrisTest): - def setUp(self): - self.metadata = metadata_manager_factory(BaseMetadata) - - def test__not_implemented(self): - self.assertNotEqual(self.metadata, 1) - - def test__not_is_cls(self): - base = BaseMetadata - other = metadata_manager_factory(base) - self.assertIs(other.cls, base) - other.cls = CoordMetadata - self.assertNotEqual(self.metadata, other) - - def test__not_values(self): - standard_name = mock.sentinel.standard_name - other = metadata_manager_factory( - BaseMetadata, standard_name=standard_name - ) - self.assertEqual(other.standard_name, standard_name) - self.assertIsNone(other.long_name) - self.assertIsNone(other.var_name) - self.assertIsNone(other.units) - self.assertIsNone(other.attributes) - self.assertNotEqual(self.metadata, other) - - def test__same_default(self): - other = metadata_manager_factory(BaseMetadata) - self.assertEqual(self.metadata, other) - - def test__same(self): - kwargs = dict( - standard_name=1, long_name=2, var_name=3, units=4, attributes=5 - ) - metadata = metadata_manager_factory(BaseMetadata, **kwargs) - other = metadata_manager_factory(BaseMetadata, **kwargs) - self.assertEqual(metadata.values._asdict(), kwargs) - self.assertEqual(metadata, other) - - -class Test_instance____repr__(tests.IrisTest): - def setUp(self): - self.metadata = metadata_manager_factory(BaseMetadata) - - def test(self): - standard_name = mock.sentinel.standard_name - long_name = mock.sentinel.long_name - var_name = mock.sentinel.var_name - units = mock.sentinel.units - attributes = mock.sentinel.attributes - values = (standard_name, long_name, var_name, units, attributes) - - for field, value in zip(self.metadata.fields, values): - setattr(self.metadata, field, value) - - result = repr(self.metadata) - expected = ( - "MetadataManager(standard_name={!r}, long_name={!r}, var_name={!r}, " - "units={!r}, attributes={!r})" - ) - self.assertEqual(result, expected.format(*values)) - - -class Test_instance__pickle(tests.IrisTest): - def setUp(self): - self.standard_name = "standard_name" - self.long_name = "long_name" - self.var_name = "var_name" - self.units = Unit("1") - self.attributes = dict(hello="world") - values = ( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - ) - kwargs = dict(zip(BaseMetadata._fields, values)) - self.metadata = metadata_manager_factory(BaseMetadata, **kwargs) - - def test_pickle(self): - for protocol in range(pickle.HIGHEST_PROTOCOL + 1): - with self.temp_filename(suffix=".pkl") as fname: - with open(fname, "wb") as fo: - pickle.dump(self.metadata, fo, protocol=protocol) - with open(fname, "rb") as fi: - metadata = pickle.load(fi) - self.assertEqual(metadata, self.metadata) - - -class Test_instance__fields(tests.IrisTest): - def setUp(self): - self.bases = BASES - - def test(self): - for base in self.bases: - fields = base._fields - metadata = metadata_manager_factory(base) - self.assertEqual(metadata.fields, fields) - for field in fields: - hasattr(metadata, field) - - -class Test_instance__values(tests.IrisTest): - def setUp(self): - self.bases = BASES - - def test(self): - for base in self.bases: - metadata = metadata_manager_factory(base) - result = metadata.values - self.assertIsInstance(result, base) - self.assertEqual(result._fields, base._fields) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/mixin/__init__.py b/lib/iris/tests/unit/common/mixin/__init__.py deleted file mode 100644 index 493e140626..0000000000 --- a/lib/iris/tests/unit/common/mixin/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.common.mixin` package.""" diff --git a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py b/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py deleted file mode 100644 index 88a88be567..0000000000 --- a/lib/iris/tests/unit/common/mixin/test_CFVariableMixin.py +++ /dev/null @@ -1,380 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.common.mixin.CFVariableMixin`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from collections import OrderedDict, namedtuple -from unittest import mock - -from cf_units import Unit - -from iris.common.metadata import ( - AncillaryVariableMetadata, - BaseMetadata, - CellMeasureMetadata, - CoordMetadata, - CubeMetadata, -) -from iris.common.mixin import CFVariableMixin, LimitedAttributeDict -from iris.experimental.ugrid.metadata import ConnectivityMetadata - - -class Test__getter(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.metadata = mock.sentinel.metadata - - metadata = mock.MagicMock( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - units=self.units, - attributes=self.attributes, - values=self.metadata, - ) - - self.item = CFVariableMixin() - self.item._metadata_manager = metadata - - def test_standard_name(self): - self.assertEqual(self.item.standard_name, self.standard_name) - - def test_long_name(self): - self.assertEqual(self.item.long_name, self.long_name) - - def test_var_name(self): - self.assertEqual(self.item.var_name, self.var_name) - - def test_units(self): - self.assertEqual(self.item.units, self.units) - - def test_attributes(self): - self.assertEqual(self.item.attributes, self.attributes) - - def test_metadata(self): - self.assertEqual(self.item.metadata, self.metadata) - - -class Test__setter(tests.IrisTest): - def setUp(self): - metadata = mock.MagicMock( - standard_name=mock.sentinel.standard_name, - long_name=mock.sentinel.long_name, - var_name=mock.sentinel.var_name, - units=mock.sentinel.units, - attributes=mock.sentinel.attributes, - token=lambda name: name, - ) - - self.item = CFVariableMixin() - self.item._metadata_manager = metadata - - def test_standard_name__valid(self): - standard_name = "air_temperature" - self.item.standard_name = standard_name - self.assertEqual( - self.item._metadata_manager.standard_name, standard_name - ) - - def test_standard_name__none(self): - self.item.standard_name = None - self.assertIsNone(self.item._metadata_manager.standard_name) - - def test_standard_name__invalid(self): - standard_name = "nope nope" - emsg = f"{standard_name!r} is not a valid standard_name" - with self.assertRaisesRegex(ValueError, emsg): - self.item.standard_name = standard_name - - def test_long_name(self): - long_name = "long_name" - self.item.long_name = long_name - self.assertEqual(self.item._metadata_manager.long_name, long_name) - - def test_long_name__none(self): - self.item.long_name = None - self.assertIsNone(self.item._metadata_manager.long_name) - - def test_var_name(self): - var_name = "var_name" - self.item.var_name = var_name - self.assertEqual(self.item._metadata_manager.var_name, var_name) - - def test_var_name__none(self): - self.item.var_name = None - self.assertIsNone(self.item._metadata_manager.var_name) - - def test_var_name__invalid_token(self): - var_name = "nope nope" - self.item._metadata_manager.token = lambda name: None - emsg = f"{var_name!r} is not a valid NetCDF variable name." - with self.assertRaisesRegex(ValueError, emsg): - self.item.var_name = var_name - - def test_attributes(self): - attributes = dict(hello="world") - self.item.attributes = attributes - self.assertEqual(self.item._metadata_manager.attributes, attributes) - self.assertIsNot(self.item._metadata_manager.attributes, attributes) - self.assertIsInstance( - self.item._metadata_manager.attributes, LimitedAttributeDict - ) - - def test_attributes__none(self): - self.item.attributes = None - self.assertEqual(self.item._metadata_manager.attributes, {}) - - -class Test__metadata_setter(tests.IrisTest): - def setUp(self): - class Metadata: - def __init__(self): - self.cls = BaseMetadata - self.fields = BaseMetadata._fields - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.token = lambda name: name - - @property - def values(self): - return dict( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - units=self.units, - attributes=self.attributes, - ) - - metadata = Metadata() - self.item = CFVariableMixin() - self.item._metadata_manager = metadata - self.attributes = dict(one=1, two=2, three=3) - self.args = OrderedDict( - standard_name="air_temperature", - long_name="long_name", - var_name="var_name", - units=Unit("1"), - attributes=self.attributes, - ) - - def test_dict(self): - metadata = dict(**self.args) - self.item.metadata = metadata - self.assertEqual(self.item._metadata_manager.values, metadata) - self.assertIsNot( - self.item._metadata_manager.attributes, self.attributes - ) - - def test_dict__partial(self): - metadata = dict(**self.args) - del metadata["standard_name"] - self.item.metadata = metadata - metadata["standard_name"] = mock.sentinel.standard_name - self.assertEqual(self.item._metadata_manager.values, metadata) - self.assertIsNot( - self.item._metadata_manager.attributes, self.attributes - ) - - def test_ordereddict(self): - metadata = self.args - self.item.metadata = metadata - self.assertEqual(self.item._metadata_manager.values, metadata) - self.assertIsNot( - self.item._metadata_manager.attributes, self.attributes - ) - - def test_ordereddict__partial(self): - metadata = self.args - del metadata["long_name"] - del metadata["units"] - self.item.metadata = metadata - metadata["long_name"] = mock.sentinel.long_name - metadata["units"] = mock.sentinel.units - self.assertEqual(self.item._metadata_manager.values, metadata) - - def test_tuple(self): - metadata = tuple(self.args.values()) - self.item.metadata = metadata - result = tuple( - [ - getattr(self.item._metadata_manager, field) - for field in self.item._metadata_manager.fields - ] - ) - self.assertEqual(result, metadata) - self.assertIsNot( - self.item._metadata_manager.attributes, self.attributes - ) - - def test_tuple__missing(self): - metadata = list(self.args.values()) - del metadata[2] - emsg = "Invalid .* metadata, require .* to be specified." - with self.assertRaisesRegex(TypeError, emsg): - self.item.metadata = tuple(metadata) - - def test_namedtuple(self): - Metadata = namedtuple( - "Metadata", - ("standard_name", "long_name", "var_name", "units", "attributes"), - ) - metadata = Metadata(**self.args) - self.item.metadata = metadata - self.assertEqual( - self.item._metadata_manager.values, metadata._asdict() - ) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) - - def test_namedtuple__partial(self): - Metadata = namedtuple( - "Metadata", ("standard_name", "long_name", "var_name", "units") - ) - del self.args["attributes"] - metadata = Metadata(**self.args) - self.item.metadata = metadata - expected = metadata._asdict() - expected.update(dict(attributes=mock.sentinel.attributes)) - self.assertEqual(self.item._metadata_manager.values, expected) - - def test_class_ancillaryvariablemetadata(self): - metadata = AncillaryVariableMetadata(**self.args) - self.item.metadata = metadata - self.assertEqual( - self.item._metadata_manager.values, metadata._asdict() - ) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) - - def test_class_basemetadata(self): - metadata = BaseMetadata(**self.args) - self.item.metadata = metadata - self.assertEqual( - self.item._metadata_manager.values, metadata._asdict() - ) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) - - def test_class_cellmeasuremetadata(self): - self.args["measure"] = None - metadata = CellMeasureMetadata(**self.args) - self.item.metadata = metadata - expected = metadata._asdict() - del expected["measure"] - self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) - - def test_class_connectivitymetadata(self): - self.args.update( - dict(cf_role=None, start_index=None, location_axis=None) - ) - metadata = ConnectivityMetadata(**self.args) - self.item.metadata = metadata - expected = metadata._asdict() - del expected["cf_role"] - del expected["start_index"] - del expected["location_axis"] - self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) - - def test_class_coordmetadata(self): - self.args.update(dict(coord_system=None, climatological=False)) - metadata = CoordMetadata(**self.args) - self.item.metadata = metadata - expected = metadata._asdict() - del expected["coord_system"] - del expected["climatological"] - self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) - - def test_class_cubemetadata(self): - self.args["cell_methods"] = None - metadata = CubeMetadata(**self.args) - self.item.metadata = metadata - expected = metadata._asdict() - del expected["cell_methods"] - self.assertEqual(self.item._metadata_manager.values, expected) - self.assertIsNot( - self.item._metadata_manager.attributes, metadata.attributes - ) - - -class Test_rename(tests.IrisTest): - def setUp(self): - metadata = mock.MagicMock( - standard_name=mock.sentinel.standard_name, - long_name=mock.sentinel.long_name, - var_name=mock.sentinel.var_name, - units=mock.sentinel.units, - attributes=mock.sentinel.attributes, - values=mock.sentinel.metadata, - token=lambda name: name, - ) - - self.item = CFVariableMixin() - self.item._metadata_manager = metadata - - def test__valid_standard_name(self): - name = "air_temperature" - self.item.rename(name) - self.assertEqual(self.item._metadata_manager.standard_name, name) - self.assertIsNone(self.item._metadata_manager.long_name) - self.assertIsNone(self.item._metadata_manager.var_name) - - def test__invalid_standard_name(self): - name = "nope nope" - self.item.rename(name) - self.assertIsNone(self.item._metadata_manager.standard_name) - self.assertEqual(self.item._metadata_manager.long_name, name) - self.assertIsNone(self.item._metadata_manager.var_name) - - -class Test_name(tests.IrisTest): - def setUp(self): - class Metadata: - def __init__(self, name): - self.name = mock.MagicMock(return_value=name) - - self.name = mock.sentinel.name - metadata = Metadata(self.name) - - self.item = CFVariableMixin() - self.item._metadata_manager = metadata - - def test(self): - default = mock.sentinel.default - token = mock.sentinel.token - result = self.item.name(default=default, token=token) - self.assertEqual(result, self.name) - self.item._metadata_manager.name.assert_called_with( - default=default, token=token - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py b/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py deleted file mode 100644 index 32c78b6697..0000000000 --- a/lib/iris/tests/unit/common/mixin/test_LimitedAttributeDict.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.common.mixin.LimitedAttributeDict`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.common.mixin import LimitedAttributeDict - - -class Test(tests.IrisTest): - def setUp(self): - self.forbidden_keys = LimitedAttributeDict._forbidden_keys - self.emsg = "{!r} is not a permitted attribute" - - def test__invalid_keys(self): - for key in self.forbidden_keys: - with self.assertRaisesRegex(ValueError, self.emsg.format(key)): - _ = LimitedAttributeDict(**{key: None}) - - def test___eq__(self): - values = dict( - one=mock.sentinel.one, - two=mock.sentinel.two, - three=mock.sentinel.three, - ) - left = LimitedAttributeDict(**values) - right = LimitedAttributeDict(**values) - self.assertEqual(left, right) - self.assertEqual(left, values) - - def test___eq___numpy(self): - values = dict(one=np.arange(1), two=np.arange(2), three=np.arange(3)) - left = LimitedAttributeDict(**values) - right = LimitedAttributeDict(**values) - self.assertEqual(left, right) - self.assertEqual(left, values) - values = dict(one=np.arange(1), two=np.arange(1), three=np.arange(1)) - left = LimitedAttributeDict(dict(one=0, two=0, three=0)) - right = LimitedAttributeDict(**values) - self.assertEqual(left, right) - self.assertEqual(left, values) - - def test___setitem__(self): - for key in self.forbidden_keys: - item = LimitedAttributeDict() - with self.assertRaisesRegex(ValueError, self.emsg.format(key)): - item[key] = None - - def test_update(self): - for key in self.forbidden_keys: - item = LimitedAttributeDict() - with self.assertRaisesRegex(ValueError, self.emsg.format(key)): - other = {key: None} - item.update(other) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py b/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py deleted file mode 100644 index 8fc21f2965..0000000000 --- a/lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.common.mixin._get_valid_standard_name`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.common.mixin import _get_valid_standard_name - - -class Test(tests.IrisTest): - def setUp(self): - self.emsg = "'{}' is not a valid standard_name" - - def test_pass_thru_none(self): - name = None - self.assertEqual(_get_valid_standard_name(name), name) - - def test_pass_thru_empty(self): - name = "" - self.assertEqual(_get_valid_standard_name(name), name) - - def test_pass_thru_whitespace(self): - name = " " - self.assertEqual(_get_valid_standard_name(name), name) - - def test_valid_standard_name(self): - name = "air_temperature" - self.assertEqual(_get_valid_standard_name(name), name) - - def test_standard_name_alias(self): - name = "atmosphere_optical_thickness_due_to_pm1_ambient_aerosol" - self.assertEqual(_get_valid_standard_name(name), name) - - def test_invalid_standard_name(self): - name = "not_a_standard_name" - with self.assertRaisesRegex(ValueError, self.emsg.format(name)): - _get_valid_standard_name(name) - - def test_valid_standard_name_valid_modifier(self): - name = "air_temperature standard_error" - self.assertEqual(_get_valid_standard_name(name), name) - - def test_valid_standard_name_valid_modifier_extra_spaces(self): - name = "air_temperature standard_error" - self.assertEqual(_get_valid_standard_name(name), name) - - def test_invalid_standard_name_valid_modifier(self): - name = "not_a_standard_name standard_error" - with self.assertRaisesRegex(ValueError, self.emsg.format(name)): - _get_valid_standard_name(name) - - def test_valid_standard_invalid_name_modifier(self): - name = "air_temperature extra_names standard_error" - with self.assertRaisesRegex(ValueError, self.emsg.format(name)): - _get_valid_standard_name(name) - - def test_valid_standard_valid_name_modifier_extra_names(self): - name = "air_temperature standard_error extra words" - with self.assertRaisesRegex(ValueError, self.emsg.format(name)): - _get_valid_standard_name(name) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/common/resolve/__init__.py b/lib/iris/tests/unit/common/resolve/__init__.py deleted file mode 100644 index d0b189e59d..0000000000 --- a/lib/iris/tests/unit/common/resolve/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.common.resolve` package.""" diff --git a/lib/iris/tests/unit/common/resolve/test_Resolve.py b/lib/iris/tests/unit/common/resolve/test_Resolve.py deleted file mode 100644 index 98643c8f10..0000000000 --- a/lib/iris/tests/unit/common/resolve/test_Resolve.py +++ /dev/null @@ -1,4795 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.common.resolve.Resolve`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from collections import namedtuple -from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel - -from cf_units import Unit -import numpy as np - -from iris.common.lenient import LENIENT -from iris.common.metadata import CubeMetadata -from iris.common.resolve import ( - Resolve, - _AuxCoverage, - _CategoryItems, - _DimCoverage, - _Item, - _PreparedFactory, - _PreparedItem, - _PreparedMetadata, -) -from iris.coords import DimCoord -from iris.cube import Cube - - -class Test___init__(tests.IrisTest): - def setUp(self): - target = "iris.common.resolve.Resolve.__call__" - self.m_call = mock.MagicMock(return_value=sentinel.return_value) - _ = self.patch(target, new=self.m_call) - - def _assert_members_none(self, resolve): - self.assertIsNone(resolve.lhs_cube_resolved) - self.assertIsNone(resolve.rhs_cube_resolved) - self.assertIsNone(resolve.lhs_cube_category) - self.assertIsNone(resolve.rhs_cube_category) - self.assertIsNone(resolve.lhs_cube_category_local) - self.assertIsNone(resolve.rhs_cube_category_local) - self.assertIsNone(resolve.category_common) - self.assertIsNone(resolve.lhs_cube_dim_coverage) - self.assertIsNone(resolve.lhs_cube_aux_coverage) - self.assertIsNone(resolve.rhs_cube_dim_coverage) - self.assertIsNone(resolve.rhs_cube_aux_coverage) - self.assertIsNone(resolve.map_rhs_to_lhs) - self.assertIsNone(resolve.mapping) - self.assertIsNone(resolve.prepared_category) - self.assertIsNone(resolve.prepared_factories) - self.assertIsNone(resolve._broadcast_shape) - - def test_lhs_rhs_default(self): - resolve = Resolve() - self.assertIsNone(resolve.lhs_cube) - self.assertIsNone(resolve.rhs_cube) - self._assert_members_none(resolve) - self.assertEqual(0, self.m_call.call_count) - - def test_lhs_rhs_provided(self): - m_lhs = sentinel.lhs - m_rhs = sentinel.rhs - resolve = Resolve(lhs=m_lhs, rhs=m_rhs) - # The lhs_cube and rhs_cube are only None due - # to __call__ being mocked. See Test___call__ - # for appropriate test coverage. - self.assertIsNone(resolve.lhs_cube) - self.assertIsNone(resolve.rhs_cube) - self._assert_members_none(resolve) - self.assertEqual(1, self.m_call.call_count) - call_args = mock.call(m_lhs, m_rhs) - self.assertEqual(call_args, self.m_call.call_args) - - -class Test___call__(tests.IrisTest): - def setUp(self): - self.m_lhs = mock.MagicMock(spec=Cube) - self.m_rhs = mock.MagicMock(spec=Cube) - target = "iris.common.resolve.Resolve.{method}" - method = target.format(method="_metadata_resolve") - self.m_metadata_resolve = self.patch(method) - method = target.format(method="_metadata_coverage") - self.m_metadata_coverage = self.patch(method) - method = target.format(method="_metadata_mapping") - self.m_metadata_mapping = self.patch(method) - method = target.format(method="_metadata_prepare") - self.m_metadata_prepare = self.patch(method) - - def test_lhs_not_cube(self): - emsg = "'LHS' argument to be a 'Cube'" - with self.assertRaisesRegex(TypeError, emsg): - _ = Resolve(rhs=self.m_rhs) - - def test_rhs_not_cube(self): - emsg = "'RHS' argument to be a 'Cube'" - with self.assertRaisesRegex(TypeError, emsg): - _ = Resolve(lhs=self.m_lhs) - - def _assert_called_metadata_methods(self): - call_args = mock.call() - self.assertEqual(1, self.m_metadata_resolve.call_count) - self.assertEqual(call_args, self.m_metadata_resolve.call_args) - self.assertEqual(1, self.m_metadata_coverage.call_count) - self.assertEqual(call_args, self.m_metadata_coverage.call_args) - self.assertEqual(1, self.m_metadata_mapping.call_count) - self.assertEqual(call_args, self.m_metadata_mapping.call_args) - self.assertEqual(1, self.m_metadata_prepare.call_count) - self.assertEqual(call_args, self.m_metadata_prepare.call_args) - - def test_map_rhs_to_lhs__less_than(self): - self.m_lhs.ndim = 2 - self.m_rhs.ndim = 1 - resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs) - self.assertEqual(self.m_lhs, resolve.lhs_cube) - self.assertEqual(self.m_rhs, resolve.rhs_cube) - self.assertTrue(resolve.map_rhs_to_lhs) - self._assert_called_metadata_methods() - - def test_map_rhs_to_lhs__equal(self): - self.m_lhs.ndim = 2 - self.m_rhs.ndim = 2 - resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs) - self.assertEqual(self.m_lhs, resolve.lhs_cube) - self.assertEqual(self.m_rhs, resolve.rhs_cube) - self.assertTrue(resolve.map_rhs_to_lhs) - self._assert_called_metadata_methods() - - def test_map_lhs_to_rhs(self): - self.m_lhs.ndim = 2 - self.m_rhs.ndim = 3 - resolve = Resolve(lhs=self.m_lhs, rhs=self.m_rhs) - self.assertEqual(self.m_lhs, resolve.lhs_cube) - self.assertEqual(self.m_rhs, resolve.rhs_cube) - self.assertFalse(resolve.map_rhs_to_lhs) - self._assert_called_metadata_methods() - - -class Test__categorise_items(tests.IrisTest): - def setUp(self): - self.coord_dims = {} - # configure dim coords - coord = mock.Mock(metadata=sentinel.dim_metadata1) - self.dim_coords = [coord] - self.coord_dims[coord] = sentinel.dims1 - # configure aux and scalar coords - self.aux_coords = [] - pairs = [ - (sentinel.aux_metadata2, sentinel.dims2), - (sentinel.aux_metadata3, sentinel.dims3), - (sentinel.scalar_metadata4, None), - (sentinel.scalar_metadata5, None), - (sentinel.scalar_metadata6, None), - ] - for metadata, dims in pairs: - coord = mock.Mock(metadata=metadata) - self.aux_coords.append(coord) - self.coord_dims[coord] = dims - func = lambda coord: self.coord_dims[coord] - self.cube = mock.Mock( - aux_coords=self.aux_coords, - dim_coords=self.dim_coords, - coord_dims=func, - ) - - def test(self): - result = Resolve._categorise_items(self.cube) - self.assertIsInstance(result, _CategoryItems) - self.assertEqual(1, len(result.items_dim)) - # check dim coords - for item in result.items_dim: - self.assertIsInstance(item, _Item) - (coord,) = self.dim_coords - dims = self.coord_dims[coord] - expected = [_Item(metadata=coord.metadata, coord=coord, dims=dims)] - self.assertEqual(expected, result.items_dim) - # check aux coords - self.assertEqual(2, len(result.items_aux)) - for item in result.items_aux: - self.assertIsInstance(item, _Item) - expected_aux, expected_scalar = [], [] - for coord in self.aux_coords: - dims = self.coord_dims[coord] - item = _Item(metadata=coord.metadata, coord=coord, dims=dims) - if dims: - expected_aux.append(item) - else: - expected_scalar.append(item) - self.assertEqual(expected_aux, result.items_aux) - # check scalar coords - self.assertEqual(3, len(result.items_scalar)) - for item in result.items_scalar: - self.assertIsInstance(item, _Item) - self.assertEqual(expected_scalar, result.items_scalar) - - -class Test__metadata_resolve(tests.IrisTest): - def setUp(self): - self.target = "iris.common.resolve.Resolve._categorise_items" - self.m_lhs_cube = sentinel.lhs_cube - self.m_rhs_cube = sentinel.rhs_cube - - @staticmethod - def _create_items(pairs): - # this wrapper (hack) is necessary in order to support mocking - # the "name" method (callable) of the metadata, as "name" is already - # part of the mock API - this is always troublesome in mock-world. - Wrapper = namedtuple("Wrapper", ("name", "value")) - result = [] - for name, dims in pairs: - metadata = Wrapper(name=lambda: str(name), value=name) - coord = mock.Mock(metadata=metadata) - item = _Item(metadata=metadata, coord=coord, dims=dims) - result.append(item) - return result - - def test_metadata_same(self): - category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) - # configure dim coords - pairs = [(sentinel.dim_metadata1, sentinel.dims1)] - category.items_dim.extend(self._create_items(pairs)) - # configure aux coords - pairs = [ - (sentinel.aux_metadata1, sentinel.dims2), - (sentinel.aux_metadata2, sentinel.dims3), - ] - category.items_aux.extend(self._create_items(pairs)) - # configure scalar coords - pairs = [ - (sentinel.scalar_metadata1, None), - (sentinel.scalar_metadata2, None), - (sentinel.scalar_metadata3, None), - ] - category.items_scalar.extend(self._create_items(pairs)) - - side_effect = (category, category) - mocker = self.patch(self.target, side_effect=side_effect) - - resolve = Resolve() - self.assertIsNone(resolve.lhs_cube) - self.assertIsNone(resolve.rhs_cube) - self.assertIsNone(resolve.lhs_cube_category) - self.assertIsNone(resolve.rhs_cube_category) - self.assertIsNone(resolve.lhs_cube_category_local) - self.assertIsNone(resolve.rhs_cube_category_local) - self.assertIsNone(resolve.category_common) - - # require to explicitly configure cubes - resolve.lhs_cube = self.m_lhs_cube - resolve.rhs_cube = self.m_rhs_cube - resolve._metadata_resolve() - - self.assertEqual(mocker.call_count, 2) - calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)] - self.assertEqual(calls, mocker.call_args_list) - - self.assertEqual(category, resolve.lhs_cube_category) - self.assertEqual(category, resolve.rhs_cube_category) - expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) - self.assertEqual(expected, resolve.lhs_cube_category_local) - self.assertEqual(expected, resolve.rhs_cube_category_local) - self.assertEqual(category, resolve.category_common) - - def test_metadata_overlap(self): - # configure the lhs cube category - category_lhs = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) - # configure dim coords - pairs = [ - (sentinel.dim_metadata1, sentinel.dims1), - (sentinel.dim_metadata2, sentinel.dims2), - ] - category_lhs.items_dim.extend(self._create_items(pairs)) - # configure aux coords - pairs = [ - (sentinel.aux_metadata1, sentinel.dims3), - (sentinel.aux_metadata2, sentinel.dims4), - ] - category_lhs.items_aux.extend(self._create_items(pairs)) - # configure scalar coords - pairs = [ - (sentinel.scalar_metadata1, None), - (sentinel.scalar_metadata2, None), - ] - category_lhs.items_scalar.extend(self._create_items(pairs)) - - # configure the rhs cube category - category_rhs = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) - # configure dim coords - category_rhs.items_dim.append(category_lhs.items_dim[0]) - pairs = [(sentinel.dim_metadata200, sentinel.dims2)] - category_rhs.items_dim.extend(self._create_items(pairs)) - # configure aux coords - category_rhs.items_aux.append(category_lhs.items_aux[0]) - pairs = [(sentinel.aux_metadata200, sentinel.dims4)] - category_rhs.items_aux.extend(self._create_items(pairs)) - # configure scalar coords - category_rhs.items_scalar.append(category_lhs.items_scalar[0]) - pairs = [(sentinel.scalar_metadata200, None)] - category_rhs.items_scalar.extend(self._create_items(pairs)) - - side_effect = (category_lhs, category_rhs) - mocker = self.patch(self.target, side_effect=side_effect) - - resolve = Resolve() - self.assertIsNone(resolve.lhs_cube) - self.assertIsNone(resolve.rhs_cube) - self.assertIsNone(resolve.lhs_cube_category) - self.assertIsNone(resolve.rhs_cube_category) - self.assertIsNone(resolve.lhs_cube_category_local) - self.assertIsNone(resolve.rhs_cube_category_local) - self.assertIsNone(resolve.category_common) - - # require to explicitly configure cubes - resolve.lhs_cube = self.m_lhs_cube - resolve.rhs_cube = self.m_rhs_cube - resolve._metadata_resolve() - - self.assertEqual(2, mocker.call_count) - calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)] - self.assertEqual(calls, mocker.call_args_list) - - self.assertEqual(category_lhs, resolve.lhs_cube_category) - self.assertEqual(category_rhs, resolve.rhs_cube_category) - - items_dim = [category_lhs.items_dim[1]] - items_aux = [category_lhs.items_aux[1]] - items_scalar = [category_lhs.items_scalar[1]] - expected = _CategoryItems( - items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar - ) - self.assertEqual(expected, resolve.lhs_cube_category_local) - - items_dim = [category_rhs.items_dim[1]] - items_aux = [category_rhs.items_aux[1]] - items_scalar = [category_rhs.items_scalar[1]] - expected = _CategoryItems( - items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar - ) - self.assertEqual(expected, resolve.rhs_cube_category_local) - - items_dim = [category_lhs.items_dim[0]] - items_aux = [category_lhs.items_aux[0]] - items_scalar = [category_lhs.items_scalar[0]] - expected = _CategoryItems( - items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar - ) - self.assertEqual(expected, resolve.category_common) - - def test_metadata_different(self): - # configure the lhs cube category - category_lhs = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) - # configure dim coords - pairs = [ - (sentinel.dim_metadata1, sentinel.dims1), - (sentinel.dim_metadata2, sentinel.dims2), - ] - category_lhs.items_dim.extend(self._create_items(pairs)) - # configure aux coords - pairs = [ - (sentinel.aux_metadata1, sentinel.dims3), - (sentinel.aux_metadata2, sentinel.dims4), - ] - category_lhs.items_aux.extend(self._create_items(pairs)) - # configure scalar coords - pairs = [ - (sentinel.scalar_metadata1, None), - (sentinel.scalar_metadata2, None), - ] - category_lhs.items_scalar.extend(self._create_items(pairs)) - - # configure the rhs cube category - category_rhs = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) - # configure dim coords - pairs = [ - (sentinel.dim_metadata100, sentinel.dims1), - (sentinel.dim_metadata200, sentinel.dims2), - ] - category_rhs.items_dim.extend(self._create_items(pairs)) - # configure aux coords - pairs = [ - (sentinel.aux_metadata100, sentinel.dims3), - (sentinel.aux_metadata200, sentinel.dims4), - ] - category_rhs.items_aux.extend(self._create_items(pairs)) - # configure scalar coords - pairs = [ - (sentinel.scalar_metadata100, None), - (sentinel.scalar_metadata200, None), - ] - category_rhs.items_scalar.extend(self._create_items(pairs)) - - side_effect = (category_lhs, category_rhs) - mocker = self.patch(self.target, side_effect=side_effect) - - resolve = Resolve() - self.assertIsNone(resolve.lhs_cube) - self.assertIsNone(resolve.rhs_cube) - self.assertIsNone(resolve.lhs_cube_category) - self.assertIsNone(resolve.rhs_cube_category) - self.assertIsNone(resolve.lhs_cube_category_local) - self.assertIsNone(resolve.rhs_cube_category_local) - self.assertIsNone(resolve.category_common) - - # first require to explicitly lhs/rhs configure cubes - resolve.lhs_cube = self.m_lhs_cube - resolve.rhs_cube = self.m_rhs_cube - resolve._metadata_resolve() - - self.assertEqual(2, mocker.call_count) - calls = [mock.call(self.m_lhs_cube), mock.call(self.m_rhs_cube)] - self.assertEqual(calls, mocker.call_args_list) - - self.assertEqual(category_lhs, resolve.lhs_cube_category) - self.assertEqual(category_rhs, resolve.rhs_cube_category) - self.assertEqual(category_lhs, resolve.lhs_cube_category_local) - self.assertEqual(category_rhs, resolve.rhs_cube_category_local) - expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) - self.assertEqual(expected, resolve.category_common) - - -class Test__dim_coverage(tests.IrisTest): - def setUp(self): - self.ndim = 4 - self.cube = mock.Mock(ndim=self.ndim) - self.items = [] - parts = [ - (sentinel.metadata0, sentinel.coord0, (0,)), - (sentinel.metadata1, sentinel.coord1, (1,)), - (sentinel.metadata2, sentinel.coord2, (2,)), - (sentinel.metadata3, sentinel.coord3, (3,)), - ] - column_parts = [x for x in zip(*parts)] - self.metadata, self.coords, self.dims = [list(x) for x in column_parts] - self.dims = [dim for dim, in self.dims] - for metadata, coord, dims in parts: - item = _Item(metadata=metadata, coord=coord, dims=dims) - self.items.append(item) - - def test_coverage_no_local_no_common_all_free(self): - items = [] - common = [] - result = Resolve._dim_coverage(self.cube, items, common) - self.assertIsInstance(result, _DimCoverage) - self.assertEqual(self.cube, result.cube) - expected = [None] * self.ndim - self.assertEqual(expected, result.metadata) - self.assertEqual(expected, result.coords) - self.assertEqual([], result.dims_common) - self.assertEqual([], result.dims_local) - expected = list(range(self.ndim)) - self.assertEqual(expected, result.dims_free) - - def test_coverage_all_local_no_common_no_free(self): - common = [] - result = Resolve._dim_coverage(self.cube, self.items, common) - self.assertIsInstance(result, _DimCoverage) - self.assertEqual(self.cube, result.cube) - self.assertEqual(self.metadata, result.metadata) - self.assertEqual(self.coords, result.coords) - self.assertEqual([], result.dims_common) - self.assertEqual(self.dims, result.dims_local) - self.assertEqual([], result.dims_free) - - def test_coverage_no_local_all_common_no_free(self): - result = Resolve._dim_coverage(self.cube, self.items, self.metadata) - self.assertIsInstance(result, _DimCoverage) - self.assertEqual(self.cube, result.cube) - self.assertEqual(self.metadata, result.metadata) - self.assertEqual(self.coords, result.coords) - self.assertEqual(self.dims, result.dims_common) - self.assertEqual([], result.dims_local) - self.assertEqual([], result.dims_free) - - def test_coverage_mixed(self): - common = [self.items[1].metadata, self.items[2].metadata] - self.items.pop(0) - self.items.pop(-1) - metadata, coord, dims = sentinel.metadata100, sentinel.coord100, (0,) - self.items.append(_Item(metadata=metadata, coord=coord, dims=dims)) - result = Resolve._dim_coverage(self.cube, self.items, common) - self.assertIsInstance(result, _DimCoverage) - self.assertEqual(self.cube, result.cube) - expected = [ - metadata, - self.items[0].metadata, - self.items[1].metadata, - None, - ] - self.assertEqual(expected, result.metadata) - expected = [coord, self.items[0].coord, self.items[1].coord, None] - self.assertEqual(expected, result.coords) - self.assertEqual([1, 2], result.dims_common) - self.assertEqual([0], result.dims_local) - self.assertEqual([3], result.dims_free) - - -class Test__aux_coverage(tests.IrisTest): - def setUp(self): - self.ndim = 4 - self.cube = mock.Mock(ndim=self.ndim) - # configure aux coords - self.items_aux = [] - aux_parts = [ - (sentinel.aux_metadata0, sentinel.aux_coord0, (0,)), - (sentinel.aux_metadata1, sentinel.aux_coord1, (1,)), - (sentinel.aux_metadata23, sentinel.aux_coord23, (2, 3)), - ] - column_aux_parts = [x for x in zip(*aux_parts)] - self.aux_metadata, self.aux_coords, self.aux_dims = [ - list(x) for x in column_aux_parts - ] - for metadata, coord, dims in aux_parts: - item = _Item(metadata=metadata, coord=coord, dims=dims) - self.items_aux.append(item) - # configure scalar coords - self.items_scalar = [] - scalar_parts = [ - (sentinel.scalar_metadata0, sentinel.scalar_coord0, ()), - (sentinel.scalar_metadata1, sentinel.scalar_coord1, ()), - (sentinel.scalar_metadata2, sentinel.scalar_coord2, ()), - ] - column_scalar_parts = [x for x in zip(*scalar_parts)] - self.scalar_metadata, self.scalar_coords, self.scalar_dims = [ - list(x) for x in column_scalar_parts - ] - for metadata, coord, dims in scalar_parts: - item = _Item(metadata=metadata, coord=coord, dims=dims) - self.items_scalar.append(item) - - def test_coverage_no_local_no_common_all_free(self): - items_aux, items_scalar = [], [] - common_aux, common_scalar = [], [] - result = Resolve._aux_coverage( - self.cube, items_aux, items_scalar, common_aux, common_scalar - ) - self.assertIsInstance(result, _AuxCoverage) - self.assertEqual(self.cube, result.cube) - self.assertEqual([], result.common_items_aux) - self.assertEqual([], result.common_items_scalar) - self.assertEqual([], result.local_items_aux) - self.assertEqual([], result.local_items_scalar) - self.assertEqual([], result.dims_common) - self.assertEqual([], result.dims_local) - expected = list(range(self.ndim)) - self.assertEqual(expected, result.dims_free) - - def test_coverage_all_local_no_common_no_free(self): - common_aux, common_scalar = [], [] - result = Resolve._aux_coverage( - self.cube, - self.items_aux, - self.items_scalar, - common_aux, - common_scalar, - ) - self.assertIsInstance(result, _AuxCoverage) - self.assertEqual(self.cube, result.cube) - expected = [] - self.assertEqual(expected, result.common_items_aux) - self.assertEqual(expected, result.common_items_scalar) - self.assertEqual(self.items_aux, result.local_items_aux) - self.assertEqual(self.items_scalar, result.local_items_scalar) - self.assertEqual([], result.dims_common) - expected = list(range(self.ndim)) - self.assertEqual(expected, result.dims_local) - self.assertEqual([], result.dims_free) - - def test_coverage_no_local_all_common_no_free(self): - result = Resolve._aux_coverage( - self.cube, - self.items_aux, - self.items_scalar, - self.aux_metadata, - self.scalar_metadata, - ) - self.assertIsInstance(result, _AuxCoverage) - self.assertEqual(self.cube, result.cube) - self.assertEqual(self.items_aux, result.common_items_aux) - self.assertEqual(self.items_scalar, result.common_items_scalar) - self.assertEqual([], result.local_items_aux) - self.assertEqual([], result.local_items_scalar) - expected = list(range(self.ndim)) - self.assertEqual(expected, result.dims_common) - self.assertEqual([], result.dims_local) - self.assertEqual([], result.dims_free) - - def test_coverage_mixed(self): - common_aux = [self.items_aux[-1].metadata] - common_scalar = [self.items_scalar[1].metadata] - self.items_aux.pop(1) - result = Resolve._aux_coverage( - self.cube, - self.items_aux, - self.items_scalar, - common_aux, - common_scalar, - ) - self.assertIsInstance(result, _AuxCoverage) - self.assertEqual(self.cube, result.cube) - expected = [self.items_aux[-1]] - self.assertEqual(expected, result.common_items_aux) - expected = [self.items_scalar[1]] - self.assertEqual(expected, result.common_items_scalar) - expected = [self.items_aux[0]] - self.assertEqual(expected, result.local_items_aux) - expected = [self.items_scalar[0], self.items_scalar[2]] - self.assertEqual(expected, result.local_items_scalar) - self.assertEqual([2, 3], result.dims_common) - self.assertEqual([0], result.dims_local) - self.assertEqual([1], result.dims_free) - - -class Test__metadata_coverage(tests.IrisTest): - def setUp(self): - self.resolve = Resolve() - self.m_lhs_cube = sentinel.lhs_cube - self.resolve.lhs_cube = self.m_lhs_cube - self.m_rhs_cube = sentinel.rhs_cube - self.resolve.rhs_cube = self.m_rhs_cube - self.m_items_dim_metadata = sentinel.items_dim_metadata - self.m_items_aux_metadata = sentinel.items_aux_metadata - self.m_items_scalar_metadata = sentinel.items_scalar_metadata - items_dim = [mock.Mock(metadata=self.m_items_dim_metadata)] - items_aux = [mock.Mock(metadata=self.m_items_aux_metadata)] - items_scalar = [mock.Mock(metadata=self.m_items_scalar_metadata)] - category = _CategoryItems( - items_dim=items_dim, items_aux=items_aux, items_scalar=items_scalar - ) - self.resolve.category_common = category - self.m_items_dim = sentinel.items_dim - self.m_items_aux = sentinel.items_aux - self.m_items_scalar = sentinel.items_scalar - category = _CategoryItems( - items_dim=self.m_items_dim, - items_aux=self.m_items_aux, - items_scalar=self.m_items_scalar, - ) - self.resolve.lhs_cube_category = category - self.resolve.rhs_cube_category = category - target = "iris.common.resolve.Resolve._dim_coverage" - self.m_lhs_cube_dim_coverage = sentinel.lhs_cube_dim_coverage - self.m_rhs_cube_dim_coverage = sentinel.rhs_cube_dim_coverage - side_effect = ( - self.m_lhs_cube_dim_coverage, - self.m_rhs_cube_dim_coverage, - ) - self.mocker_dim_coverage = self.patch(target, side_effect=side_effect) - target = "iris.common.resolve.Resolve._aux_coverage" - self.m_lhs_cube_aux_coverage = sentinel.lhs_cube_aux_coverage - self.m_rhs_cube_aux_coverage = sentinel.rhs_cube_aux_coverage - side_effect = ( - self.m_lhs_cube_aux_coverage, - self.m_rhs_cube_aux_coverage, - ) - self.mocker_aux_coverage = self.patch(target, side_effect=side_effect) - - def test(self): - self.resolve._metadata_coverage() - self.assertEqual(2, self.mocker_dim_coverage.call_count) - calls = [ - mock.call( - self.m_lhs_cube, self.m_items_dim, [self.m_items_dim_metadata] - ), - mock.call( - self.m_rhs_cube, self.m_items_dim, [self.m_items_dim_metadata] - ), - ] - self.assertEqual(calls, self.mocker_dim_coverage.call_args_list) - self.assertEqual(2, self.mocker_aux_coverage.call_count) - calls = [ - mock.call( - self.m_lhs_cube, - self.m_items_aux, - self.m_items_scalar, - [self.m_items_aux_metadata], - [self.m_items_scalar_metadata], - ), - mock.call( - self.m_rhs_cube, - self.m_items_aux, - self.m_items_scalar, - [self.m_items_aux_metadata], - [self.m_items_scalar_metadata], - ), - ] - self.assertEqual(calls, self.mocker_aux_coverage.call_args_list) - self.assertEqual( - self.m_lhs_cube_dim_coverage, self.resolve.lhs_cube_dim_coverage - ) - self.assertEqual( - self.m_rhs_cube_dim_coverage, self.resolve.rhs_cube_dim_coverage - ) - self.assertEqual( - self.m_lhs_cube_aux_coverage, self.resolve.lhs_cube_aux_coverage - ) - self.assertEqual( - self.m_rhs_cube_aux_coverage, self.resolve.rhs_cube_aux_coverage - ) - - -class Test__dim_mapping(tests.IrisTest): - def setUp(self): - self.ndim = 3 - Wrapper = namedtuple("Wrapper", ("name",)) - cube = Wrapper(name=lambda: sentinel.name) - self.src_coverage = _DimCoverage( - cube=cube, - metadata=[], - coords=None, - dims_common=None, - dims_local=None, - dims_free=None, - ) - self.tgt_coverage = _DimCoverage( - cube=cube, - metadata=[], - coords=None, - dims_common=[], - dims_local=None, - dims_free=None, - ) - self.metadata = [ - sentinel.metadata_0, - sentinel.metadata_1, - sentinel.metadata_2, - ] - self.dummy = [sentinel.dummy_0, sentinel.dummy_1, sentinel.dummy_2] - - def test_no_mapping(self): - self.src_coverage.metadata.extend(self.metadata) - self.tgt_coverage.metadata.extend(self.dummy) - result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) - self.assertEqual(dict(), result) - - def test_full_mapping(self): - self.src_coverage.metadata.extend(self.metadata) - self.tgt_coverage.metadata.extend(self.metadata) - dims_common = list(range(self.ndim)) - self.tgt_coverage.dims_common.extend(dims_common) - result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) - expected = {0: 0, 1: 1, 2: 2} - self.assertEqual(expected, result) - - def test_transpose_mapping(self): - self.src_coverage.metadata.extend(self.metadata[::-1]) - self.tgt_coverage.metadata.extend(self.metadata) - dims_common = list(range(self.ndim)) - self.tgt_coverage.dims_common.extend(dims_common) - result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) - expected = {0: 2, 1: 1, 2: 0} - self.assertEqual(expected, result) - - def test_partial_mapping__transposed(self): - self.src_coverage.metadata.extend(self.metadata) - self.metadata[1] = sentinel.nope - self.tgt_coverage.metadata.extend(self.metadata[::-1]) - dims_common = [0, 2] - self.tgt_coverage.dims_common.extend(dims_common) - result = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) - expected = {0: 2, 2: 0} - self.assertEqual(expected, result) - - def test_bad_metadata_mapping(self): - self.src_coverage.metadata.extend(self.metadata) - self.metadata[0] = sentinel.bad - self.tgt_coverage.metadata.extend(self.metadata) - dims_common = [0] - self.tgt_coverage.dims_common.extend(dims_common) - emsg = "Failed to map common dim coordinate metadata" - with self.assertRaisesRegex(ValueError, emsg): - _ = Resolve._dim_mapping(self.src_coverage, self.tgt_coverage) - - -class Test__aux_mapping(tests.IrisTest): - def setUp(self): - self.ndim = 3 - Wrapper = namedtuple("Wrapper", ("name",)) - cube = Wrapper(name=lambda: sentinel.name) - self.src_coverage = _AuxCoverage( - cube=cube, - common_items_aux=[], - common_items_scalar=None, - local_items_aux=None, - local_items_scalar=None, - dims_common=None, - dims_local=None, - dims_free=None, - ) - self.tgt_coverage = _AuxCoverage( - cube=cube, - common_items_aux=[], - common_items_scalar=None, - local_items_aux=None, - local_items_scalar=None, - dims_common=None, - dims_local=None, - dims_free=None, - ) - self.items = [ - _Item( - metadata=sentinel.metadata0, coord=sentinel.coord0, dims=[0] - ), - _Item( - metadata=sentinel.metadata1, coord=sentinel.coord1, dims=[1] - ), - _Item( - metadata=sentinel.metadata2, coord=sentinel.coord2, dims=[2] - ), - ] - - def _copy(self, items): - # Due to a bug in python 3.6.x, performing a deepcopy of a mock.sentinel - # will yield an object that is not equivalent to its parent, so this - # is a work-around until we drop support for python 3.6.x. - import sys - - version = sys.version_info - major, minor = version.major, version.minor - result = deepcopy(items) - if major == 3 and minor <= 6: - for i, item in enumerate(items): - result[i] = result[i]._replace(metadata=item.metadata) - return result - - def test_no_mapping(self): - result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) - self.assertEqual(dict(), result) - - def test_full_mapping(self): - self.src_coverage.common_items_aux.extend(self.items) - self.tgt_coverage.common_items_aux.extend(self.items) - result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) - expected = {0: 0, 1: 1, 2: 2} - self.assertEqual(expected, result) - - def test_transpose_mapping(self): - self.src_coverage.common_items_aux.extend(self.items) - items = self._copy(self.items) - items[0].dims[0] = 2 - items[2].dims[0] = 0 - self.tgt_coverage.common_items_aux.extend(items) - result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) - expected = {0: 2, 1: 1, 2: 0} - self.assertEqual(expected, result) - - def test_partial_mapping__transposed(self): - _ = self.items.pop(1) - self.src_coverage.common_items_aux.extend(self.items) - items = self._copy(self.items) - items[0].dims[0] = 2 - items[1].dims[0] = 0 - self.tgt_coverage.common_items_aux.extend(items) - result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) - expected = {0: 2, 2: 0} - self.assertEqual(expected, result) - - def test_mapping__match_multiple_src_metadata(self): - items = self._copy(self.items) - _ = self.items.pop(1) - self.src_coverage.common_items_aux.extend(self.items) - items[1] = items[0] - self.tgt_coverage.common_items_aux.extend(items) - result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) - expected = {0: 0, 2: 2} - self.assertEqual(expected, result) - - def test_mapping__skip_match_multiple_src_metadata(self): - items = self._copy(self.items) - _ = self.items.pop(1) - self.tgt_coverage.common_items_aux.extend(self.items) - items[1] = items[0]._replace(dims=[1]) - self.src_coverage.common_items_aux.extend(items) - result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) - expected = {2: 2} - self.assertEqual(expected, result) - - def test_mapping__skip_different_rank(self): - items = self._copy(self.items) - self.src_coverage.common_items_aux.extend(self.items) - items[2] = items[2]._replace(dims=[1, 2]) - self.tgt_coverage.common_items_aux.extend(items) - result = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) - expected = {0: 0, 1: 1} - self.assertEqual(expected, result) - - def test_bad_metadata_mapping(self): - self.src_coverage.common_items_aux.extend(self.items) - items = self._copy(self.items) - items[0] = items[0]._replace(metadata=sentinel.bad) - self.tgt_coverage.common_items_aux.extend(items) - emsg = "Failed to map common aux coordinate metadata" - with self.assertRaisesRegex(ValueError, emsg): - _ = Resolve._aux_mapping(self.src_coverage, self.tgt_coverage) - - -class Test_mapped(tests.IrisTest): - def test_mapping_none(self): - resolve = Resolve() - self.assertIsNone(resolve.mapping) - self.assertIsNone(resolve.mapped) - - def test_mapped__src_cube_lhs(self): - resolve = Resolve() - lhs = mock.Mock(ndim=2) - rhs = mock.Mock(ndim=3) - resolve.lhs_cube = lhs - resolve.rhs_cube = rhs - resolve.map_rhs_to_lhs = False - resolve.mapping = {0: 0, 1: 1} - self.assertTrue(resolve.mapped) - - def test_mapped__src_cube_rhs(self): - resolve = Resolve() - lhs = mock.Mock(ndim=3) - rhs = mock.Mock(ndim=2) - resolve.lhs_cube = lhs - resolve.rhs_cube = rhs - resolve.map_rhs_to_lhs = True - resolve.mapping = {0: 0, 1: 1} - self.assertTrue(resolve.mapped) - - def test_partial_mapping(self): - resolve = Resolve() - lhs = mock.Mock(ndim=3) - rhs = mock.Mock(ndim=2) - resolve.lhs_cube = lhs - resolve.rhs_cube = rhs - resolve.map_rhs_to_lhs = True - resolve.mapping = {0: 0} - self.assertFalse(resolve.mapped) - - -class Test__free_mapping(tests.IrisTest): - def setUp(self): - self.Cube = namedtuple("Wrapper", ("name", "ndim", "shape")) - self.src_dim_coverage = dict( - cube=None, - metadata=None, - coords=None, - dims_common=None, - dims_local=None, - dims_free=[], - ) - self.tgt_dim_coverage = deepcopy(self.src_dim_coverage) - self.src_aux_coverage = dict( - cube=None, - common_items_aux=None, - common_items_scalar=None, - local_items_aux=None, - local_items_scalar=None, - dims_common=None, - dims_local=None, - dims_free=[], - ) - self.tgt_aux_coverage = deepcopy(self.src_aux_coverage) - self.resolve = Resolve() - self.resolve.map_rhs_to_lhs = True - self.resolve.mapping = {} - - def _make_args(self): - args = dict( - src_dim_coverage=_DimCoverage(**self.src_dim_coverage), - tgt_dim_coverage=_DimCoverage(**self.tgt_dim_coverage), - src_aux_coverage=_AuxCoverage(**self.src_aux_coverage), - tgt_aux_coverage=_AuxCoverage(**self.tgt_aux_coverage), - ) - return args - - def test_mapping_no_dims_free(self): - ndim = 4 - shape = tuple(range(ndim)) - cube = self.Cube(name=lambda: "name", ndim=ndim, shape=shape) - self.src_dim_coverage["cube"] = cube - self.tgt_dim_coverage["cube"] = cube - args = self._make_args() - emsg = "Insufficient matching coordinate metadata" - with self.assertRaisesRegex(ValueError, emsg): - self.resolve._free_mapping(**args) - - def _make_coverage(self, name, shape, dims_free): - if name == "src": - dim_coverage = self.src_dim_coverage - aux_coverage = self.src_aux_coverage - else: - dim_coverage = self.tgt_dim_coverage - aux_coverage = self.tgt_aux_coverage - ndim = len(shape) - cube = self.Cube(name=lambda: name, ndim=ndim, shape=shape) - dim_coverage["cube"] = cube - dim_coverage["dims_free"].extend(dims_free) - aux_coverage["cube"] = cube - aux_coverage["dims_free"].extend(dims_free) - - def test_mapping_src_free_to_tgt_local(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 2 shape 2 3 4 - # state f l c l state f c f - # coord d d d a coord a d d - # - # src-to-tgt mapping: - # before 1->2 - # after 0->3 1->2 2->1 - src_shape = (2, 3, 4) - src_free = [0, 2] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 2) - tgt_free = [0] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_src_free_to_tgt_local__broadcast_src_first(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 2 shape 1 3 4 - # state f l c l state f c f - # coord d d d a coord a d d - # bcast ^ - # - # src-to-tgt mapping: - # before 1->2 - # after 0->3 1->2 2->1 - src_shape = (1, 3, 4) - src_free = [0, 2] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 2) - tgt_free = [0] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_src_free_to_tgt_local__broadcast_src_last(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 2 shape 2 3 1 - # state f l c l state f c f - # coord d d d a coord a d d - # bcast ^ - # - # src-to-tgt mapping: - # before 1->2 - # after 0->3 1->2 2->1 - src_shape = (2, 3, 1) - src_free = [0, 2] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 2) - tgt_free = [0] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_src_free_to_tgt_local__broadcast_src_both(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 2 shape 1 3 1 - # state f l c l state f c f - # coord d d d a coord a d d - # bcast ^ ^ - # - # src-to-tgt mapping: - # before 1->2 - # after 0->1 1->2 2->3 - src_shape = (1, 3, 1) - src_free = [0, 2] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 2) - tgt_free = [0] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 1, 1: 2, 2: 3} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_src_free_to_tgt_free(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 2 shape 2 3 4 - # state f f c f state f c f - # coord d d d a coord a d d - # - # src-to-tgt mapping: - # before 1->2 - # after 0->0 1->2 2->1 - src_shape = (2, 3, 4) - src_free = [0, 2] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 2) - tgt_free = [0, 1, 3] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 0, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_src_free_to_tgt_free__broadcast_src_first(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 2 shape 1 3 4 - # state f f c f state f c f - # coord d d d a coord a d d - # bcast ^ - # - # src-to-tgt mapping: - # before 1->2 - # after 0->0 1->2 2->1 - src_shape = (1, 3, 4) - src_free = [0, 2] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 2) - tgt_free = [0, 1, 3] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 0, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_src_free_to_tgt_free__broadcast_src_last(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 2 shape 2 3 1 - # state f f c f state f c f - # coord d d d a coord a d d - # bcast ^ - # - # src-to-tgt mapping: - # before 1->2 - # after 0->0 1->2 2->1 - src_shape = (2, 3, 1) - src_free = [0, 2] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 2) - tgt_free = [0, 1, 3] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 0, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_src_free_to_tgt_free__broadcast_src_both(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 2 shape 1 3 1 - # state f f c f state f c f - # coord d d d a coord a d d - # bcast ^ ^ - # - # src-to-tgt mapping: - # before 1->2 - # after 0->0 1->2 2->1 - src_shape = (1, 3, 1) - src_free = [0, 2] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 2) - tgt_free = [0, 1, 3] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 0, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_src_free_to_tgt__fail(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 2 shape 2 3 5 - # state f f c f state f c f - # coord d d d a coord a d d - # fail ^ - # - # src-to-tgt mapping: - # before 1->2 - # after 0->0 1->2 2->? - src_shape = (2, 3, 5) - src_free = [0, 2] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 2) - tgt_free = [0, 1, 3] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - emsg = "Insufficient matching coordinate metadata to resolve cubes" - with self.assertRaisesRegex(ValueError, emsg): - self.resolve._free_mapping(**args) - - def test_mapping_tgt_free_to_src_local(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: -> src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 2 shape 2 3 4 - # state l f c f state l c l - # coord d d d a coord a d d - # - # src-to-tgt mapping: - # before 1->2 - # after 0->3 1->2 2->1 - src_shape = (2, 3, 4) - src_free = [] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 2) - tgt_free = [1, 3] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_tgt_free_to_src_local__broadcast_tgt_first(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: -> src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 1 3 2 shape 2 3 4 - # state l f c f state l c l - # coord d d d a coord a d d - # bcast ^ - # - # src-to-tgt mapping: - # before 1->2 - # after 0->3 1->2 2->1 - src_shape = (2, 3, 4) - src_free = [] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 1, 3, 2) - tgt_free = [1, 3] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_tgt_free_to_src_local__broadcast_tgt_last(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: -> src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 1 shape 2 3 4 - # state l f c f state l c l - # coord d d d a coord a d d - # bcast ^ - # - # src-to-tgt mapping: - # before 1->2 - # after 0->3 1->2 2->1 - src_shape = (2, 3, 4) - src_free = [] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 1) - tgt_free = [1, 3] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 3, 1: 2, 2: 1} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_tgt_free_to_src_local__broadcast_tgt_both(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: -> src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 1 3 1 shape 2 3 4 - # state l f c f state l c l - # coord d d d a coord a d d - # bcast ^ ^ - # - # src-to-tgt mapping: - # before 1->2 - # after 0->1 1->2 2->3 - src_shape = (2, 3, 4) - src_free = [] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 1, 3, 1) - tgt_free = [1, 3] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - self.resolve._free_mapping(**args) - expected = {0: 1, 1: 2, 2: 3} - self.assertEqual(expected, self.resolve.mapping) - - def test_mapping_tgt_free_to_src_no_free__fail(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: -> src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 5 shape 2 3 4 - # state l f c f state l c l - # coord d d d a coord a d d - # fail ^ - # - # src-to-tgt mapping: - # before 1->2 - # after 0->0 1->2 2->? - src_shape = (2, 3, 4) - src_free = [] - self._make_coverage("src", src_shape, src_free) - tgt_shape = (2, 4, 3, 5) - tgt_free = [1, 3] - self._make_coverage("tgt", tgt_shape, tgt_free) - self.resolve.mapping = {1: 2} - args = self._make_args() - emsg = "Insufficient matching coordinate metadata to resolve cubes" - with self.assertRaisesRegex(ValueError, emsg): - self.resolve._free_mapping(**args) - - -class Test__src_cube(tests.IrisTest): - def setUp(self): - self.resolve = Resolve() - self.expected = sentinel.cube - - def test_rhs_cube(self): - self.resolve.map_rhs_to_lhs = True - self.resolve.rhs_cube = self.expected - self.assertEqual(self.expected, self.resolve._src_cube) - - def test_lhs_cube(self): - self.resolve.map_rhs_to_lhs = False - self.resolve.lhs_cube = self.expected - self.assertEqual(self.expected, self.resolve._src_cube) - - def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): - self.resolve._src_cube - - -class Test__src_cube_position(tests.IrisTest): - def setUp(self): - self.resolve = Resolve() - - def test_rhs_cube(self): - self.resolve.map_rhs_to_lhs = True - self.assertEqual("RHS", self.resolve._src_cube_position) - - def test_lhs_cube(self): - self.resolve.map_rhs_to_lhs = False - self.assertEqual("LHS", self.resolve._src_cube_position) - - def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): - self.resolve._src_cube_position - - -class Test__src_cube_resolved__getter(tests.IrisTest): - def setUp(self): - self.resolve = Resolve() - self.expected = sentinel.cube - - def test_rhs_cube(self): - self.resolve.map_rhs_to_lhs = True - self.resolve.rhs_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve._src_cube_resolved) - - def test_lhs_cube(self): - self.resolve.map_rhs_to_lhs = False - self.resolve.lhs_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve._src_cube_resolved) - - def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): - self.resolve._src_cube_resolved - - -class Test__src_cube_resolved__setter(tests.IrisTest): - def setUp(self): - self.resolve = Resolve() - self.expected = sentinel.cube - - def test_rhs_cube(self): - self.resolve.map_rhs_to_lhs = True - self.resolve._src_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve.rhs_cube_resolved) - - def test_lhs_cube(self): - self.resolve.map_rhs_to_lhs = False - self.resolve._src_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve.lhs_cube_resolved) - - def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): - self.resolve._src_cube_resolved = self.expected - - -class Test__tgt_cube(tests.IrisTest): - def setUp(self): - self.resolve = Resolve() - self.expected = sentinel.cube - - def test_rhs_cube(self): - self.resolve.map_rhs_to_lhs = False - self.resolve.rhs_cube = self.expected - self.assertEqual(self.expected, self.resolve._tgt_cube) - - def test_lhs_cube(self): - self.resolve.map_rhs_to_lhs = True - self.resolve.lhs_cube = self.expected - self.assertEqual(self.expected, self.resolve._tgt_cube) - - def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): - self.resolve._tgt_cube - - -class Test__tgt_cube_position(tests.IrisTest): - def setUp(self): - self.resolve = Resolve() - - def test_rhs_cube(self): - self.resolve.map_rhs_to_lhs = False - self.assertEqual("RHS", self.resolve._tgt_cube_position) - - def test_lhs_cube(self): - self.resolve.map_rhs_to_lhs = True - self.assertEqual("LHS", self.resolve._tgt_cube_position) - - def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): - self.resolve._tgt_cube_position - - -class Test__tgt_cube_resolved__getter(tests.IrisTest): - def setUp(self): - self.resolve = Resolve() - self.expected = sentinel.cube - - def test_rhs_cube(self): - self.resolve.map_rhs_to_lhs = False - self.resolve.rhs_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve._tgt_cube_resolved) - - def test_lhs_cube(self): - self.resolve.map_rhs_to_lhs = True - self.resolve.lhs_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve._tgt_cube_resolved) - - def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): - self.resolve._tgt_cube_resolved - - -class Test__tgt_cube_resolved__setter(tests.IrisTest): - def setUp(self): - self.resolve = Resolve() - self.expected = sentinel.cube - - def test_rhs_cube(self): - self.resolve.map_rhs_to_lhs = False - self.resolve._tgt_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve.rhs_cube_resolved) - - def test_lhs_cube(self): - self.resolve.map_rhs_to_lhs = True - self.resolve._tgt_cube_resolved = self.expected - self.assertEqual(self.expected, self.resolve.lhs_cube_resolved) - - def test_fail__no_map_rhs_to_lhs(self): - with self.assertRaises(AssertionError): - self.resolve._tgt_cube_resolved = self.expected - - -class Test_shape(tests.IrisTest): - def setUp(self): - self.resolve = Resolve() - - def test_no_shape(self): - self.assertIsNone(self.resolve.shape) - - def test_shape(self): - expected = sentinel.shape - self.resolve._broadcast_shape = expected - self.assertEqual(expected, self.resolve.shape) - - -class Test__as_compatible_cubes(tests.IrisTest): - def setUp(self): - self.Cube = namedtuple( - "Wrapper", - ( - "name", - "ndim", - "shape", - "metadata", - "core_data", - "coord_dims", - "dim_coords", - "aux_coords", - "aux_factories", - ), - ) - self.resolve = Resolve() - self.resolve.map_rhs_to_lhs = True - self.resolve.mapping = {} - self.mocker = self.patch("iris.cube.Cube") - self.args = dict( - name=None, - ndim=None, - shape=None, - metadata=None, - core_data=None, - coord_dims=None, - dim_coords=None, - aux_coords=None, - aux_factories=None, - ) - - def _make_cube(self, name, shape, transpose_shape=None): - self.args["name"] = lambda: name - ndim = len(shape) - self.args["ndim"] = ndim - self.args["shape"] = shape - if name == "src": - self.args["metadata"] = sentinel.metadata - self.reshape = sentinel.reshape - m_reshape = mock.Mock(return_value=self.reshape) - self.transpose = mock.Mock( - shape=transpose_shape, reshape=m_reshape - ) - m_transpose = mock.Mock(return_value=self.transpose) - self.data = mock.Mock( - shape=shape, transpose=m_transpose, reshape=m_reshape - ) - m_copy = mock.Mock(return_value=self.data) - m_core_data = mock.Mock(copy=m_copy) - self.args["core_data"] = mock.Mock(return_value=m_core_data) - self.args["coord_dims"] = mock.Mock(side_effect=([0], [ndim - 1])) - self.dim_coord = sentinel.dim_coord - self.aux_coord = sentinel.aux_coord - self.aux_factory = sentinel.aux_factory - self.args["dim_coords"] = [self.dim_coord] - self.args["aux_coords"] = [self.aux_coord] - self.args["aux_factories"] = [self.aux_factory] - cube = self.Cube(**self.args) - self.resolve.rhs_cube = cube - self.cube = mock.Mock() - self.mocker.return_value = self.cube - else: - cube = self.Cube(**self.args) - self.resolve.lhs_cube = cube - - def test_incomplete_src_to_tgt_mapping__fail(self): - src_shape = (1, 2) - self._make_cube("src", src_shape) - tgt_shape = (3, 4) - self._make_cube("tgt", tgt_shape) - with self.assertRaises(AssertionError): - self.resolve._as_compatible_cubes() - - def test_incompatible_shapes__fail(self): - # key: (state) c=common, f=free - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 2 3 4 shape 2 3 5 - # state f c c c state c c c - # fail ^ fail ^ - # - # src-to-tgt mapping: - # 0->1, 1->2, 2->3 - src_shape = (2, 3, 5) - self._make_cube("src", src_shape) - tgt_shape = (2, 2, 3, 4) - self._make_cube("tgt", tgt_shape) - self.resolve.mapping = {0: 1, 1: 2, 2: 3} - emsg = "Cannot resolve cubes" - with self.assertRaisesRegex(ValueError, emsg): - self.resolve._as_compatible_cubes() - - def test_incompatible_shapes__fail_broadcast(self): - # key: (state) c=common, f=free - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 2 4 3 2 shape 2 3 5 - # state f c c c state c c c - # fail ^ fail ^ - # - # src-to-tgt mapping: - # 0->3, 1->2, 2->1 - src_shape = (2, 3, 5) - self._make_cube("src", src_shape) - tgt_shape = (2, 4, 3, 2) - self._make_cube("tgt", tgt_shape) - self.resolve.mapping = {0: 3, 1: 2, 2: 1} - emsg = "Cannot resolve cubes" - with self.assertRaisesRegex(ValueError, emsg): - self.resolve._as_compatible_cubes() - - def _check_compatible(self, broadcast_shape): - self.assertEqual( - self.resolve.lhs_cube, self.resolve._tgt_cube_resolved - ) - self.assertEqual(self.cube, self.resolve._src_cube_resolved) - self.assertEqual(broadcast_shape, self.resolve._broadcast_shape) - self.assertEqual(1, self.mocker.call_count) - self.assertEqual(self.args["metadata"], self.cube.metadata) - self.assertEqual(2, self.resolve.rhs_cube.coord_dims.call_count) - self.assertEqual( - [mock.call(self.dim_coord), mock.call(self.aux_coord)], - self.resolve.rhs_cube.coord_dims.call_args_list, - ) - self.assertEqual(1, self.cube.add_dim_coord.call_count) - self.assertEqual( - [mock.call(self.dim_coord, [self.resolve.mapping[0]])], - self.cube.add_dim_coord.call_args_list, - ) - self.assertEqual(1, self.cube.add_aux_coord.call_count) - self.assertEqual( - [mock.call(self.aux_coord, [self.resolve.mapping[2]])], - self.cube.add_aux_coord.call_args_list, - ) - self.assertEqual(1, self.cube.add_aux_factory.call_count) - self.assertEqual( - [mock.call(self.aux_factory)], - self.cube.add_aux_factory.call_args_list, - ) - - def test_compatible(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 dims 0 1 2 - # shape 4 3 2 shape 4 3 2 - # state c c c state c c c - # coord d a - # - # src-to-tgt mapping: - # 0->0, 1->1, 2->2 - src_shape = (4, 3, 2) - self._make_cube("src", src_shape) - tgt_shape = (4, 3, 2) - self._make_cube("tgt", tgt_shape) - mapping = {0: 0, 1: 1, 2: 2} - self.resolve.mapping = mapping - self.resolve._as_compatible_cubes() - self._check_compatible(broadcast_shape=tgt_shape) - self.assertEqual([mock.call(self.data)], self.mocker.call_args_list) - - def test_compatible__transpose(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 dims 0 1 2 - # shape 4 3 2 shape 2 3 4 - # state c c c state c c c - # coord d a - # - # src-to-tgt mapping: - # 0->2, 1->1, 2->0 - src_shape = (2, 3, 4) - self._make_cube("src", src_shape, transpose_shape=(4, 3, 2)) - tgt_shape = (4, 3, 2) - self._make_cube("tgt", tgt_shape) - mapping = {0: 2, 1: 1, 2: 0} - self.resolve.mapping = mapping - self.resolve._as_compatible_cubes() - self._check_compatible(broadcast_shape=tgt_shape) - self.assertEqual(1, self.data.transpose.call_count) - self.assertEqual( - [mock.call([2, 1, 0])], self.data.transpose.call_args_list - ) - self.assertEqual( - [mock.call(self.transpose)], self.mocker.call_args_list - ) - - def test_compatible__reshape(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 5 4 3 2 shape 4 3 2 - # state f c c c state c c c - # coord d a - # - # src-to-tgt mapping: - # 0->1, 1->2, 2->3 - src_shape = (4, 3, 2) - self._make_cube("src", src_shape) - tgt_shape = (5, 4, 3, 2) - self._make_cube("tgt", tgt_shape) - mapping = {0: 1, 1: 2, 2: 3} - self.resolve.mapping = mapping - self.resolve._as_compatible_cubes() - self._check_compatible(broadcast_shape=tgt_shape) - self.assertEqual(1, self.data.reshape.call_count) - self.assertEqual( - [mock.call((1,) + src_shape)], self.data.reshape.call_args_list - ) - self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list) - - def test_compatible__transpose_reshape(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 5 4 3 2 shape 2 3 4 - # state f c c c state c c c - # coord d a - # - # src-to-tgt mapping: - # 0->3, 1->2, 2->1 - src_shape = (2, 3, 4) - transpose_shape = (4, 3, 2) - self._make_cube("src", src_shape, transpose_shape=transpose_shape) - tgt_shape = (5, 4, 3, 2) - self._make_cube("tgt", tgt_shape) - mapping = {0: 3, 1: 2, 2: 1} - self.resolve.mapping = mapping - self.resolve._as_compatible_cubes() - self._check_compatible(broadcast_shape=tgt_shape) - self.assertEqual(1, self.data.transpose.call_count) - self.assertEqual( - [mock.call([2, 1, 0])], self.data.transpose.call_args_list - ) - self.assertEqual(1, self.data.reshape.call_count) - self.assertEqual( - [mock.call((1,) + transpose_shape)], - self.data.reshape.call_args_list, - ) - self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list) - - def test_compatible__broadcast(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 dims 0 1 2 - # shape 1 3 2 shape 4 1 2 - # state c c c state c c c - # coord d a - # bcast ^ bcast ^ - # - # src-to-tgt mapping: - # 0->0, 1->1, 2->2 - src_shape = (4, 1, 2) - self._make_cube("src", src_shape) - tgt_shape = (1, 3, 2) - self._make_cube("tgt", tgt_shape) - mapping = {0: 0, 1: 1, 2: 2} - self.resolve.mapping = mapping - self.resolve._as_compatible_cubes() - self._check_compatible(broadcast_shape=(4, 3, 2)) - self.assertEqual([mock.call(self.data)], self.mocker.call_args_list) - - def test_compatible__broadcast_transpose_reshape(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 5 1 3 2 shape 2 1 4 - # state f c c c state c c c - # coord d a - # bcast ^ bcast ^ - # - # src-to-tgt mapping: - # 0->3, 1->2, 2->1 - src_shape = (2, 1, 4) - transpose_shape = (4, 1, 2) - self._make_cube("src", src_shape) - tgt_shape = (5, 1, 3, 2) - self._make_cube("tgt", tgt_shape) - mapping = {0: 3, 1: 2, 2: 1} - self.resolve.mapping = mapping - self.resolve._as_compatible_cubes() - self._check_compatible(broadcast_shape=(5, 4, 3, 2)) - self.assertEqual(1, self.data.transpose.call_count) - self.assertEqual( - [mock.call([2, 1, 0])], self.data.transpose.call_args_list - ) - self.assertEqual(1, self.data.reshape.call_count) - self.assertEqual( - [mock.call((1,) + transpose_shape)], - self.data.reshape.call_args_list, - ) - self.assertEqual([mock.call(self.reshape)], self.mocker.call_args_list) - - -class Test__metadata_mapping(tests.IrisTest): - def setUp(self): - self.ndim = sentinel.ndim - self.src_cube = mock.Mock(ndim=self.ndim) - self.src_dim_coverage = mock.Mock(dims_free=[]) - self.src_aux_coverage = mock.Mock(dims_free=[]) - self.tgt_cube = mock.Mock(ndim=self.ndim) - self.tgt_dim_coverage = mock.Mock(dims_free=[]) - self.tgt_aux_coverage = mock.Mock(dims_free=[]) - self.resolve = Resolve() - self.map_rhs_to_lhs = True - self.resolve.map_rhs_to_lhs = self.map_rhs_to_lhs - self.resolve.rhs_cube = self.src_cube - self.resolve.rhs_cube_dim_coverage = self.src_dim_coverage - self.resolve.rhs_cube_aux_coverage = self.src_aux_coverage - self.resolve.lhs_cube = self.tgt_cube - self.resolve.lhs_cube_dim_coverage = self.tgt_dim_coverage - self.resolve.lhs_cube_aux_coverage = self.tgt_aux_coverage - self.resolve.mapping = {} - self.shape = sentinel.shape - self.resolve._broadcast_shape = self.shape - self.resolve._src_cube_resolved = mock.Mock(shape=self.shape) - self.resolve._tgt_cube_resolved = mock.Mock(shape=self.shape) - self.m_dim_mapping = self.patch( - "iris.common.resolve.Resolve._dim_mapping", return_value={} - ) - self.m_aux_mapping = self.patch( - "iris.common.resolve.Resolve._aux_mapping", return_value={} - ) - self.m_free_mapping = self.patch( - "iris.common.resolve.Resolve._free_mapping" - ) - self.m_as_compatible_cubes = self.patch( - "iris.common.resolve.Resolve._as_compatible_cubes" - ) - self.mapping = {0: 1, 1: 2, 2: 3} - - def test_mapped__dim_coords(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 5 4 3 2 shape 4 3 2 - # state f c c c state c c c - # coord d d d coord d d d - # - # src-to-tgt mapping: - # 0->1, 1->2, 2->3 - self.src_cube.ndim = 3 - self.m_dim_mapping.return_value = self.mapping - self.resolve._metadata_mapping() - self.assertEqual(self.mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(0, self.m_aux_mapping.call_count) - self.assertEqual(0, self.m_free_mapping.call_count) - self.assertEqual(1, self.m_as_compatible_cubes.call_count) - - def test_mapped__aux_coords(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 5 4 3 2 shape 4 3 2 - # state f c c c state c c c - # coord a a a coord a a a - # - # src-to-tgt mapping: - # 0->1, 1->2, 2->3 - self.src_cube.ndim = 3 - self.m_aux_mapping.return_value = self.mapping - self.resolve._metadata_mapping() - self.assertEqual(self.mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(1, self.m_aux_mapping.call_count) - expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] - self.assertEqual(expected, self.m_aux_mapping.call_args_list) - self.assertEqual(0, self.m_free_mapping.call_count) - self.assertEqual(1, self.m_as_compatible_cubes.call_count) - - def test_mapped__dim_and_aux_coords(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 5 4 3 2 shape 4 3 2 - # state f c c c state c c c - # coord d a d coord d a d - # - # src-to-tgt mapping: - # 0->1, 1->2, 2->3 - dim_mapping = {0: 1, 2: 3} - aux_mapping = {1: 2} - self.src_cube.ndim = 3 - self.m_dim_mapping.return_value = dim_mapping - self.m_aux_mapping.return_value = aux_mapping - self.resolve._metadata_mapping() - self.assertEqual(self.mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(1, self.m_aux_mapping.call_count) - expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] - self.assertEqual(expected, self.m_aux_mapping.call_args_list) - self.assertEqual(0, self.m_free_mapping.call_count) - self.assertEqual(1, self.m_as_compatible_cubes.call_count) - - def test_mapped__dim_coords_and_free_dims(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 5 4 3 2 shape 4 3 2 - # state l f c c state f c c - # coord d d d coord d d - # - # src-to-tgt mapping: - # 0->1, 1->2, 2->3 - dim_mapping = {1: 2, 2: 3} - free_mapping = {0: 1} - self.src_cube.ndim = 3 - self.m_dim_mapping.return_value = dim_mapping - side_effect = lambda a, b, c, d: self.resolve.mapping.update( - free_mapping - ) - self.m_free_mapping.side_effect = side_effect - self.resolve._metadata_mapping() - self.assertEqual(self.mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(1, self.m_aux_mapping.call_count) - expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] - self.assertEqual(expected, self.m_aux_mapping.call_args_list) - self.assertEqual(1, self.m_free_mapping.call_count) - expected = [ - mock.call( - self.src_dim_coverage, - self.tgt_dim_coverage, - self.src_aux_coverage, - self.tgt_aux_coverage, - ) - ] - self.assertEqual(expected, self.m_free_mapping.call_args_list) - self.assertEqual(1, self.m_as_compatible_cubes.call_count) - - def test_mapped__dim_coords_with_broadcast_flip(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 4 dims 0 1 2 4 - # shape 1 4 3 2 shape 5 4 3 2 - # state c c c c state c c c c - # coord d d d d coord d d d d - # - # src-to-tgt mapping: - # 0->0, 1->1, 2->2, 3->3 - mapping = {0: 0, 1: 1, 2: 2, 3: 3} - self.src_cube.ndim = 4 - self.tgt_cube.ndim = 4 - self.m_dim_mapping.return_value = mapping - broadcast_shape = (5, 4, 3, 2) - self.resolve._broadcast_shape = broadcast_shape - self.resolve._src_cube_resolved.shape = broadcast_shape - self.resolve._tgt_cube_resolved.shape = (1, 4, 3, 2) - self.resolve._metadata_mapping() - self.assertEqual(mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(0, self.m_aux_mapping.call_count) - self.assertEqual(0, self.m_free_mapping.call_count) - self.assertEqual(2, self.m_as_compatible_cubes.call_count) - self.assertEqual(not self.map_rhs_to_lhs, self.resolve.map_rhs_to_lhs) - - def test_mapped__dim_coords_free_flip_with_free_flip(self): - # key: (state) c=common, f=free, l=local - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 dims 0 1 2 - # shape 4 3 2 shape 4 3 2 - # state f f c state l l c - # coord d coord d d d - # - # src-to-tgt mapping: - # 0->0, 1->1, 2->2 - dim_mapping = {2: 2} - free_mapping = {0: 0, 1: 1} - mapping = {0: 0, 1: 1, 2: 2} - self.src_cube.ndim = 3 - self.tgt_cube.ndim = 3 - self.m_dim_mapping.return_value = dim_mapping - side_effect = lambda a, b, c, d: self.resolve.mapping.update( - free_mapping - ) - self.m_free_mapping.side_effect = side_effect - self.tgt_dim_coverage.dims_free = [0, 1] - self.tgt_aux_coverage.dims_free = [0, 1] - self.resolve._metadata_mapping() - self.assertEqual(mapping, self.resolve.mapping) - self.assertEqual(1, self.m_dim_mapping.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual(expected, self.m_dim_mapping.call_args_list) - self.assertEqual(1, self.m_aux_mapping.call_count) - expected = [mock.call(self.src_aux_coverage, self.tgt_aux_coverage)] - self.assertEqual(expected, self.m_aux_mapping.call_args_list) - self.assertEqual(1, self.m_free_mapping.call_count) - expected = [ - mock.call( - self.src_dim_coverage, - self.tgt_dim_coverage, - self.src_aux_coverage, - self.tgt_aux_coverage, - ) - ] - self.assertEqual(expected, self.m_free_mapping.call_args_list) - self.assertEqual(2, self.m_as_compatible_cubes.call_count) - - -class Test__prepare_common_dim_payload(tests.IrisTest): - def setUp(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 5 4 3 2 shape 4 3 2 - # state l c c c state c c c - # coord d d d coord d d d - # - # src-to-tgt mapping: - # 0->1, 1->2, 2->3 - self.points = (sentinel.points_0, sentinel.points_1, sentinel.points_2) - self.bounds = (sentinel.bounds_0, sentinel.bounds_1, sentinel.bounds_2) - self.pb_0 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[0])), - mock.Mock(copy=mock.Mock(return_value=self.bounds[0])), - ) - self.pb_1 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[1])), - None, - ) - self.pb_2 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[2])), - mock.Mock(copy=mock.Mock(return_value=self.bounds[2])), - ) - side_effect = (self.pb_0, self.pb_1, self.pb_2) - self.m_prepare_points_and_bounds = self.patch( - "iris.common.resolve.Resolve._prepare_points_and_bounds", - side_effect=side_effect, - ) - self.resolve = Resolve() - self.resolve.prepared_category = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) - self.mapping = {0: 1, 1: 2, 2: 3} - self.resolve.mapping = self.mapping - self.metadata_combined = ( - sentinel.combined_0, - sentinel.combined_1, - sentinel.combined_2, - ) - self.src_metadata = mock.Mock( - combine=mock.Mock(side_effect=self.metadata_combined) - ) - metadata = [self.src_metadata] * len(self.mapping) - self.src_coords = [ - sentinel.src_coord_0, - sentinel.src_coord_1, - sentinel.src_coord_2, - ] - self.src_dims_common = [0, 1, 2] - self.container = DimCoord - self.src_dim_coverage = _DimCoverage( - cube=None, - metadata=metadata, - coords=self.src_coords, - dims_common=self.src_dims_common, - dims_local=[], - dims_free=[], - ) - self.tgt_metadata = [ - sentinel.tgt_metadata_0, - sentinel.tgt_metadata_1, - sentinel.tgt_metadata_2, - sentinel.tgt_metadata_3, - ] - self.tgt_coords = [ - sentinel.tgt_coord_0, - sentinel.tgt_coord_1, - sentinel.tgt_coord_2, - sentinel.tgt_coord_3, - ] - self.tgt_dims_common = [1, 2, 3] - self.tgt_dim_coverage = _DimCoverage( - cube=None, - metadata=self.tgt_metadata, - coords=self.tgt_coords, - dims_common=self.tgt_dims_common, - dims_local=[], - dims_free=[], - ) - - def _check(self, ignore_mismatch=None, bad_points=None): - if bad_points is None: - bad_points = False - self.resolve._prepare_common_dim_payload( - self.src_dim_coverage, - self.tgt_dim_coverage, - ignore_mismatch=ignore_mismatch, - ) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) - if not bad_points: - self.assertEqual(3, len(self.resolve.prepared_category.items_dim)) - expected = [ - _PreparedItem( - metadata=_PreparedMetadata( - combined=self.metadata_combined[0], - src=self.src_metadata, - tgt=self.tgt_metadata[self.mapping[0]], - ), - points=self.points[0], - bounds=self.bounds[0], - dims=(self.mapping[0],), - container=self.container, - ), - _PreparedItem( - metadata=_PreparedMetadata( - combined=self.metadata_combined[1], - src=self.src_metadata, - tgt=self.tgt_metadata[self.mapping[1]], - ), - points=self.points[1], - bounds=None, - dims=(self.mapping[1],), - container=self.container, - ), - _PreparedItem( - metadata=_PreparedMetadata( - combined=self.metadata_combined[2], - src=self.src_metadata, - tgt=self.tgt_metadata[self.mapping[2]], - ), - points=self.points[2], - bounds=self.bounds[2], - dims=(self.mapping[2],), - container=self.container, - ), - ] - self.assertEqual( - expected, self.resolve.prepared_category.items_dim - ) - else: - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) - self.assertEqual(3, self.m_prepare_points_and_bounds.call_count) - if ignore_mismatch is None: - ignore_mismatch = False - expected = [ - mock.call( - self.src_coords[0], - self.tgt_coords[self.mapping[0]], - 0, - 1, - ignore_mismatch=ignore_mismatch, - ), - mock.call( - self.src_coords[1], - self.tgt_coords[self.mapping[1]], - 1, - 2, - ignore_mismatch=ignore_mismatch, - ), - mock.call( - self.src_coords[2], - self.tgt_coords[self.mapping[2]], - 2, - 3, - ignore_mismatch=ignore_mismatch, - ), - ] - self.assertEqual( - expected, self.m_prepare_points_and_bounds.call_args_list - ) - if not bad_points: - self.assertEqual(3, self.src_metadata.combine.call_count) - expected = [ - mock.call(metadata) for metadata in self.tgt_metadata[1:] - ] - self.assertEqual( - expected, self.src_metadata.combine.call_args_list - ) - - def test__default_ignore_mismatch(self): - self._check() - - def test__not_ignore_mismatch(self): - self._check(ignore_mismatch=False) - - def test__ignore_mismatch(self): - self._check(ignore_mismatch=True) - - def test__bad_points(self): - side_effect = [(None, None)] * len(self.mapping) - self.m_prepare_points_and_bounds.side_effect = side_effect - self._check(bad_points=True) - - -class Test__prepare_common_aux_payload(tests.IrisTest): - def setUp(self): - # key: (state) c=common, f=free - # (coord) a=aux, d=dim - # - # tgt: <- src: - # dims 0 1 2 3 dims 0 1 2 - # shape 5 4 3 2 shape 4 3 2 - # state l c c c state c c c - # coord a a a coord a a a - # - # src-to-tgt mapping: - # 0->1, 1->2, 2->3 - self.points = (sentinel.points_0, sentinel.points_1, sentinel.points_2) - self.bounds = (sentinel.bounds_0, sentinel.bounds_1, sentinel.bounds_2) - self.pb_0 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[0])), - mock.Mock(copy=mock.Mock(return_value=self.bounds[0])), - ) - self.pb_1 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[1])), - None, - ) - self.pb_2 = ( - mock.Mock(copy=mock.Mock(return_value=self.points[2])), - mock.Mock(copy=mock.Mock(return_value=self.bounds[2])), - ) - side_effect = (self.pb_0, self.pb_1, self.pb_2) - self.m_prepare_points_and_bounds = self.patch( - "iris.common.resolve.Resolve._prepare_points_and_bounds", - side_effect=side_effect, - ) - self.resolve = Resolve() - self.resolve.prepared_category = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) - self.mapping = {0: 1, 1: 2, 2: 3} - self.resolve.mapping = self.mapping - self.resolve.map_rhs_to_lhs = True - self.metadata_combined = ( - sentinel.combined_0, - sentinel.combined_1, - sentinel.combined_2, - ) - self.src_metadata = [ - mock.Mock( - combine=mock.Mock(return_value=self.metadata_combined[0]) - ), - mock.Mock( - combine=mock.Mock(return_value=self.metadata_combined[1]) - ), - mock.Mock( - combine=mock.Mock(return_value=self.metadata_combined[2]) - ), - ] - self.src_coords = [ - sentinel.src_coord_0, - sentinel.src_coord_1, - sentinel.src_coord_2, - ] - self.src_dims = [(dim,) for dim in self.mapping.keys()] - self.src_common_items = [ - _Item(*item) - for item in zip(self.src_metadata, self.src_coords, self.src_dims) - ] - self.tgt_metadata = [sentinel.tgt_metadata_0] + self.src_metadata - self.tgt_coords = [ - sentinel.tgt_coord_0, - sentinel.tgt_coord_1, - sentinel.tgt_coord_2, - sentinel.tgt_coord_3, - ] - self.tgt_dims = [None] + [(dim,) for dim in self.mapping.values()] - self.tgt_common_items = [ - _Item(*item) - for item in zip(self.tgt_metadata, self.tgt_coords, self.tgt_dims) - ] - self.container = type(self.src_coords[0]) - - def _check(self, ignore_mismatch=None, bad_points=None): - if bad_points is None: - bad_points = False - prepared_items = [] - self.resolve._prepare_common_aux_payload( - self.src_common_items, - self.tgt_common_items, - prepared_items, - ignore_mismatch=ignore_mismatch, - ) - if not bad_points: - self.assertEqual(3, len(prepared_items)) - expected = [ - _PreparedItem( - metadata=_PreparedMetadata( - combined=self.metadata_combined[0], - src=self.src_metadata[0], - tgt=self.tgt_metadata[self.mapping[0]], - ), - points=self.points[0], - bounds=self.bounds[0], - dims=self.tgt_dims[self.mapping[0]], - container=self.container, - ), - _PreparedItem( - metadata=_PreparedMetadata( - combined=self.metadata_combined[1], - src=self.src_metadata[1], - tgt=self.tgt_metadata[self.mapping[1]], - ), - points=self.points[1], - bounds=None, - dims=self.tgt_dims[self.mapping[1]], - container=self.container, - ), - _PreparedItem( - metadata=_PreparedMetadata( - combined=self.metadata_combined[2], - src=self.src_metadata[2], - tgt=self.tgt_metadata[self.mapping[2]], - ), - points=self.points[2], - bounds=self.bounds[2], - dims=self.tgt_dims[self.mapping[2]], - container=self.container, - ), - ] - self.assertEqual(expected, prepared_items) - else: - self.assertEqual(0, len(prepared_items)) - self.assertEqual(3, self.m_prepare_points_and_bounds.call_count) - if ignore_mismatch is None: - ignore_mismatch = False - expected = [ - mock.call( - self.src_coords[0], - self.tgt_coords[self.mapping[0]], - self.src_dims[0], - self.tgt_dims[self.mapping[0]], - ignore_mismatch=ignore_mismatch, - ), - mock.call( - self.src_coords[1], - self.tgt_coords[self.mapping[1]], - self.src_dims[1], - self.tgt_dims[self.mapping[1]], - ignore_mismatch=ignore_mismatch, - ), - mock.call( - self.src_coords[2], - self.tgt_coords[self.mapping[2]], - self.src_dims[2], - self.tgt_dims[self.mapping[2]], - ignore_mismatch=ignore_mismatch, - ), - ] - self.assertEqual( - expected, self.m_prepare_points_and_bounds.call_args_list - ) - if not bad_points: - for src_metadata, tgt_metadata in zip( - self.src_metadata, self.tgt_metadata[1:] - ): - self.assertEqual(1, src_metadata.combine.call_count) - expected = [mock.call(tgt_metadata)] - self.assertEqual(expected, src_metadata.combine.call_args_list) - - def test__default_ignore_mismatch(self): - self._check() - - def test__not_ignore_mismatch(self): - self._check(ignore_mismatch=False) - - def test__ignore_mismatch(self): - self._check(ignore_mismatch=True) - - def test__bad_points(self): - side_effect = [(None, None)] * len(self.mapping) - self.m_prepare_points_and_bounds.side_effect = side_effect - self._check(bad_points=True) - - def test__no_tgt_metadata_match(self): - item = self.tgt_common_items[0] - tgt_common_items = [item] * len(self.tgt_common_items) - prepared_items = [] - self.resolve._prepare_common_aux_payload( - self.src_common_items, tgt_common_items, prepared_items - ) - self.assertEqual(0, len(prepared_items)) - - def test__multi_tgt_metadata_match(self): - item = self.tgt_common_items[1] - tgt_common_items = [item] * len(self.tgt_common_items) - prepared_items = [] - self.resolve._prepare_common_aux_payload( - self.src_common_items, tgt_common_items, prepared_items - ) - self.assertEqual(0, len(prepared_items)) - - -class Test__prepare_points_and_bounds(tests.IrisTest): - def setUp(self): - self.Coord = namedtuple( - "Coord", - [ - "name", - "points", - "bounds", - "metadata", - "ndim", - "shape", - "has_bounds", - ], - ) - self.Cube = namedtuple("Cube", ["name", "shape"]) - self.resolve = Resolve() - self.resolve.map_rhs_to_lhs = True - self.src_name = sentinel.src_name - self.src_points = sentinel.src_points - self.src_bounds = sentinel.src_bounds - self.src_metadata = sentinel.src_metadata - self.src_items = dict( - name=lambda: self.src_name, - points=self.src_points, - bounds=self.src_bounds, - metadata=self.src_metadata, - ndim=None, - shape=None, - has_bounds=None, - ) - self.tgt_name = sentinel.tgt_name - self.tgt_points = sentinel.tgt_points - self.tgt_bounds = sentinel.tgt_bounds - self.tgt_metadata = sentinel.tgt_metadata - self.tgt_items = dict( - name=lambda: self.tgt_name, - points=self.tgt_points, - bounds=self.tgt_bounds, - metadata=self.tgt_metadata, - ndim=None, - shape=None, - has_bounds=None, - ) - self.m_array_equal = self.patch( - "iris.util.array_equal", side_effect=(True, True) - ) - - def test_coord_ndim_unequal__tgt_ndim_greater(self): - self.src_items["ndim"] = 1 - src_coord = self.Coord(**self.src_items) - self.tgt_items["ndim"] = 10 - tgt_coord = self.Coord(**self.tgt_items) - points, bounds = self.resolve._prepare_points_and_bounds( - src_coord, tgt_coord, src_dims=None, tgt_dims=None - ) - self.assertEqual(self.tgt_points, points) - self.assertEqual(self.tgt_bounds, bounds) - - def test_coord_ndim_unequal__src_ndim_greater(self): - self.src_items["ndim"] = 10 - src_coord = self.Coord(**self.src_items) - self.tgt_items["ndim"] = 1 - tgt_coord = self.Coord(**self.tgt_items) - points, bounds = self.resolve._prepare_points_and_bounds( - src_coord, tgt_coord, src_dims=None, tgt_dims=None - ) - self.assertEqual(self.src_points, points) - self.assertEqual(self.src_bounds, bounds) - - def test_coord_ndim_equal__shape_unequal_with_src_broadcasting(self): - # key: (state) c=common, f=free - # (coord) x=coord - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 9 9 shape 1 9 - # state c c state c c - # coord x-x coord x-x - # bcast ^ - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - broadcast_shape = (9, 9) - ndim = len(broadcast_shape) - self.resolve.mapping = mapping - self.resolve._broadcast_shape = broadcast_shape - src_shape = (1, 9) - src_dims = tuple(mapping.keys()) - self.resolve.rhs_cube = self.Cube(name=None, shape=src_shape) - self.src_items["ndim"] = ndim - self.src_items["shape"] = src_shape - src_coord = self.Coord(**self.src_items) - tgt_shape = broadcast_shape - tgt_dims = tuple(mapping.values()) - self.resolve.lhs_cube = self.Cube(name=None, shape=tgt_shape) - self.tgt_items["ndim"] = ndim - self.tgt_items["shape"] = tgt_shape - tgt_coord = self.Coord(**self.tgt_items) - points, bounds = self.resolve._prepare_points_and_bounds( - src_coord, tgt_coord, src_dims, tgt_dims - ) - self.assertEqual(self.tgt_points, points) - self.assertEqual(self.tgt_bounds, bounds) - - def test_coord_ndim_equal__shape_unequal_with_tgt_broadcasting(self): - # key: (state) c=common, f=free - # (coord) x=coord - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 1 9 shape 9 9 - # state c c state c c - # coord x-x coord x-x - # bcast ^ - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - broadcast_shape = (9, 9) - ndim = len(broadcast_shape) - self.resolve.mapping = mapping - self.resolve._broadcast_shape = broadcast_shape - src_shape = broadcast_shape - src_dims = tuple(mapping.keys()) - self.resolve.rhs_cube = self.Cube(name=None, shape=src_shape) - self.src_items["ndim"] = ndim - self.src_items["shape"] = src_shape - src_coord = self.Coord(**self.src_items) - tgt_shape = (1, 9) - tgt_dims = tuple(mapping.values()) - self.resolve.lhs_cube = self.Cube(name=None, shape=tgt_shape) - self.tgt_items["ndim"] = ndim - self.tgt_items["shape"] = tgt_shape - tgt_coord = self.Coord(**self.tgt_items) - points, bounds = self.resolve._prepare_points_and_bounds( - src_coord, tgt_coord, src_dims, tgt_dims - ) - self.assertEqual(self.src_points, points) - self.assertEqual(self.src_bounds, bounds) - - def test_coord_ndim_equal__shape_unequal_with_unsupported_broadcasting( - self, - ): - # key: (state) c=common, f=free - # (coord) x=coord - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 1 9 shape 9 1 - # state c c state c c - # coord x-x coord x-x - # bcast ^ bcast ^ - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - broadcast_shape = (9, 9) - ndim = len(broadcast_shape) - self.resolve.mapping = mapping - self.resolve._broadcast_shape = broadcast_shape - src_shape = (9, 1) - src_dims = tuple(mapping.keys()) - self.resolve.rhs_cube = self.Cube( - name=lambda: sentinel.src_cube, shape=src_shape - ) - self.src_items["ndim"] = ndim - self.src_items["shape"] = src_shape - src_coord = self.Coord(**self.src_items) - tgt_shape = (1, 9) - tgt_dims = tuple(mapping.values()) - self.resolve.lhs_cube = self.Cube( - name=lambda: sentinel.tgt_cube, shape=tgt_shape - ) - self.tgt_items["ndim"] = ndim - self.tgt_items["shape"] = tgt_shape - tgt_coord = self.Coord(**self.tgt_items) - emsg = "Cannot broadcast" - with self.assertRaisesRegex(ValueError, emsg): - _ = self.resolve._prepare_points_and_bounds( - src_coord, tgt_coord, src_dims, tgt_dims - ) - - def _populate( - self, src_points, tgt_points, src_bounds=None, tgt_bounds=None - ): - # key: (state) c=common, f=free - # (coord) x=coord - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state f c state f c - # coord x coord x - # - # src-to-tgt mapping: - # 0->0, 1->1 - shape = (2, 3) - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - self.resolve.map_rhs_to_lhs = True - self.resolve.rhs_cube = self.Cube( - name=lambda: sentinel.src_cube, shape=None - ) - self.resolve.lhs_cube = self.Cube( - name=lambda: sentinel.tgt_cube, shape=None - ) - ndim = 1 - src_dims = 1 - self.src_items["ndim"] = ndim - self.src_items["shape"] = (shape[src_dims],) - self.src_items["points"] = src_points - self.src_items["bounds"] = src_bounds - self.src_items["has_bounds"] = lambda: src_bounds is not None - src_coord = self.Coord(**self.src_items) - tgt_dims = 1 - self.tgt_items["ndim"] = ndim - self.tgt_items["shape"] = (shape[mapping[tgt_dims]],) - self.tgt_items["points"] = tgt_points - self.tgt_items["bounds"] = tgt_bounds - self.tgt_items["has_bounds"] = lambda: tgt_bounds is not None - tgt_coord = self.Coord(**self.tgt_items) - args = dict( - src_coord=src_coord, - tgt_coord=tgt_coord, - src_dims=src_dims, - tgt_dims=tgt_dims, - ) - return args - - def test_coord_ndim_and_shape_equal__points_equal_with_no_bounds(self): - args = self._populate(self.src_points, self.src_points) - points, bounds = self.resolve._prepare_points_and_bounds(**args) - self.assertEqual(self.src_points, points) - self.assertIsNone(bounds) - self.assertEqual(1, self.m_array_equal.call_count) - expected = [mock.call(self.src_points, self.src_points, withnans=True)] - self.assertEqual(expected, self.m_array_equal.call_args_list) - - def test_coord_ndim_and_shape_equal__points_equal_with_src_bounds_only( - self, - ): - args = self._populate( - self.src_points, self.src_points, src_bounds=self.src_bounds - ) - points, bounds = self.resolve._prepare_points_and_bounds(**args) - self.assertEqual(self.src_points, points) - self.assertEqual(self.src_bounds, bounds) - self.assertEqual(1, self.m_array_equal.call_count) - expected = [mock.call(self.src_points, self.src_points, withnans=True)] - self.assertEqual(expected, self.m_array_equal.call_args_list) - - def test_coord_ndim_and_shape_equal__points_equal_with_tgt_bounds_only( - self, - ): - args = self._populate( - self.src_points, self.src_points, tgt_bounds=self.tgt_bounds - ) - points, bounds = self.resolve._prepare_points_and_bounds(**args) - self.assertEqual(self.src_points, points) - self.assertEqual(self.tgt_bounds, bounds) - self.assertEqual(1, self.m_array_equal.call_count) - expected = [mock.call(self.src_points, self.src_points, withnans=True)] - self.assertEqual(expected, self.m_array_equal.call_args_list) - - def test_coord_ndim_and_shape_equal__points_equal_with_src_bounds_only_strict( - self, - ): - args = self._populate( - self.src_points, self.src_points, src_bounds=self.src_bounds - ) - with LENIENT.context(maths=False): - emsg = f"Coordinate {self.src_name} has bounds" - with self.assertRaisesRegex(ValueError, emsg): - _ = self.resolve._prepare_points_and_bounds(**args) - - def test_coord_ndim_and_shape_equal__points_equal_with_tgt_bounds_only_strict( - self, - ): - args = self._populate( - self.src_points, self.src_points, tgt_bounds=self.tgt_bounds - ) - with LENIENT.context(maths=False): - emsg = f"Coordinate {self.tgt_name} has bounds" - with self.assertRaisesRegex(ValueError, emsg): - _ = self.resolve._prepare_points_and_bounds(**args) - - def test_coord_ndim_and_shape_equal__points_equal_with_bounds_equal(self): - args = self._populate( - self.src_points, - self.src_points, - src_bounds=self.src_bounds, - tgt_bounds=self.src_bounds, - ) - points, bounds = self.resolve._prepare_points_and_bounds(**args) - self.assertEqual(self.src_points, points) - self.assertEqual(self.src_bounds, bounds) - self.assertEqual(2, self.m_array_equal.call_count) - expected = [ - mock.call(self.src_points, self.src_points, withnans=True), - mock.call(self.src_bounds, self.src_bounds, withnans=True), - ] - self.assertEqual(expected, self.m_array_equal.call_args_list) - - def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different( - self, - ): - self.m_array_equal.side_effect = (True, False) - args = self._populate( - self.src_points, - self.src_points, - src_bounds=self.src_bounds, - tgt_bounds=self.tgt_bounds, - ) - emsg = f"Coordinate {self.src_name} has different bounds" - with self.assertRaisesRegex(ValueError, emsg): - _ = self.resolve._prepare_points_and_bounds(**args) - - def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different_ignore_mismatch( - self, - ): - self.m_array_equal.side_effect = (True, False) - args = self._populate( - self.src_points, - self.src_points, - src_bounds=self.src_bounds, - tgt_bounds=self.tgt_bounds, - ) - points, bounds = self.resolve._prepare_points_and_bounds( - **args, ignore_mismatch=True - ) - self.assertEqual(self.src_points, points) - self.assertIsNone(bounds) - self.assertEqual(2, self.m_array_equal.call_count) - expected = [ - mock.call(self.src_points, self.src_points, withnans=True), - mock.call(self.src_bounds, self.tgt_bounds, withnans=True), - ] - self.assertEqual(expected, self.m_array_equal.call_args_list) - - def test_coord_ndim_and_shape_equal__points_equal_with_bounds_different_strict( - self, - ): - self.m_array_equal.side_effect = (True, False) - args = self._populate( - self.src_points, - self.src_points, - src_bounds=self.src_bounds, - tgt_bounds=self.tgt_bounds, - ) - with LENIENT.context(maths=False): - emsg = f"Coordinate {self.src_name} has different bounds" - with self.assertRaisesRegex(ValueError, emsg): - _ = self.resolve._prepare_points_and_bounds(**args) - - def test_coord_ndim_and_shape_equal__points_different(self): - self.m_array_equal.side_effect = (False,) - args = self._populate(self.src_points, self.tgt_points) - emsg = f"Coordinate {self.src_name} has different points" - with self.assertRaisesRegex(ValueError, emsg): - _ = self.resolve._prepare_points_and_bounds(**args) - - def test_coord_ndim_and_shape_equal__points_different_ignore_mismatch( - self, - ): - self.m_array_equal.side_effect = (False,) - args = self._populate(self.src_points, self.tgt_points) - points, bounds = self.resolve._prepare_points_and_bounds( - **args, ignore_mismatch=True - ) - self.assertIsNone(points) - self.assertIsNone(bounds) - - def test_coord_ndim_and_shape_equal__points_different_strict(self): - self.m_array_equal.side_effect = (False,) - args = self._populate(self.src_points, self.tgt_points) - with LENIENT.context(maths=False): - emsg = f"Coordinate {self.src_name} has different points" - with self.assertRaisesRegex(ValueError, emsg): - _ = self.resolve._prepare_points_and_bounds(**args) - - -class Test__create_prepared_item(tests.IrisTest): - def setUp(self): - Coord = namedtuple("Coord", ["points", "bounds"]) - self.points_value = sentinel.points - self.points = mock.Mock(copy=mock.Mock(return_value=self.points_value)) - self.bounds_value = sentinel.bounds - self.bounds = mock.Mock(copy=mock.Mock(return_value=self.bounds_value)) - self.coord = Coord(points=self.points, bounds=self.bounds) - self.container = type(self.coord) - self.combined = sentinel.combined - self.src = mock.Mock(combine=mock.Mock(return_value=self.combined)) - self.tgt = sentinel.tgt - - def _check(self, src=None, tgt=None): - dims = 0 - if src is not None and tgt is not None: - combined = self.combined - else: - combined = src or tgt - result = Resolve._create_prepared_item( - self.coord, dims, src_metadata=src, tgt_metadata=tgt - ) - self.assertIsInstance(result, _PreparedItem) - self.assertIsInstance(result.metadata, _PreparedMetadata) - expected = _PreparedMetadata(combined=combined, src=src, tgt=tgt) - self.assertEqual(expected, result.metadata) - self.assertEqual(self.points_value, result.points) - self.assertEqual(1, self.points.copy.call_count) - self.assertEqual([mock.call()], self.points.copy.call_args_list) - self.assertEqual(self.bounds_value, result.bounds) - self.assertEqual(1, self.bounds.copy.call_count) - self.assertEqual([mock.call()], self.bounds.copy.call_args_list) - self.assertEqual((dims,), result.dims) - self.assertEqual(self.container, result.container) - - def test__no_metadata(self): - self._check() - - def test__src_metadata_only(self): - self._check(src=self.src) - - def test__tgt_metadata_only(self): - self._check(tgt=self.tgt) - - def test__combine_metadata(self): - self._check(src=self.src, tgt=self.tgt) - - -class Test__prepare_local_payload_dim(tests.IrisTest): - def setUp(self): - self.Cube = namedtuple("Cube", ["ndim"]) - self.resolve = Resolve() - self.resolve.prepared_category = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) - self.resolve.map_rhs_to_lhs = True - self.src_coverage = dict( - cube=None, - metadata=[], - coords=[], - dims_common=None, - dims_local=[], - dims_free=None, - ) - self.tgt_coverage = deepcopy(self.src_coverage) - self.prepared_item = sentinel.prepared_item - self.m_create_prepared_item = self.patch( - "iris.common.resolve.Resolve._create_prepared_item", - return_value=self.prepared_item, - ) - - def test_src_no_local_with_tgt_no_local(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c c state c c - # coord d d coord d d - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_coverage = _DimCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_coverage = _DimCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) - - def test_src_no_local_with_tgt_no_local__strict(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c c state c c - # coord d d coord d d - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_coverage = _DimCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_coverage = _DimCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) - - def test_src_local_with_tgt_local(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c l state c l - # coord d d coord d d - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - self.src_coverage["dims_local"] = (1,) - src_coverage = _DimCoverage(**self.src_coverage) - self.tgt_coverage["dims_local"] = (1,) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_coverage = _DimCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) - - def test_src_local_with_tgt_local__strict(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c l state c l - # coord d d coord d d - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - self.src_coverage["dims_local"] = (1,) - src_coverage = _DimCoverage(**self.src_coverage) - self.tgt_coverage["dims_local"] = (1,) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_coverage = _DimCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) - - def test_src_local_with_tgt_free(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c f state c l - # coord d coord d d - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_dim = 1 - self.src_coverage["dims_local"] = (src_dim,) - src_metadata = sentinel.src_metadata - self.src_coverage["metadata"] = [None, src_metadata] - src_coord = sentinel.src_coord - self.src_coverage["coords"] = [None, src_coord] - src_coverage = _DimCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_coverage = _DimCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) - self.assertEqual( - self.prepared_item, self.resolve.prepared_category.items_dim[0] - ) - self.assertEqual(1, self.m_create_prepared_item.call_count) - expected = [ - mock.call(src_coord, mapping[src_dim], src_metadata=src_metadata) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_local_with_tgt_free__strict(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c f state c l - # coord d coord d d - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_dim = 1 - self.src_coverage["dims_local"] = (src_dim,) - src_metadata = sentinel.src_metadata - self.src_coverage["metadata"] = [None, src_metadata] - src_coord = sentinel.src_coord - self.src_coverage["coords"] = [None, src_coord] - src_coverage = _DimCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_coverage = _DimCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) - - def test_src_free_with_tgt_local(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c l state c f - # coord d d coord d - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_coverage = _DimCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_dim = 1 - self.tgt_coverage["dims_local"] = (tgt_dim,) - tgt_metadata = sentinel.tgt_metadata - self.tgt_coverage["metadata"] = [None, tgt_metadata] - tgt_coord = sentinel.tgt_coord - self.tgt_coverage["coords"] = [None, tgt_coord] - tgt_coverage = _DimCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) - self.assertEqual( - self.prepared_item, self.resolve.prepared_category.items_dim[0] - ) - self.assertEqual(1, self.m_create_prepared_item.call_count) - expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_free_with_tgt_local__strict(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c l state c f - # coord d d coord d - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_coverage = _DimCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_dim = 1 - self.tgt_coverage["dims_local"] = (tgt_dim,) - tgt_metadata = sentinel.tgt_metadata - self.tgt_coverage["metadata"] = [None, tgt_metadata] - tgt_coord = sentinel.tgt_coord - self.tgt_coverage["coords"] = [None, tgt_coord] - tgt_coverage = _DimCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_dim)) - - def test_src_no_local_with_tgt_local__extra_dims(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 2 dims 0 1 - # shape 4 2 3 shape 2 3 - # state l c c state c c - # coord d d d coord d d - # - # src-to-tgt mapping: - # 0->1, 1->2 - mapping = {0: 1, 1: 2} - self.resolve.mapping = mapping - src_coverage = _DimCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=3) - tgt_dim = 0 - self.tgt_coverage["dims_local"] = (tgt_dim,) - tgt_metadata = sentinel.tgt_metadata - self.tgt_coverage["metadata"] = [tgt_metadata, None, None] - tgt_coord = sentinel.tgt_coord - self.tgt_coverage["coords"] = [tgt_coord, None, None] - tgt_coverage = _DimCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) - self.assertEqual( - self.prepared_item, self.resolve.prepared_category.items_dim[0] - ) - self.assertEqual(1, self.m_create_prepared_item.call_count) - expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_no_local_with_tgt_local__extra_dims_strict(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 2 dims 0 1 - # shape 4 2 3 shape 2 3 - # state l c c state c c - # coord d d d coord d d - # - # src-to-tgt mapping: - # 0->1, 1->2 - mapping = {0: 1, 1: 2} - self.resolve.mapping = mapping - src_coverage = _DimCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=3) - tgt_dim = 0 - self.tgt_coverage["dims_local"] = (tgt_dim,) - tgt_metadata = sentinel.tgt_metadata - self.tgt_coverage["metadata"] = [tgt_metadata, None, None] - tgt_coord = sentinel.tgt_coord - self.tgt_coverage["coords"] = [tgt_coord, None, None] - tgt_coverage = _DimCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_dim(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_dim)) - self.assertEqual( - self.prepared_item, self.resolve.prepared_category.items_dim[0] - ) - self.assertEqual(1, self.m_create_prepared_item.call_count) - expected = [mock.call(tgt_coord, tgt_dim, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - -class Test__prepare_local_payload_aux(tests.IrisTest): - def setUp(self): - self.Cube = namedtuple("Cube", ["ndim"]) - self.resolve = Resolve() - self.resolve.prepared_category = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) - self.resolve.map_rhs_to_lhs = True - self.src_coverage = dict( - cube=None, - common_items_aux=None, - common_items_scalar=None, - local_items_aux=[], - local_items_scalar=None, - dims_common=None, - dims_local=[], - dims_free=None, - ) - self.tgt_coverage = deepcopy(self.src_coverage) - self.src_prepared_item = sentinel.src_prepared_item - self.tgt_prepared_item = sentinel.tgt_prepared_item - self.m_create_prepared_item = self.patch( - "iris.common.resolve.Resolve._create_prepared_item", - side_effect=(self.src_prepared_item, self.tgt_prepared_item), - ) - - def test_src_no_local_with_tgt_no_local(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c c state c c - # coord a a coord a a - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_coverage = _AuxCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) - - def test_src_no_local_with_tgt_no_local__strict(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c c state c c - # coord a a coord a a - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_coverage = _AuxCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) - - def test_src_local_with_tgt_local(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c l state c l - # coord a a coord a a - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_dims = (1,) - src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) - self.src_coverage["local_items_aux"].append(src_item) - src_coverage = _AuxCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_dims = (1,) - tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) - self.tgt_coverage["local_items_aux"].append(tgt_item) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(2, len(self.resolve.prepared_category.items_aux)) - expected = [self.src_prepared_item, self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_aux) - expected = [ - mock.call(src_coord, tgt_dims, src_metadata=src_metadata), - mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata), - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_local_with_tgt_local__strict(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c l state c l - # coord a a coord a a - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_dims = (1,) - src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) - self.src_coverage["local_items_aux"].append(src_item) - src_coverage = _AuxCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_dims = (1,) - tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) - self.tgt_coverage["local_items_aux"].append(tgt_item) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) - - def test_src_local_with_tgt_free(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c f state c l - # coord a coord a a - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_dims = (1,) - src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) - self.src_coverage["local_items_aux"].append(src_item) - self.src_coverage["dims_local"].extend(src_dims) - src_coverage = _AuxCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) - expected = [self.src_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_aux) - expected = [mock.call(src_coord, src_dims, src_metadata=src_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_local_with_tgt_free__strict(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c f state c l - # coord a coord a a - # - # src-to-tgt mapping: - # 0->0, 1->1 - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_dims = (1,) - src_item = _Item(metadata=src_metadata, coord=src_coord, dims=src_dims) - self.src_coverage["local_items_aux"].append(src_item) - self.src_coverage["dims_local"].extend(src_dims) - src_coverage = _AuxCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) - - def test_src_free_with_tgt_local(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c l state c f - # coord a a coord a - # - # src-to-tgt mapping: - # 0->0, 1->1 - self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_coverage = _AuxCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_dims = (1,) - tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) - self.tgt_coverage["local_items_aux"].append(tgt_item) - self.tgt_coverage["dims_local"].extend(tgt_dims) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) - expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_aux) - expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_free_with_tgt_local__strict(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 dims 0 1 - # shape 2 3 shape 2 3 - # state c l state c f - # coord a a coord a - # - # src-to-tgt mapping: - # 0->0, 1->1 - self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) - mapping = {0: 0, 1: 1} - self.resolve.mapping = mapping - src_coverage = _AuxCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=2) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_dims = (1,) - tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) - self.tgt_coverage["local_items_aux"].append(tgt_item) - self.tgt_coverage["dims_local"].extend(tgt_dims) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_aux)) - - def test_src_no_local_with_tgt_local__extra_dims(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 2 dims 0 1 - # shape 4 2 3 shape 2 3 - # state l c c state c c - # coord a a a coord a a - # - # src-to-tgt mapping: - # 0->1, 1->2 - self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) - mapping = {0: 1, 1: 2} - self.resolve.mapping = mapping - src_coverage = _AuxCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=3) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_dims = (0,) - tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) - self.tgt_coverage["local_items_aux"].append(tgt_item) - self.tgt_coverage["dims_local"].extend(tgt_dims) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) - expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_aux) - expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_no_local_with_tgt_local__extra_dims_strict(self): - # key: (state) c=common, f=free, l=local - # (coord) d=dim - # - # tgt: <- src: - # dims 0 1 2 dims 0 1 - # shape 4 2 3 shape 2 3 - # state l c c state c c - # coord a a a coord a a - # - # src-to-tgt mapping: - # 0->1, 1->2 - self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) - mapping = {0: 1, 1: 2} - self.resolve.mapping = mapping - src_coverage = _AuxCoverage(**self.src_coverage) - self.tgt_coverage["cube"] = self.Cube(ndim=3) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_dims = (0,) - tgt_item = _Item(metadata=tgt_metadata, coord=tgt_coord, dims=tgt_dims) - self.tgt_coverage["local_items_aux"].append(tgt_item) - self.tgt_coverage["dims_local"].extend(tgt_dims) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=True): - self.resolve._prepare_local_payload_aux(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_aux)) - expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_aux) - expected = [mock.call(tgt_coord, tgt_dims, tgt_metadata=tgt_metadata)] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - -class Test__prepare_local_payload_scalar(tests.IrisTest): - def setUp(self): - self.Cube = namedtuple("Cube", ["ndim"]) - self.resolve = Resolve() - self.resolve.prepared_category = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) - self.src_coverage = dict( - cube=None, - common_items_aux=None, - common_items_scalar=None, - local_items_aux=None, - local_items_scalar=[], - dims_common=None, - dims_local=[], - dims_free=None, - ) - self.tgt_coverage = deepcopy(self.src_coverage) - self.src_prepared_item = sentinel.src_prepared_item - self.tgt_prepared_item = sentinel.tgt_prepared_item - self.m_create_prepared_item = self.patch( - "iris.common.resolve.Resolve._create_prepared_item", - side_effect=(self.src_prepared_item, self.tgt_prepared_item), - ) - self.src_dims = () - self.tgt_dims = () - - def test_src_no_local_with_tgt_no_local(self): - ndim = 2 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) - - def test_src_no_local_with_tgt_no_local__strict(self): - ndim = 2 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) - - def test_src_no_local_with_tgt_no_local__src_scalar_cube(self): - ndim = 0 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) - - def test_src_no_local_with_tgt_no_local__src_scalar_cube_strict(self): - ndim = 0 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) - - def test_src_local_with_tgt_no_local(self): - ndim = 2 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) - self.src_coverage["local_items_scalar"].append(src_item) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) - expected = [self.src_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(src_coord, self.src_dims, src_metadata=src_metadata) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_local_with_tgt_no_local__strict(self): - ndim = 2 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) - self.src_coverage["local_items_scalar"].append(src_item) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) - - def test_src_local_with_tgt_no_local__src_scalar_cube(self): - ndim = 0 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) - self.src_coverage["local_items_scalar"].append(src_item) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) - expected = [self.src_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(src_coord, self.src_dims, src_metadata=src_metadata) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_local_with_tgt_no_local__src_scalar_cube_strict(self): - ndim = 0 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) - self.src_coverage["local_items_scalar"].append(src_item) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) - - def test_src_no_local_with_tgt_local(self): - self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) - ndim = 2 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) - self.tgt_coverage["local_items_scalar"].append(tgt_item) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) - expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_no_local_with_tgt_local__strict(self): - self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) - ndim = 2 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) - self.tgt_coverage["local_items_scalar"].append(tgt_item) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) - - def test_src_no_local_with_tgt_local__src_scalar_cube(self): - self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) - ndim = 0 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) - self.tgt_coverage["local_items_scalar"].append(tgt_item) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) - expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_no_local_with_tgt_local__src_scalar_cube_strict(self): - self.m_create_prepared_item.side_effect = (self.tgt_prepared_item,) - ndim = 0 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) - self.tgt_coverage["local_items_scalar"].append(tgt_item) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) - self.assertEqual(1, len(self.resolve.prepared_category.items_scalar)) - expected = [self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_local_with_tgt_local(self): - ndim = 2 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) - self.src_coverage["local_items_scalar"].append(src_item) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) - self.tgt_coverage["local_items_scalar"].append(tgt_item) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) - expected = [self.src_prepared_item, self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(src_coord, self.src_dims, src_metadata=src_metadata), - mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata), - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_local_with_tgt_local__strict(self): - ndim = 2 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) - self.src_coverage["local_items_scalar"].append(src_item) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) - self.tgt_coverage["local_items_scalar"].append(tgt_item) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) - - def test_src_local_with_tgt_local__src_scalar_cube(self): - ndim = 0 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) - self.src_coverage["local_items_scalar"].append(src_item) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) - self.tgt_coverage["local_items_scalar"].append(tgt_item) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - self.resolve._prepare_local_payload_scalar(src_coverage, tgt_coverage) - self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) - expected = [self.src_prepared_item, self.tgt_prepared_item] - self.assertEqual(expected, self.resolve.prepared_category.items_scalar) - expected = [ - mock.call(src_coord, self.src_dims, src_metadata=src_metadata), - mock.call(tgt_coord, self.tgt_dims, tgt_metadata=tgt_metadata), - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_src_local_with_tgt_local__src_scalar_cube_strict(self): - ndim = 0 - self.src_coverage["cube"] = self.Cube(ndim=ndim) - src_metadata = sentinel.src_metadata - src_coord = sentinel.src_coord - src_item = _Item( - metadata=src_metadata, coord=src_coord, dims=self.src_dims - ) - self.src_coverage["local_items_scalar"].append(src_item) - src_coverage = _AuxCoverage(**self.src_coverage) - tgt_metadata = sentinel.tgt_metadata - tgt_coord = sentinel.tgt_coord - tgt_item = _Item( - metadata=tgt_metadata, coord=tgt_coord, dims=self.tgt_dims - ) - self.tgt_coverage["local_items_scalar"].append(tgt_item) - tgt_coverage = _AuxCoverage(**self.tgt_coverage) - with LENIENT.context(maths=False): - self.resolve._prepare_local_payload_scalar( - src_coverage, tgt_coverage - ) - self.assertEqual(0, len(self.resolve.prepared_category.items_scalar)) - - -class Test__prepare_local_payload(tests.IrisTest): - def test(self): - src_dim_coverage = sentinel.src_dim_coverage - src_aux_coverage = sentinel.src_aux_coverage - tgt_dim_coverage = sentinel.tgt_dim_coverage - tgt_aux_coverage = sentinel.tgt_aux_coverage - root = "iris.common.resolve.Resolve" - m_prepare_dim = self.patch(f"{root}._prepare_local_payload_dim") - m_prepare_aux = self.patch(f"{root}._prepare_local_payload_aux") - m_prepare_scalar = self.patch(f"{root}._prepare_local_payload_scalar") - resolve = Resolve() - resolve._prepare_local_payload( - src_dim_coverage, - src_aux_coverage, - tgt_dim_coverage, - tgt_aux_coverage, - ) - self.assertEqual(1, m_prepare_dim.call_count) - expected = [mock.call(src_dim_coverage, tgt_dim_coverage)] - self.assertEqual(expected, m_prepare_dim.call_args_list) - self.assertEqual(1, m_prepare_aux.call_count) - expected = [mock.call(src_aux_coverage, tgt_aux_coverage)] - self.assertEqual(expected, m_prepare_aux.call_args_list) - self.assertEqual(1, m_prepare_scalar.call_count) - expected = [mock.call(src_aux_coverage, tgt_aux_coverage)] - self.assertEqual(expected, m_prepare_scalar.call_args_list) - - -class Test__metadata_prepare(tests.IrisTest): - def setUp(self): - self.src_cube = sentinel.src_cube - self.src_category_local = sentinel.src_category_local - self.src_dim_coverage = sentinel.src_dim_coverage - self.src_aux_coverage = mock.Mock( - common_items_aux=sentinel.src_aux_coverage_common_items_aux, - common_items_scalar=sentinel.src_aux_coverage_common_items_scalar, - ) - self.tgt_cube = sentinel.tgt_cube - self.tgt_category_local = sentinel.tgt_category_local - self.tgt_dim_coverage = sentinel.tgt_dim_coverage - self.tgt_aux_coverage = mock.Mock( - common_items_aux=sentinel.tgt_aux_coverage_common_items_aux, - common_items_scalar=sentinel.tgt_aux_coverage_common_items_scalar, - ) - self.resolve = Resolve() - root = "iris.common.resolve.Resolve" - self.m_prepare_common_dim_payload = self.patch( - f"{root}._prepare_common_dim_payload" - ) - self.m_prepare_common_aux_payload = self.patch( - f"{root}._prepare_common_aux_payload" - ) - self.m_prepare_local_payload = self.patch( - f"{root}._prepare_local_payload" - ) - self.m_prepare_factory_payload = self.patch( - f"{root}._prepare_factory_payload" - ) - - def _check(self): - self.assertIsNone(self.resolve.prepared_category) - self.assertIsNone(self.resolve.prepared_factories) - self.resolve._metadata_prepare() - expected = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) - self.assertEqual(expected, self.resolve.prepared_category) - self.assertEqual([], self.resolve.prepared_factories) - self.assertEqual(1, self.m_prepare_common_dim_payload.call_count) - expected = [mock.call(self.src_dim_coverage, self.tgt_dim_coverage)] - self.assertEqual( - expected, self.m_prepare_common_dim_payload.call_args_list - ) - self.assertEqual(2, self.m_prepare_common_aux_payload.call_count) - expected = [ - mock.call( - self.src_aux_coverage.common_items_aux, - self.tgt_aux_coverage.common_items_aux, - [], - ), - mock.call( - self.src_aux_coverage.common_items_scalar, - self.tgt_aux_coverage.common_items_scalar, - [], - ignore_mismatch=True, - ), - ] - self.assertEqual( - expected, self.m_prepare_common_aux_payload.call_args_list - ) - self.assertEqual(1, self.m_prepare_local_payload.call_count) - expected = [ - mock.call( - self.src_dim_coverage, - self.src_aux_coverage, - self.tgt_dim_coverage, - self.tgt_aux_coverage, - ) - ] - self.assertEqual(expected, self.m_prepare_local_payload.call_args_list) - self.assertEqual(2, self.m_prepare_factory_payload.call_count) - expected = [ - mock.call(self.tgt_cube, self.tgt_category_local, from_src=False), - mock.call(self.src_cube, self.src_category_local), - ] - self.assertEqual( - expected, self.m_prepare_factory_payload.call_args_list - ) - - def test_map_rhs_to_lhs__true(self): - self.resolve.map_rhs_to_lhs = True - self.resolve.rhs_cube = self.src_cube - self.resolve.rhs_cube_category_local = self.src_category_local - self.resolve.rhs_cube_dim_coverage = self.src_dim_coverage - self.resolve.rhs_cube_aux_coverage = self.src_aux_coverage - self.resolve.lhs_cube = self.tgt_cube - self.resolve.lhs_cube_category_local = self.tgt_category_local - self.resolve.lhs_cube_dim_coverage = self.tgt_dim_coverage - self.resolve.lhs_cube_aux_coverage = self.tgt_aux_coverage - self._check() - - def test_map_rhs_to_lhs__false(self): - self.resolve.map_rhs_to_lhs = False - self.resolve.lhs_cube = self.src_cube - self.resolve.lhs_cube_category_local = self.src_category_local - self.resolve.lhs_cube_dim_coverage = self.src_dim_coverage - self.resolve.lhs_cube_aux_coverage = self.src_aux_coverage - self.resolve.rhs_cube = self.tgt_cube - self.resolve.rhs_cube_category_local = self.tgt_category_local - self.resolve.rhs_cube_dim_coverage = self.tgt_dim_coverage - self.resolve.rhs_cube_aux_coverage = self.tgt_aux_coverage - self._check() - - -class Test__prepare_factory_payload(tests.IrisTest): - def setUp(self): - self.Cube = namedtuple("Cube", ["aux_factories"]) - self.Coord = namedtuple("Coord", ["metadata"]) - self.Factory_T1 = namedtuple( - "Factory_T1", ["dependencies"] - ) # dummy factory type - self.container_T1 = type(self.Factory_T1(None)) - self.Factory_T2 = namedtuple( - "Factory_T2", ["dependencies"] - ) # dummy factory type - self.container_T2 = type(self.Factory_T2(None)) - self.resolve = Resolve() - self.resolve.map_rhs_to_lhs = True - self.resolve.prepared_factories = [] - self.m_get_prepared_item = self.patch( - "iris.common.resolve.Resolve._get_prepared_item" - ) - self.category_local = sentinel.category_local - self.from_src = sentinel.from_src - - def test_no_factory(self): - cube = self.Cube(aux_factories=[]) - self.resolve._prepare_factory_payload(cube, self.category_local) - self.assertEqual(0, len(self.resolve.prepared_factories)) - - def test_skip_factory__already_prepared(self): - aux_factory = self.Factory_T1(dependencies=None) - aux_factories = [aux_factory] - cube = self.Cube(aux_factories=aux_factories) - prepared_factories = [ - _PreparedFactory(container=self.container_T1, dependencies=None), - _PreparedFactory(container=self.container_T2, dependencies=None), - ] - self.resolve.prepared_factories.extend(prepared_factories) - self.resolve._prepare_factory_payload(cube, self.category_local) - self.assertEqual(prepared_factories, self.resolve.prepared_factories) - - def test_factory__dependency_already_prepared(self): - coord_a = self.Coord(metadata=sentinel.coord_a_metadata) - coord_b = self.Coord(metadata=sentinel.coord_b_metadata) - coord_c = self.Coord(metadata=sentinel.coord_c_metadata) - side_effect = (coord_a, coord_b, coord_c) - self.m_get_prepared_item.side_effect = side_effect - dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c) - aux_factory = self.Factory_T1(dependencies=dependencies) - aux_factories = [aux_factory] - cube = self.Cube(aux_factories=aux_factories) - self.resolve._prepare_factory_payload( - cube, self.category_local, from_src=self.from_src - ) - self.assertEqual(1, len(self.resolve.prepared_factories)) - prepared_dependencies = { - name: coord.metadata for name, coord in dependencies.items() - } - expected = [ - _PreparedFactory( - container=self.container_T1, dependencies=prepared_dependencies - ) - ] - self.assertEqual(expected, self.resolve.prepared_factories) - self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) - expected = [ - mock.call( - coord_a.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_b.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_c.metadata, self.category_local, from_src=self.from_src - ), - ] - actual = self.m_get_prepared_item.call_args_list - for call in expected: - self.assertIn(call, actual) - - def test_factory__dependency_local_not_prepared(self): - coord_a = self.Coord(metadata=sentinel.coord_a_metadata) - coord_b = self.Coord(metadata=sentinel.coord_b_metadata) - coord_c = self.Coord(metadata=sentinel.coord_c_metadata) - side_effect = (None, coord_a, None, coord_b, None, coord_c) - self.m_get_prepared_item.side_effect = side_effect - dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c) - aux_factory = self.Factory_T1(dependencies=dependencies) - aux_factories = [aux_factory] - cube = self.Cube(aux_factories=aux_factories) - self.resolve._prepare_factory_payload( - cube, self.category_local, from_src=self.from_src - ) - self.assertEqual(1, len(self.resolve.prepared_factories)) - prepared_dependencies = { - name: coord.metadata for name, coord in dependencies.items() - } - expected = [ - _PreparedFactory( - container=self.container_T1, dependencies=prepared_dependencies - ) - ] - self.assertEqual(expected, self.resolve.prepared_factories) - self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) - expected = [ - mock.call( - coord_a.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_b.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_c.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_a.metadata, - self.category_local, - from_src=self.from_src, - from_local=True, - ), - mock.call( - coord_b.metadata, - self.category_local, - from_src=self.from_src, - from_local=True, - ), - mock.call( - coord_c.metadata, - self.category_local, - from_src=self.from_src, - from_local=True, - ), - ] - actual = self.m_get_prepared_item.call_args_list - for call in expected: - self.assertIn(call, actual) - - def test_factory__dependency_not_found(self): - coord_a = self.Coord(metadata=sentinel.coord_a_metadata) - coord_b = self.Coord(metadata=sentinel.coord_b_metadata) - coord_c = self.Coord(metadata=sentinel.coord_c_metadata) - side_effect = (None, None) - self.m_get_prepared_item.side_effect = side_effect - dependencies = dict(name_a=coord_a, name_b=coord_b, name_c=coord_c) - aux_factory = self.Factory_T1(dependencies=dependencies) - aux_factories = [aux_factory] - cube = self.Cube(aux_factories=aux_factories) - self.resolve._prepare_factory_payload( - cube, self.category_local, from_src=self.from_src - ) - self.assertEqual(0, len(self.resolve.prepared_factories)) - self.assertEqual(len(side_effect), self.m_get_prepared_item.call_count) - expected = [ - mock.call( - coord_a.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_b.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_c.metadata, self.category_local, from_src=self.from_src - ), - mock.call( - coord_a.metadata, - self.category_local, - from_src=self.from_src, - from_local=True, - ), - mock.call( - coord_b.metadata, - self.category_local, - from_src=self.from_src, - from_local=True, - ), - mock.call( - coord_c.metadata, - self.category_local, - from_src=self.from_src, - from_local=True, - ), - ] - actual = self.m_get_prepared_item.call_args_list - for call in actual: - self.assertIn(call, expected) - - -class Test__get_prepared_item(tests.IrisTest): - def setUp(self): - PreparedItem = namedtuple("PreparedItem", ["metadata"]) - self.resolve = Resolve() - self.prepared_dim_metadata_src = sentinel.prepared_dim_metadata_src - self.prepared_dim_metadata_tgt = sentinel.prepared_dim_metadata_tgt - self.prepared_items_dim = PreparedItem( - metadata=_PreparedMetadata( - combined=None, - src=self.prepared_dim_metadata_src, - tgt=self.prepared_dim_metadata_tgt, - ) - ) - self.prepared_aux_metadata_src = sentinel.prepared_aux_metadata_src - self.prepared_aux_metadata_tgt = sentinel.prepared_aux_metadata_tgt - self.prepared_items_aux = PreparedItem( - metadata=_PreparedMetadata( - combined=None, - src=self.prepared_aux_metadata_src, - tgt=self.prepared_aux_metadata_tgt, - ) - ) - self.prepared_scalar_metadata_src = ( - sentinel.prepared_scalar_metadata_src - ) - self.prepared_scalar_metadata_tgt = ( - sentinel.prepared_scalar_metadata_tgt - ) - self.prepared_items_scalar = PreparedItem( - metadata=_PreparedMetadata( - combined=None, - src=self.prepared_scalar_metadata_src, - tgt=self.prepared_scalar_metadata_tgt, - ) - ) - self.resolve.prepared_category = _CategoryItems( - items_dim=[self.prepared_items_dim], - items_aux=[self.prepared_items_aux], - items_scalar=[self.prepared_items_scalar], - ) - self.resolve.mapping = {0: 10} - self.m_create_prepared_item = self.patch( - "iris.common.resolve.Resolve._create_prepared_item" - ) - self.local_dim_metadata = sentinel.local_dim_metadata - self.local_aux_metadata = sentinel.local_aux_metadata - self.local_scalar_metadata = sentinel.local_scalar_metadata - self.local_coord = sentinel.local_coord - self.local_coord_dims = (0,) - self.local_items_dim = _Item( - metadata=self.local_dim_metadata, - coord=self.local_coord, - dims=self.local_coord_dims, - ) - self.local_items_aux = _Item( - metadata=self.local_aux_metadata, - coord=self.local_coord, - dims=self.local_coord_dims, - ) - self.local_items_scalar = _Item( - metadata=self.local_scalar_metadata, - coord=self.local_coord, - dims=self.local_coord_dims, - ) - self.category_local = _CategoryItems( - items_dim=[self.local_items_dim], - items_aux=[self.local_items_aux], - items_scalar=[self.local_items_scalar], - ) - - def test_missing_prepared_coord__from_src(self): - metadata = sentinel.missing - category_local = None - result = self.resolve._get_prepared_item(metadata, category_local) - self.assertIsNone(result) - - def test_missing_prepared_coord__from_tgt(self): - metadata = sentinel.missing - category_local = None - result = self.resolve._get_prepared_item( - metadata, category_local, from_src=False - ) - self.assertIsNone(result) - - def test_get_prepared_dim_coord__from_src(self): - metadata = self.prepared_dim_metadata_src - category_local = None - result = self.resolve._get_prepared_item(metadata, category_local) - self.assertEqual(self.prepared_items_dim, result) - - def test_get_prepared_dim_coord__from_tgt(self): - metadata = self.prepared_dim_metadata_tgt - category_local = None - result = self.resolve._get_prepared_item( - metadata, category_local, from_src=False - ) - self.assertEqual(self.prepared_items_dim, result) - - def test_get_prepared_aux_coord__from_src(self): - metadata = self.prepared_aux_metadata_src - category_local = None - result = self.resolve._get_prepared_item(metadata, category_local) - self.assertEqual(self.prepared_items_aux, result) - - def test_get_prepared_aux_coord__from_tgt(self): - metadata = self.prepared_aux_metadata_tgt - category_local = None - result = self.resolve._get_prepared_item( - metadata, category_local, from_src=False - ) - self.assertEqual(self.prepared_items_aux, result) - - def test_get_prepared_scalar_coord__from_src(self): - metadata = self.prepared_scalar_metadata_src - category_local = None - result = self.resolve._get_prepared_item(metadata, category_local) - self.assertEqual(self.prepared_items_scalar, result) - - def test_get_prepared_scalar_coord__from_tgt(self): - metadata = self.prepared_scalar_metadata_tgt - category_local = None - result = self.resolve._get_prepared_item( - metadata, category_local, from_src=False - ) - self.assertEqual(self.prepared_items_scalar, result) - - def test_missing_local_coord__from_src(self): - metadata = sentinel.missing - result = self.resolve._get_prepared_item( - metadata, self.category_local, from_local=True - ) - self.assertIsNone(result) - - def test_missing_local_coord__from_tgt(self): - metadata = sentinel.missing - result = self.resolve._get_prepared_item( - metadata, self.category_local, from_src=False, from_local=True - ) - self.assertIsNone(result) - - def test_get_local_dim_coord__from_src(self): - created_local_item = sentinel.created_local_item - self.m_create_prepared_item.return_value = created_local_item - metadata = self.local_dim_metadata - result = self.resolve._get_prepared_item( - metadata, self.category_local, from_local=True - ) - expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_dim)) - self.assertEqual(expected, self.resolve.prepared_category.items_dim[1]) - self.assertEqual(1, self.m_create_prepared_item.call_count) - dims = (self.resolve.mapping[self.local_coord_dims[0]],) - expected = [ - mock.call( - self.local_coord, - dims, - src_metadata=metadata, - tgt_metadata=None, - ) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_get_local_dim_coord__from_tgt(self): - created_local_item = sentinel.created_local_item - self.m_create_prepared_item.return_value = created_local_item - metadata = self.local_dim_metadata - result = self.resolve._get_prepared_item( - metadata, self.category_local, from_src=False, from_local=True - ) - expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_dim)) - self.assertEqual(expected, self.resolve.prepared_category.items_dim[1]) - self.assertEqual(1, self.m_create_prepared_item.call_count) - dims = self.local_coord_dims - expected = [ - mock.call( - self.local_coord, - dims, - src_metadata=None, - tgt_metadata=metadata, - ) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_get_local_aux_coord__from_src(self): - created_local_item = sentinel.created_local_item - self.m_create_prepared_item.return_value = created_local_item - metadata = self.local_aux_metadata - result = self.resolve._get_prepared_item( - metadata, self.category_local, from_local=True - ) - expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_aux)) - self.assertEqual(expected, self.resolve.prepared_category.items_aux[1]) - self.assertEqual(1, self.m_create_prepared_item.call_count) - dims = (self.resolve.mapping[self.local_coord_dims[0]],) - expected = [ - mock.call( - self.local_coord, - dims, - src_metadata=metadata, - tgt_metadata=None, - ) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_get_local_aux_coord__from_tgt(self): - created_local_item = sentinel.created_local_item - self.m_create_prepared_item.return_value = created_local_item - metadata = self.local_aux_metadata - result = self.resolve._get_prepared_item( - metadata, self.category_local, from_src=False, from_local=True - ) - expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_aux)) - self.assertEqual(expected, self.resolve.prepared_category.items_aux[1]) - self.assertEqual(1, self.m_create_prepared_item.call_count) - dims = self.local_coord_dims - expected = [ - mock.call( - self.local_coord, - dims, - src_metadata=None, - tgt_metadata=metadata, - ) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_get_local_scalar_coord__from_src(self): - created_local_item = sentinel.created_local_item - self.m_create_prepared_item.return_value = created_local_item - metadata = self.local_scalar_metadata - result = self.resolve._get_prepared_item( - metadata, self.category_local, from_local=True - ) - expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) - self.assertEqual( - expected, self.resolve.prepared_category.items_scalar[1] - ) - self.assertEqual(1, self.m_create_prepared_item.call_count) - dims = (self.resolve.mapping[self.local_coord_dims[0]],) - expected = [ - mock.call( - self.local_coord, - dims, - src_metadata=metadata, - tgt_metadata=None, - ) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - def test_get_local_scalar_coord__from_tgt(self): - created_local_item = sentinel.created_local_item - self.m_create_prepared_item.return_value = created_local_item - metadata = self.local_scalar_metadata - result = self.resolve._get_prepared_item( - metadata, self.category_local, from_src=False, from_local=True - ) - expected = created_local_item - self.assertEqual(expected, result) - self.assertEqual(2, len(self.resolve.prepared_category.items_scalar)) - self.assertEqual( - expected, self.resolve.prepared_category.items_scalar[1] - ) - self.assertEqual(1, self.m_create_prepared_item.call_count) - dims = self.local_coord_dims - expected = [ - mock.call( - self.local_coord, - dims, - src_metadata=None, - tgt_metadata=metadata, - ) - ] - self.assertEqual(expected, self.m_create_prepared_item.call_args_list) - - -class Test_cube(tests.IrisTest): - def setUp(self): - self.shape = (2, 3) - self.data = np.zeros(np.multiply(*self.shape), dtype=np.int8).reshape( - self.shape - ) - self.bad_data = np.zeros(np.multiply(*self.shape), dtype=np.int8) - self.resolve = Resolve() - self.resolve.map_rhs_to_lhs = True - self.resolve._broadcast_shape = self.shape - self.cube_metadata = CubeMetadata( - standard_name="air_temperature", - long_name="air temp", - var_name="airT", - units=Unit("K"), - attributes={}, - cell_methods=(), - ) - lhs_cube = Cube(self.data) - lhs_cube.metadata = self.cube_metadata - self.resolve.lhs_cube = lhs_cube - rhs_cube = Cube(self.data) - rhs_cube.metadata = self.cube_metadata - self.resolve.rhs_cube = rhs_cube - self.m_add_dim_coord = self.patch("iris.cube.Cube.add_dim_coord") - self.m_add_aux_coord = self.patch("iris.cube.Cube.add_aux_coord") - self.m_add_aux_factory = self.patch("iris.cube.Cube.add_aux_factory") - self.m_coord = self.patch("iris.cube.Cube.coord") - # - # prepared coordinates - # - prepared_category = _CategoryItems( - items_dim=[], items_aux=[], items_scalar=[] - ) - # prepared dim coordinates - self.prepared_dim_0_metadata = _PreparedMetadata( - combined=sentinel.prepared_dim_0_metadata_combined, - src=None, - tgt=None, - ) - self.prepared_dim_0_points = sentinel.prepared_dim_0_points - self.prepared_dim_0_bounds = sentinel.prepared_dim_0_bounds - self.prepared_dim_0_dims = (0,) - self.prepared_dim_0_coord = mock.Mock(metadata=None) - self.prepared_dim_0_container = mock.Mock( - return_value=self.prepared_dim_0_coord - ) - self.prepared_dim_0 = _PreparedItem( - metadata=self.prepared_dim_0_metadata, - points=self.prepared_dim_0_points, - bounds=self.prepared_dim_0_bounds, - dims=self.prepared_dim_0_dims, - container=self.prepared_dim_0_container, - ) - prepared_category.items_dim.append(self.prepared_dim_0) - self.prepared_dim_1_metadata = _PreparedMetadata( - combined=sentinel.prepared_dim_1_metadata_combined, - src=None, - tgt=None, - ) - self.prepared_dim_1_points = sentinel.prepared_dim_1_points - self.prepared_dim_1_bounds = sentinel.prepared_dim_1_bounds - self.prepared_dim_1_dims = (1,) - self.prepared_dim_1_coord = mock.Mock(metadata=None) - self.prepared_dim_1_container = mock.Mock( - return_value=self.prepared_dim_1_coord - ) - self.prepared_dim_1 = _PreparedItem( - metadata=self.prepared_dim_1_metadata, - points=self.prepared_dim_1_points, - bounds=self.prepared_dim_1_bounds, - dims=self.prepared_dim_1_dims, - container=self.prepared_dim_1_container, - ) - prepared_category.items_dim.append(self.prepared_dim_1) - - # prepared auxiliary coordinates - self.prepared_aux_0_metadata = _PreparedMetadata( - combined=sentinel.prepared_aux_0_metadata_combined, - src=None, - tgt=None, - ) - self.prepared_aux_0_points = sentinel.prepared_aux_0_points - self.prepared_aux_0_bounds = sentinel.prepared_aux_0_bounds - self.prepared_aux_0_dims = (0,) - self.prepared_aux_0_coord = mock.Mock(metadata=None) - self.prepared_aux_0_container = mock.Mock( - return_value=self.prepared_aux_0_coord - ) - self.prepared_aux_0 = _PreparedItem( - metadata=self.prepared_aux_0_metadata, - points=self.prepared_aux_0_points, - bounds=self.prepared_aux_0_bounds, - dims=self.prepared_aux_0_dims, - container=self.prepared_aux_0_container, - ) - prepared_category.items_aux.append(self.prepared_aux_0) - self.prepared_aux_1_metadata = _PreparedMetadata( - combined=sentinel.prepared_aux_1_metadata_combined, - src=None, - tgt=None, - ) - self.prepared_aux_1_points = sentinel.prepared_aux_1_points - self.prepared_aux_1_bounds = sentinel.prepared_aux_1_bounds - self.prepared_aux_1_dims = (1,) - self.prepared_aux_1_coord = mock.Mock(metadata=None) - self.prepared_aux_1_container = mock.Mock( - return_value=self.prepared_aux_1_coord - ) - self.prepared_aux_1 = _PreparedItem( - metadata=self.prepared_aux_1_metadata, - points=self.prepared_aux_1_points, - bounds=self.prepared_aux_1_bounds, - dims=self.prepared_aux_1_dims, - container=self.prepared_aux_1_container, - ) - prepared_category.items_aux.append(self.prepared_aux_1) - - # prepare scalar coordinates - self.prepared_scalar_0_metadata = _PreparedMetadata( - combined=sentinel.prepared_scalar_0_metadata_combined, - src=None, - tgt=None, - ) - self.prepared_scalar_0_points = sentinel.prepared_scalar_0_points - self.prepared_scalar_0_bounds = sentinel.prepared_scalar_0_bounds - self.prepared_scalar_0_dims = () - self.prepared_scalar_0_coord = mock.Mock(metadata=None) - self.prepared_scalar_0_container = mock.Mock( - return_value=self.prepared_scalar_0_coord - ) - self.prepared_scalar_0 = _PreparedItem( - metadata=self.prepared_scalar_0_metadata, - points=self.prepared_scalar_0_points, - bounds=self.prepared_scalar_0_bounds, - dims=self.prepared_scalar_0_dims, - container=self.prepared_scalar_0_container, - ) - prepared_category.items_scalar.append(self.prepared_scalar_0) - self.prepared_scalar_1_metadata = _PreparedMetadata( - combined=sentinel.prepared_scalar_1_metadata_combined, - src=None, - tgt=None, - ) - self.prepared_scalar_1_points = sentinel.prepared_scalar_1_points - self.prepared_scalar_1_bounds = sentinel.prepared_scalar_1_bounds - self.prepared_scalar_1_dims = () - self.prepared_scalar_1_coord = mock.Mock(metadata=None) - self.prepared_scalar_1_container = mock.Mock( - return_value=self.prepared_scalar_1_coord - ) - self.prepared_scalar_1 = _PreparedItem( - metadata=self.prepared_scalar_1_metadata, - points=self.prepared_scalar_1_points, - bounds=self.prepared_scalar_1_bounds, - dims=self.prepared_scalar_1_dims, - container=self.prepared_scalar_1_container, - ) - prepared_category.items_scalar.append(self.prepared_scalar_1) - # - # prepared factories - # - prepared_factories = [] - self.aux_factory = sentinel.aux_factory - self.prepared_factory_container = mock.Mock( - return_value=self.aux_factory - ) - self.prepared_factory_metadata_a = _PreparedMetadata( - combined=sentinel.prepared_factory_metadata_a_combined, - src=None, - tgt=None, - ) - self.prepared_factory_metadata_b = _PreparedMetadata( - combined=sentinel.prepared_factory_metadata_b_combined, - src=None, - tgt=None, - ) - self.prepared_factory_metadata_c = _PreparedMetadata( - combined=sentinel.prepared_factory_metadata_c_combined, - src=None, - tgt=None, - ) - self.prepared_factory_dependencies = dict( - name_a=self.prepared_factory_metadata_a, - name_b=self.prepared_factory_metadata_b, - name_c=self.prepared_factory_metadata_c, - ) - self.prepared_factory = _PreparedFactory( - container=self.prepared_factory_container, - dependencies=self.prepared_factory_dependencies, - ) - prepared_factories.append(self.prepared_factory) - self.prepared_factory_side_effect = ( - sentinel.prepared_factory_coord_a, - sentinel.prepared_factory_coord_b, - sentinel.prepared_factory_coord_c, - ) - self.m_coord.side_effect = self.prepared_factory_side_effect - self.resolve.prepared_category = prepared_category - self.resolve.prepared_factories = prepared_factories - - def test_no_resolved_shape(self): - self.resolve._broadcast_shape = None - data = None - emsg = "Cannot resolve resultant cube, as no candidate cubes have been provided" - with self.assertRaisesRegex(ValueError, emsg): - _ = self.resolve.cube(data) - - def test_bad_data_shape(self): - emsg = "Cannot resolve resultant cube, as the provided data must have shape" - with self.assertRaisesRegex(ValueError, emsg): - _ = self.resolve.cube(self.bad_data) - - def test_bad_data_shape__inplace(self): - self.resolve.lhs_cube = Cube(self.bad_data) - emsg = "Cannot resolve resultant cube in-place" - with self.assertRaisesRegex(ValueError, emsg): - _ = self.resolve.cube(self.data, in_place=True) - - def _check(self): - # check dim coordinate 0 - self.assertEqual(1, self.prepared_dim_0.container.call_count) - expected = [ - mock.call( - self.prepared_dim_0_points, bounds=self.prepared_dim_0_bounds - ) - ] - self.assertEqual( - expected, self.prepared_dim_0.container.call_args_list - ) - self.assertEqual( - self.prepared_dim_0_coord.metadata, - self.prepared_dim_0_metadata.combined, - ) - # check dim coordinate 1 - self.assertEqual(1, self.prepared_dim_1.container.call_count) - expected = [ - mock.call( - self.prepared_dim_1_points, bounds=self.prepared_dim_1_bounds - ) - ] - self.assertEqual( - expected, self.prepared_dim_1.container.call_args_list - ) - self.assertEqual( - self.prepared_dim_1_coord.metadata, - self.prepared_dim_1_metadata.combined, - ) - # check add_dim_coord - self.assertEqual(2, self.m_add_dim_coord.call_count) - expected = [ - mock.call(self.prepared_dim_0_coord, self.prepared_dim_0_dims), - mock.call(self.prepared_dim_1_coord, self.prepared_dim_1_dims), - ] - self.assertEqual(expected, self.m_add_dim_coord.call_args_list) - - # check aux coordinate 0 - self.assertEqual(1, self.prepared_aux_0.container.call_count) - expected = [ - mock.call( - self.prepared_aux_0_points, bounds=self.prepared_aux_0_bounds - ) - ] - self.assertEqual( - expected, self.prepared_aux_0.container.call_args_list - ) - self.assertEqual( - self.prepared_aux_0_coord.metadata, - self.prepared_aux_0_metadata.combined, - ) - # check aux coordinate 1 - self.assertEqual(1, self.prepared_aux_1.container.call_count) - expected = [ - mock.call( - self.prepared_aux_1_points, bounds=self.prepared_aux_1_bounds - ) - ] - self.assertEqual( - expected, self.prepared_aux_1.container.call_args_list - ) - self.assertEqual( - self.prepared_aux_1_coord.metadata, - self.prepared_aux_1_metadata.combined, - ) - # check scalar coordinate 0 - self.assertEqual(1, self.prepared_scalar_0.container.call_count) - expected = [ - mock.call( - self.prepared_scalar_0_points, - bounds=self.prepared_scalar_0_bounds, - ) - ] - self.assertEqual( - expected, self.prepared_scalar_0.container.call_args_list - ) - self.assertEqual( - self.prepared_scalar_0_coord.metadata, - self.prepared_scalar_0_metadata.combined, - ) - # check scalar coordinate 1 - self.assertEqual(1, self.prepared_scalar_1.container.call_count) - expected = [ - mock.call( - self.prepared_scalar_1_points, - bounds=self.prepared_scalar_1_bounds, - ) - ] - self.assertEqual( - expected, self.prepared_scalar_1.container.call_args_list - ) - self.assertEqual( - self.prepared_scalar_1_coord.metadata, - self.prepared_scalar_1_metadata.combined, - ) - # check add_aux_coord - self.assertEqual(4, self.m_add_aux_coord.call_count) - expected = [ - mock.call(self.prepared_aux_0_coord, self.prepared_aux_0_dims), - mock.call(self.prepared_aux_1_coord, self.prepared_aux_1_dims), - mock.call( - self.prepared_scalar_0_coord, self.prepared_scalar_0_dims - ), - mock.call( - self.prepared_scalar_1_coord, self.prepared_scalar_1_dims - ), - ] - self.assertEqual(expected, self.m_add_aux_coord.call_args_list) - - # check auxiliary factories - self.assertEqual(1, self.m_add_aux_factory.call_count) - expected = [mock.call(self.aux_factory)] - self.assertEqual(expected, self.m_add_aux_factory.call_args_list) - self.assertEqual(1, self.prepared_factory_container.call_count) - expected = [ - mock.call( - **{ - name: value - for name, value in zip( - sorted(self.prepared_factory_dependencies.keys()), - self.prepared_factory_side_effect, - ) - } - ) - ] - self.assertEqual( - expected, self.prepared_factory_container.call_args_list - ) - self.assertEqual(3, self.m_coord.call_count) - expected = [ - mock.call(self.prepared_factory_metadata_a.combined), - mock.call(self.prepared_factory_metadata_b.combined), - mock.call(self.prepared_factory_metadata_c.combined), - ] - self.assertEqual(expected, self.m_coord.call_args_list) - - def test_resolve(self): - result = self.resolve.cube(self.data) - self.assertEqual(self.cube_metadata, result.metadata) - self._check() - self.assertIsNot(self.resolve.lhs_cube, result) - - def test_resolve__inplace(self): - result = self.resolve.cube(self.data, in_place=True) - self.assertEqual(self.cube_metadata, result.metadata) - self._check() - self.assertIs(self.resolve.lhs_cube, result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/concatenate/__init__.py b/lib/iris/tests/unit/concatenate/__init__.py deleted file mode 100644 index cf671a6553..0000000000 --- a/lib/iris/tests/unit/concatenate/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris._concatenate` package.""" diff --git a/lib/iris/tests/unit/concatenate/test__CubeSignature.py b/lib/iris/tests/unit/concatenate/test__CubeSignature.py deleted file mode 100644 index b3870a7901..0000000000 --- a/lib/iris/tests/unit/concatenate/test__CubeSignature.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test class :class:`iris._concatenate._CubeSignature`.""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -from cf_units import Unit -import numpy as np - -from iris._concatenate import _CubeSignature as CubeSignature -from iris.coords import DimCoord -from iris.cube import Cube -from iris.util import new_axis - - -class Test__coordinate_dim_metadata_equality(tests.IrisTest): - def setUp(self): - nt = 10 - data = np.arange(nt, dtype=np.float32) - cube = Cube(data, standard_name="air_temperature", units="K") - # Temporal coordinate. - t_units = Unit("hours since 1970-01-01 00:00:00", calendar="gregorian") - t_coord = DimCoord( - points=np.arange(nt), standard_name="time", units=t_units - ) - cube.add_dim_coord(t_coord, 0) - - # Increasing 1D time-series cube. - self.series_inc_cube = cube - self.series_inc = CubeSignature(self.series_inc_cube) - - # Decreasing 1D time-series cube. - self.series_dec_cube = self.series_inc_cube.copy() - self.series_dec_cube.remove_coord("time") - t_tmp = DimCoord( - points=t_coord.points[::-1], standard_name="time", units=t_units - ) - self.series_dec_cube.add_dim_coord(t_tmp, 0) - self.series_dec = CubeSignature(self.series_dec_cube) - - # Scalar 0D time-series cube with scalar time coordinate. - cube = Cube(0, standard_name="air_temperature", units="K") - cube.add_aux_coord( - DimCoord(points=nt, standard_name="time", units=t_units) - ) - self.scalar_cube = cube - - def test_scalar_non_common_axis(self): - scalar = CubeSignature(self.scalar_cube) - self.assertNotEqual(self.series_inc.dim_metadata, scalar.dim_metadata) - self.assertNotEqual(self.series_dec.dim_metadata, scalar.dim_metadata) - - def test_1d_single_value_common_axis(self): - # Manually promote scalar time cube to be a 1d cube. - single = CubeSignature(new_axis(self.scalar_cube, "time")) - self.assertEqual(self.series_inc.dim_metadata, single.dim_metadata) - self.assertEqual(self.series_dec.dim_metadata, single.dim_metadata) - - def test_increasing_common_axis(self): - series_inc = self.series_inc - series_dec = self.series_dec - self.assertEqual(series_inc.dim_metadata, series_inc.dim_metadata) - self.assertNotEqual(series_inc.dim_metadata, series_dec.dim_metadata) - - def test_decreasing_common_axis(self): - series_inc = self.series_inc - series_dec = self.series_dec - self.assertNotEqual(series_dec.dim_metadata, series_inc.dim_metadata) - self.assertEqual(series_dec.dim_metadata, series_dec.dim_metadata) - - def test_circular(self): - series_inc = self.series_inc - circular_cube = self.series_inc_cube.copy() - circular_cube.coord("time").circular = True - circular = CubeSignature(circular_cube) - self.assertNotEqual(circular.dim_metadata, series_inc.dim_metadata) - self.assertEqual(circular.dim_metadata, circular.dim_metadata) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/concatenate/test_concatenate.py b/lib/iris/tests/unit/concatenate/test_concatenate.py deleted file mode 100644 index 2af568f077..0000000000 --- a/lib/iris/tests/unit/concatenate/test_concatenate.py +++ /dev/null @@ -1,358 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris._concatenate.concatenate.py`.""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -import cf_units -import numpy as np -import numpy.ma as ma - -from iris._concatenate import concatenate -from iris._lazy_data import as_lazy_data -import iris.coords -import iris.cube -from iris.exceptions import ConcatenateError - - -class TestEpoch(tests.IrisTest): - def simple_1d_time_cubes(self, reftimes, coords_points): - cubes = [] - data_points = [273, 275, 278, 277, 274] - for reftime, coord_points in zip(reftimes, coords_points): - cube = iris.cube.Cube( - np.array(data_points, dtype=np.float32), - standard_name="air_temperature", - units="K", - ) - unit = cf_units.Unit(reftime, calendar="gregorian") - coord = iris.coords.DimCoord( - points=np.array(coord_points, dtype=np.float32), - standard_name="time", - units=unit, - ) - cube.add_dim_coord(coord, 0) - cubes.append(cube) - return cubes - - def test_concat_1d_with_same_time_units(self): - reftimes = [ - "hours since 1970-01-01 00:00:00", - "hours since 1970-01-01 00:00:00", - ] - coords_points = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10]] - cubes = self.simple_1d_time_cubes(reftimes, coords_points) - result = concatenate(cubes) - self.assertEqual(len(result), 1) - self.assertEqual(result[0].shape, (10,)) - - -class TestMessages(tests.IrisTest): - def setUp(self): - data = np.arange(24, dtype=np.float32).reshape(2, 3, 4) - cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") - # Time coord - t_unit = cf_units.Unit( - "hours since 1970-01-01 00:00:00", calendar="gregorian" - ) - t_coord = iris.coords.DimCoord( - points=np.arange(2, dtype=np.float32), - standard_name="time", - units=t_unit, - ) - cube.add_dim_coord(t_coord, 0) - # Lats and lons - x_coord = iris.coords.DimCoord( - points=np.arange(3, dtype=np.float32), - standard_name="longitude", - units="degrees", - ) - cube.add_dim_coord(x_coord, 1) - y_coord = iris.coords.DimCoord( - points=np.arange(4, dtype=np.float32), - standard_name="latitude", - units="degrees", - ) - cube.add_dim_coord(y_coord, 2) - # Scalars - cube.add_aux_coord(iris.coords.AuxCoord([0], "height", units="m")) - # Aux Coords - cube.add_aux_coord( - iris.coords.AuxCoord(data, long_name="wibble", units="1"), - data_dims=(0, 1, 2), - ) - cube.add_aux_coord( - iris.coords.AuxCoord([0, 1, 2], long_name="foo", units="1"), - data_dims=(1,), - ) - cube.add_cell_measure( - iris.coords.CellMeasure([0, 1, 2], long_name="bar", units="1"), - data_dims=(1,), - ) - cube.add_ancillary_variable( - iris.coords.AncillaryVariable( - [0, 1, 2], long_name="baz", units="1" - ), - data_dims=(1,), - ) - self.cube = cube - - def test_definition_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.units = "1" - exc_regexp = "Cube metadata differs for phenomenon: *" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_dimensions_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.remove_coord("latitude") - exc_regexp = "Dimension coordinates differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_dimensions_metadata_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.coord("latitude").long_name = "bob" - exc_regexp = "Dimension coordinates metadata differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_aux_coords_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.remove_coord("foo") - exc_regexp = "Auxiliary coordinates differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_aux_coords_metadata_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.coord("foo").units = "m" - exc_regexp = "Auxiliary coordinates metadata differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_scalar_coords_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.remove_coord("height") - exc_regexp = "Scalar coordinates differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_scalar_coords_metadata_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.coord("height").long_name = "alice" - exc_regexp = "Scalar coordinates values or metadata differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_cell_measure_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.remove_cell_measure("bar") - exc_regexp = "Cell measures differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_cell_measure_metadata_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.cell_measure("bar").units = "m" - exc_regexp = "Cell measures metadata differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_ancillary_variable_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.remove_ancillary_variable("baz") - exc_regexp = "Ancillary variables differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_ancillary_variable_metadata_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.ancillary_variable("baz").units = "m" - exc_regexp = "Ancillary variables metadata differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_ndim_difference_message(self): - cube_1 = self.cube - cube_2 = iris.cube.Cube( - np.arange(5, dtype=np.float32), - standard_name="air_temperature", - units="K", - ) - x_coord = iris.coords.DimCoord( - points=np.arange(5, dtype=np.float32), - standard_name="longitude", - units="degrees", - ) - cube_2.add_dim_coord(x_coord, 0) - exc_regexp = "Data dimensions differ: [0-9] != [0-9]" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - def test_datatype_difference_message(self): - cube_1 = self.cube - cube_2 = cube_1.copy() - cube_2.data.dtype = np.float64 - exc_regexp = "Data types differ: .* != .*" - with self.assertRaisesRegex(ConcatenateError, exc_regexp): - _ = concatenate([cube_1, cube_2], True) - - -class TestOrder(tests.IrisTest): - def _make_cube(self, points, bounds=None): - nx = 4 - data = np.arange(len(points) * nx).reshape(len(points), nx) - cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") - lat = iris.coords.DimCoord(points, "latitude", bounds=bounds) - lon = iris.coords.DimCoord(np.arange(nx), "longitude") - cube.add_dim_coord(lat, 0) - cube.add_dim_coord(lon, 1) - return cube - - def test_asc_points(self): - top = self._make_cube([10, 30, 50, 70, 90]) - bottom = self._make_cube([-90, -70, -50, -30, -10]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_asc_bounds(self): - top = self._make_cube([22.5, 67.5], [[0, 45], [45, 90]]) - bottom = self._make_cube([-67.5, -22.5], [[-90, -45], [-45, 0]]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_asc_points_with_singleton_ordered(self): - top = self._make_cube([5]) - bottom = self._make_cube([15, 25]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_asc_points_with_singleton_unordered(self): - top = self._make_cube([25]) - bottom = self._make_cube([5, 15]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_asc_bounds_with_singleton_ordered(self): - top = self._make_cube([5], [[0, 10]]) - bottom = self._make_cube([15, 25], [[10, 20], [20, 30]]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_asc_bounds_with_singleton_unordered(self): - top = self._make_cube([25], [[20, 30]]) - bottom = self._make_cube([5, 15], [[0, 10], [10, 20]]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_desc_points(self): - top = self._make_cube([90, 70, 50, 30, 10]) - bottom = self._make_cube([-10, -30, -50, -70, -90]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_desc_bounds(self): - top = self._make_cube([67.5, 22.5], [[90, 45], [45, 0]]) - bottom = self._make_cube([-22.5, -67.5], [[0, -45], [-45, -90]]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_desc_points_with_singleton_ordered(self): - top = self._make_cube([25]) - bottom = self._make_cube([15, 5]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_desc_points_with_singleton_unordered(self): - top = self._make_cube([5]) - bottom = self._make_cube([25, 15]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_desc_bounds_with_singleton_ordered(self): - top = self._make_cube([25], [[30, 20]]) - bottom = self._make_cube([15, 5], [[20, 10], [10, 0]]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_desc_bounds_with_singleton_unordered(self): - top = self._make_cube([5], [[10, 0]]) - bottom = self._make_cube([25, 15], [[30, 20], [20, 10]]) - result = concatenate([top, bottom]) - self.assertEqual(len(result), 1) - - def test_points_all_singleton(self): - top = self._make_cube([5]) - bottom = self._make_cube([15]) - result1 = concatenate([top, bottom]) - result2 = concatenate([bottom, top]) - self.assertEqual(len(result1), 1) - self.assertEqual(len(result2), 1) - self.assertEqual(result1, result2) - - def test_asc_bounds_all_singleton(self): - top = self._make_cube([5], [0, 10]) - bottom = self._make_cube([15], [10, 20]) - result1 = concatenate([top, bottom]) - result2 = concatenate([bottom, top]) - self.assertEqual(len(result1), 1) - self.assertEqual(len(result2), 1) - self.assertEqual(result1, result2) - - def test_desc_bounds_all_singleton(self): - top = self._make_cube([5], [10, 0]) - bottom = self._make_cube([15], [20, 10]) - result1 = concatenate([top, bottom]) - result2 = concatenate([bottom, top]) - self.assertEqual(len(result1), 1) - self.assertEqual(len(result2), 1) - self.assertEqual(result1, result2) - - -class TestConcatenate__dask(tests.IrisTest): - def build_lazy_cube(self, points, bounds=None, nx=4): - data = np.arange(len(points) * nx).reshape(len(points), nx) - data = as_lazy_data(data) - cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") - lat = iris.coords.DimCoord(points, "latitude", bounds=bounds) - lon = iris.coords.DimCoord(np.arange(nx), "longitude") - cube.add_dim_coord(lat, 0) - cube.add_dim_coord(lon, 1) - return cube - - def test_lazy_concatenate(self): - c1 = self.build_lazy_cube([1, 2]) - c2 = self.build_lazy_cube([3, 4, 5]) - (cube,) = concatenate([c1, c2]) - self.assertTrue(cube.has_lazy_data()) - self.assertFalse(ma.isMaskedArray(cube.data)) - - def test_lazy_concatenate_masked_array_mixed_deferred(self): - c1 = self.build_lazy_cube([1, 2]) - c2 = self.build_lazy_cube([3, 4, 5]) - c2.data = np.ma.masked_greater(c2.data, 3) - (cube,) = concatenate([c1, c2]) - self.assertTrue(cube.has_lazy_data()) - self.assertTrue(ma.isMaskedArray(cube.data)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/config/__init__.py b/lib/iris/tests/unit/config/__init__.py deleted file mode 100644 index 38806c7db8..0000000000 --- a/lib/iris/tests/unit/config/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.config` module.""" diff --git a/lib/iris/tests/unit/config/test_NetCDF.py b/lib/iris/tests/unit/config/test_NetCDF.py deleted file mode 100644 index c7f7564e4e..0000000000 --- a/lib/iris/tests/unit/config/test_NetCDF.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.config.NetCDF` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import warnings - -import iris.config - - -class Test(tests.IrisTest): - def setUp(self): - self.options = iris.config.NetCDF() - - def test_basic(self): - self.assertFalse(self.options.conventions_override) - - def test_enabled(self): - self.options.conventions_override = True - self.assertTrue(self.options.conventions_override) - - def test_bad_value(self): - # A bad value should be ignored and replaced with the default value. - bad_value = "wibble" - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("always") - self.options.conventions_override = bad_value - self.assertFalse(self.options.conventions_override) - exp_wmsg = "Attempting to set invalid value {!r}".format(bad_value) - self.assertRegex(str(w[0].message), exp_wmsg) - - def test__contextmgr(self): - with self.options.context(conventions_override=True): - self.assertTrue(self.options.conventions_override) - self.assertFalse(self.options.conventions_override) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/constraints/__init__.py b/lib/iris/tests/unit/constraints/__init__.py deleted file mode 100644 index 03a987b1a1..0000000000 --- a/lib/iris/tests/unit/constraints/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris._constraints` module.""" diff --git a/lib/iris/tests/unit/constraints/test_NameConstraint.py b/lib/iris/tests/unit/constraints/test_NameConstraint.py deleted file mode 100644 index 46aea25331..0000000000 --- a/lib/iris/tests/unit/constraints/test_NameConstraint.py +++ /dev/null @@ -1,229 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris._constraints.NameConstraint` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest.mock import Mock, sentinel - -from iris._constraints import NameConstraint - - -class Test___init__(tests.IrisTest): - def setUp(self): - self.default = "none" - - def test_default(self): - constraint = NameConstraint() - self.assertEqual(constraint.standard_name, self.default) - self.assertEqual(constraint.long_name, self.default) - self.assertEqual(constraint.var_name, self.default) - self.assertEqual(constraint.STASH, self.default) - - def test_standard_name(self): - standard_name = sentinel.standard_name - constraint = NameConstraint(standard_name=standard_name) - self.assertEqual(constraint.standard_name, standard_name) - constraint = NameConstraint(standard_name=standard_name) - self.assertEqual(constraint.standard_name, standard_name) - - def test_long_name(self): - long_name = sentinel.long_name - constraint = NameConstraint(long_name=long_name) - self.assertEqual(constraint.standard_name, self.default) - self.assertEqual(constraint.long_name, long_name) - constraint = NameConstraint(standard_name=None, long_name=long_name) - self.assertIsNone(constraint.standard_name) - self.assertEqual(constraint.long_name, long_name) - - def test_var_name(self): - var_name = sentinel.var_name - constraint = NameConstraint(var_name=var_name) - self.assertEqual(constraint.standard_name, self.default) - self.assertEqual(constraint.long_name, self.default) - self.assertEqual(constraint.var_name, var_name) - constraint = NameConstraint( - standard_name=None, long_name=None, var_name=var_name - ) - self.assertIsNone(constraint.standard_name) - self.assertIsNone(constraint.long_name) - self.assertEqual(constraint.var_name, var_name) - - def test_STASH(self): - STASH = sentinel.STASH - constraint = NameConstraint(STASH=STASH) - self.assertEqual(constraint.standard_name, self.default) - self.assertEqual(constraint.long_name, self.default) - self.assertEqual(constraint.var_name, self.default) - self.assertEqual(constraint.STASH, STASH) - constraint = NameConstraint( - standard_name=None, long_name=None, var_name=None, STASH=STASH - ) - self.assertIsNone(constraint.standard_name) - self.assertIsNone(constraint.long_name) - self.assertIsNone(constraint.var_name) - self.assertEqual(constraint.STASH, STASH) - - -class Test__cube_func(tests.IrisTest): - def setUp(self): - self.standard_name = sentinel.standard_name - self.long_name = sentinel.long_name - self.var_name = sentinel.var_name - self.STASH = sentinel.STASH - self.cube = Mock( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - attributes=dict(STASH=self.STASH), - ) - - def test_standard_name(self): - # Match. - constraint = NameConstraint(standard_name=self.standard_name) - self.assertTrue(constraint._cube_func(self.cube)) - # Match. - constraint = NameConstraint(standard_name=self.standard_name) - self.assertTrue(constraint._cube_func(self.cube)) - # No match. - constraint = NameConstraint(standard_name="wibble") - self.assertFalse(constraint._cube_func(self.cube)) - # No match. - constraint = NameConstraint(standard_name="wibble") - self.assertFalse(constraint._cube_func(self.cube)) - - def test_long_name(self): - # Match. - constraint = NameConstraint(long_name=self.long_name) - self.assertTrue(constraint._cube_func(self.cube)) - # Match. - constraint = NameConstraint( - standard_name=self.standard_name, long_name=self.long_name - ) - self.assertTrue(constraint._cube_func(self.cube)) - # No match. - constraint = NameConstraint(long_name=None) - self.assertFalse(constraint._cube_func(self.cube)) - # No match. - constraint = NameConstraint( - standard_name=None, long_name=self.long_name - ) - self.assertFalse(constraint._cube_func(self.cube)) - - def test_var_name(self): - # Match. - constraint = NameConstraint(var_name=self.var_name) - self.assertTrue(constraint._cube_func(self.cube)) - # Match. - constraint = NameConstraint( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - ) - self.assertTrue(constraint._cube_func(self.cube)) - # No match. - constraint = NameConstraint(var_name=None) - self.assertFalse(constraint._cube_func(self.cube)) - # No match. - constraint = NameConstraint( - standard_name=None, long_name=None, var_name=self.var_name - ) - self.assertFalse(constraint._cube_func(self.cube)) - - def test_STASH(self): - # Match. - constraint = NameConstraint(STASH=self.STASH) - self.assertTrue(constraint._cube_func(self.cube)) - # Match. - constraint = NameConstraint( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - STASH=self.STASH, - ) - self.assertTrue(constraint._cube_func(self.cube)) - # No match. - constraint = NameConstraint(STASH=None) - self.assertFalse(constraint._cube_func(self.cube)) - # No match. - constraint = NameConstraint( - standard_name=None, long_name=None, var_name=None, STASH=self.STASH - ) - self.assertFalse(constraint._cube_func(self.cube)) - - -class Test___repr__(tests.IrisTest): - def setUp(self): - self.standard_name = sentinel.standard_name - self.long_name = sentinel.long_name - self.var_name = sentinel.var_name - self.STASH = sentinel.STASH - self.msg = "NameConstraint({})" - self.f_standard_name = "standard_name={!r}".format(self.standard_name) - self.f_long_name = "long_name={!r}".format(self.long_name) - self.f_var_name = "var_name={!r}".format(self.var_name) - self.f_STASH = "STASH={!r}".format(self.STASH) - - def test(self): - constraint = NameConstraint() - expected = self.msg.format("") - self.assertEqual(repr(constraint), expected) - - def test_standard_name(self): - constraint = NameConstraint(standard_name=self.standard_name) - expected = self.msg.format(self.f_standard_name) - self.assertEqual(repr(constraint), expected) - - def test_long_name(self): - constraint = NameConstraint(long_name=self.long_name) - expected = self.msg.format(self.f_long_name) - self.assertEqual(repr(constraint), expected) - constraint = NameConstraint( - standard_name=self.standard_name, long_name=self.long_name - ) - args = "{}, {}".format(self.f_standard_name, self.f_long_name) - expected = self.msg.format(args) - self.assertEqual(repr(constraint), expected) - - def test_var_name(self): - constraint = NameConstraint(var_name=self.var_name) - expected = self.msg.format(self.f_var_name) - self.assertEqual(repr(constraint), expected) - constraint = NameConstraint( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - ) - args = "{}, {}, {}".format( - self.f_standard_name, self.f_long_name, self.f_var_name - ) - expected = self.msg.format(args) - self.assertEqual(repr(constraint), expected) - - def test_STASH(self): - constraint = NameConstraint(STASH=self.STASH) - expected = self.msg.format(self.f_STASH) - self.assertEqual(repr(constraint), expected) - constraint = NameConstraint( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - STASH=self.STASH, - ) - args = "{}, {}, {}, {}".format( - self.f_standard_name, - self.f_long_name, - self.f_var_name, - self.f_STASH, - ) - expected = self.msg.format(args) - self.assertEqual(repr(constraint), expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_categorisation/__init__.py b/lib/iris/tests/unit/coord_categorisation/__init__.py deleted file mode 100644 index 18fe8f2482..0000000000 --- a/lib/iris/tests/unit/coord_categorisation/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.coord_categorisation` module.""" diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py b/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py deleted file mode 100644 index b7c59ff566..0000000000 --- a/lib/iris/tests/unit/coord_categorisation/test_add_categorised_coord.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.coord_categorisation.add_categorised_coord`.""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -from cf_units import CALENDARS as calendars -from cf_units import Unit -import numpy as np - -from iris.coord_categorisation import add_categorised_coord, add_day_of_year -from iris.coords import DimCoord -from iris.cube import Cube - - -class Test_add_categorised_coord(tests.IrisTest): - def setUp(self): - # Factor out common variables and objects. - self.cube = mock.Mock(name="cube", coords=mock.Mock(return_value=[])) - self.coord = mock.Mock( - name="coord", points=np.arange(12).reshape(3, 4) - ) - self.units = "units" - self.vectorised = mock.Mock(name="vectorized_result") - - def test_vectorise_call(self): - # Check that the function being passed through gets called with - # numpy.vectorize, before being applied to the points array. - # The reason we use numpy.vectorize is to support multi-dimensional - # coordinate points. - def fn(coord, v): - return v ** 2 - - with mock.patch( - "numpy.vectorize", return_value=self.vectorised - ) as vectorise_patch: - with mock.patch("iris.coords.AuxCoord") as aux_coord_constructor: - add_categorised_coord( - self.cube, "foobar", self.coord, fn, units=self.units - ) - - # Check the constructor of AuxCoord gets called with the - # appropriate arguments. - # Start with the vectorised function. - vectorise_patch.assert_called_once_with(fn) - # Check the vectorize wrapper gets called with the appropriate args. - self.vectorised.assert_called_once_with(self.coord, self.coord.points) - # Check the AuxCoord constructor itself. - aux_coord_constructor.assert_called_once_with( - self.vectorised(self.coord, self.coord.points), - units=self.units, - attributes=self.coord.attributes.copy(), - ) - # And check adding the aux coord to the cube mock. - self.cube.add_aux_coord.assert_called_once_with( - aux_coord_constructor(), self.cube.coord_dims(self.coord) - ) - - def test_string_vectorised(self): - # Check that special case handling of a vectorized string returning - # function is taking place. - def fn(coord, v): - return "0123456789"[:v] - - with mock.patch( - "numpy.vectorize", return_value=self.vectorised - ) as vectorise_patch: - with mock.patch("iris.coords.AuxCoord") as aux_coord_constructor: - add_categorised_coord( - self.cube, "foobar", self.coord, fn, units=self.units - ) - - self.assertEqual( - aux_coord_constructor.call_args[0][0], - vectorise_patch(fn, otypes=[object])( - self.coord, self.coord.points - ).astype("|S64"), - ) - - -class Test_add_day_of_year(tests.IrisTest): - def setUp(self): - self.expected = { - "standard": np.array(list(range(360, 367)) + list(range(1, 4))), - "gregorian": np.array(list(range(360, 367)) + list(range(1, 4))), - "proleptic_gregorian": np.array( - list(range(360, 367)) + list(range(1, 4)) - ), - "noleap": np.array(list(range(359, 366)) + list(range(1, 4))), - "julian": np.array(list(range(360, 367)) + list(range(1, 4))), - "all_leap": np.array(list(range(360, 367)) + list(range(1, 4))), - "365_day": np.array(list(range(359, 366)) + list(range(1, 4))), - "366_day": np.array(list(range(360, 367)) + list(range(1, 4))), - "360_day": np.array(list(range(355, 361)) + list(range(1, 5))), - } - - def make_cube(self, calendar): - n_times = 10 - cube = Cube(np.arange(n_times)) - time_coord = DimCoord( - np.arange(n_times), - standard_name="time", - units=Unit("days since 1980-12-25", calendar=calendar), - ) - cube.add_dim_coord(time_coord, 0) - return cube - - def test_calendars(self): - for calendar in calendars: - # Skip the Julian calendar due to - # https://github.com/Unidata/netcdftime/issues/13 - # Remove this if block once the issue is resolved. - if calendar == "julian": - continue - cube = self.make_cube(calendar) - add_day_of_year(cube, "time") - points = cube.coord("day_of_year").points - expected_points = self.expected[calendar] - msg = "Test failed for the following calendar: {}." - self.assertArrayEqual( - points, expected_points, err_msg=msg.format(calendar) - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py b/lib/iris/tests/unit/coord_categorisation/test_add_hour.py deleted file mode 100644 index 86230c84b9..0000000000 --- a/lib/iris/tests/unit/coord_categorisation/test_add_hour.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test coordinate categorisation function add_hour. -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import cf_units -import numpy as np - -import iris -import iris.coord_categorisation as ccat - - -class Test_add_hour(tests.IrisTest): - def setUp(self): - # make a series of 'hour numbers' for the time - hour_numbers = np.arange(0, 200, 5, dtype=np.int32) - - # use hour numbers as data values also (don't actually use this for - # anything) - cube = iris.cube.Cube( - hour_numbers, long_name="test cube", units="metres" - ) - - time_coord = iris.coords.DimCoord( - hour_numbers, - standard_name="time", - units=cf_units.Unit("hours since epoch", "gregorian"), - ) - cube.add_dim_coord(time_coord, 0) - - self.hour_numbers = hour_numbers - self.cube = cube - self.time_coord = time_coord - - def test_bad_coord(self): - with self.assertRaises(iris.exceptions.CoordinateNotFoundError): - ccat.add_hour(self.cube, "DOES NOT EXIST", name="my_hour") - - def test_explicit_result_name_specify_coord_by_name(self): - coord_name = "my_hour" - msg = "Missing/incorrectly named result for add_hour" - - # Specify source coordinate by name - cube = self.cube - ccat.add_hour(cube, "time", name=coord_name) - result_coords = cube.coords(coord_name) - self.assertEqual(len(result_coords), 1, msg) - - def test_explicit_result_name_specify_coord_by_reference(self): - coord_name = "my_hour" - msg = "Missing/incorrectly named result for add_hour" - - # Specify source coordinate by coordinate reference - cube = self.cube - time = cube.coord("time") - ccat.add_hour(cube, time, name=coord_name) - result_coords = cube.coords(coord_name) - self.assertEqual(len(result_coords), 1, msg) - - def test_basic(self): - coord_name = "my_hour" - cube = self.cube - time_coord = self.time_coord - expected_coord = iris.coords.AuxCoord( - self.hour_numbers % 24, long_name=coord_name, units="1" - ) - - ccat.add_hour(cube, time_coord, coord_name) - - self.assertEqual(cube.coord(coord_name), expected_coord) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_systems/__init__.py b/lib/iris/tests/unit/coord_systems/__init__.py deleted file mode 100644 index 39d4d25f73..0000000000 --- a/lib/iris/tests/unit/coord_systems/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.coord_systems` module.""" diff --git a/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py b/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py deleted file mode 100644 index 99a7c9f59b..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_AlbersEqualArea.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.coord_systems.AlbersEqualArea` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import cartopy.crs as ccrs - -from iris.coord_systems import AlbersEqualArea, GeogCS - - -class Test_as_cartopy_crs(tests.IrisTest): - def setUp(self): - self.latitude_of_projection_origin = 0.0 - self.longitude_of_central_meridian = 0.0 - self.semi_major_axis = 6377563.396 - self.semi_minor_axis = 6356256.909 - self.false_easting = 0.0 - self.false_northing = 0.0 - self.standard_parallels = (-18.0, -36.0) - self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) - self.aea_cs = AlbersEqualArea( - self.latitude_of_projection_origin, - self.longitude_of_central_meridian, - self.false_easting, - self.false_northing, - self.standard_parallels, - ellipsoid=self.ellipsoid, - ) - - def test_crs_creation(self): - res = self.aea_cs.as_cartopy_crs() - globe = ccrs.Globe( - semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None, - ) - expected = ccrs.AlbersEqualArea( - self.longitude_of_central_meridian, - self.latitude_of_projection_origin, - self.false_easting, - self.false_northing, - self.standard_parallels, - globe=globe, - ) - self.assertEqual(res, expected) - - def test_fail_too_few_parallels(self): - emsg = "parallels" - with self.assertRaisesRegex(ValueError, emsg): - AlbersEqualArea(standard_parallels=()) - - def test_fail_too_many_parallels(self): - emsg = "parallels" - with self.assertRaisesRegex(ValueError, emsg): - AlbersEqualArea(standard_parallels=(1, 2, 3)) - - -class Test_as_cartopy_projection(tests.IrisTest): - def setUp(self): - self.latitude_of_projection_origin = 0.0 - self.longitude_of_central_meridian = 0.0 - self.semi_major_axis = 6377563.396 - self.semi_minor_axis = 6356256.909 - self.false_easting = 0.0 - self.false_northing = 0.0 - self.standard_parallels = (-18.0, -36.0) - self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) - self.aea_cs = AlbersEqualArea( - self.latitude_of_projection_origin, - self.longitude_of_central_meridian, - self.false_easting, - self.false_northing, - self.standard_parallels, - ellipsoid=self.ellipsoid, - ) - - def test_projection_creation(self): - res = self.aea_cs.as_cartopy_projection() - globe = ccrs.Globe( - semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None, - ) - expected = ccrs.AlbersEqualArea( - self.latitude_of_projection_origin, - self.longitude_of_central_meridian, - self.false_easting, - self.false_northing, - self.standard_parallels, - globe=globe, - ) - self.assertEqual(res, expected) - - -class Test_init_defaults(tests.IrisTest): - def test_set_optional_args(self): - # Check that setting optional arguments works as expected. - crs = AlbersEqualArea( - longitude_of_central_meridian=123, - latitude_of_projection_origin=-17, - false_easting=100, - false_northing=-200, - standard_parallels=(-37, 21.4), - ) - - self.assertEqualAndKind(crs.longitude_of_central_meridian, 123.0) - self.assertEqualAndKind(crs.latitude_of_projection_origin, -17.0) - self.assertEqualAndKind(crs.false_easting, 100.0) - self.assertEqualAndKind(crs.false_northing, -200.0) - self.assertEqual(len(crs.standard_parallels), 2) - self.assertEqualAndKind(crs.standard_parallels[0], -37.0) - self.assertEqualAndKind(crs.standard_parallels[1], 21.4) - - def _check_crs_defaults(self, crs): - # Check for property defaults when no kwargs options were set. - # NOTE: except ellipsoid, which is done elsewhere. - self.assertEqualAndKind(crs.longitude_of_central_meridian, 0.0) - self.assertEqualAndKind(crs.latitude_of_projection_origin, 0.0) - self.assertEqualAndKind(crs.false_easting, 0.0) - self.assertEqualAndKind(crs.false_northing, 0.0) - self.assertEqual(len(crs.standard_parallels), 2) - self.assertEqualAndKind(crs.standard_parallels[0], 20.0) - self.assertEqualAndKind(crs.standard_parallels[1], 50.0) - - def test_no_optional_args(self): - # Check expected defaults with no optional args. - crs = AlbersEqualArea() - self._check_crs_defaults(crs) - - def test_optional_args_None(self): - # Check expected defaults with optional args=None. - crs = AlbersEqualArea( - longitude_of_central_meridian=None, - latitude_of_projection_origin=None, - standard_parallels=None, - false_easting=None, - false_northing=None, - ) - self._check_crs_defaults(crs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_GeogCS.py b/lib/iris/tests/unit/coord_systems/test_GeogCS.py deleted file mode 100644 index f3f9531dbb..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_GeogCS.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coord_systems.GeogCS` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.coord_systems import GeogCS - - -class Test_init_defaults(tests.IrisTest): - # NOTE: most of the testing for GeogCS is in the legacy test module - # 'iris.tests.test_coordsystem'. - # This class *only* tests the defaults for optional constructor args. - - def test_set_optional_args(self): - # Check that setting the optional (non-ellipse) argument works. - crs = GeogCS(1.0, longitude_of_prime_meridian=-85) - self.assertEqualAndKind(crs.longitude_of_prime_meridian, -85.0) - - def _check_crs_defaults(self, crs): - # Check for property defaults when no kwargs options were set. - # NOTE: except ellipsoid, which is done elsewhere. - radius = float(crs.semi_major_axis) - self.assertEqualAndKind(crs.semi_major_axis, radius) # just the kind - self.assertEqualAndKind(crs.semi_minor_axis, radius) - self.assertEqualAndKind(crs.inverse_flattening, 0.0) - self.assertEqualAndKind(crs.longitude_of_prime_meridian, 0.0) - - def test_no_optional_args(self): - # Check expected properties with no optional args. - crs = GeogCS(1.0) - self._check_crs_defaults(crs) - - def test_optional_args_None(self): - # Check expected properties with optional args=None. - crs = GeogCS( - 1.0, - semi_minor_axis=None, - inverse_flattening=None, - longitude_of_prime_meridian=None, - ) - self._check_crs_defaults(crs) - - def test_zero_inverse_flattening_on_perfect_sphere(self): - # allow inverse_flattening to be 0 for a perfect sphere - # i.e. semi-major axis defined, semi-minor is None. - crs = GeogCS( - 1.0, - semi_minor_axis=None, - inverse_flattening=0.0, - longitude_of_prime_meridian=None, - ) - self._check_crs_defaults(crs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_Geostationary.py b/lib/iris/tests/unit/coord_systems/test_Geostationary.py deleted file mode 100644 index cc3c8384db..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_Geostationary.py +++ /dev/null @@ -1,116 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coord_systems.Geostationary` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import cartopy.crs as ccrs - -from iris.coord_systems import GeogCS, Geostationary - - -class Test(tests.IrisTest): - def setUp(self): - # Set everything to non-default values. - self.latitude_of_projection_origin = 0 # For now, Cartopy needs =0. - self.longitude_of_projection_origin = 123.0 - self.perspective_point_height = 9999.0 - self.sweep_angle_axis = "x" - self.false_easting = 100.0 - self.false_northing = -200.0 - - self.semi_major_axis = 4000.0 - self.semi_minor_axis = 3900.0 - self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) - self.globe = ccrs.Globe( - semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None, - ) - - # Actual and expected coord system can be re-used for - # Geostationary.test_crs_creation and test_projection_creation. - self.expected = ccrs.Geostationary( - central_longitude=self.longitude_of_projection_origin, - satellite_height=self.perspective_point_height, - false_easting=self.false_easting, - false_northing=self.false_northing, - globe=self.globe, - sweep_axis=self.sweep_angle_axis, - ) - self.geo_cs = Geostationary( - self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - self.perspective_point_height, - self.sweep_angle_axis, - self.false_easting, - self.false_northing, - self.ellipsoid, - ) - - def test_crs_creation(self): - res = self.geo_cs.as_cartopy_crs() - self.assertEqual(res, self.expected) - - def test_projection_creation(self): - res = self.geo_cs.as_cartopy_projection() - self.assertEqual(res, self.expected) - - def test_non_zero_lat(self): - with self.assertRaisesRegex(ValueError, "Non-zero latitude"): - Geostationary( - 0.1, - self.longitude_of_projection_origin, - self.perspective_point_height, - self.sweep_angle_axis, - self.false_easting, - self.false_northing, - self.ellipsoid, - ) - - def test_invalid_sweep(self): - with self.assertRaisesRegex(ValueError, "Invalid sweep_angle_axis"): - Geostationary( - self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - self.perspective_point_height, - "a", - self.false_easting, - self.false_northing, - self.ellipsoid, - ) - - def test_set_optional_args(self): - # Check that setting the optional (non-ellipse) args works. - crs = Geostationary( - 0, 0, 1000, "y", false_easting=100, false_northing=-200 - ) - self.assertEqualAndKind(crs.false_easting, 100.0) - self.assertEqualAndKind(crs.false_northing, -200.0) - - def _check_crs_defaults(self, crs): - # Check for property defaults when no kwargs options were set. - # NOTE: except ellipsoid, which is done elsewhere. - self.assertEqualAndKind(crs.false_easting, 0.0) - self.assertEqualAndKind(crs.false_northing, 0.0) - - def test_no_optional_args(self): - # Check expected defaults with no optional args. - crs = Geostationary(0, 0, 1000, "y") - self._check_crs_defaults(crs) - - def test_optional_args_None(self): - # Check expected defaults with optional args=None. - crs = Geostationary( - 0, 0, 1000, "y", false_easting=None, false_northing=None - ) - self._check_crs_defaults(crs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py b/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py deleted file mode 100644 index 971ee06293..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_LambertAzimuthalEqualArea.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.coord_systems.LambertAzimuthalEqualArea` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import cartopy.crs as ccrs - -from iris.coord_systems import GeogCS, LambertAzimuthalEqualArea - - -class Test_as_cartopy_crs(tests.IrisTest): - def setUp(self): - self.latitude_of_projection_origin = 90.0 - self.longitude_of_projection_origin = 0.0 - self.semi_major_axis = 6377563.396 - self.semi_minor_axis = 6356256.909 - self.false_easting = 0.0 - self.false_northing = 0.0 - self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) - self.laea_cs = LambertAzimuthalEqualArea( - self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - self.false_easting, - self.false_northing, - ellipsoid=self.ellipsoid, - ) - - def test_crs_creation(self): - res = self.laea_cs.as_cartopy_crs() - globe = ccrs.Globe( - semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None, - ) - expected = ccrs.LambertAzimuthalEqualArea( - self.longitude_of_projection_origin, - self.latitude_of_projection_origin, - self.false_easting, - self.false_northing, - globe=globe, - ) - self.assertEqual(res, expected) - - -class Test_as_cartopy_projection(tests.IrisTest): - def setUp(self): - self.latitude_of_projection_origin = 0.0 - self.longitude_of_projection_origin = 0.0 - self.semi_major_axis = 6377563.396 - self.semi_minor_axis = 6356256.909 - self.false_easting = 0.0 - self.false_northing = 0.0 - self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) - self.laea_cs = LambertAzimuthalEqualArea( - self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - self.false_easting, - self.false_northing, - ellipsoid=self.ellipsoid, - ) - - def test_projection_creation(self): - res = self.laea_cs.as_cartopy_projection() - globe = ccrs.Globe( - semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None, - ) - expected = ccrs.LambertAzimuthalEqualArea( - self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - self.false_easting, - self.false_northing, - globe=globe, - ) - self.assertEqual(res, expected) - - -class Test_init_defaults(tests.IrisTest): - def test_set_optional_args(self): - # Check that setting the optional (non-ellipse) args works. - crs = LambertAzimuthalEqualArea( - longitude_of_projection_origin=123, - latitude_of_projection_origin=-37, - false_easting=100, - false_northing=-200, - ) - self.assertEqualAndKind(crs.longitude_of_projection_origin, 123.0) - self.assertEqualAndKind(crs.latitude_of_projection_origin, -37.0) - self.assertEqualAndKind(crs.false_easting, 100.0) - self.assertEqualAndKind(crs.false_northing, -200.0) - - def _check_crs_defaults(self, crs): - # Check for property defaults when no kwargs options were set. - # NOTE: except ellipsoid, which is done elsewhere. - self.assertEqualAndKind(crs.longitude_of_projection_origin, 0.0) - self.assertEqualAndKind(crs.latitude_of_projection_origin, 0.0) - self.assertEqualAndKind(crs.false_easting, 0.0) - self.assertEqualAndKind(crs.false_northing, 0.0) - - def test_no_optional_args(self): - # Check expected defaults with no optional args. - crs = LambertAzimuthalEqualArea() - self._check_crs_defaults(crs) - - def test_optional_args_None(self): - # Check expected defaults with optional args=None. - crs = LambertAzimuthalEqualArea( - longitude_of_projection_origin=None, - latitude_of_projection_origin=None, - false_easting=None, - false_northing=None, - ) - self._check_crs_defaults(crs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_LambertConformal.py b/lib/iris/tests/unit/coord_systems/test_LambertConformal.py deleted file mode 100644 index 7ba89208b1..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_LambertConformal.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coord_systems.LambertConformal` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.coord_systems import LambertConformal - - -class Test_init_defaults(tests.IrisTest): - # NOTE: most of the testing for LambertConformal is in the legacy test - # module 'iris.tests.test_coordsystem'. - # This class *only* tests the defaults for optional constructor args. - - def test_set_optional_args(self): - # Check that setting the optional (non-ellipse) args works. - # (Except secant_latitudes, which are done separately). - crs = LambertConformal( - central_lat=25.3, - central_lon=-172, - false_easting=100, - false_northing=-200, - ) - self.assertEqualAndKind(crs.central_lat, 25.3) - self.assertEqualAndKind(crs.central_lon, -172.0) - self.assertEqualAndKind(crs.false_easting, 100.0) - self.assertEqualAndKind(crs.false_northing, -200.0) - - def test_set_one_parallel(self): - # Check that setting the optional (non-ellipse) args works. - # (Except secant_latitudes, which are done separately). - crs = LambertConformal(secant_latitudes=-44) - self.assertEqual(len(crs.secant_latitudes), 1) - self.assertEqualAndKind(crs.secant_latitudes[0], -44.0) - - def test_set_two_parallels(self): - # Check that setting the optional (non-ellipse) args works. - # (Except secant_latitudes, which are done separately). - crs = LambertConformal(secant_latitudes=[43, -7]) - self.assertEqual(len(crs.secant_latitudes), 2) - self.assertEqualAndKind(crs.secant_latitudes[0], 43.0) - self.assertEqualAndKind(crs.secant_latitudes[1], -7.0) - - def _check_crs_defaults(self, crs): - # Check for property defaults when no kwargs options were set. - # NOTE: except ellipsoid, which is done elsewhere. - self.assertEqualAndKind(crs.central_lat, 39.0) - self.assertEqualAndKind(crs.central_lon, -96.0) - self.assertEqualAndKind(crs.false_easting, 0.0) - self.assertEqualAndKind(crs.false_northing, 0.0) - self.assertEqual(len(crs.secant_latitudes), 2) - self.assertEqualAndKind(crs.secant_latitudes[0], 33.0) - self.assertEqualAndKind(crs.secant_latitudes[1], 45.0) - - def test_no_optional_args(self): - # Check expected defaults with no optional args. - crs = LambertConformal() - self._check_crs_defaults(crs) - - def test_optional_args_None(self): - # Check expected defaults with optional args=None. - crs = LambertConformal( - central_lat=None, - central_lon=None, - false_easting=None, - false_northing=None, - secant_latitudes=None, - ) - self._check_crs_defaults(crs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_Mercator.py b/lib/iris/tests/unit/coord_systems/test_Mercator.py deleted file mode 100644 index 33efaef9da..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_Mercator.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coord_systems.Mercator` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import cartopy.crs as ccrs - -from iris.coord_systems import GeogCS, Mercator - - -class Test_Mercator__basics(tests.IrisTest): - def setUp(self): - self.tm = Mercator( - longitude_of_projection_origin=90.0, - ellipsoid=GeogCS(6377563.396, 6356256.909), - ) - - def test_construction(self): - self.assertXMLElement(self.tm, ("coord_systems", "Mercator.xml")) - - def test_repr(self): - expected = ( - "Mercator(longitude_of_projection_origin=90.0, " - "ellipsoid=GeogCS(semi_major_axis=6377563.396, " - "semi_minor_axis=6356256.909), " - "standard_parallel=0.0)" - ) - self.assertEqual(expected, repr(self.tm)) - - -class Test_init_defaults(tests.IrisTest): - def test_set_optional_args(self): - # Check that setting the optional (non-ellipse) args works. - crs = Mercator( - longitude_of_projection_origin=27, standard_parallel=157.4 - ) - self.assertEqualAndKind(crs.longitude_of_projection_origin, 27.0) - self.assertEqualAndKind(crs.standard_parallel, 157.4) - - def _check_crs_defaults(self, crs): - # Check for property defaults when no kwargs options were set. - # NOTE: except ellipsoid, which is done elsewhere. - self.assertEqualAndKind(crs.longitude_of_projection_origin, 0.0) - self.assertEqualAndKind(crs.standard_parallel, 0.0) - - def test_no_optional_args(self): - # Check expected defaults with no optional args. - crs = Mercator() - self._check_crs_defaults(crs) - - def test_optional_args_None(self): - # Check expected defaults with optional args=None. - crs = Mercator( - longitude_of_projection_origin=None, standard_parallel=None - ) - self._check_crs_defaults(crs) - - -class Test_Mercator__as_cartopy_crs(tests.IrisTest): - def test_simple(self): - # Check that a projection set up with all the defaults is correctly - # converted to a cartopy CRS. - merc_cs = Mercator() - res = merc_cs.as_cartopy_crs() - # expected = ccrs.Mercator(globe=ccrs.Globe()) - expected = ccrs.Mercator(globe=ccrs.Globe(), latitude_true_scale=0.0) - self.assertEqual(res, expected) - - def test_extra_kwargs(self): - # Check that a projection with non-default values is correctly - # converted to a cartopy CRS. - longitude_of_projection_origin = 90.0 - true_scale_lat = 14.0 - ellipsoid = GeogCS( - semi_major_axis=6377563.396, semi_minor_axis=6356256.909 - ) - - merc_cs = Mercator( - longitude_of_projection_origin, - ellipsoid=ellipsoid, - standard_parallel=true_scale_lat, - ) - - expected = ccrs.Mercator( - central_longitude=longitude_of_projection_origin, - globe=ccrs.Globe( - semimajor_axis=6377563.396, - semiminor_axis=6356256.909, - ellipse=None, - ), - latitude_true_scale=true_scale_lat, - ) - - res = merc_cs.as_cartopy_crs() - self.assertEqual(res, expected) - - -class Test_as_cartopy_projection(tests.IrisTest): - def test_simple(self): - # Check that a projection set up with all the defaults is correctly - # converted to a cartopy projection. - merc_cs = Mercator() - res = merc_cs.as_cartopy_projection() - expected = ccrs.Mercator(globe=ccrs.Globe(), latitude_true_scale=0.0) - self.assertEqual(res, expected) - - def test_extra_kwargs(self): - longitude_of_projection_origin = 90.0 - true_scale_lat = 14.0 - ellipsoid = GeogCS( - semi_major_axis=6377563.396, semi_minor_axis=6356256.909 - ) - - merc_cs = Mercator( - longitude_of_projection_origin, - ellipsoid=ellipsoid, - standard_parallel=true_scale_lat, - ) - - expected = ccrs.Mercator( - central_longitude=longitude_of_projection_origin, - globe=ccrs.Globe( - semimajor_axis=6377563.396, - semiminor_axis=6356256.909, - ellipse=None, - ), - latitude_true_scale=true_scale_lat, - ) - - res = merc_cs.as_cartopy_projection() - self.assertEqual(res, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_Orthographic.py b/lib/iris/tests/unit/coord_systems/test_Orthographic.py deleted file mode 100644 index ffcbecf55c..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_Orthographic.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coord_systems.Orthographic` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import cartopy.crs as ccrs - -from iris.coord_systems import GeogCS, Orthographic - - -class Test_as_cartopy_crs(tests.IrisTest): - def setUp(self): - self.latitude_of_projection_origin = 0.0 - self.longitude_of_projection_origin = 0.0 - self.semi_major_axis = 6377563.396 - self.semi_minor_axis = 6356256.909 - self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) - self.ortho_cs = Orthographic( - self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - ellipsoid=self.ellipsoid, - ) - - def test_crs_creation(self): - res = self.ortho_cs.as_cartopy_crs() - globe = ccrs.Globe( - semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None, - ) - expected = ccrs.Orthographic( - self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - globe=globe, - ) - self.assertEqual(res, expected) - - -class Test_as_cartopy_projection(tests.IrisTest): - def setUp(self): - self.latitude_of_projection_origin = 0.0 - self.longitude_of_projection_origin = 0.0 - self.semi_major_axis = 6377563.396 - self.semi_minor_axis = 6356256.909 - self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) - self.ortho_cs = Orthographic( - self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - ellipsoid=self.ellipsoid, - ) - - def test_projection_creation(self): - res = self.ortho_cs.as_cartopy_projection() - globe = ccrs.Globe( - semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None, - ) - expected = ccrs.Orthographic( - self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - globe=globe, - ) - self.assertEqual(res, expected) - - -class Test_init_defaults(tests.IrisTest): - # NOTE: most of the testing for Orthographic.__init__ is elsewhere. - # This class *only* tests the defaults for optional constructor args. - - def test_set_optional_args(self): - # Check that setting the optional (non-ellipse) args works. - crs = Orthographic(0, 0, false_easting=100, false_northing=-203.7) - self.assertEqualAndKind(crs.false_easting, 100.0) - self.assertEqualAndKind(crs.false_northing, -203.7) - - def _check_crs_defaults(self, crs): - # Check for property defaults when no kwargs options were set. - # NOTE: except ellipsoid, which is done elsewhere. - self.assertEqualAndKind(crs.false_easting, 0.0) - self.assertEqualAndKind(crs.false_northing, 0.0) - - def test_no_optional_args(self): - # Check expected defaults with no optional args. - crs = Orthographic(0, 0) - self._check_crs_defaults(crs) - - def test_optional_args_None(self): - # Check expected defaults with optional args=None. - crs = Orthographic(0, 0, false_easting=None, false_northing=None) - self._check_crs_defaults(crs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_RotatedPole.py b/lib/iris/tests/unit/coord_systems/test_RotatedPole.py deleted file mode 100644 index dbb7a05bca..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_RotatedPole.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coord_systems.RotatedPole` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import cartopy -import cartopy.crs as ccrs - -from iris.coord_systems import RotatedGeogCS - - -class Test_init(tests.IrisTest): - def setUp(self): - self.pole_lon = 171.77 - self.pole_lat = 49.55 - self.rotation_about_new_pole = 180.0 - self.rp_crs = RotatedGeogCS( - self.pole_lat, self.pole_lon, self.rotation_about_new_pole - ) - - def test_crs_creation(self): - self.assertEqual(self.pole_lon, self.rp_crs.grid_north_pole_longitude) - self.assertEqual(self.pole_lat, self.rp_crs.grid_north_pole_latitude) - self.assertEqual( - self.rotation_about_new_pole, self.rp_crs.north_pole_grid_longitude - ) - - def test_as_cartopy_crs(self): - if cartopy.__version__ < "0.12": - with mock.patch("warnings.warn") as warn: - accrs = self.rp_crs.as_cartopy_crs() - self.assertEqual(warn.call_count, 1) - else: - accrs = self.rp_crs.as_cartopy_crs() - expected = ccrs.RotatedGeodetic( - self.pole_lon, self.pole_lat, self.rotation_about_new_pole - ) - self.assertEqual( - sorted(accrs.proj4_init.split(" +")), - sorted(expected.proj4_init.split(" +")), - ) - - def test_as_cartopy_projection(self): - if cartopy.__version__ < "0.12": - with mock.patch("warnings.warn") as warn: - _ = self.rp_crs.as_cartopy_projection() - self.assertEqual(warn.call_count, 1) - else: - accrsp = self.rp_crs.as_cartopy_projection() - expected = ccrs.RotatedPole( - self.pole_lon, self.pole_lat, self.rotation_about_new_pole - ) - self.assertEqual( - sorted(accrsp.proj4_init.split(" +")), - sorted(expected.proj4_init.split(" +")), - ) - - def _check_crs_default(self, crs): - # Check for property defaults when no kwargs options are set. - # NOTE: except ellipsoid, which is done elsewhere. - self.assertEqualAndKind(crs.north_pole_grid_longitude, 0.0) - - def test_optional_args_missing(self): - # Check that unused 'north_pole_grid_longitude' defaults to 0.0. - crs = RotatedGeogCS(self.pole_lon, self.pole_lat) - self._check_crs_default(crs) - - def test_optional_args_None(self): - # Check that 'north_pole_grid_longitude=None' defaults to 0.0. - crs = RotatedGeogCS( - self.pole_lon, self.pole_lat, north_pole_grid_longitude=None - ) - self._check_crs_default(crs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_Stereographic.py b/lib/iris/tests/unit/coord_systems/test_Stereographic.py deleted file mode 100644 index fac411f9d5..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_Stereographic.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coord_systems.Stereographic` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.coord_systems import Stereographic - - -class Test_init_defaults(tests.IrisTest): - # NOTE: most of the testing for Stereographic is in the legacy test module - # 'iris.tests.test_coordsystem'. - # This class *only* tests the defaults for optional constructor args. - - def test_set_optional_args(self): - # Check that setting the optional (non-ellipse) args works. - crs = Stereographic( - 0, 0, false_easting=100, false_northing=-203.7, true_scale_lat=77 - ) - self.assertEqualAndKind(crs.false_easting, 100.0) - self.assertEqualAndKind(crs.false_northing, -203.7) - self.assertEqualAndKind(crs.true_scale_lat, 77.0) - - def _check_crs_defaults(self, crs): - # Check for property defaults when no kwargs options were set. - # NOTE: except ellipsoid, which is done elsewhere. - self.assertEqualAndKind(crs.false_easting, 0.0) - self.assertEqualAndKind(crs.false_northing, 0.0) - self.assertIsNone(crs.true_scale_lat) - - def test_no_optional_args(self): - # Check expected defaults with no optional args. - crs = Stereographic(0, 0) - self._check_crs_defaults(crs) - - def test_optional_args_None(self): - # Check expected defaults with optional args=None. - crs = Stereographic( - 0, 0, false_easting=None, false_northing=None, true_scale_lat=None - ) - self._check_crs_defaults(crs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_TransverseMercator.py b/lib/iris/tests/unit/coord_systems/test_TransverseMercator.py deleted file mode 100644 index 95b80333c2..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_TransverseMercator.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coord_systems.TransverseMercator` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.coord_systems import TransverseMercator - - -class Test_init_defaults(tests.IrisTest): - # NOTE: most of the testing for TransverseMercator is in the legacy test - # module 'iris.tests.test_coordsystem'. - # This class *only* tests the defaults for optional constructor args. - - def test_set_optional_args(self): - # Check that setting the optional (non-ellipse) args works. - crs = TransverseMercator( - 0, - 50, - false_easting=100, - false_northing=-203.7, - scale_factor_at_central_meridian=1.057, - ) - self.assertEqualAndKind(crs.false_easting, 100.0) - self.assertEqualAndKind(crs.false_northing, -203.7) - self.assertEqualAndKind(crs.scale_factor_at_central_meridian, 1.057) - - def _check_crs_defaults(self, crs): - # Check for property defaults when no kwargs options were set. - # NOTE: except ellipsoid, which is done elsewhere. - self.assertEqualAndKind(crs.false_easting, 0.0) - self.assertEqualAndKind(crs.false_northing, 0.0) - self.assertEqualAndKind(crs.scale_factor_at_central_meridian, 1.0) - - def test_no_optional_args(self): - # Check expected defaults with no optional args. - crs = TransverseMercator(0, 50) - self._check_crs_defaults(crs) - - def test_optional_args_None(self): - # Check expected defaults with optional args=None. - crs = TransverseMercator( - 0, - 50, - false_easting=None, - false_northing=None, - scale_factor_at_central_meridian=None, - ) - self._check_crs_defaults(crs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py b/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py deleted file mode 100644 index 56498e40fa..0000000000 --- a/lib/iris/tests/unit/coord_systems/test_VerticalPerspective.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coord_systems.VerticalPerspective` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import cartopy.crs as ccrs - -from iris.coord_systems import GeogCS, VerticalPerspective - - -class Test(tests.IrisTest): - def setUp(self): - self.latitude_of_projection_origin = 0.0 - self.longitude_of_projection_origin = 0.0 - self.perspective_point_height = 38204820000.0 - self.false_easting = 0.0 - self.false_northing = 0.0 - - self.semi_major_axis = 6377563.396 - self.semi_minor_axis = 6356256.909 - self.ellipsoid = GeogCS(self.semi_major_axis, self.semi_minor_axis) - self.globe = ccrs.Globe( - semimajor_axis=self.semi_major_axis, - semiminor_axis=self.semi_minor_axis, - ellipse=None, - ) - - # Actual and expected coord system can be re-used for - # VerticalPerspective.test_crs_creation and test_projection_creation. - self.expected = ccrs.NearsidePerspective( - central_longitude=self.longitude_of_projection_origin, - central_latitude=self.latitude_of_projection_origin, - satellite_height=self.perspective_point_height, - false_easting=self.false_easting, - false_northing=self.false_northing, - globe=self.globe, - ) - self.vp_cs = VerticalPerspective( - self.latitude_of_projection_origin, - self.longitude_of_projection_origin, - self.perspective_point_height, - self.false_easting, - self.false_northing, - self.ellipsoid, - ) - - def test_crs_creation(self): - res = self.vp_cs.as_cartopy_crs() - self.assertEqual(res, self.expected) - - def test_projection_creation(self): - res = self.vp_cs.as_cartopy_projection() - self.assertEqual(res, self.expected) - - def test_set_optional_args(self): - # Check that setting the optional (non-ellipse) args works. - crs = VerticalPerspective( - 0, 0, 1000, false_easting=100, false_northing=-203.7 - ) - self.assertEqualAndKind(crs.false_easting, 100.0) - self.assertEqualAndKind(crs.false_northing, -203.7) - - def _check_crs_defaults(self, crs): - # Check for property defaults when no kwargs options were set. - # NOTE: except ellipsoid, which is done elsewhere. - self.assertEqualAndKind(crs.false_easting, 0.0) - self.assertEqualAndKind(crs.false_northing, 0.0) - - def test_no_optional_args(self): - # Check expected defaults with no optional args. - crs = VerticalPerspective(0, 0, 1000) - self._check_crs_defaults(crs) - - def test_optional_args_None(self): - # Check expected defaults with optional args=None. - crs = VerticalPerspective( - 0, 0, 1000, false_easting=None, false_northing=None - ) - self._check_crs_defaults(crs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coords/__init__.py b/lib/iris/tests/unit/coords/__init__.py deleted file mode 100644 index 10cee9db8b..0000000000 --- a/lib/iris/tests/unit/coords/__init__.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :mod:`iris.coords` module. - -Provides test methods and classes common to -:class:`~iris.tests.unit.coords.test_AuxCoord` and -:class:`~iris.tests.unit.coords.test_DimCoord`. - -""" - -import dask.array as da -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import is_lazy_data - - -def setup_test_arrays(self, shape, masked=False): - # Create concrete and lazy coordinate points and bounds test arrays, - # given a desired coord shape. - # If masked=True, also add masked arrays with some or no masked data, - # for both points and bounds, lazy and real. - n_pts = np.prod(shape) - # Note: the values must be integral for testing integer dtypes. - points = 10.0 * np.arange(n_pts, dtype=float).reshape(shape) - lower = points - 2.0 - upper = points + 2.0 - bounds = np.stack((lower, upper), axis=-1) - self.pts_real = points - self.pts_lazy = da.from_array(points, points.shape) - self.bds_real = bounds - self.bds_lazy = da.from_array(bounds, bounds.shape) - if masked: - mpoints = ma.array(points) - self.no_masked_pts_real = mpoints - self.no_masked_pts_lazy = da.from_array( - mpoints, mpoints.shape, asarray=False - ) - mpoints = ma.array(mpoints, copy=True) - mpoints[0] = ma.masked - self.masked_pts_real = mpoints - self.masked_pts_lazy = da.from_array( - mpoints, mpoints.shape, asarray=False - ) - mbounds = ma.array(bounds) - self.no_masked_bds_real = mbounds - self.no_masked_bds_lazy = da.from_array( - mbounds, mbounds.shape, asarray=False - ) - mbounds = ma.array(mbounds, copy=True) - mbounds[0] = ma.masked - self.masked_bds_real = mbounds - self.masked_bds_lazy = da.from_array( - mbounds, mbounds.shape, asarray=False - ) - - -def is_real_data(array): - # A parallel to :func:`iris._lazy_data.is_lazy_data`. - # Not just "not lazy" : ensure it is a 'real' array (i.e. numpy). - return isinstance(array, np.ndarray) - - -def arrays_share_data(a1, a2): - # Check whether 2 real arrays with the same content view the same data. - # For an ndarray x, x.base will either be None (if x owns its data) or a - # reference to the array which owns its data (if x is a view). - return ( - a1 is a2 - or a1.base is a2 - or a2.base is a1 - or a1.base is a2.base - and a1.base is not None - ) - - -def lazyness_string(data): - # Represent the lazyness of an array as a string. - return "lazy" if is_lazy_data(data) else "real" - - -def coords_all_dtypes_and_lazynesses(self, coord_class): - # Generate coords with all possible types of points and bounds, and all - # of the given dtypes. - points_types = ["real", "lazy"] - bounds_types = ["no", "real", "lazy"] - # Test a few specific combinations of points+bounds dtypes, including - # cases where they are different. - dtype_pairs = [ - (np.float64, np.float64), - (np.int16, np.int16), - (np.int16, np.float32), - (np.float64, np.int32), - ] - for pts_dtype, bds_dtype in dtype_pairs: - for points_type_name in points_types: - for bounds_type_name in bounds_types: - pts = np.asarray(self.pts_real, dtype=pts_dtype) - bds = np.asarray(self.bds_real, dtype=bds_dtype) - if points_type_name == "lazy": - pts = da.from_array(pts, pts.shape) - if bounds_type_name == "lazy": - bds = da.from_array(bds, bds.shape) - elif bounds_type_name == "no": - bds = None - coord = coord_class(pts, bounds=bds) - result = (coord, points_type_name, bounds_type_name) - yield result - - -class CoordTestMixin: - def setupTestArrays(self, shape=(3,), masked=False): - setup_test_arrays(self, shape=shape, masked=masked) - - def assertArraysShareData(self, a1, a2, *args, **kwargs): - # Check that two arrays are both real, same dtype, and based on the - # same underlying data (so changing one will change the other). - self.assertIsRealArray(a1) - self.assertIsRealArray(a2) - self.assertEqual(a1.dtype, a2.dtype) - self.assertTrue(arrays_share_data(a1, a2), *args, **kwargs) - - def assertArraysDoNotShareData(self, a1, a2, *args, **kwargs): - self.assertFalse(arrays_share_data(a1, a2), *args, **kwargs) - - def assertIsRealArray(self, array, *args, **kwargs): - # Check that the arg is a real array. - self.assertTrue(is_real_data(array), *args, **kwargs) - - def assertIsLazyArray(self, array, *args, **kwargs): - # Check that the arg is a lazy array. - self.assertTrue(is_lazy_data(array), *args, **kwargs) - - def assertEqualRealArraysAndDtypes(self, a1, a2, *args, **kwargs): - # Check that two arrays are real, equal, and have same dtype. - self.assertIsRealArray(a1) - self.assertIsRealArray(a2) - self.assertEqual(a1.dtype, a2.dtype) - self.assertArrayEqual(a1, a2) - - def assertEqualLazyArraysAndDtypes(self, a1, a2, *args, **kwargs): - # Check that two arrays are lazy, equal, and have same dtype. - self.assertIsLazyArray(a1) - self.assertIsLazyArray(a2) - self.assertEqual(a1.dtype, a2.dtype) - self.assertArrayEqual(a1.compute(), a2.compute()) diff --git a/lib/iris/tests/unit/coords/test_AncillaryVariable.py b/lib/iris/tests/unit/coords/test_AncillaryVariable.py deleted file mode 100644 index 4d520ac414..0000000000 --- a/lib/iris/tests/unit/coords/test_AncillaryVariable.py +++ /dev/null @@ -1,705 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coords.AncillaryVariable` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from cf_units import Unit -import dask.array as da -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import as_lazy_data -from iris.coords import AncillaryVariable -from iris.cube import Cube -from iris.tests.unit.coords import CoordTestMixin, lazyness_string - - -def data_all_dtypes_and_lazynesses(self): - # Generate ancillary variables with real and lazy data, and a few different - # dtypes. - data_types = ["real", "lazy"] - dtypes = [np.int16, np.int32, np.float32, np.float64] - for dtype in dtypes: - for data_type_name in data_types: - data = np.asarray(self.data_real, dtype=dtype) - if data_type_name == "lazy": - data = da.from_array(data, data.shape) - ancill_var = AncillaryVariable(data) - result = (ancill_var, data_type_name) - yield result - - -class AncillaryVariableTestMixin(CoordTestMixin): - # Define a 2-D default array shape. - def setupTestArrays(self, shape=(2, 3), masked=False): - # Create concrete and lazy data test arrays, given a desired shape. - # If masked=True, also add masked arrays with some or no masked data. - n_vals = np.prod(shape) - # Note: the values must be integral for testing integer dtypes. - values = 100.0 + 10.0 * np.arange(n_vals, dtype=float).reshape(shape) - self.data_real = values - self.data_lazy = da.from_array(values, values.shape) - - if masked: - mvalues = ma.array(values) - self.no_masked_data_real = mvalues - self.no_masked_data_lazy = da.from_array( - mvalues, mvalues.shape, asarray=False - ) - mvalues = ma.array(mvalues, copy=True) - mvalues[0] = ma.masked - self.masked_data_real = mvalues - self.masked_data_lazy = da.from_array( - mvalues, mvalues.shape, asarray=False - ) - - -class Test__init__(tests.IrisTest, AncillaryVariableTestMixin): - # Test for AncillaryVariable creation, with real / lazy data - def setUp(self): - self.setupTestArrays(masked=True) - - def test_lazyness_and_dtype_combinations(self): - for (ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses( - self, - ): - data = ancill_var.core_data() - # Check properties of data. - if data_lazyness == "real": - # Real data. - if ancill_var.dtype == self.data_real.dtype: - self.assertArraysShareData( - data, - self.data_real, - "Data values are not the same " - "data as the provided array.", - ) - self.assertIsNot( - data, - self.data_real, - "Data array is the same instance as the provided " - "array.", - ) - else: - # the original data values were cast to a test dtype. - check_data = self.data_real.astype(ancill_var.dtype) - self.assertEqualRealArraysAndDtypes(data, check_data) - else: - # Lazy data : the core data may be promoted to float. - check_data = self.data_lazy.astype(data.dtype) - self.assertEqualLazyArraysAndDtypes(data, check_data) - # The realisation type should be correct, though. - target_dtype = ancill_var.dtype - self.assertEqual(ancill_var.data.dtype, target_dtype) - - def test_no_masked_data_real(self): - data = self.no_masked_data_real - self.assertTrue(ma.isMaskedArray(data)) - self.assertEqual(ma.count_masked(data), 0) - ancill_var = AncillaryVariable(data) - self.assertFalse(ancill_var.has_lazy_data()) - self.assertTrue(ma.isMaskedArray(ancill_var.data)) - self.assertEqual(ma.count_masked(ancill_var.data), 0) - - def test_no_masked_data_lazy(self): - data = self.no_masked_data_lazy - computed = data.compute() - self.assertTrue(ma.isMaskedArray(computed)) - self.assertEqual(ma.count_masked(computed), 0) - ancill_var = AncillaryVariable(data) - self.assertTrue(ancill_var.has_lazy_data()) - self.assertTrue(ma.isMaskedArray(ancill_var.data)) - self.assertEqual(ma.count_masked(ancill_var.data), 0) - - def test_masked_data_real(self): - data = self.masked_data_real - self.assertTrue(ma.isMaskedArray(data)) - self.assertTrue(ma.count_masked(data)) - ancill_var = AncillaryVariable(data) - self.assertFalse(ancill_var.has_lazy_data()) - self.assertTrue(ma.isMaskedArray(ancill_var.data)) - self.assertTrue(ma.count_masked(ancill_var.data)) - - def test_masked_data_lazy(self): - data = self.masked_data_lazy - computed = data.compute() - self.assertTrue(ma.isMaskedArray(computed)) - self.assertTrue(ma.count_masked(computed)) - ancill_var = AncillaryVariable(data) - self.assertTrue(ancill_var.has_lazy_data()) - self.assertTrue(ma.isMaskedArray(ancill_var.data)) - self.assertTrue(ma.count_masked(ancill_var.data)) - - -class Test_core_data(tests.IrisTest, AncillaryVariableTestMixin): - # Test for AncillaryVariable.core_data() with various lazy/real data. - def setUp(self): - self.setupTestArrays() - - def test_real_data(self): - ancill_var = AncillaryVariable(self.data_real) - result = ancill_var.core_data() - self.assertArraysShareData( - result, - self.data_real, - "core_data() do not share data with the internal array.", - ) - - def test_lazy_data(self): - ancill_var = AncillaryVariable(self.data_lazy) - result = ancill_var.core_data() - self.assertEqualLazyArraysAndDtypes(result, self.data_lazy) - - def test_lazy_points_realise(self): - ancill_var = AncillaryVariable(self.data_lazy) - real_data = ancill_var.data - result = ancill_var.core_data() - self.assertEqualRealArraysAndDtypes(result, real_data) - - -class Test_lazy_data(tests.IrisTest, AncillaryVariableTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_core(self): - ancill_var = AncillaryVariable(self.data_real) - result = ancill_var.lazy_data() - self.assertEqualLazyArraysAndDtypes(result, self.data_lazy) - - def test_lazy_core(self): - ancill_var = AncillaryVariable(self.data_lazy) - result = ancill_var.lazy_data() - self.assertIs(result, self.data_lazy) - - -class Test_has_lazy_data(tests.IrisTest, AncillaryVariableTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_core(self): - ancill_var = AncillaryVariable(self.data_real) - result = ancill_var.has_lazy_data() - self.assertFalse(result) - - def test_lazy_core(self): - ancill_var = AncillaryVariable(self.data_lazy) - result = ancill_var.has_lazy_data() - self.assertTrue(result) - - def test_lazy_core_realise(self): - ancill_var = AncillaryVariable(self.data_lazy) - ancill_var.data - result = ancill_var.has_lazy_data() - self.assertFalse(result) - - -class Test__getitem__(tests.IrisTest, AncillaryVariableTestMixin): - # Test for AncillaryVariable indexing with various types of data. - def setUp(self): - self.setupTestArrays() - - def test_partial_slice_data_copy(self): - parent_ancill_var = AncillaryVariable([1.0, 2.0, 3.0]) - sub_ancill_var = parent_ancill_var[:1] - values_before_change = sub_ancill_var.data.copy() - parent_ancill_var.data[:] = -999.9 - self.assertArrayEqual(sub_ancill_var.data, values_before_change) - - def test_full_slice_data_copy(self): - parent_ancill_var = AncillaryVariable([1.0, 2.0, 3.0]) - sub_ancill_var = parent_ancill_var[:] - values_before_change = sub_ancill_var.data.copy() - parent_ancill_var.data[:] = -999.9 - self.assertArrayEqual(sub_ancill_var.data, values_before_change) - - def test_dtypes(self): - # Index ancillary variables with real+lazy data, and either an int or - # floating dtype. - # Check that dtypes remain the same in all cases, taking the dtypes - # directly from the core data as we have no masking). - for (main_ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses( - self - ): - - sub_ancill_var = main_ancill_var[:2, 1] - - ancill_var_dtype = main_ancill_var.dtype - msg = ( - "Indexing main_ancill_var of dtype {} with {} data changed" - "dtype of {} to {}." - ) - - sub_data = sub_ancill_var.core_data() - self.assertEqual( - sub_data.dtype, - ancill_var_dtype, - msg.format( - ancill_var_dtype, data_lazyness, "data", sub_data.dtype - ), - ) - - def test_lazyness(self): - # Index ancillary variables with real+lazy data, and either an int or - # floating dtype. - # Check that lazy data stays lazy and real stays real, in all cases. - for (main_ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses( - self - ): - - sub_ancill_var = main_ancill_var[:2, 1] - - msg = ( - "Indexing main_ancill_var of dtype {} with {} data " - "changed laziness of {} from {!r} to {!r}." - ) - ancill_var_dtype = main_ancill_var.dtype - sub_data_lazyness = lazyness_string(sub_ancill_var.core_data()) - self.assertEqual( - sub_data_lazyness, - data_lazyness, - msg.format( - ancill_var_dtype, - data_lazyness, - "data", - data_lazyness, - sub_data_lazyness, - ), - ) - - def test_real_data_copies(self): - # Index ancillary variables with real+lazy data. - # In all cases, check that any real arrays are copied by the indexing. - for (main_ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses( - self - ): - - sub_ancill_var = main_ancill_var[:2, 1] - - msg = ( - "Indexed ancillary variable with {} data " - "does not have its own separate {} array." - ) - if data_lazyness == "real": - main_data = main_ancill_var.core_data() - sub_data = sub_ancill_var.core_data() - sub_main_data = main_data[:2, 1] - self.assertEqualRealArraysAndDtypes(sub_data, sub_main_data) - self.assertArraysDoNotShareData( - sub_data, - sub_main_data, - msg.format(data_lazyness, "points"), - ) - - -class Test_copy(tests.IrisTest, AncillaryVariableTestMixin): - # Test for AncillaryVariable.copy() with various types of data. - def setUp(self): - self.setupTestArrays() - - def test_lazyness(self): - # Copy ancillary variables with real+lazy data, and either an int or - # floating dtype. - # Check that lazy data stays lazy and real stays real, in all cases. - for (main_ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses( - self - ): - - ancill_var_dtype = main_ancill_var.dtype - copied_ancill_var = main_ancill_var.copy() - - msg = ( - "Copying main_ancill_var of dtype {} with {} data " - "changed lazyness of {} from {!r} to {!r}." - ) - - copied_data_lazyness = lazyness_string( - copied_ancill_var.core_data() - ) - self.assertEqual( - copied_data_lazyness, - data_lazyness, - msg.format( - ancill_var_dtype, - data_lazyness, - "points", - data_lazyness, - copied_data_lazyness, - ), - ) - - def test_realdata_copies(self): - # Copy ancillary variables with real+lazy data. - # In all cases, check that any real arrays are copies, not views. - for (main_ancill_var, data_lazyness) in data_all_dtypes_and_lazynesses( - self - ): - - copied_ancill_var = main_ancill_var.copy() - - msg = ( - "Copied ancillary variable with {} data " - "does not have its own separate {} array." - ) - - if data_lazyness == "real": - main_data = main_ancill_var.core_data() - copied_data = copied_ancill_var.core_data() - self.assertEqualRealArraysAndDtypes(main_data, copied_data) - self.assertArraysDoNotShareData( - main_data, copied_data, msg.format(data_lazyness, "points") - ) - - -class Test_data__getter(tests.IrisTest, AncillaryVariableTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_mutable_real_data(self): - # Check that ancill_var.data returns a modifiable array, and changes - # to it are reflected to the ancillary_var. - data = np.array([1.0, 2.0, 3.0, 4.0]) - ancill_var = AncillaryVariable(data) - initial_values = data.copy() - ancill_var.data[1:2] += 33.1 - result = ancill_var.data - self.assertFalse(np.all(result == initial_values)) - - def test_real_data(self): - # Getting real data does not change or copy them. - ancill_var = AncillaryVariable(self.data_real) - result = ancill_var.data - self.assertArraysShareData( - result, - self.data_real, - "Data values do not share data with the provided array.", - ) - - def test_lazy_data(self): - # Getting lazy data realises them. - ancill_var = AncillaryVariable(self.data_lazy) - self.assertTrue(ancill_var.has_lazy_data()) - result = ancill_var.data - self.assertFalse(ancill_var.has_lazy_data()) - self.assertEqualRealArraysAndDtypes(result, self.data_real) - - -class Test_data__setter(tests.IrisTest, AncillaryVariableTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_set_real(self): - # Setting new real data does not make a copy. - ancill_var = AncillaryVariable(self.data_real) - new_data = self.data_real + 102.3 - ancill_var.data = new_data - result = ancill_var.core_data() - self.assertArraysShareData( - result, - new_data, - "Data values do not share data with the assigned array.", - ) - - def test_fail_bad_shape(self): - # Setting real data requires matching shape. - ancill_var = AncillaryVariable([1.0, 2.0]) - msg = r"Require data with shape \(2,\), got \(3,\)" - with self.assertRaisesRegex(ValueError, msg): - ancill_var.data = np.array([1.0, 2.0, 3.0]) - - def test_real_set_lazy(self): - # Setting new lazy data does not make a copy. - ancill_var = AncillaryVariable(self.data_real) - new_data = self.data_lazy + 102.3 - ancill_var.data = new_data - result = ancill_var.core_data() - self.assertEqualLazyArraysAndDtypes(result, new_data) - - -class Test__str__(tests.IrisTest): - def test_non_time_values(self): - ancillary_var = AncillaryVariable( - np.array([2, 5, 9]), - standard_name="height", - long_name="height of detector", - var_name="height", - units="m", - attributes={"notes": "Measured from sea level"}, - ) - expected = "\n".join( - [ - "AncillaryVariable : height / (m)", - " data: [2, 5, 9]", - " shape: (3,)", - " dtype: int64", - " standard_name: 'height'", - " long_name: 'height of detector'", - " var_name: 'height'", - " attributes:", - " notes 'Measured from sea level'", - ] - ) - self.assertEqual(expected, ancillary_var.__str__()) - - def test_time_values(self): - ancillary_var = AncillaryVariable( - np.array([2, 5, 9]), - units="hours since 1970-01-01 01:00", - long_name="time of previous valid detection", - ) - expected = "\n".join( - [ - ( - "AncillaryVariable : time of previous valid detection / " - "(hours since 1970-01-01 01:00, gregorian calendar)" - ), - ( - " data: [1970-01-01 03:00:00, 1970-01-01 06:00:00, " - "1970-01-01 10:00:00]" - ), - " shape: (3,)", - " dtype: int64", - " long_name: 'time of previous valid detection'", - ] - ) - self.assertEqual(expected, ancillary_var.__str__()) - - -class Test__repr__(tests.IrisTest): - def test_non_time_values(self): - ancillary_var = AncillaryVariable( - np.array([2, 5, 9]), - standard_name="height", - long_name="height of detector", - var_name="height", - units="m", - attributes={"notes": "Measured from sea level"}, - ) - expected = "" - self.assertEqual(expected, ancillary_var.__repr__()) - - def test_time_values(self): - ancillary_var = AncillaryVariable( - np.array([2, 5, 9]), - units="hours since 1970-01-01 01:00", - long_name="time of previous valid detection", - ) - expected = ( - "" - ) - self.assertEqual(expected, ancillary_var.__repr__()) - - -class Test___binary_operator__(tests.IrisTest, AncillaryVariableTestMixin): - # Test maths operations on on real+lazy data. - def setUp(self): - self.setupTestArrays() - - self.real_ancill_var = AncillaryVariable(self.data_real) - self.lazy_ancill_var = AncillaryVariable(self.data_lazy) - - self.test_combinations = [ - (self.real_ancill_var, self.data_real, "real"), - (self.lazy_ancill_var, self.data_lazy, "lazy"), - ] - - def _check(self, result_ancill_var, expected_data, lazyness): - # Test each operation on - data = result_ancill_var.core_data() - if lazyness == "real": - self.assertEqualRealArraysAndDtypes(expected_data, data) - else: - self.assertEqualLazyArraysAndDtypes(expected_data, data) - - def test_add(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - result = ancill_var + 10 - expected_data = orig_data + 10 - self._check(result, expected_data, data_lazyness) - - def test_add_inplace(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - ancill_var += 10 - expected_data = orig_data + 10 - self._check(ancill_var, expected_data, data_lazyness) - - def test_right_add(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - result = 10 + ancill_var - expected_data = 10 + orig_data - self._check(result, expected_data, data_lazyness) - - def test_subtract(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - result = ancill_var - 10 - expected_data = orig_data - 10 - self._check(result, expected_data, data_lazyness) - - def test_subtract_inplace(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - ancill_var -= 10 - expected_data = orig_data - 10 - self._check(ancill_var, expected_data, data_lazyness) - - def test_right_subtract(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - result = 10 - ancill_var - expected_data = 10 - orig_data - self._check(result, expected_data, data_lazyness) - - def test_multiply(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - result = ancill_var * 10 - expected_data = orig_data * 10 - self._check(result, expected_data, data_lazyness) - - def test_multiply_inplace(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - ancill_var *= 10 - expected_data = orig_data * 10 - self._check(ancill_var, expected_data, data_lazyness) - - def test_right_multiply(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - result = 10 * ancill_var - expected_data = 10 * orig_data - self._check(result, expected_data, data_lazyness) - - def test_divide(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - result = ancill_var / 10 - expected_data = orig_data / 10 - self._check(result, expected_data, data_lazyness) - - def test_divide_inplace(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - ancill_var /= 10 - expected_data = orig_data / 10 - self._check(ancill_var, expected_data, data_lazyness) - - def test_right_divide(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - result = 10 / ancill_var - expected_data = 10 / orig_data - self._check(result, expected_data, data_lazyness) - - def test_negative(self): - for (ancill_var, orig_data, data_lazyness) in self.test_combinations: - result = -ancill_var - expected_data = -orig_data - self._check(result, expected_data, data_lazyness) - - -class Test_has_bounds(tests.IrisTest): - def test(self): - ancillary_var = AncillaryVariable(np.array([2, 9, 5])) - self.assertFalse(ancillary_var.has_bounds()) - - -class Test_convert_units(tests.IrisTest): - def test_preserves_lazy(self): - test_data = np.array([[11.1, 12.2, 13.3], [21.4, 22.5, 23.6]]) - lazy_data = as_lazy_data(test_data) - ancill_var = AncillaryVariable(data=lazy_data, units="m") - ancill_var.convert_units("ft") - self.assertTrue(ancill_var.has_lazy_data()) - test_data_ft = Unit("m").convert(test_data, "ft") - self.assertArrayAllClose(ancill_var.data, test_data_ft) - - -class Test_is_compatible(tests.IrisTest): - def setUp(self): - self.ancill_var = AncillaryVariable( - [1.0, 8.0, 22.0], standard_name="number_of_observations", units="1" - ) - self.modified_ancill_var = self.ancill_var.copy() - - def test_not_compatible_diff_name(self): - # Different name() - not compatible - self.modified_ancill_var.rename("air_temperature") - self.assertFalse( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) - - def test_not_compatible_diff_units(self): - # Different units- not compatible - self.modified_ancill_var.units = "m" - self.assertFalse( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) - - def test_not_compatible_diff_common_attrs(self): - # Different common attributes - not compatible. - self.ancill_var.attributes["source"] = "A" - self.modified_ancill_var.attributes["source"] = "B" - self.assertFalse( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) - - def test_compatible_diff_data(self): - # Different data values - compatible. - self.modified_ancill_var.data = [10.0, 20.0, 100.0] - self.assertTrue( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) - - def test_compatible_diff_var_name(self): - # Different var_name (but same name()) - compatible. - self.modified_ancill_var.var_name = "obs_num" - self.assertTrue( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) - - def test_compatible_diff_non_common_attributes(self): - # Different non-common attributes - compatible. - self.ancill_var.attributes["source"] = "A" - self.modified_ancill_var.attributes["origin"] = "B" - self.assertTrue( - self.ancill_var.is_compatible(self.modified_ancill_var) - ) - - def test_compatible_ignore_common_attribute(self): - # ignore different common attributes - compatible. - self.ancill_var.attributes["source"] = "A" - self.modified_ancill_var.attributes["source"] = "B" - self.assertTrue( - self.ancill_var.is_compatible( - self.modified_ancill_var, ignore="source" - ) - ) - - -class TestEquality(tests.IrisTest): - def test_nanpoints_eq_self(self): - av1 = AncillaryVariable([1.0, np.nan, 2.0]) - self.assertEqual(av1, av1) - - def test_nanpoints_eq_copy(self): - av1 = AncillaryVariable([1.0, np.nan, 2.0]) - av2 = av1.copy() - self.assertEqual(av1, av2) - - -class Test_cube_dims(tests.IrisTest): - def test(self): - # Check that "coord.cube_dims(cube)" calls "cube.coord_dims(coord)". - mock_dims_result = mock.sentinel.AV_DIMS - mock_dims_call = mock.Mock(return_value=mock_dims_result) - mock_cube = mock.Mock(Cube, ancillary_variable_dims=mock_dims_call) - test_var = AncillaryVariable([1], long_name="test_name") - - result = test_var.cube_dims(mock_cube) - self.assertEqual(result, mock_dims_result) - self.assertEqual(mock_dims_call.call_args_list, [mock.call(test_var)]) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coords/test_AuxCoord.py b/lib/iris/tests/unit/coords/test_AuxCoord.py deleted file mode 100644 index e6cd8ac821..0000000000 --- a/lib/iris/tests/unit/coords/test_AuxCoord.py +++ /dev/null @@ -1,809 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.coords.AuxCoord` class. - -Note: a lot of these methods are actually defined by the :class:`Coord` class, -but can only be tested on concrete instances (DimCoord or AuxCoord). - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from cf_units import Unit -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import as_lazy_data -from iris.coords import AuxCoord -from iris.tests.unit.coords import ( - CoordTestMixin, - coords_all_dtypes_and_lazynesses, - lazyness_string, -) - - -class AuxCoordTestMixin(CoordTestMixin): - # Define a 2-D default array shape. - def setupTestArrays(self, shape=(2, 3), masked=False): - super().setupTestArrays(shape, masked=masked) - - -class Test__init__(tests.IrisTest, AuxCoordTestMixin): - # Test for AuxCoord creation, with various combinations of points and - # bounds = real / lazy / None. - def setUp(self): - self.setupTestArrays(masked=True) - - def test_lazyness_and_dtype_combinations(self): - for ( - coord, - points_type_name, - bounds_type_name, - ) in coords_all_dtypes_and_lazynesses(self, AuxCoord): - pts = coord.core_points() - bds = coord.core_bounds() - # Check properties of points. - if points_type_name == "real": - # Real points. - if coord.dtype == self.pts_real.dtype: - self.assertArraysShareData( - pts, - self.pts_real, - "Points are not the same data as the provided array.", - ) - self.assertIsNot( - pts, - self.pts_real, - "Points array is the same instance as the provided " - "array.", - ) - else: - # the original points were cast to a test dtype. - check_pts = self.pts_real.astype(coord.dtype) - self.assertEqualRealArraysAndDtypes(pts, check_pts) - else: - # Lazy points : the core data may be promoted to float. - check_pts = self.pts_lazy.astype(pts.dtype) - self.assertEqualLazyArraysAndDtypes(pts, check_pts) - # The realisation type should be correct, though. - target_dtype = coord.dtype - self.assertEqual(coord.points.dtype, target_dtype) - - # Check properties of bounds. - if bounds_type_name == "real": - # Real bounds. - if coord.bounds_dtype == self.bds_real.dtype: - self.assertArraysShareData( - bds, - self.bds_real, - "Bounds are not the same data as the provided array.", - ) - self.assertIsNot( - pts, - self.pts_real, - "Bounds array is the same instance as the provided " - "array.", - ) - else: - # the original bounds were cast to a test dtype. - check_bds = self.bds_real.astype(coord.bounds_dtype) - self.assertEqualRealArraysAndDtypes(bds, check_bds) - elif bounds_type_name == "lazy": - # Lazy points : the core data may be promoted to float. - check_bds = self.bds_lazy.astype(bds.dtype) - self.assertEqualLazyArraysAndDtypes(bds, check_bds) - # The realisation type should be correct, though. - target_dtype = coord.bounds_dtype - self.assertEqual(coord.bounds.dtype, target_dtype) - - def test_fail_bounds_shape_mismatch(self): - bds_shape = list(self.bds_real.shape) - bds_shape[0] += 1 - bds_wrong = np.zeros(bds_shape) - msg = "Bounds shape must be compatible with points shape" - with self.assertRaisesRegex(ValueError, msg): - AuxCoord(self.pts_real, bounds=bds_wrong) - - def test_no_masked_pts_real(self): - data = self.no_masked_pts_real - self.assertTrue(ma.isMaskedArray(data)) - self.assertEqual(ma.count_masked(data), 0) - coord = AuxCoord(data) - self.assertFalse(coord.has_lazy_points()) - self.assertTrue(ma.isMaskedArray(coord.points)) - self.assertEqual(ma.count_masked(coord.points), 0) - - def test_no_masked_pts_lazy(self): - data = self.no_masked_pts_lazy - computed = data.compute() - self.assertTrue(ma.isMaskedArray(computed)) - self.assertEqual(ma.count_masked(computed), 0) - coord = AuxCoord(data) - self.assertTrue(coord.has_lazy_points()) - self.assertTrue(ma.isMaskedArray(coord.points)) - self.assertEqual(ma.count_masked(coord.points), 0) - - def test_masked_pts_real(self): - data = self.masked_pts_real - self.assertTrue(ma.isMaskedArray(data)) - self.assertTrue(ma.count_masked(data)) - coord = AuxCoord(data) - self.assertFalse(coord.has_lazy_points()) - self.assertTrue(ma.isMaskedArray(coord.points)) - self.assertTrue(ma.count_masked(coord.points)) - - def test_masked_pts_lazy(self): - data = self.masked_pts_lazy - computed = data.compute() - self.assertTrue(ma.isMaskedArray(computed)) - self.assertTrue(ma.count_masked(computed)) - coord = AuxCoord(data) - self.assertTrue(coord.has_lazy_points()) - self.assertTrue(ma.isMaskedArray(coord.points)) - self.assertTrue(ma.count_masked(coord.points)) - - def test_no_masked_bds_real(self): - data = self.no_masked_bds_real - self.assertTrue(ma.isMaskedArray(data)) - self.assertEqual(ma.count_masked(data), 0) - coord = AuxCoord(self.pts_real, bounds=data) - self.assertFalse(coord.has_lazy_bounds()) - self.assertTrue(ma.isMaskedArray(coord.bounds)) - self.assertEqual(ma.count_masked(coord.bounds), 0) - - def test_no_masked_bds_lazy(self): - data = self.no_masked_bds_lazy - computed = data.compute() - self.assertTrue(ma.isMaskedArray(computed)) - self.assertEqual(ma.count_masked(computed), 0) - coord = AuxCoord(self.pts_real, bounds=data) - self.assertTrue(coord.has_lazy_bounds()) - self.assertTrue(ma.isMaskedArray(coord.bounds)) - self.assertEqual(ma.count_masked(coord.bounds), 0) - - def test_masked_bds_real(self): - data = self.masked_bds_real - self.assertTrue(ma.isMaskedArray(data)) - self.assertTrue(ma.count_masked(data)) - coord = AuxCoord(self.pts_real, bounds=data) - self.assertFalse(coord.has_lazy_bounds()) - self.assertTrue(ma.isMaskedArray(coord.bounds)) - self.assertTrue(ma.count_masked(coord.bounds)) - - def test_masked_bds_lazy(self): - data = self.masked_bds_lazy - computed = data.compute() - self.assertTrue(ma.isMaskedArray(computed)) - self.assertTrue(ma.count_masked(computed)) - coord = AuxCoord(self.pts_real, bounds=data) - self.assertTrue(coord.has_lazy_bounds()) - self.assertTrue(ma.isMaskedArray(coord.bounds)) - self.assertTrue(ma.count_masked(coord.bounds)) - - -class Test_core_points(tests.IrisTest, AuxCoordTestMixin): - # Test for AuxCoord.core_points() with various types of points and bounds. - def setUp(self): - self.setupTestArrays() - - def test_real_points(self): - coord = AuxCoord(self.pts_real) - result = coord.core_points() - self.assertArraysShareData( - result, - self.pts_real, - "core_points() do not share data with the internal array.", - ) - - def test_lazy_points(self): - coord = AuxCoord(self.pts_lazy) - result = coord.core_points() - self.assertEqualLazyArraysAndDtypes(result, self.pts_lazy) - - def test_lazy_points_realise(self): - coord = AuxCoord(self.pts_lazy) - real_points = coord.points - result = coord.core_points() - self.assertEqualRealArraysAndDtypes(result, real_points) - - -class Test_core_bounds(tests.IrisTest, AuxCoordTestMixin): - # Test for AuxCoord.core_bounds() with various types of points and bounds. - def setUp(self): - self.setupTestArrays() - - def test_no_bounds(self): - coord = AuxCoord(self.pts_real) - result = coord.core_bounds() - self.assertIsNone(result) - - def test_real_bounds(self): - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - result = coord.core_bounds() - self.assertArraysShareData( - result, - self.bds_real, - "core_bounds() do not share data with the internal array.", - ) - - def test_lazy_bounds(self): - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - result = coord.core_bounds() - self.assertEqualLazyArraysAndDtypes(result, self.bds_lazy) - - def test_lazy_bounds_realise(self): - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - real_bounds = coord.bounds - result = coord.core_bounds() - self.assertEqualRealArraysAndDtypes(result, real_bounds) - - -class Test_lazy_points(tests.IrisTest, AuxCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_core(self): - coord = AuxCoord(self.pts_real) - result = coord.lazy_points() - self.assertEqualLazyArraysAndDtypes(result, self.pts_lazy) - - def test_lazy_core(self): - coord = AuxCoord(self.pts_lazy) - result = coord.lazy_points() - self.assertIs(result, self.pts_lazy) - - -class Test_lazy_bounds(tests.IrisTest, AuxCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_no_bounds(self): - coord = AuxCoord(self.pts_real) - result = coord.lazy_bounds() - self.assertIsNone(result) - - def test_real_core(self): - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - result = coord.lazy_bounds() - self.assertEqualLazyArraysAndDtypes(result, self.bds_lazy) - - def test_lazy_core(self): - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - result = coord.lazy_bounds() - self.assertIs(result, self.bds_lazy) - - -class Test_has_lazy_points(tests.IrisTest, AuxCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_core(self): - coord = AuxCoord(self.pts_real) - result = coord.has_lazy_points() - self.assertFalse(result) - - def test_lazy_core(self): - coord = AuxCoord(self.pts_lazy) - result = coord.has_lazy_points() - self.assertTrue(result) - - def test_lazy_core_realise(self): - coord = AuxCoord(self.pts_lazy) - coord.points - result = coord.has_lazy_points() - self.assertFalse(result) - - -class Test_has_lazy_bounds(tests.IrisTest, AuxCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_core(self): - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - result = coord.has_lazy_bounds() - self.assertFalse(result) - - def test_lazy_core(self): - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - result = coord.has_lazy_bounds() - self.assertTrue(result) - - def test_lazy_core_realise(self): - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - coord.bounds - result = coord.has_lazy_bounds() - self.assertFalse(result) - - -class Test_bounds_dtype(tests.IrisTest, AuxCoordTestMixin): - def test_i16(self): - test_dtype = np.int16 - coord = AuxCoord([1], bounds=np.array([[0, 4]], dtype=test_dtype)) - result = coord.bounds_dtype - self.assertEqual(result, test_dtype) - - def test_u16(self): - test_dtype = np.uint16 - coord = AuxCoord([1], bounds=np.array([[0, 4]], dtype=test_dtype)) - result = coord.bounds_dtype - self.assertEqual(result, test_dtype) - - def test_f16(self): - test_dtype = np.float16 - coord = AuxCoord([1], bounds=np.array([[0, 4]], dtype=test_dtype)) - result = coord.bounds_dtype - self.assertEqual(result, test_dtype) - - -class Test__getitem__(tests.IrisTest, AuxCoordTestMixin): - # Test for AuxCoord indexing with various types of points and bounds. - def setUp(self): - self.setupTestArrays() - - def test_partial_slice_data_copy(self): - parent_coord = AuxCoord([1.0, 2.0, 3.0]) - sub_coord = parent_coord[:1] - values_before_change = sub_coord.points.copy() - parent_coord.points[:] = -999.9 - self.assertArrayEqual(sub_coord.points, values_before_change) - - def test_full_slice_data_copy(self): - parent_coord = AuxCoord([1.0, 2.0, 3.0]) - sub_coord = parent_coord[:] - values_before_change = sub_coord.points.copy() - parent_coord.points[:] = -999.9 - self.assertArrayEqual(sub_coord.points, values_before_change) - - def test_dtypes(self): - # Index coords with all combinations of real+lazy points+bounds, and - # either an int or floating dtype. - # Check that dtypes remain the same in all cases, taking the dtypes - # directly from the core points and bounds (as we have no masking). - for ( - main_coord, - points_type_name, - bounds_type_name, - ) in coords_all_dtypes_and_lazynesses(self, AuxCoord): - - sub_coord = main_coord[:2, 1] - - coord_dtype = main_coord.dtype - msg = ( - "Indexing main_coord of dtype {} " - "with {} points and {} bounds " - "changed dtype of {} to {}." - ) - - sub_points = sub_coord.core_points() - self.assertEqual( - sub_points.dtype, - coord_dtype, - msg.format( - coord_dtype, - points_type_name, - bounds_type_name, - "points", - sub_points.dtype, - ), - ) - - if bounds_type_name != "no": - sub_bounds = sub_coord.core_bounds() - main_bounds_dtype = main_coord.bounds_dtype - self.assertEqual( - sub_bounds.dtype, - main_bounds_dtype, - msg.format( - main_bounds_dtype, - points_type_name, - bounds_type_name, - "bounds", - sub_bounds.dtype, - ), - ) - - def test_lazyness(self): - # Index coords with all combinations of real+lazy points+bounds, and - # either an int or floating dtype. - # Check that lazy data stays lazy and real stays real, in all cases. - for ( - main_coord, - points_type_name, - bounds_type_name, - ) in coords_all_dtypes_and_lazynesses(self, AuxCoord): - - sub_coord = main_coord[:2, 1] - - msg = ( - "Indexing coord of dtype {} " - "with {} points and {} bounds " - "changed laziness of {} from {!r} to {!r}." - ) - coord_dtype = main_coord.dtype - sub_points_lazyness = lazyness_string(sub_coord.core_points()) - self.assertEqual( - sub_points_lazyness, - points_type_name, - msg.format( - coord_dtype, - points_type_name, - bounds_type_name, - "points", - points_type_name, - sub_points_lazyness, - ), - ) - - if bounds_type_name != "no": - sub_bounds_lazy = lazyness_string(sub_coord.core_bounds()) - self.assertEqual( - sub_bounds_lazy, - bounds_type_name, - msg.format( - coord_dtype, - points_type_name, - bounds_type_name, - "bounds", - bounds_type_name, - sub_bounds_lazy, - ), - ) - - def test_real_data_copies(self): - # Index coords with all combinations of real+lazy points+bounds. - # In all cases, check that any real arrays are copied by the indexing. - for ( - main_coord, - points_lazyness, - bounds_lazyness, - ) in coords_all_dtypes_and_lazynesses(self, AuxCoord): - - sub_coord = main_coord[:2, 1] - - msg = ( - "Indexed coord with {} points and {} bounds " - "does not have its own separate {} array." - ) - if points_lazyness == "real": - main_points = main_coord.core_points() - sub_points = sub_coord.core_points() - sub_main_points = main_points[:2, 1] - self.assertEqualRealArraysAndDtypes( - sub_points, sub_main_points - ) - self.assertArraysDoNotShareData( - sub_points, - sub_main_points, - msg.format(points_lazyness, bounds_lazyness, "points"), - ) - - if bounds_lazyness == "real": - main_bounds = main_coord.core_bounds() - sub_bounds = sub_coord.core_bounds() - sub_main_bounds = main_bounds[:2, 1] - self.assertEqualRealArraysAndDtypes( - sub_bounds, sub_main_bounds - ) - self.assertArraysDoNotShareData( - sub_bounds, - sub_main_bounds, - msg.format(points_lazyness, bounds_lazyness, "bounds"), - ) - - -class Test_copy(tests.IrisTest, AuxCoordTestMixin): - # Test for AuxCoord.copy() with various types of points and bounds. - def setUp(self): - self.setupTestArrays() - - def test_lazyness(self): - # Copy coords with all combinations of real+lazy points+bounds, and - # either an int or floating dtype. - # Check that lazy data stays lazy and real stays real, in all cases. - for ( - main_coord, - points_lazyness, - bounds_lazyness, - ) in coords_all_dtypes_and_lazynesses(self, AuxCoord): - - coord_dtype = main_coord.dtype - copied_coord = main_coord.copy() - - msg = ( - "Copying main_coord of dtype {} " - "with {} points and {} bounds " - "changed lazyness of {} from {!r} to {!r}." - ) - - copied_pts_lazyness = lazyness_string(copied_coord.core_points()) - self.assertEqual( - copied_pts_lazyness, - points_lazyness, - msg.format( - coord_dtype, - points_lazyness, - bounds_lazyness, - "points", - points_lazyness, - copied_pts_lazyness, - ), - ) - - if bounds_lazyness != "no": - copied_bds_lazy = lazyness_string(copied_coord.core_bounds()) - self.assertEqual( - copied_bds_lazy, - bounds_lazyness, - msg.format( - coord_dtype, - points_lazyness, - bounds_lazyness, - "bounds", - bounds_lazyness, - copied_bds_lazy, - ), - ) - - def test_realdata_copies(self): - # Copy coords with all combinations of real+lazy points+bounds. - # In all cases, check that any real arrays are copies, not views. - for ( - main_coord, - points_lazyness, - bounds_lazyness, - ) in coords_all_dtypes_and_lazynesses(self, AuxCoord): - - copied_coord = main_coord.copy() - - msg = ( - "Copied coord with {} points and {} bounds " - "does not have its own separate {} array." - ) - - if points_lazyness == "real": - main_points = main_coord.core_points() - copied_points = copied_coord.core_points() - self.assertEqualRealArraysAndDtypes(main_points, copied_points) - self.assertArraysDoNotShareData( - main_points, - copied_points, - msg.format(points_lazyness, bounds_lazyness, "points"), - ) - - if bounds_lazyness == "real": - main_bounds = main_coord.core_bounds() - copied_bounds = copied_coord.core_bounds() - self.assertEqualRealArraysAndDtypes(main_bounds, copied_bounds) - self.assertArraysDoNotShareData( - main_bounds, - copied_bounds, - msg.format(points_lazyness, bounds_lazyness, "bounds"), - ) - - -class Test_points__getter(tests.IrisTest, AuxCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_mutable_real_points(self): - # Check that coord.points returns a modifiable array, and changes to it - # are reflected to the coord. - data = np.array([1.0, 2.0, 3.0, 4.0]) - coord = AuxCoord(data) - initial_values = data.copy() - coord.points[1:2] += 33.1 - result = coord.points - self.assertFalse(np.all(result == initial_values)) - - def test_real_points(self): - # Getting real points does not change or copy them. - coord = AuxCoord(self.pts_real) - result = coord.points - self.assertArraysShareData( - result, - self.pts_real, - "Points do not share data with the provided array.", - ) - - def test_lazy_points(self): - # Getting lazy points realises them. - coord = AuxCoord(self.pts_lazy) - self.assertTrue(coord.has_lazy_points()) - result = coord.points - self.assertFalse(coord.has_lazy_points()) - self.assertEqualRealArraysAndDtypes(result, self.pts_real) - - def test_real_points_with_real_bounds(self): - # Getting real points does not change real bounds. - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - coord.points - result = coord.core_bounds() - self.assertArraysShareData( - result, - self.bds_real, - "Bounds do not share data with the provided array.", - ) - - def test_real_points_with_lazy_bounds(self): - # Getting real points does not touch lazy bounds. - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - coord.points - self.assertTrue(coord.has_lazy_bounds()) - - def test_lazy_points_with_real_bounds(self): - # Getting lazy points does not affect real bounds. - coord = AuxCoord(self.pts_lazy, bounds=self.bds_real) - coord.points - result = coord.core_bounds() - self.assertEqualRealArraysAndDtypes(result, self.bds_real) - - def test_lazy_points_with_lazy_bounds(self): - # Getting lazy points does not touch lazy bounds. - coord = AuxCoord(self.pts_lazy, bounds=self.bds_lazy) - coord.points - self.assertTrue(coord.has_lazy_bounds()) - - -class Test_points__setter(tests.IrisTest, AuxCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_set_real(self): - # Setting new real points does not make a copy. - coord = AuxCoord(self.pts_real) - new_pts = self.pts_real + 102.3 - coord.points = new_pts - result = coord.core_points() - self.assertArraysShareData( - result, - new_pts, - "Points do not share data with the assigned array.", - ) - - def test_fail_bad_shape(self): - # Setting real points requires matching shape. - coord = AuxCoord([1.0, 2.0]) - msg = r"Require data with shape \(2,\), got \(3,\)" - with self.assertRaisesRegex(ValueError, msg): - coord.points = np.array([1.0, 2.0, 3.0]) - - def test_real_set_lazy(self): - # Setting new lazy points does not make a copy. - coord = AuxCoord(self.pts_real) - new_pts = self.pts_lazy + 102.3 - coord.points = new_pts - result = coord.core_points() - self.assertEqualLazyArraysAndDtypes(result, new_pts) - - def test_set_points_with_lazy_bounds(self): - # Setting points does not touch lazy bounds. - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - new_pts = self.pts_real + 102.3 - coord.points = new_pts - result = coord.core_bounds() - self.assertEqualLazyArraysAndDtypes(result, self.bds_lazy) - - -class Test_bounds__getter(tests.IrisTest, AuxCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_mutable_real_bounds(self): - # Check that coord.bounds returns a modifiable array, and changes to it - # are reflected to the coord. - pts_data = np.array([1.5, 2.5]) - bds_data = np.array([[1.4, 1.6], [2.4, 2.6]]) - coord = AuxCoord(pts_data, bounds=bds_data) - initial_values = bds_data.copy() - coord.bounds[1:2] += 33.1 - result = coord.bounds - self.assertFalse(np.all(result == initial_values)) - - def test_real_bounds(self): - # Getting real bounds does not change or copy them. - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - result = coord.bounds - self.assertArraysShareData( - result, - self.bds_real, - "Bounds do not share data with the provided array.", - ) - - def test_lazy_bounds(self): - # Getting lazy bounds realises them. - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - self.assertTrue(coord.has_lazy_bounds()) - result = coord.bounds - self.assertFalse(coord.has_lazy_bounds()) - self.assertEqualRealArraysAndDtypes(result, self.bds_real) - - def test_lazy_bounds_with_lazy_points(self): - # Getting lazy bounds does not fetch the points. - coord = AuxCoord(self.pts_lazy, bounds=self.bds_lazy) - coord.bounds - self.assertTrue(coord.has_lazy_points()) - - -class Test_bounds__setter(tests.IrisTest, AuxCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_set_real_bounds(self): - # Setting new real bounds does not make a copy. - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - new_bounds = self.bds_real + 102.3 - coord.bounds = new_bounds - result = coord.core_bounds() - self.assertArraysShareData( - result, - new_bounds, - "Bounds do not share data with the assigned array.", - ) - - def test_fail_bad_shape(self): - # Setting real points requires matching shape. - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - msg = "must be compatible with points shape" - with self.assertRaisesRegex(ValueError, msg): - coord.bounds = np.array([1.0, 2.0, 3.0]) - - def test_set_lazy_bounds(self): - # Setting new lazy bounds. - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - new_bounds = self.bds_lazy + 102.3 - coord.bounds = new_bounds - result = coord.core_bounds() - self.assertEqualLazyArraysAndDtypes(result, new_bounds) - - def test_set_bounds_with_lazy_points(self): - # Setting bounds does not change lazy points. - coord = AuxCoord(self.pts_lazy, bounds=self.bds_real) - new_bounds = self.bds_real + 102.3 - coord.bounds = new_bounds - self.assertTrue(coord.has_lazy_points()) - - -class Test_convert_units(tests.IrisTest): - def test_preserves_lazy(self): - test_bounds = np.array( - [ - [[11.0, 12.0], [12.0, 13.0], [13.0, 14.0]], - [[21.0, 22.0], [22.0, 23.0], [23.0, 24.0]], - ] - ) - test_points = np.array([[11.1, 12.2, 13.3], [21.4, 22.5, 23.6]]) - lazy_points = as_lazy_data(test_points) - lazy_bounds = as_lazy_data(test_bounds) - coord = AuxCoord(points=lazy_points, bounds=lazy_bounds, units="m") - coord.convert_units("ft") - self.assertTrue(coord.has_lazy_points()) - self.assertTrue(coord.has_lazy_bounds()) - test_points_ft = Unit("m").convert(test_points, "ft") - test_bounds_ft = Unit("m").convert(test_bounds, "ft") - self.assertArrayAllClose(coord.points, test_points_ft) - self.assertArrayAllClose(coord.bounds, test_bounds_ft) - - -class TestEquality(tests.IrisTest): - def test_nanpoints_eq_self(self): - co1 = AuxCoord([1.0, np.nan, 2.0]) - self.assertEqual(co1, co1) - - def test_nanpoints_eq_copy(self): - co1 = AuxCoord([1.0, np.nan, 2.0]) - co2 = co1.copy() - self.assertEqual(co1, co2) - - def test_nanbounds_eq_self(self): - co1 = AuxCoord([15.0, 25.0], bounds=[[14.0, 16.0], [24.0, np.nan]]) - self.assertEqual(co1, co1) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coords/test_Cell.py b/lib/iris/tests/unit/coords/test_Cell.py deleted file mode 100644 index 650f9ded6c..0000000000 --- a/lib/iris/tests/unit/coords/test_Cell.py +++ /dev/null @@ -1,209 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coords.Cell` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import datetime -from unittest import mock - -import cftime -import numpy as np - -from iris.coords import Cell -from iris.time import PartialDateTime - - -class Test___common_cmp__(tests.IrisTest): - def assert_raises_on_comparison(self, cell, other, exception_type, regexp): - with self.assertRaisesRegex(exception_type, regexp): - cell < other - with self.assertRaisesRegex(exception_type, regexp): - cell <= other - with self.assertRaisesRegex(exception_type, regexp): - cell > other - with self.assertRaisesRegex(exception_type, regexp): - cell >= other - - def test_cftime_cell(self): - # Check that cell comparison when the cell contains - # cftime.datetime objects raises an exception otherwise - # this will fall back to id comparison producing unreliable - # results. - cell = Cell(cftime.datetime(2010, 3, 21)) - dt = mock.Mock(timetuple=mock.Mock()) - self.assert_raises_on_comparison( - cell, dt, TypeError, "determine the order of cftime" - ) - self.assert_raises_on_comparison( - cell, 23, TypeError, "determine the order of cftime" - ) - self.assert_raises_on_comparison( - cell, "hello", TypeError, "Unexpected type.*str" - ) - - def test_cftime_other(self): - # Check that cell comparison to a cftime.datetime object - # raises an exception otherwise this will fall back to id comparison - # producing unreliable results. - dt = cftime.datetime(2010, 3, 21) - cell = Cell(mock.Mock(timetuple=mock.Mock())) - self.assert_raises_on_comparison( - cell, dt, TypeError, "determine the order of cftime" - ) - - def test_PartialDateTime_bounded_cell(self): - # Check that bounded comparisions to a PartialDateTime - # raise an exception. These are not supported as they - # depend on the calendar. - dt = PartialDateTime(month=6) - cell = Cell( - datetime.datetime(2010, 1, 1), - bound=[ - datetime.datetime(2010, 1, 1), - datetime.datetime(2011, 1, 1), - ], - ) - self.assert_raises_on_comparison( - cell, dt, TypeError, "bounded region for datetime" - ) - - def test_PartialDateTime_unbounded_cell(self): - # Check that cell comparison works with PartialDateTimes. - dt = PartialDateTime(month=6) - cell = Cell(cftime.datetime(2010, 3, 1)) - self.assertLess(cell, dt) - self.assertGreater(dt, cell) - self.assertLessEqual(cell, dt) - self.assertGreaterEqual(dt, cell) - - def test_datetime_unbounded_cell(self): - # Check that cell comparison works with datetimes. - dt = datetime.datetime(2000, 6, 15) - cell = Cell(datetime.datetime(2000, 1, 1)) - # Note the absence of the inverse of these - # e.g. self.assertGreater(dt, cell). - # See http://bugs.python.org/issue8005 - self.assertLess(cell, dt) - self.assertLessEqual(cell, dt) - - def test_0D_numpy_array(self): - # Check that cell comparison works with 0D numpy arrays - - cell = Cell(1.3) - - self.assertGreater(np.array(1.5), cell) - self.assertLess(np.array(1.1), cell) - self.assertGreaterEqual(np.array(1.3), cell) - self.assertLessEqual(np.array(1.3), cell) - - def test_len_1_numpy_array(self): - # Check that cell comparison works with numpy arrays of len=1 - - cell = Cell(1.3) - - self.assertGreater(np.array([1.5]), cell) - self.assertLess(np.array([1.1]), cell) - self.assertGreaterEqual(np.array([1.3]), cell) - self.assertLessEqual(np.array([1.3]), cell) - - -class Test___eq__(tests.IrisTest): - def test_datetimelike(self): - # Check that cell equality works with objects with a "timetuple". - dt = mock.Mock(timetuple=mock.Mock()) - cell = mock.MagicMock( - spec=Cell, point=datetime.datetime(2010, 3, 21), bound=None - ) - _ = cell == dt - cell.__eq__.assert_called_once_with(dt) - - def test_datetimelike_bounded_cell(self): - # Check that equality with a datetime-like bounded cell - # raises an error. This is not supported as it - # depends on the calendar which is not always known from - # the datetime-like bound objects. - other = mock.Mock(timetuple=mock.Mock()) - cell = Cell( - point=object(), - bound=[ - mock.Mock(timetuple=mock.Mock()), - mock.Mock(timetuple=mock.Mock()), - ], - ) - with self.assertRaisesRegex(TypeError, "bounded region for datetime"): - cell == other - - def test_PartialDateTime_other(self): - cell = Cell(datetime.datetime(2010, 3, 2)) - # A few simple cases. - self.assertEqual(cell, PartialDateTime(month=3)) - self.assertNotEqual(cell, PartialDateTime(month=3, hour=12)) - self.assertNotEqual(cell, PartialDateTime(month=4)) - - -class Test_contains_point(tests.IrisTest): - def test_datetimelike_bounded_cell(self): - point = object() - cell = Cell( - point=object(), - bound=[ - mock.Mock(timetuple=mock.Mock()), - mock.Mock(timetuple=mock.Mock()), - ], - ) - with self.assertRaisesRegex(TypeError, "bounded region for datetime"): - cell.contains_point(point) - - def test_datetimelike_point(self): - point = mock.Mock(timetuple=mock.Mock()) - cell = Cell(point=object(), bound=[object(), object()]) - with self.assertRaisesRegex(TypeError, "bounded region for datetime"): - cell.contains_point(point) - - -class Test_numpy_comparison(tests.IrisTest): - """ - Unit tests to check that the results of comparisons with numpy types can be - used as truth values.""" - - def test_cell_lhs(self): - cell = Cell(point=1.5) - n = np.float64(1.2) - - try: - bool(cell < n) - bool(cell <= n) - bool(cell > n) - bool(cell >= n) - bool(cell == n) - bool(cell != n) - except: # noqa - self.fail( - "Result of comparison could not be used as a truth value" - ) - - def test_cell_rhs(self): - cell = Cell(point=1.5) - n = np.float64(1.2) - - try: - bool(n < cell) - bool(n <= cell) - bool(n > cell) - bool(n >= cell) - bool(n == cell) - bool(n != cell) - except: # noqa - self.fail( - "Result of comparison could not be used as a truth value" - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coords/test_CellMeasure.py b/lib/iris/tests/unit/coords/test_CellMeasure.py deleted file mode 100644 index 0bd66c6e98..0000000000 --- a/lib/iris/tests/unit/coords/test_CellMeasure.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coords.CellMeasure` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris._lazy_data import as_lazy_data -from iris.coords import CellMeasure -from iris.cube import Cube - - -class Tests(tests.IrisTest): - def setUp(self): - self.values = np.array((10.0, 12.0, 16.0, 9.0)) - self.measure = CellMeasure( - self.values, - units="m^2", - standard_name="cell_area", - long_name="measured_area", - var_name="area", - attributes={"notes": "1m accuracy"}, - ) - - def test_invalid_measure(self): - msg = "measure must be 'area' or 'volume', got 'length'" - with self.assertRaisesRegex(ValueError, msg): - self.measure.measure = "length" - - def test_set_measure(self): - v = "volume" - self.measure.measure = v - self.assertEqual(self.measure.measure, v) - - def test_data(self): - self.assertArrayEqual(self.measure.data, self.values) - - def test_set_data(self): - new_vals = np.array((1.0, 2.0, 3.0, 4.0)) - self.measure.data = new_vals - self.assertArrayEqual(self.measure.data, new_vals) - - def test_set_data__int(self): - new_vals = np.array((1, 2, 3, 4), dtype=np.int32) - self.measure.data = new_vals - self.assertArrayEqual(self.measure.data, new_vals) - - def test_set_data__uint(self): - new_vals = np.array((1, 2, 3, 4), dtype=np.uint32) - self.measure.data = new_vals - self.assertArrayEqual(self.measure.data, new_vals) - - def test_set_data__lazy(self): - new_vals = as_lazy_data(np.array((1.0, 2.0, 3.0, 4.0))) - self.measure.data = new_vals - self.assertArrayEqual(self.measure.data, new_vals) - - def test_data_different_shape(self): - new_vals = np.array((1.0, 2.0, 3.0)) - msg = "Require data with shape." - with self.assertRaisesRegex(ValueError, msg): - self.measure.data = new_vals - - def test_shape(self): - self.assertEqual(self.measure.shape, (4,)) - - def test_ndim(self): - self.assertEqual(self.measure.ndim, 1) - - def test___getitem__(self): - sub_measure = self.measure[2] - self.assertArrayEqual(self.values[2], sub_measure.data) - - def test___getitem__data_copy(self): - # Check that a sliced cell measure has independent data. - sub_measure = self.measure[1:3] - old_values = sub_measure.data.copy() - # Change the original one. - self.measure.data[:] = 0.0 - # Check the new one has not changed. - self.assertArrayEqual(sub_measure.data, old_values) - - def test_copy(self): - new_vals = np.array((7.0, 8.0)) - copy_measure = self.measure.copy(new_vals) - self.assertArrayEqual(copy_measure.data, new_vals) - - def test___str__(self): - expected = "\n".join( - [ - "CellMeasure : cell_area / (m^2)", - " data: [10., 12., 16., 9.]", - " shape: (4,)", - " dtype: float64", - " standard_name: 'cell_area'", - " long_name: 'measured_area'", - " var_name: 'area'", - " attributes:", - " notes '1m accuracy'", - " measure: 'area'", - ] - ) - self.assertEqual(self.measure.__str__(), expected) - - def test___repr__(self): - expected = ( - "" - ) - self.assertEqual(expected, self.measure.__repr__()) - - def test__eq__(self): - self.assertEqual(self.measure, self.measure) - - -class Test_cube_dims(tests.IrisTest): - def test(self): - # Check that "coord.cube_dims(cube)" calls "cube.coord_dims(coord)". - mock_dims_result = mock.sentinel.CM_DIMS - mock_dims_call = mock.Mock(return_value=mock_dims_result) - mock_cube = mock.Mock(Cube, cell_measure_dims=mock_dims_call) - test_cm = CellMeasure([1], long_name="test_name") - - result = test_cm.cube_dims(mock_cube) - self.assertEqual(result, mock_dims_result) - self.assertEqual(mock_dims_call.call_args_list, [mock.call(test_cm)]) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coords/test_CellMethod.py b/lib/iris/tests/unit/coords/test_CellMethod.py deleted file mode 100644 index b10fd41834..0000000000 --- a/lib/iris/tests/unit/coords/test_CellMethod.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.coords.CellMethod`. -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.common import BaseMetadata -from iris.coords import AuxCoord, CellMethod - - -class Test(tests.IrisTest): - def setUp(self): - self.method = "mean" - - def _check(self, token, coord, default=False): - result = CellMethod(self.method, coords=coord) - token = token if not default else BaseMetadata.DEFAULT_NAME - expected = "{}: {}".format(self.method, token) - self.assertEqual(str(result), expected) - - def test_coord_standard_name(self): - token = "air_temperature" - coord = AuxCoord(1, standard_name=token) - self._check(token, coord) - - def test_coord_long_name(self): - token = "long_name" - coord = AuxCoord(1, long_name=token) - self._check(token, coord) - - def test_coord_long_name_default(self): - token = "long name" # includes space - coord = AuxCoord(1, long_name=token) - self._check(token, coord, default=True) - - def test_coord_var_name(self): - token = "var_name" - coord = AuxCoord(1, var_name=token) - self._check(token, coord) - - def test_coord_var_name_fail(self): - token = "var name" # includes space - emsg = "is not a valid NetCDF variable name" - with self.assertRaisesRegex(ValueError, emsg): - AuxCoord(1, var_name=token) - - def test_coord_stash(self): - token = "stash" - coord = AuxCoord(1, attributes=dict(STASH=token)) - self._check(token, coord, default=True) - - def test_coord_stash_default(self): - token = "_stash" # includes leading underscore - coord = AuxCoord(1, attributes=dict(STASH=token)) - self._check(token, coord, default=True) - - def test_string(self): - token = "air_temperature" - result = CellMethod(self.method, coords=token) - expected = "{}: {}".format(self.method, token) - self.assertEqual(str(result), expected) - - def test_string_default(self): - token = "air temperature" # includes space - result = CellMethod(self.method, coords=token) - expected = "{}: unknown".format(self.method) - self.assertEqual(str(result), expected) - - def test_mixture(self): - token = "air_temperature" - coord = AuxCoord(1, standard_name=token) - result = CellMethod(self.method, coords=[coord, token]) - expected = "{}: {}, {}".format(self.method, token, token) - self.assertEqual(str(result), expected) - - def test_mixture_default(self): - token = "air temperature" # includes space - coord = AuxCoord(1, long_name=token) - result = CellMethod(self.method, coords=[coord, token]) - expected = "{}: unknown, unknown".format(self.method) - self.assertEqual(str(result), expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coords/test_Coord.py b/lib/iris/tests/unit/coords/test_Coord.py deleted file mode 100644 index 43170b6c4e..0000000000 --- a/lib/iris/tests/unit/coords/test_Coord.py +++ /dev/null @@ -1,1084 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coords.Coord` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import collections -from unittest import mock -import warnings - -import dask.array as da -import numpy as np - -import iris -from iris.coords import AuxCoord, Coord, DimCoord -from iris.cube import Cube -from iris.exceptions import UnitConversionError -from iris.tests.unit.coords import CoordTestMixin - -Pair = collections.namedtuple("Pair", "points bounds") - - -class Test_nearest_neighbour_index__ascending(tests.IrisTest): - def setUp(self): - points = [0.0, 90.0, 180.0, 270.0] - self.coord = DimCoord(points, circular=False, units="degrees") - - def _test_nearest_neighbour_index( - self, target, bounds=None, circular=False - ): - _bounds = [[-20, 10], [10, 100], [100, 260], [260, 340]] - ext_pnts = [-70, -10, 110, 275, 370] - if bounds is True: - self.coord.bounds = _bounds - else: - self.coord.bounds = bounds - self.coord.circular = circular - results = [self.coord.nearest_neighbour_index(ind) for ind in ext_pnts] - self.assertEqual(results, target) - - def test_nobounds(self): - target = [0, 0, 1, 3, 3] - self._test_nearest_neighbour_index(target) - - def test_nobounds_circular(self): - target = [3, 0, 1, 3, 0] - self._test_nearest_neighbour_index(target, circular=True) - - def test_bounded(self): - target = [0, 0, 2, 3, 3] - self._test_nearest_neighbour_index(target, bounds=True) - - def test_bounded_circular(self): - target = [3, 0, 2, 3, 0] - self._test_nearest_neighbour_index(target, bounds=True, circular=True) - - def test_bounded_overlapping(self): - _bounds = [[-20, 50], [10, 150], [100, 300], [260, 340]] - target = [0, 0, 1, 2, 3] - self._test_nearest_neighbour_index(target, bounds=_bounds) - - def test_bounded_disjointed(self): - _bounds = [[-20, 10], [80, 170], [180, 190], [240, 340]] - target = [0, 0, 1, 3, 3] - self._test_nearest_neighbour_index(target, bounds=_bounds) - - def test_scalar(self): - self.coord = DimCoord([0], circular=False, units="degrees") - target = [0, 0, 0, 0, 0] - self._test_nearest_neighbour_index(target) - - -class Test_nearest_neighbour_index__descending(tests.IrisTest): - def setUp(self): - points = [270.0, 180.0, 90.0, 0.0] - self.coord = DimCoord(points, circular=False, units="degrees") - - def _test_nearest_neighbour_index( - self, target, bounds=False, circular=False - ): - _bounds = [[340, 260], [260, 100], [100, 10], [10, -20]] - ext_pnts = [-70, -10, 110, 275, 370] - if bounds: - self.coord.bounds = _bounds - self.coord.circular = circular - results = [self.coord.nearest_neighbour_index(ind) for ind in ext_pnts] - self.assertEqual(results, target) - - def test_nobounds(self): - target = [3, 3, 2, 0, 0] - self._test_nearest_neighbour_index(target) - - def test_nobounds_circular(self): - target = [0, 3, 2, 0, 3] - self._test_nearest_neighbour_index(target, circular=True) - - def test_bounded(self): - target = [3, 3, 1, 0, 0] - self._test_nearest_neighbour_index(target, bounds=True) - - def test_bounded_circular(self): - target = [0, 3, 1, 0, 3] - self._test_nearest_neighbour_index(target, bounds=True, circular=True) - - -class Test_guess_bounds(tests.IrisTest): - def setUp(self): - self.coord = DimCoord( - np.array([-160, -120, 0, 30, 150, 170]), - units="degree", - standard_name="longitude", - circular=True, - ) - - def test_non_circular(self): - self.coord.circular = False - self.coord.guess_bounds() - target = np.array( - [ - [-180.0, -140.0], - [-140.0, -60.0], - [-60.0, 15.0], - [15.0, 90.0], - [90.0, 160.0], - [160.0, 180.0], - ] - ) - self.assertArrayEqual(target, self.coord.bounds) - - def test_circular_increasing(self): - self.coord.guess_bounds() - target = np.array( - [ - [-175.0, -140.0], - [-140.0, -60.0], - [-60.0, 15.0], - [15.0, 90.0], - [90.0, 160.0], - [160.0, 185.0], - ] - ) - self.assertArrayEqual(target, self.coord.bounds) - - def test_circular_decreasing(self): - self.coord.points = self.coord.points[::-1] - self.coord.guess_bounds() - target = np.array( - [ - [185.0, 160.0], - [160.0, 90.0], - [90.0, 15.0], - [15.0, -60.0], - [-60.0, -140.0], - [-140.0, -175.0], - ] - ) - self.assertArrayEqual(target, self.coord.bounds) - - def test_circular_increasing_alt_range(self): - self.coord.points = np.array([10, 30, 90, 150, 210, 220]) - self.coord.guess_bounds() - target = np.array( - [ - [-65.0, 20.0], - [20.0, 60.0], - [60.0, 120.0], - [120.0, 180.0], - [180.0, 215.0], - [215.0, 295.0], - ] - ) - self.assertArrayEqual(target, self.coord.bounds) - - def test_circular_decreasing_alt_range(self): - self.coord.points = np.array([10, 30, 90, 150, 210, 220])[::-1] - self.coord.guess_bounds() - target = np.array( - [ - [295, 215], - [215, 180], - [180, 120], - [120, 60], - [60, 20], - [20, -65], - ] - ) - self.assertArrayEqual(target, self.coord.bounds) - - -class Test_guess_bounds__default_enabled_latitude_clipping(tests.IrisTest): - def test_all_inside(self): - lat = DimCoord([-10, 0, 20], units="degree", standard_name="latitude") - lat.guess_bounds() - self.assertArrayEqual(lat.bounds, [[-15, -5], [-5, 10], [10, 30]]) - - def test_points_inside_bounds_outside(self): - lat = DimCoord([-80, 0, 70], units="degree", standard_name="latitude") - lat.guess_bounds() - self.assertArrayEqual(lat.bounds, [[-90, -40], [-40, 35], [35, 90]]) - - def test_points_inside_bounds_outside_grid_latitude(self): - lat = DimCoord( - [-80, 0, 70], units="degree", standard_name="grid_latitude" - ) - lat.guess_bounds() - self.assertArrayEqual(lat.bounds, [[-90, -40], [-40, 35], [35, 90]]) - - def test_points_to_edges_bounds_outside(self): - lat = DimCoord([-90, 0, 90], units="degree", standard_name="latitude") - lat.guess_bounds() - self.assertArrayEqual(lat.bounds, [[-90, -45], [-45, 45], [45, 90]]) - - def test_points_outside(self): - lat = DimCoord( - [-100, 0, 120], units="degree", standard_name="latitude" - ) - lat.guess_bounds() - self.assertArrayEqual(lat.bounds, [[-150, -50], [-50, 60], [60, 180]]) - - def test_points_inside_bounds_outside_wrong_unit(self): - lat = DimCoord([-80, 0, 70], units="feet", standard_name="latitude") - lat.guess_bounds() - self.assertArrayEqual(lat.bounds, [[-120, -40], [-40, 35], [35, 105]]) - - def test_points_inside_bounds_outside_wrong_name(self): - lat = DimCoord([-80, 0, 70], units="degree", standard_name="longitude") - lat.guess_bounds() - self.assertArrayEqual(lat.bounds, [[-120, -40], [-40, 35], [35, 105]]) - - def test_points_inside_bounds_outside_wrong_name_2(self): - lat = DimCoord( - [-80, 0, 70], units="degree", long_name="other_latitude" - ) - lat.guess_bounds() - self.assertArrayEqual(lat.bounds, [[-120, -40], [-40, 35], [35, 105]]) - - -class Test_cell(tests.IrisTest): - def _mock_coord(self): - coord = mock.Mock( - spec=Coord, - ndim=1, - points=np.array([mock.sentinel.time]), - bounds=np.array([[mock.sentinel.lower, mock.sentinel.upper]]), - ) - return coord - - def test_time_as_object(self): - # Ensure Coord.cell() converts the point/bound values to - # "datetime" objects. - coord = self._mock_coord() - coord.units.num2date = mock.Mock( - side_effect=[ - mock.sentinel.datetime, - (mock.sentinel.datetime_lower, mock.sentinel.datetime_upper), - ] - ) - cell = Coord.cell(coord, 0) - self.assertIs(cell.point, mock.sentinel.datetime) - self.assertEqual( - cell.bound, - (mock.sentinel.datetime_lower, mock.sentinel.datetime_upper), - ) - self.assertEqual( - coord.units.num2date.call_args_list, - [ - mock.call((mock.sentinel.time,)), - mock.call((mock.sentinel.lower, mock.sentinel.upper)), - ], - ) - - -class Test_collapsed(tests.IrisTest, CoordTestMixin): - def test_serialize(self): - # Collapse a string AuxCoord, causing it to be serialised. - string = Pair( - np.array(["two", "four", "six", "eight"]), - np.array( - [ - ["one", "three"], - ["three", "five"], - ["five", "seven"], - ["seven", "nine"], - ] - ), - ) - string_nobounds = Pair(np.array(["ecks", "why", "zed"]), None) - string_multi = Pair( - np.array(["three", "six", "nine"]), - np.array( - [ - ["one", "two", "four", "five"], - ["four", "five", "seven", "eight"], - ["seven", "eight", "ten", "eleven"], - ] - ), - ) - - def _serialize(data): - return "|".join(str(item) for item in data.flatten()) - - for units in ["unknown", "no_unit"]: - for points, bounds in [string, string_nobounds, string_multi]: - coord = AuxCoord(points=points, bounds=bounds, units=units) - collapsed_coord = coord.collapsed() - self.assertArrayEqual( - collapsed_coord.points, _serialize(points) - ) - if bounds is not None: - for index in np.ndindex(bounds.shape[1:]): - index_slice = (slice(None),) + tuple(index) - self.assertArrayEqual( - collapsed_coord.bounds[index_slice], - _serialize(bounds[index_slice]), - ) - - def test_dim_1d(self): - # Numeric coords should not be serialised. - coord = DimCoord( - points=np.array([2, 4, 6, 8]), - bounds=np.array([[1, 3], [3, 5], [5, 7], [7, 9]]), - ) - for units in ["unknown", "no_unit", 1, "K"]: - coord.units = units - collapsed_coord = coord.collapsed() - self.assertArrayEqual( - collapsed_coord.points, np.mean(coord.points) - ) - self.assertArrayEqual( - collapsed_coord.bounds, - [[coord.bounds.min(), coord.bounds.max()]], - ) - - def test_lazy_points(self): - # Lazy points should stay lazy after collapse. - coord = AuxCoord(points=da.from_array(np.arange(5), chunks=5)) - collapsed_coord = coord.collapsed() - self.assertTrue(collapsed_coord.has_lazy_bounds()) - self.assertTrue(collapsed_coord.has_lazy_points()) - - def test_numeric_nd(self): - coord = AuxCoord( - points=np.array([[1, 2, 4, 5], [4, 5, 7, 8], [7, 8, 10, 11]]) - ) - - collapsed_coord = coord.collapsed() - self.assertArrayEqual(collapsed_coord.points, np.array([6])) - self.assertArrayEqual(collapsed_coord.bounds, np.array([[1, 11]])) - - # Test partially collapsing one dimension... - collapsed_coord = coord.collapsed(1) - self.assertArrayEqual( - collapsed_coord.points, np.array([3.0, 6.0, 9.0]) - ) - self.assertArrayEqual( - collapsed_coord.bounds, np.array([[1, 5], [4, 8], [7, 11]]) - ) - - # ... and the other - collapsed_coord = coord.collapsed(0) - self.assertArrayEqual(collapsed_coord.points, np.array([4, 5, 7, 8])) - self.assertArrayEqual( - collapsed_coord.bounds, - np.array([[1, 7], [2, 8], [4, 10], [5, 11]]), - ) - - def test_numeric_nd_bounds_all(self): - self.setupTestArrays((3, 4)) - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - - collapsed_coord = coord.collapsed() - self.assertArrayEqual(collapsed_coord.points, np.array([55])) - self.assertArrayEqual(collapsed_coord.bounds, np.array([[-2, 112]])) - - def test_numeric_nd_bounds_second(self): - self.setupTestArrays((3, 4)) - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - collapsed_coord = coord.collapsed(1) - self.assertArrayEqual(collapsed_coord.points, np.array([15, 55, 95])) - self.assertArrayEqual( - collapsed_coord.bounds, np.array([[-2, 32], [38, 72], [78, 112]]) - ) - - def test_numeric_nd_bounds_first(self): - self.setupTestArrays((3, 4)) - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - # ... and the other.. - collapsed_coord = coord.collapsed(0) - self.assertArrayEqual( - collapsed_coord.points, np.array([40, 50, 60, 70]) - ) - self.assertArrayEqual( - collapsed_coord.bounds, - np.array([[-2, 82], [8, 92], [18, 102], [28, 112]]), - ) - - def test_numeric_nd_bounds_last(self): - self.setupTestArrays((3, 4)) - coord = AuxCoord(self.pts_real, bounds=self.bds_real) - # ... and again with -ve dimension specification. - collapsed_coord = coord.collapsed(-1) - self.assertArrayEqual(collapsed_coord.points, np.array([15, 55, 95])) - self.assertArrayEqual( - collapsed_coord.bounds, np.array([[-2, 32], [38, 72], [78, 112]]) - ) - - def test_lazy_nd_bounds_all(self): - self.setupTestArrays((3, 4)) - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - - collapsed_coord = coord.collapsed() - - # Note that the new points get recalculated from the lazy bounds - # and so end up as lazy - self.assertTrue(collapsed_coord.has_lazy_points()) - self.assertTrue(collapsed_coord.has_lazy_bounds()) - - self.assertArrayEqual(collapsed_coord.points, np.array([55])) - self.assertArrayEqual(collapsed_coord.bounds, da.array([[-2, 112]])) - - def test_lazy_nd_bounds_second(self): - self.setupTestArrays((3, 4)) - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - - collapsed_coord = coord.collapsed(1) - self.assertArrayEqual(collapsed_coord.points, np.array([15, 55, 95])) - self.assertArrayEqual( - collapsed_coord.bounds, np.array([[-2, 32], [38, 72], [78, 112]]) - ) - - def test_lazy_nd_bounds_first(self): - self.setupTestArrays((3, 4)) - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - - collapsed_coord = coord.collapsed(0) - self.assertArrayEqual( - collapsed_coord.points, np.array([40, 50, 60, 70]) - ) - self.assertArrayEqual( - collapsed_coord.bounds, - np.array([[-2, 82], [8, 92], [18, 102], [28, 112]]), - ) - - def test_lazy_nd_bounds_last(self): - self.setupTestArrays((3, 4)) - coord = AuxCoord(self.pts_real, bounds=self.bds_lazy) - - collapsed_coord = coord.collapsed(-1) - self.assertArrayEqual(collapsed_coord.points, np.array([15, 55, 95])) - self.assertArrayEqual( - collapsed_coord.bounds, np.array([[-2, 32], [38, 72], [78, 112]]) - ) - - def test_lazy_nd_points_and_bounds(self): - - self.setupTestArrays((3, 4)) - coord = AuxCoord(self.pts_lazy, bounds=self.bds_lazy) - - collapsed_coord = coord.collapsed() - - self.assertTrue(collapsed_coord.has_lazy_points()) - self.assertTrue(collapsed_coord.has_lazy_bounds()) - - self.assertArrayEqual(collapsed_coord.points, da.array([55])) - self.assertArrayEqual(collapsed_coord.bounds, da.array([[-2, 112]])) - - -class Test_is_compatible(tests.IrisTest): - def setUp(self): - self.test_coord = AuxCoord([1.0]) - self.other_coord = self.test_coord.copy() - - def test_noncommon_array_attrs_compatible(self): - # Non-common array attributes should be ok. - self.test_coord.attributes["array_test"] = np.array([1.0, 2, 3]) - self.assertTrue(self.test_coord.is_compatible(self.other_coord)) - - def test_matching_array_attrs_compatible(self): - # Matching array attributes should be ok. - self.test_coord.attributes["array_test"] = np.array([1.0, 2, 3]) - self.other_coord.attributes["array_test"] = np.array([1.0, 2, 3]) - self.assertTrue(self.test_coord.is_compatible(self.other_coord)) - - def test_different_array_attrs_incompatible(self): - # Differing array attributes should make coords incompatible. - self.test_coord.attributes["array_test"] = np.array([1.0, 2, 3]) - self.other_coord.attributes["array_test"] = np.array([1.0, 2, 777.7]) - self.assertFalse(self.test_coord.is_compatible(self.other_coord)) - - -class Test_contiguous_bounds(tests.IrisTest): - def test_1d_coord_no_bounds_warning(self): - coord = DimCoord([0, 1, 2], standard_name="latitude") - msg = ( - "Coordinate 'latitude' is not bounded, guessing contiguous " - "bounds." - ) - with warnings.catch_warnings(): - # Cause all warnings to raise Exceptions - warnings.simplefilter("error") - with self.assertRaisesRegex(Warning, msg): - coord.contiguous_bounds() - - def test_2d_coord_no_bounds_error(self): - coord = AuxCoord(np.array([[0, 0], [5, 5]]), standard_name="latitude") - emsg = "Guessing bounds of 2D coords is not currently supported" - with self.assertRaisesRegex(ValueError, emsg): - coord.contiguous_bounds() - - def test__sanity_check_bounds_call(self): - coord = DimCoord([5, 15, 25], bounds=[[0, 10], [10, 20], [20, 30]]) - with mock.patch( - "iris.coords.Coord._sanity_check_bounds" - ) as bounds_check: - coord.contiguous_bounds() - bounds_check.assert_called_once() - - def test_1d_coord(self): - coord = DimCoord( - [2, 4, 6], - standard_name="latitude", - bounds=[[1, 3], [3, 5], [5, 7]], - ) - expected = np.array([1, 3, 5, 7]) - result = coord.contiguous_bounds() - self.assertArrayEqual(result, expected) - - def test_1d_coord_discontiguous(self): - coord = DimCoord( - [2, 4, 6], - standard_name="latitude", - bounds=[[1, 3], [4, 5], [5, 7]], - ) - expected = np.array([1, 4, 5, 7]) - result = coord.contiguous_bounds() - self.assertArrayEqual(result, expected) - - def test_2d_lon_bounds(self): - coord = AuxCoord( - np.array([[1, 3], [1, 3]]), - bounds=np.array( - [[[0, 2, 2, 0], [2, 4, 4, 2]], [[0, 2, 2, 0], [2, 4, 4, 2]]] - ), - ) - expected = np.array([[0, 2, 4], [0, 2, 4], [0, 2, 4]]) - result = coord.contiguous_bounds() - self.assertArrayEqual(result, expected) - - def test_2d_lat_bounds(self): - coord = AuxCoord( - np.array([[1, 1], [3, 3]]), - bounds=np.array( - [[[0, 0, 2, 2], [0, 0, 2, 2]], [[2, 2, 4, 4], [2, 2, 4, 4]]] - ), - ) - expected = np.array([[0, 0, 0], [2, 2, 2], [4, 4, 4]]) - result = coord.contiguous_bounds() - self.assertArrayEqual(result, expected) - - -class Test_is_contiguous(tests.IrisTest): - def test_no_bounds(self): - coord = DimCoord([1, 3]) - result = coord.is_contiguous() - self.assertFalse(result) - - def test__discontiguity_in_bounds_call(self): - # Check that :meth:`iris.coords.Coord._discontiguity_in_bounds` is - # called. - coord = DimCoord([1, 3], bounds=[[0, 2], [2, 4]]) - with mock.patch( - "iris.coords.Coord._discontiguity_in_bounds" - ) as discontiguity_check: - # Discontiguity returns two objects that are unpacked in - # `coord.is_contiguous`. - discontiguity_check.return_value = [None, None] - coord.is_contiguous(rtol=1e-1, atol=1e-3) - discontiguity_check.assert_called_with(rtol=1e-1, atol=1e-3) - - -class Test__discontiguity_in_bounds(tests.IrisTest): - def setUp(self): - self.points_3by3 = np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]) - self.lon_bounds_3by3 = np.array( - [ - [[0, 2, 2, 0], [2, 4, 4, 2], [4, 6, 6, 4]], - [[0, 2, 2, 0], [2, 4, 4, 2], [4, 6, 6, 4]], - [[0, 2, 2, 0], [2, 4, 4, 2], [4, 6, 6, 4]], - ] - ) - self.lat_bounds_3by3 = np.array( - [ - [[0, 0, 2, 2], [0, 0, 2, 2], [0, 0, 2, 2]], - [[2, 2, 4, 4], [2, 2, 4, 4], [2, 2, 4, 4]], - [[4, 4, 6, 6], [4, 4, 6, 6], [4, 4, 6, 6]], - ] - ) - - def test_1d_contiguous(self): - coord = DimCoord( - [-20, 0, 20], bounds=[[-30, -10], [-10, 10], [10, 30]] - ) - contiguous, diffs = coord._discontiguity_in_bounds() - self.assertTrue(contiguous) - self.assertArrayEqual(diffs, np.zeros(2)) - - def test_1d_discontiguous(self): - coord = DimCoord([10, 20, 40], bounds=[[5, 15], [15, 25], [35, 45]]) - contiguous, diffs = coord._discontiguity_in_bounds() - self.assertFalse(contiguous) - self.assertArrayEqual(diffs, np.array([0, 10])) - - def test_1d_one_cell(self): - # Test a 1D coord with a single cell. - coord = DimCoord(20, bounds=[[10, 30]]) - contiguous, diffs = coord._discontiguity_in_bounds() - self.assertTrue(contiguous) - self.assertArrayEqual(diffs, np.array([])) - - def test_2d_contiguous_both_dirs(self): - coord = AuxCoord(self.points_3by3, bounds=self.lon_bounds_3by3) - contiguous, diffs = coord._discontiguity_in_bounds() - diffs_along_x, diffs_along_y = diffs - self.assertTrue(contiguous) - self.assertTrue(not diffs_along_x.any()) - self.assertTrue(not diffs_along_y.any()) - - def test_2d_discontiguous_along_x(self): - coord = AuxCoord( - self.points_3by3[:, ::2], bounds=self.lon_bounds_3by3[:, ::2, :] - ) - contiguous, diffs = coord._discontiguity_in_bounds() - diffs_along_x, diffs_along_y = diffs - self.assertFalse(contiguous) - self.assertArrayEqual( - diffs_along_x, np.array([True, True, True]).reshape(3, 1) - ) - self.assertTrue(not diffs_along_y.any()) - - def test_2d_discontiguous_along_y(self): - coord = AuxCoord( - self.points_3by3[::2, :], bounds=self.lat_bounds_3by3[::2, :, :] - ) - contiguous, diffs = coord._discontiguity_in_bounds() - diffs_along_x, diffs_along_y = diffs - self.assertFalse(contiguous) - self.assertTrue(not diffs_along_x.any()) - self.assertArrayEqual(diffs_along_y, np.array([[True, True, True]])) - - def test_2d_discontiguous_along_x_and_y(self): - coord = AuxCoord( - np.array([[1, 5], [3, 5]]), - bounds=np.array( - [[[0, 2, 2, 0], [4, 6, 6, 4]], [[2, 4, 4, 2], [4, 6, 6, 4]]] - ), - ) - contiguous, diffs = coord._discontiguity_in_bounds() - diffs_along_x, diffs_along_y = diffs - exp_x_diffs = np.array([True, False]).reshape(2, 1) - exp_y_diffs = np.array([True, False]).reshape(1, 2) - self.assertFalse(contiguous) - self.assertArrayEqual(diffs_along_x, exp_x_diffs) - self.assertArrayEqual(diffs_along_y, exp_y_diffs) - - def test_2d_contiguous_along_x_atol(self): - coord = AuxCoord( - self.points_3by3[:, ::2], bounds=self.lon_bounds_3by3[:, ::2, :] - ) - # Set a high atol that allows small discontiguities. - contiguous, diffs = coord._discontiguity_in_bounds(atol=5) - diffs_along_x, diffs_along_y = diffs - self.assertTrue(contiguous) - self.assertArrayEqual( - diffs_along_x, np.array([False, False, False]).reshape(3, 1) - ) - self.assertTrue(not diffs_along_y.any()) - - def test_2d_one_cell(self): - # Test a 2D coord with a single cell, where the coord has shape (1, 1). - coord = AuxCoord( - self.points_3by3[:1, :1], bounds=self.lon_bounds_3by3[:1, :1, :] - ) - contiguous, diffs = coord._discontiguity_in_bounds() - diffs_along_x, diffs_along_y = diffs - expected_diffs = np.array([], dtype=np.int64) - self.assertTrue(contiguous) - self.assertArrayEqual(diffs_along_x, expected_diffs.reshape(1, 0)) - self.assertArrayEqual(diffs_along_y, expected_diffs.reshape(0, 1)) - - def test_2d_one_cell_along_x(self): - # Test a 2D coord with a single cell along the x axis, where the coord - # has shape (2, 1). - coord = AuxCoord( - self.points_3by3[:, :1], bounds=self.lat_bounds_3by3[:, :1, :] - ) - contiguous, diffs = coord._discontiguity_in_bounds() - diffs_along_x, diffs_along_y = diffs - self.assertTrue(contiguous) - self.assertTrue(not diffs_along_x.any()) - self.assertArrayEqual(diffs_along_y, np.array([0, 0]).reshape(2, 1)) - - def test_2d_one_cell_along_y(self): - # Test a 2D coord with a single cell along the y axis, where the coord - # has shape (1, 2). - coord = AuxCoord( - self.points_3by3[:1, :], bounds=self.lon_bounds_3by3[:1, :, :] - ) - contiguous, diffs = coord._discontiguity_in_bounds() - diffs_along_x, diffs_along_y = diffs - self.assertTrue(contiguous) - self.assertTrue(not diffs_along_x.any()) - self.assertTrue(not diffs_along_y.any()) - - def test_2d_contiguous_mod_360(self): - # Test that longitude coordinates are adjusted by the 360 modulus when - # calculating the discontiguities in contiguous bounds. - coord = AuxCoord( - [[175, -175], [175, -175]], - standard_name="longitude", - bounds=np.array( - [ - [[170, 180, 180, 170], [-180, -170, -170, -180]], - [[170, 180, 180, 170], [-180, -170, -170, -180]], - ] - ), - ) - contiguous, diffs = coord._discontiguity_in_bounds() - diffs_along_x, diffs_along_y = diffs - self.assertTrue(contiguous) - self.assertTrue(not diffs_along_x.any()) - self.assertTrue(not diffs_along_y.any()) - - def test_2d_discontiguous_mod_360(self): - # Test that longitude coordinates are adjusted by the 360 modulus when - # calculating the discontiguities in contiguous bounds. - coord = AuxCoord( - [[175, -175], [175, -175]], - standard_name="longitude", - bounds=np.array( - [ - [[170, 180, 180, 170], [10, 20, 20, 10]], - [[170, 180, 180, 170], [10, 20, 20, 10]], - ] - ), - ) - contiguous, diffs = coord._discontiguity_in_bounds() - diffs_along_x, diffs_along_y = diffs - self.assertFalse(contiguous) - self.assertArrayEqual(diffs_along_x, np.array([[True], [True]])) - self.assertTrue(not diffs_along_y.any()) - - def test_2d_contiguous_mod_360_not_longitude(self): - # Test that non-longitude coordinates are not adjusted by the 360 - # modulus when calculating the discontiguities in contiguous bounds. - coord = AuxCoord( - [[-150, 350], [-150, 350]], - standard_name="height", - bounds=np.array( - [ - [[-400, 100, 100, -400], [100, 600, 600, 100]], - [[-400, 100, 100, -400], [100, 600, 600, 100]], - ] - ), - ) - contiguous, diffs = coord._discontiguity_in_bounds() - diffs_along_x, diffs_along_y = diffs - self.assertTrue(contiguous) - self.assertTrue(not diffs_along_x.any()) - self.assertTrue(not diffs_along_y.any()) - - def test_2d_discontiguous_mod_360_not_longitude(self): - # Test that non-longitude coordinates are not adjusted by the 360 - # modulus when calculating the discontiguities in discontiguous bounds. - coord = AuxCoord( - [[-150, 350], [-150, 350]], - standard_name="height", - bounds=np.array( - [ - [[-400, 100, 100, -400], [200, 600, 600, 200]], - [[-400, 100, 100, -400], [200, 600, 600, 200]], - ] - ), - ) - contiguous, diffs = coord._discontiguity_in_bounds() - diffs_along_x, diffs_along_y = diffs - self.assertFalse(contiguous) - self.assertArrayEqual(diffs_along_x, np.array([[True], [True]])) - self.assertTrue(not diffs_along_y.any()) - - -class Test__sanity_check_bounds(tests.IrisTest): - def test_coord_1d_2_bounds(self): - # Check that a 1d coord with 2 bounds does not raise an error. - coord = iris.coords.DimCoord( - [0, 1], standard_name="latitude", bounds=[[0, 1], [1, 2]] - ) - coord._sanity_check_bounds() - - def test_coord_1d_no_bounds(self): - coord = iris.coords.DimCoord([0, 1], standard_name="latitude") - emsg = ( - "Contiguous bounds are only defined for 1D coordinates with " - "2 bounds." - ) - with self.assertRaisesRegex(ValueError, emsg): - coord._sanity_check_bounds() - - def test_coord_1d_1_bounds(self): - coord = iris.coords.DimCoord( - [0, 1], standard_name="latitude", bounds=np.array([[0], [1]]) - ) - emsg = ( - "Contiguous bounds are only defined for 1D coordinates with " - "2 bounds." - ) - with self.assertRaisesRegex(ValueError, emsg): - coord._sanity_check_bounds() - - def test_coord_2d_4_bounds(self): - coord = iris.coords.AuxCoord( - [[0, 0], [1, 1]], - standard_name="latitude", - bounds=np.array( - [[[0, 0, 1, 1], [0, 0, 1, 1]], [[1, 1, 2, 2], [1, 1, 2, 2]]] - ), - ) - coord._sanity_check_bounds() - - def test_coord_2d_no_bounds(self): - coord = iris.coords.AuxCoord( - [[0, 0], [1, 1]], standard_name="latitude" - ) - emsg = ( - "Contiguous bounds are only defined for 2D coordinates with " - "4 bounds." - ) - with self.assertRaisesRegex(ValueError, emsg): - coord._sanity_check_bounds() - - def test_coord_2d_2_bounds(self): - coord = iris.coords.AuxCoord( - [[0, 0], [1, 1]], - standard_name="latitude", - bounds=np.array([[[0, 1], [0, 1]], [[1, 2], [1, 2]]]), - ) - emsg = ( - "Contiguous bounds are only defined for 2D coordinates with " - "4 bounds." - ) - with self.assertRaisesRegex(ValueError, emsg): - coord._sanity_check_bounds() - - def test_coord_3d(self): - coord = iris.coords.AuxCoord( - np.zeros((2, 2, 2)), standard_name="height" - ) - emsg = ( - "Contiguous bounds are not defined for coordinates with more " - "than 2 dimensions." - ) - with self.assertRaisesRegex(ValueError, emsg): - coord._sanity_check_bounds() - - -class Test_convert_units(tests.IrisTest): - def test_convert_unknown_units(self): - coord = iris.coords.AuxCoord(1, units="unknown") - emsg = ( - "Cannot convert from unknown units. " - 'The "units" attribute may be set directly.' - ) - with self.assertRaisesRegex(UnitConversionError, emsg): - coord.convert_units("degrees") - - -class Test___str__(tests.IrisTest): - def test_short_time_interval(self): - coord = DimCoord( - [5], standard_name="time", units="days since 1970-01-01" - ) - expected = "\n".join( - [ - "DimCoord : time / (days since 1970-01-01, gregorian calendar)", - " points: [1970-01-06 00:00:00]", - " shape: (1,)", - " dtype: int64", - " standard_name: 'time'", - ] - ) - result = coord.__str__() - self.assertEqual(expected, result) - - def test_short_time_interval__bounded(self): - coord = DimCoord( - [5, 6], standard_name="time", units="days since 1970-01-01" - ) - coord.guess_bounds() - expected = "\n".join( - [ - "DimCoord : time / (days since 1970-01-01, gregorian calendar)", - " points: [1970-01-06 00:00:00, 1970-01-07 00:00:00]", - " bounds: [", - " [1970-01-05 12:00:00, 1970-01-06 12:00:00],", - " [1970-01-06 12:00:00, 1970-01-07 12:00:00]]", - " shape: (2,) bounds(2, 2)", - " dtype: int64", - " standard_name: 'time'", - ] - ) - result = coord.__str__() - self.assertEqual(expected, result) - - def test_long_time_interval(self): - coord = DimCoord( - [5], standard_name="time", units="years since 1970-01-01" - ) - expected = "\n".join( - [ - "DimCoord : time / (years since 1970-01-01, gregorian calendar)", - " points: [5]", - " shape: (1,)", - " dtype: int64", - " standard_name: 'time'", - ] - ) - result = coord.__str__() - self.assertEqual(expected, result) - - def test_long_time_interval__bounded(self): - coord = DimCoord( - [5, 6], standard_name="time", units="years since 1970-01-01" - ) - coord.guess_bounds() - expected = "\n".join( - [ - "DimCoord : time / (years since 1970-01-01, gregorian calendar)", - " points: [5, 6]", - " bounds: [", - " [4.5, 5.5],", - " [5.5, 6.5]]", - " shape: (2,) bounds(2, 2)", - " dtype: int64", - " standard_name: 'time'", - ] - ) - result = coord.__str__() - self.assertEqual(expected, result) - - def test_non_time_unit(self): - coord = DimCoord([1.0]) - expected = "\n".join( - [ - "DimCoord : unknown / (unknown)", - " points: [1.]", - " shape: (1,)", - " dtype: float64", - ] - ) - result = coord.__str__() - self.assertEqual(expected, result) - - -class TestClimatology(tests.IrisTest): - # Variety of tests for the climatological property of a coord. - # Only using AuxCoord since there is no different behaviour between Aux - # and DimCoords for this property. - - def test_create(self): - coord = AuxCoord( - points=[0, 1], - bounds=[[0, 1], [1, 2]], - units="days since 1970-01-01", - climatological=True, - ) - self.assertTrue(coord.climatological) - - def test_create_no_bounds_no_set(self): - with self.assertRaisesRegex(ValueError, "Cannot set.*no bounds exist"): - AuxCoord( - points=[0, 1], - units="days since 1970-01-01", - climatological=True, - ) - - def test_create_no_time_no_set(self): - emsg = "Cannot set climatological .* valid time reference units.*" - with self.assertRaisesRegex(TypeError, emsg): - AuxCoord( - points=[0, 1], bounds=[[0, 1], [1, 2]], climatological=True - ) - - def test_absent(self): - coord = AuxCoord(points=[0, 1], bounds=[[0, 1], [1, 2]]) - self.assertFalse(coord.climatological) - - def test_absent_no_bounds_no_set(self): - coord = AuxCoord(points=[0, 1], units="days since 1970-01-01") - with self.assertRaisesRegex(ValueError, "Cannot set.*no bounds exist"): - coord.climatological = True - - def test_absent_no_time_no_set(self): - coord = AuxCoord(points=[0, 1], bounds=[[0, 1], [1, 2]]) - emsg = "Cannot set climatological .* valid time reference units.*" - with self.assertRaisesRegex(TypeError, emsg): - coord.climatological = True - - def test_absent_no_bounds_unset(self): - coord = AuxCoord(points=[0, 1]) - coord.climatological = False - self.assertFalse(coord.climatological) - - def test_bounds_set(self): - coord = AuxCoord( - points=[0, 1], - bounds=[[0, 1], [1, 2]], - units="days since 1970-01-01", - ) - coord.climatological = True - self.assertTrue(coord.climatological) - - def test_bounds_unset(self): - coord = AuxCoord( - points=[0, 1], - bounds=[[0, 1], [1, 2]], - units="days since 1970-01-01", - climatological=True, - ) - coord.climatological = False - self.assertFalse(coord.climatological) - - def test_remove_bounds(self): - coord = AuxCoord( - points=[0, 1], - bounds=[[0, 1], [1, 2]], - units="days since 1970-01-01", - climatological=True, - ) - coord.bounds = None - self.assertFalse(coord.climatological) - - def test_change_units(self): - coord = AuxCoord( - points=[0, 1], - bounds=[[0, 1], [1, 2]], - units="days since 1970-01-01", - climatological=True, - ) - self.assertTrue(coord.climatological) - coord.units = "K" - self.assertFalse(coord.climatological) - - -class Test___init____abstractmethod(tests.IrisTest): - def test(self): - emsg = ( - "Can't instantiate abstract class Coord with abstract" - " methods __init__" - ) - with self.assertRaisesRegex(TypeError, emsg): - _ = Coord(points=[0, 1]) - - -class Test_cube_dims(tests.IrisTest): - def test(self): - # Check that "coord.cube_dims(cube)" calls "cube.coord_dims(coord)". - mock_dims_result = mock.sentinel.COORD_DIMS - mock_dims_call = mock.Mock(return_value=mock_dims_result) - mock_cube = mock.Mock(Cube, coord_dims=mock_dims_call) - test_coord = AuxCoord([1], long_name="test_name") - - result = test_coord.cube_dims(mock_cube) - self.assertEqual(result, mock_dims_result) - self.assertEqual( - mock_dims_call.call_args_list, [mock.call(test_coord)] - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coords/test_DimCoord.py b/lib/iris/tests/unit/coords/test_DimCoord.py deleted file mode 100644 index e10d228ef6..0000000000 --- a/lib/iris/tests/unit/coords/test_DimCoord.py +++ /dev/null @@ -1,614 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.coords.DimCoord` class. - -Note: a lot of these methods are actually defined by the :class:`Coord` class, -but can only be tested on concrete instances (DimCoord or AuxCoord). - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris.coords import DimCoord -from iris.tests.unit.coords import ( - CoordTestMixin, - coords_all_dtypes_and_lazynesses, - lazyness_string, -) - - -class DimCoordTestMixin(CoordTestMixin): - # Define a 1-D default array shape. - def setupTestArrays(self, shape=(3,), masked=False): - super().setupTestArrays(shape, masked=masked) - - -class Test__init__(tests.IrisTest, DimCoordTestMixin): - # Test for DimCoord creation, with various combinations of points and - # bounds = real / lazy / None. - def setUp(self): - self.setupTestArrays(masked=True) - - def test_lazyness_and_dtype_combinations(self): - for ( - coord, - points_type_name, - bounds_type_name, - ) in coords_all_dtypes_and_lazynesses(self, DimCoord): - pts = coord.core_points() - bds = coord.core_bounds() - # Check properties of points. - # Points array should not be identical to the reference one. - self.assertArraysDoNotShareData( - pts, - self.pts_real, - "Points are the same data as the provided array.", - ) - # the original points array was cast to a test dtype. - check_pts = self.pts_real.astype(coord.dtype) - self.assertEqualRealArraysAndDtypes(pts, check_pts) - - # Check properties of bounds. - if bounds_type_name != "no": - # Bounds array should not be the reference data. - self.assertArraysDoNotShareData( - bds, - self.bds_real, - "Bounds are the same data as the provided array.", - ) - # the original bounds array was cast to a test dtype. - check_bds = self.bds_real.astype(coord.bounds_dtype) - self.assertEqualRealArraysAndDtypes(bds, check_bds) - - def test_fail_bounds_shape_mismatch(self): - bds_shape = list(self.bds_real.shape) - bds_shape[0] += 1 - bds_wrong = np.zeros(bds_shape) - msg = "The shape of the 'unknown' DimCoord bounds array should be" - with self.assertRaisesRegex(ValueError, msg): - DimCoord(self.pts_real, bounds=bds_wrong) - - def test_fail_nonmonotonic(self): - msg = "must be strictly monotonic" - with self.assertRaisesRegex(ValueError, msg): - DimCoord([1, 2, 0, 3]) - - def test_no_masked_pts_real(self): - data = self.no_masked_pts_real - self.assertTrue(ma.isMaskedArray(data)) - self.assertEqual(ma.count_masked(data), 0) - coord = DimCoord(data) - self.assertFalse(coord.has_lazy_points()) - self.assertFalse(ma.isMaskedArray(coord.points)) - self.assertEqual(ma.count_masked(coord.points), 0) - - def test_no_masked_pts_lazy(self): - data = self.no_masked_pts_lazy - computed = data.compute() - self.assertTrue(ma.isMaskedArray(computed)) - self.assertEqual(ma.count_masked(computed), 0) - coord = DimCoord(data) - # DimCoord always realises its points. - self.assertFalse(coord.has_lazy_points()) - self.assertFalse(ma.isMaskedArray(coord.points)) - - def test_masked_pts_real(self): - data = self.masked_pts_real - self.assertTrue(ma.isMaskedArray(data)) - self.assertTrue(ma.count_masked(data)) - emsg = "points array must not be masked" - with self.assertRaisesRegex(TypeError, emsg): - DimCoord(data) - - def test_masked_pts_lazy(self): - data = self.masked_pts_lazy - computed = data.compute() - self.assertTrue(ma.isMaskedArray(computed)) - self.assertTrue(ma.count_masked(computed)) - emsg = "points array must not be masked" - with self.assertRaisesRegex(TypeError, emsg): - DimCoord(data) - - def test_no_masked_bds_real(self): - data = self.no_masked_bds_real - self.assertTrue(ma.isMaskedArray(data)) - self.assertEqual(ma.count_masked(data), 0) - coord = DimCoord(self.pts_real, bounds=data) - self.assertFalse(coord.has_lazy_bounds()) - self.assertFalse(ma.isMaskedArray(coord.bounds)) - self.assertEqual(ma.count_masked(coord.bounds), 0) - - def test_no_masked_bds_lazy(self): - data = self.no_masked_bds_lazy - computed = data.compute() - self.assertTrue(ma.isMaskedArray(computed)) - self.assertEqual(ma.count_masked(computed), 0) - coord = DimCoord(self.pts_real, bounds=data) - # DimCoord always realises its bounds. - self.assertFalse(coord.has_lazy_bounds()) - self.assertFalse(ma.isMaskedArray(coord.bounds)) - - def test_masked_bds_real(self): - data = self.masked_bds_real - self.assertTrue(ma.isMaskedArray(data)) - self.assertTrue(ma.count_masked(data)) - emsg = "bounds array must not be masked" - with self.assertRaisesRegex(TypeError, emsg): - DimCoord(self.pts_real, bounds=data) - - def test_masked_bds_lazy(self): - data = self.masked_bds_lazy - computed = data.compute() - self.assertTrue(ma.isMaskedArray(computed)) - self.assertTrue(ma.count_masked(computed)) - emsg = "bounds array must not be masked" - with self.assertRaisesRegex(TypeError, emsg): - DimCoord(self.pts_real, bounds=data) - - -class Test_core_points(tests.IrisTest, DimCoordTestMixin): - # Test for DimCoord.core_points() with various types of points and bounds. - def setUp(self): - self.setupTestArrays() - - def test_real_points(self): - data = self.pts_real - coord = DimCoord(data) - result = coord.core_points() - self.assertArraysDoNotShareData( - result, - self.pts_real, - "core_points() are the same data as the internal array.", - ) - - def test_lazy_points(self): - lazy_data = self.pts_lazy - coord = DimCoord(lazy_data) - result = coord.core_points() - self.assertEqualRealArraysAndDtypes(result, self.pts_real) - - -class Test_core_bounds(tests.IrisTest, DimCoordTestMixin): - # Test for DimCoord.core_bounds() with various types of points and bounds. - def setUp(self): - self.setupTestArrays() - - def test_no_bounds(self): - coord = DimCoord(self.pts_real) - result = coord.core_bounds() - self.assertIsNone(result) - - def test_real_bounds(self): - coord = DimCoord(self.pts_real, bounds=self.bds_real) - result = coord.core_bounds() - self.assertArraysDoNotShareData( - result, - self.bds_real, - "core_bounds() are the same data as the internal array.", - ) - - def test_lazy_bounds(self): - coord = DimCoord(self.pts_real, bounds=self.bds_lazy) - result = coord.core_bounds() - self.assertEqualRealArraysAndDtypes(result, self.bds_real) - - -class Test_lazy_points(tests.IrisTest, DimCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_core(self): - coord = DimCoord(self.pts_real) - result = coord.lazy_points() - self.assertEqualLazyArraysAndDtypes(result, self.pts_lazy) - - def test_lazy_core(self): - coord = DimCoord(self.pts_lazy) - result = coord.lazy_points() - self.assertEqualLazyArraysAndDtypes(result, self.pts_lazy) - # NOTE: identity, as in "result is self.pts_lazy" does *NOT* work. - - -class Test_lazy_bounds(tests.IrisTest, DimCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_no_bounds(self): - coord = DimCoord(self.pts_real) - result = coord.lazy_bounds() - self.assertIsNone(result) - - def test_real_core(self): - coord = DimCoord(self.pts_real, bounds=self.bds_real) - result = coord.lazy_bounds() - self.assertEqualLazyArraysAndDtypes(result, self.bds_lazy) - - def test_lazy_core(self): - coord = DimCoord(self.pts_real, bounds=self.bds_lazy) - result = coord.lazy_bounds() - self.assertEqualLazyArraysAndDtypes(result, self.bds_lazy) - # NOTE: identity, as in "result is self.bds_lazy" does *NOT* work. - - -class Test_has_lazy_points(tests.IrisTest, DimCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_core(self): - coord = DimCoord(self.pts_real) - result = coord.has_lazy_points() - self.assertFalse(result) - - def test_lazy_core(self): - coord = DimCoord(self.pts_lazy) - result = coord.has_lazy_points() - self.assertFalse(result) - - -class Test_has_lazy_bounds(tests.IrisTest, DimCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_core(self): - coord = DimCoord(self.pts_real, bounds=self.bds_real) - result = coord.has_lazy_bounds() - self.assertFalse(result) - - def test_lazy_core(self): - coord = DimCoord(self.pts_real, bounds=self.bds_lazy) - result = coord.has_lazy_bounds() - self.assertFalse(result) - - -class Test_bounds_dtype(tests.IrisTest): - def test_i16(self): - test_dtype = np.int16 - coord = DimCoord([1], bounds=np.array([[0, 4]], dtype=test_dtype)) - result = coord.bounds_dtype - self.assertEqual(result, test_dtype) - - def test_u16(self): - test_dtype = np.uint16 - coord = DimCoord([1], bounds=np.array([[0, 4]], dtype=test_dtype)) - result = coord.bounds_dtype - self.assertEqual(result, test_dtype) - - def test_f16(self): - test_dtype = np.float16 - coord = DimCoord([1], bounds=np.array([[0, 4]], dtype=test_dtype)) - result = coord.bounds_dtype - self.assertEqual(result, test_dtype) - - -class Test__getitem__(tests.IrisTest, DimCoordTestMixin): - # Test for DimCoord indexing with various types of points and bounds. - def setUp(self): - self.setupTestArrays() - - def test_dtypes(self): - # Index coords with all combinations of real+lazy points+bounds, and - # either an int or floating dtype. - # Check that dtypes remain the same in all cases, taking the dtypes - # directly from the core points and bounds (as we have no masking). - for ( - main_coord, - points_type_name, - bounds_type_name, - ) in coords_all_dtypes_and_lazynesses(self, DimCoord): - - sub_coord = main_coord[:2] - - coord_dtype = main_coord.dtype - msg = ( - "Indexing main_coord of dtype {} " - "with {} points and {} bounds " - "changed dtype of {} to {}." - ) - - sub_points = sub_coord.core_points() - self.assertEqual( - sub_points.dtype, - coord_dtype, - msg.format( - coord_dtype, - points_type_name, - bounds_type_name, - "points", - sub_points.dtype, - ), - ) - - if bounds_type_name != "no": - sub_bounds = sub_coord.core_bounds() - main_bounds_dtype = main_coord.bounds_dtype - self.assertEqual( - sub_bounds.dtype, - main_bounds_dtype, - msg.format( - main_bounds_dtype, - points_type_name, - bounds_type_name, - "bounds", - sub_bounds.dtype, - ), - ) - - def test_lazyness(self): - # Index coords with all combinations of real+lazy points+bounds, and - # either an int or floating dtype. - # Check that lazy data stays lazy and real stays real, in all cases. - for ( - main_coord, - points_type_name, - bounds_type_name, - ) in coords_all_dtypes_and_lazynesses(self, DimCoord): - # N.B. 'points_type_name' and 'bounds_type_name' in the iteration - # are the original types (lazy/real/none) of the points+bounds, - # but the DimCoord itself only ever has real data. - if points_type_name == "lazy": - points_type_name = "real" - if bounds_type_name == "lazy": - bounds_type_name = "real" - - sub_coord = main_coord[:2] - - msg = ( - "Indexing coord of dtype {} " - "with {} points and {} bounds " - 'changed "lazyness" of {} from {!r} to {!r}.' - ) - coord_dtype = main_coord.dtype - sub_points_lazyness = lazyness_string(sub_coord.core_points()) - self.assertEqual( - sub_points_lazyness, - points_type_name, - msg.format( - coord_dtype, - points_type_name, - bounds_type_name, - "points", - points_type_name, - sub_points_lazyness, - ), - ) - - if bounds_type_name != "no": - sub_bounds_lazy = lazyness_string(sub_coord.core_bounds()) - self.assertEqual( - sub_bounds_lazy, - bounds_type_name, - msg.format( - coord_dtype, - points_type_name, - bounds_type_name, - "bounds", - bounds_type_name, - sub_bounds_lazy, - ), - ) - - def test_real_data_copies(self): - # Index coords with all combinations of real+lazy points+bounds. - # In all cases, check that any real arrays are copied by the indexing. - for ( - main_coord, - points_lazyness, - bounds_lazyness, - ) in coords_all_dtypes_and_lazynesses(self, DimCoord): - - sub_coord = main_coord[:2] - - msg = ( - "Indexed coord with {} points and {} bounds " - "does not have its own separate {} array." - ) - if points_lazyness == "real": - main_points = main_coord.core_points() - sub_points = sub_coord.core_points() - sub_main_points = main_points[:2] - self.assertEqualRealArraysAndDtypes( - sub_points, sub_main_points - ) - self.assertArraysDoNotShareData( - sub_points, - sub_main_points, - msg.format(points_lazyness, bounds_lazyness, "points"), - ) - - if bounds_lazyness == "real": - main_bounds = main_coord.core_bounds() - sub_bounds = sub_coord.core_bounds() - sub_main_bounds = main_bounds[:2] - self.assertEqualRealArraysAndDtypes( - sub_bounds, sub_main_bounds - ) - self.assertArraysDoNotShareData( - sub_bounds, - sub_main_bounds, - msg.format(points_lazyness, bounds_lazyness, "bounds"), - ) - - -class Test_copy(tests.IrisTest, DimCoordTestMixin): - # Test for DimCoord.copy() with various types of points and bounds. - def setUp(self): - self.setupTestArrays() - - def test_writable_points(self): - coord1 = DimCoord( - np.arange(5), bounds=[[0, 1], [1, 2], [2, 3], [3, 4], [4, 5]] - ) - coord2 = coord1.copy() - msg = "destination is read-only" - - with self.assertRaisesRegex(ValueError, msg): - coord1.points[:] = 0 - - with self.assertRaisesRegex(ValueError, msg): - coord2.points[:] = 0 - - with self.assertRaisesRegex(ValueError, msg): - coord1.bounds[:] = 0 - - with self.assertRaisesRegex(ValueError, msg): - coord2.bounds[:] = 0 - - def test_realdata_readonly(self): - # Copy coords with all combinations of real+lazy points+bounds. - # In all cases, check that data arrays are read-only. - for ( - main_coord, - points_type_name, - bounds_type_name, - ) in coords_all_dtypes_and_lazynesses(self, DimCoord): - - copied_coord = main_coord.copy() - - copied_points = copied_coord.core_points() - expected_error_msg = "output array is read-only" - with self.assertRaisesRegex(ValueError, expected_error_msg): - copied_points[:1] += 33 - - if bounds_type_name != "no": - copied_bounds = copied_coord.core_bounds() - with self.assertRaisesRegex(ValueError, expected_error_msg): - copied_bounds[:1] += 33 - - -class Test_points__getter(tests.IrisTest, DimCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_points(self): - # Getting real points returns a copy - coord = DimCoord(self.pts_real) - result = coord.core_points() - self.assertArraysDoNotShareData( - result, - self.pts_real, - "Points are the same array as the provided data.", - ) - - -class Test_points__setter(tests.IrisTest, DimCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_set_real(self): - # Setting points copies the data - coord = DimCoord(self.pts_real) - new_pts = self.pts_real + 102.3 - coord.points = new_pts - result = coord.core_points() - self.assertArraysDoNotShareData( - result, new_pts, "Points are the same data as the assigned array." - ) - - def test_fail_bad_shape(self): - # Setting real points requires matching shape. - points = [1.0, 2.0] - coord = DimCoord(points) - msg = r"Require data with shape \(2,\), got \(3,\)" - with self.assertRaisesRegex(ValueError, msg): - coord.points = np.array([1.0, 2.0, 3.0]) - self.assertArrayEqual(coord.points, points) - - def test_fail_not_monotonic(self): - # Setting real points requires that they are monotonic. - coord = DimCoord(self.pts_real, bounds=self.bds_real) - msg = "strictly monotonic" - with self.assertRaisesRegex(ValueError, msg): - coord.points = np.array([3.0, 1.0, 2.0]) - self.assertArrayEqual(coord.points, self.pts_real) - - def test_set_lazy(self): - # Setting new lazy points realises them. - coord = DimCoord(self.pts_real) - new_pts = self.pts_lazy + 102.3 - coord.points = new_pts - result = coord.core_points() - self.assertEqualRealArraysAndDtypes(result, new_pts.compute()) - - def test_copy_array(self): - # Assigning points creates a copy - pts = np.array([1, 2, 3]) - coord = DimCoord(pts) - pts[1] = 5 - self.assertEqual(coord.points[1], 2) - - -class Test_bounds__getter(tests.IrisTest, DimCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_real_bounds(self): - # Getting real bounds does not change or copy them. - coord = DimCoord(self.pts_real, bounds=self.bds_real) - result = coord.bounds - self.assertArraysDoNotShareData( - result, - self.bds_real, - "Bounds are the same array as the provided data.", - ) - - -class Test_bounds__setter(tests.IrisTest, DimCoordTestMixin): - def setUp(self): - self.setupTestArrays() - - def test_set_real(self): - # Setting bounds does not copy, but makes a readonly view. - coord = DimCoord(self.pts_real, bounds=self.bds_real) - new_bounds = self.bds_real + 102.3 - coord.bounds = new_bounds - result = coord.core_bounds() - self.assertArraysDoNotShareData( - result, - new_bounds, - "Bounds are the same data as the assigned array.", - ) - - def test_fail_bad_shape(self): - # Setting real points requires matching shape. - coord = DimCoord(self.pts_real, bounds=self.bds_real) - msg = "The shape of the 'unknown' DimCoord bounds array should be" - with self.assertRaisesRegex(ValueError, msg): - coord.bounds = np.array([1.0, 2.0, 3.0]) - self.assertArrayEqual(coord.bounds, self.bds_real) - - def test_fail_not_monotonic(self): - # Setting real bounds requires that they are monotonic. - coord = DimCoord(self.pts_real, bounds=self.bds_real) - msg = "strictly monotonic" - with self.assertRaisesRegex(ValueError, msg): - coord.bounds = np.array([[3.0, 2.0], [1.0, 0.0], [2.0, 1.0]]) - self.assertArrayEqual(coord.bounds, self.bds_real) - - def test_set_lazy(self): - # Setting new lazy bounds realises them. - coord = DimCoord(self.pts_real, bounds=self.bds_lazy) - new_bounds = self.bds_lazy + 102.3 - coord.bounds = new_bounds - result = coord.core_bounds() - self.assertEqualRealArraysAndDtypes(result, new_bounds.compute()) - - def test_copy_array(self): - # Assigning bounds creates a copy - pts = np.array([2, 4, 6]) - bnds = np.array([[1, 3], [3, 5], [5, 7]]) - coord = DimCoord(pts, bounds=bnds) - bnds[1, 1] = 10 - self.assertEqual(coord.bounds[1, 1], 5) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py b/lib/iris/tests/unit/coords/test__DimensionalMetadata.py deleted file mode 100644 index fd10a6f264..0000000000 --- a/lib/iris/tests/unit/coords/test__DimensionalMetadata.py +++ /dev/null @@ -1,1082 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.coords._DimensionalMetadata` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - - -from cf_units import Unit -import numpy as np - -import iris._lazy_data as lazy -from iris.coord_systems import GeogCS -from iris.coords import ( - AncillaryVariable, - AuxCoord, - CellMeasure, - DimCoord, - _DimensionalMetadata, -) -from iris.experimental.ugrid.mesh import Connectivity -from iris.tests.stock import climatology_3d as cube_with_climatology -from iris.tests.stock.mesh import sample_meshcoord - - -class Test___init____abstractmethod(tests.IrisTest): - def test(self): - emsg = ( - "Can't instantiate abstract class _DimensionalMetadata with " - "abstract methods __init__" - ) - with self.assertRaisesRegex(TypeError, emsg): - _ = _DimensionalMetadata(0) - - -class Mixin__string_representations: - """ - Common testcode for generic `__str__`, `__repr__` and `summary` methods. - - Effectively, __str__ and __repr__ are thin wrappers around `summary`. - These are used by all the subclasses : notably Coord/DimCoord/AuxCoord, - but also AncillaryVariable, CellMeasure and MeshCoord. - - There are a lot of different aspects to consider: - - * different object classes with different class-specific properties - * changing with array sizes + dimensionalities - * masked data - * data types : int, float, string and (special) dates - * for Coords, handling of bounds - * "summary" controls (also can be affected by numpy printoptions). - - NOTE: since the details of formatting are important to us here, the basic - test method is to check printout results against an exact 'snapshot' - embedded (visibly) in the test itself. - - """ - - def repr_str_strings(self, dm, linewidth=55): - """ - Return a simple combination of repr and str printouts. - - N.B. we control linewidth to make the outputs easier to compare. - """ - with np.printoptions(linewidth=linewidth): - result = repr(dm) + "\n" + str(dm) - return result - - def sample_data(self, datatype=float, units="m", shape=(5,), masked=False): - """Make a sample data array for a test _DimensionalMetadata object.""" - # Get an actual Unit - units = Unit(units) - if units.calendar: - # fix string datatypes for date-based units - datatype = float - - # Get a dtype - dtype = np.dtype(datatype) - - # Make suitable test values for type/shape/masked - length = int(np.prod(shape)) - if dtype.kind == "U": - # String content. - digit_strs = [str(i) * (i + 1) for i in range(0, 10)] - if length < 10: - # ['0', '11', '222, '3333', ..] - values = np.array(digit_strs[:length]) - else: - # [... '9999999999', '0', '11' ....] - indices = [(i % 10) for i in range(length)] - values = np.array(digit_strs)[indices] - else: - # numeric content : a simple [0, 1, 2 ...] - values = np.arange(length).astype(dtype) - - if masked: - if np.prod(shape) >= 3: - # Mask 1 in 3 points : [x -- x x -- x ...] - i_firstmasked = 1 - else: - # Few points, mask 1 in 3 starting at 0 [-- x x -- x x -- ...] - i_firstmasked = 0 - masked_points = [(i % 3) == i_firstmasked for i in range(length)] - values = np.ma.masked_array(values, mask=masked_points) - - values = values.reshape(shape) - return values - - # Make a sample Coord, as _DimensionalMetadata is abstract and this is the - # obvious concrete subclass to use for testing - def sample_coord( - self, - datatype=float, - dates=False, - units="m", - long_name="x", - shape=(5,), - masked=False, - bounded=False, - dimcoord=False, - lazy_points=False, - lazy_bounds=False, - *coord_args, - **coord_kwargs, - ): - if masked: - dimcoord = False - if dates: - # Use a pre-programmed date unit. - units = Unit("days since 1970-03-5") - if not isinstance(units, Unit): - # This operation is *not* a no-op, it will wipe calendars ! - units = Unit(units) - values = self.sample_data( - datatype=datatype, units=units, shape=shape, masked=masked - ) - cls = DimCoord if dimcoord else AuxCoord - coord = cls( - points=values, - units=units, - long_name=long_name, - *coord_args, - **coord_kwargs, - ) - if bounded or lazy_bounds: - if shape == (1,): - # Guess-bounds doesn't work ! - val = coord.points[0] - bounds = [val - 10, val + 10] - # NB preserve masked/unmasked : avoid converting masks to NaNs - if np.ma.isMaskedArray(coord.points): - array = np.ma.array - else: - array = np.array - coord.bounds = array(bounds) - else: - coord.guess_bounds() - if lazy_points: - coord.points = lazy.as_lazy_data(coord.points) - if lazy_bounds: - coord.bounds = lazy.as_lazy_data(coord.bounds) - return coord - - def coord_representations(self, *args, **kwargs): - """ - Create a test coord and return its string representations. - - Pass args+kwargs to 'sample_coord' and return the 'repr_str_strings'. - - """ - coord = self.sample_coord(*args, **kwargs) - return self.repr_str_strings(coord) - - def assertLines(self, list_of_expected_lines, string_result): - """ - Assert equality between a result and expected output lines. - - For convenience, the 'expected lines' are joined with a '\\n', - because a list of strings is nicer to construct in code. - They should then match the actual result, which is a simple string. - - """ - self.assertEqual(list_of_expected_lines, string_result.split("\n")) - - -class Test__print_common(Mixin__string_representations, tests.IrisTest): - """ - Test aspects of __str__ and __repr__ output common to all - _DimensionalMetadata instances. - I.E. those from CFVariableMixin, plus values array (data-manager). - - Aspects : - * standard_name: - * long_name: - * var_name: - * attributes - * units - * shape - * dtype - - """ - - def test_simple(self): - result = self.coord_representations() - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0., 1., 2., 3., 4.]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_minimal(self): - result = self.coord_representations( - long_name=None, units=None, shape=(1,) - ) - expected = [ - "", - "AuxCoord : unknown / (unknown)", - " points: [0.]", - " shape: (1,)", - " dtype: float64", - ] - self.assertLines(expected, result) - - def test_names(self): - result = self.coord_representations( - standard_name="height", long_name="this", var_name="x_var" - ) - expected = [ - "", - "AuxCoord : height / (m)", - " points: [0., 1., 2., 3., 4.]", - " shape: (5,)", - " dtype: float64", - " standard_name: 'height'", - " long_name: 'this'", - " var_name: 'x_var'", - ] - self.assertLines(expected, result) - - def test_bounded(self): - result = self.coord_representations(shape=(3,), bounded=True) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0., 1., 2.]", - " bounds: [", - " [-0.5, 0.5],", - " [ 0.5, 1.5],", - " [ 1.5, 2.5]]", - " shape: (3,) bounds(3, 2)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_masked(self): - result = self.coord_representations(masked=True) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0.0, -- , 2.0, 3.0, -- ]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_dtype_int(self): - result = self.coord_representations(units="1", datatype=np.int16) - expected = [ - "", - "AuxCoord : x / (1)", - " points: [0, 1, 2, 3, 4]", - " shape: (5,)", - " dtype: int16", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_dtype_date(self): - # Note: test with a date 'longer' than the built-in one in - # 'sample_coord(dates=True)', because it includes a time-of-day - full_date_unit = Unit( - "days since 1892-05-17 03:00:25", calendar="360_day" - ) - result = self.coord_representations(units=full_date_unit) - expected = [ - ( - "" - ), - ( - "AuxCoord : x / (days since 1892-05-17 03:00:25, " - "360_day calendar)" - ), - " points: [", - " 1892-05-17 03:00:25, 1892-05-18 03:00:25,", - " 1892-05-19 03:00:25, 1892-05-20 03:00:25,", - " 1892-05-21 03:00:25]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_attributes(self): - # NOTE: scheduled for future change, to put each attribute on a line - coord = self.sample_coord( - attributes={ - "array": np.arange(7.0), - "list": [1, 2, 3], - "empty": [], - "None": None, - "string": "this", - "long_long_long_long_long_name": 3, - "other": ( - "long_long_long_long_long_long_long_long_" - "long_long_long_long_long_long_long_long_value" - ), - "float": 4.3, - } - ) - result = self.repr_str_strings(coord) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0., 1., 2., 3., 4.]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - " attributes:", - " array [0. 1. 2. 3. 4. 5. 6.]", - " list [1, 2, 3]", - " empty []", - " None None", - " string 'this'", - " long_long_long_long_long_name 3", - ( - " other " - "'long_long_long_long_long_long_long_long_" - "long_long_long_long_long_long..." - ), - " float 4.3", - ] - self.assertLines(expected, result) - - def test_lazy_points(self): - result = self.coord_representations(lazy_points=True) - expected = [ - " shape(5,)>", - "AuxCoord : x / (m)", - " points: ", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_lazy_bounds(self): - result = self.coord_representations(lazy_bounds=True) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0., 1., 2., 3., 4.]", - " bounds: ", - " shape: (5,) bounds(5, 2)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_lazy_points_and_bounds(self): - result = self.coord_representations(lazy_points=True, lazy_bounds=True) - expected = [ - "+bounds shape(5,)>", - "AuxCoord : x / (m)", - " points: ", - " bounds: ", - " shape: (5,) bounds(5, 2)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_scalar(self): - result = self.coord_representations(shape=(1,), bounded=True) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0.]", - " bounds: [[-10., 10.]]", - " shape: (1,) bounds(1, 2)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_scalar_masked(self): - result = self.coord_representations( - shape=(1,), bounded=True, masked=True - ) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [--]", - " bounds: [[--, --]]", - " shape: (1,) bounds(1, 2)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_length_short(self): - result = self.coord_representations(shape=(2,), bounded=True) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0., 1.]", - " bounds: [", - " [-0.5, 0.5],", - " [ 0.5, 1.5]]", - " shape: (2,) bounds(2, 2)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_length_medium(self): - # Where bounds are truncated, but points not. - result = self.coord_representations(shape=(14,), bounded=True) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [", - " 0., 1., 2., 3., 4., 5., 6., 7., 8.,", - " 9., 10., 11., 12., 13.]", - " bounds: [", - " [-0.5, 0.5],", - " [ 0.5, 1.5],", - " ...,", - " [11.5, 12.5],", - " [12.5, 13.5]]", - " shape: (14,) bounds(14, 2)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_length_long(self): - # Completely truncated representations - result = self.coord_representations(shape=(150,), bounded=True) - expected = [ - ( - "" - ), - "AuxCoord : x / (m)", - " points: [ 0., 1., ..., 148., 149.]", - " bounds: [", - " [ -0.5, 0.5],", - " [ 0.5, 1.5],", - " ...,", - " [147.5, 148.5],", - " [148.5, 149.5]]", - " shape: (150,) bounds(150, 2)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_strings(self): - result = self.coord_representations(datatype=str) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0 , 11 , 222 , 3333 , 44444]", - " shape: (5,)", - " dtype: ", - "AuxCoord : x / (m)", - " points: [", - " 0 , 11 , 222 ,", - " 3333 , 44444 , 555555 ,", - " 6666666 , 77777777 , 888888888 ,", - " 9999999999, 0 , 11 ,", - " 222 , 3333 , 44444 ]", - " shape: (15,)", - " dtype: ", - "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", - " points: [1970-03-05 00:00:00, 1970-03-06 00:00:00]", - " shape: (2,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_dates_scalar(self): - # Printouts for a scalar date coord. - # Demonstrate that a "typical" datetime coord can print with the date - # value visible in the repr. - long_time_unit = Unit("hours since 2025-03-23 01:00:00") - coord = self.sample_coord( - standard_name="time", - long_name=None, - shape=(1,), - units=long_time_unit, - ) - # Do this one with a default linewidth, not our default reduced one, so - # that we can get the date value in the repr output. - result = self.repr_str_strings(coord, linewidth=None) - expected = [ - ( - "" - ), - ( - "AuxCoord : time / (hours since 2025-03-23 01:00:00, " - "gregorian calendar)" - ), - " points: [2025-03-23 01:00:00]", - " shape: (1,)", - " dtype: float64", - " standard_name: 'time'", - ] - self.assertLines(expected, result) - - def test_dates_bounds(self): - result = self.coord_representations(dates=True, bounded=True) - expected = [ - "", - "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", - " points: [", - " 1970-03-05 00:00:00, 1970-03-06 00:00:00,", - " 1970-03-07 00:00:00, 1970-03-08 00:00:00,", - " 1970-03-09 00:00:00]", - " bounds: [", - " [1970-03-04 12:00:00, 1970-03-05 12:00:00],", - " [1970-03-05 12:00:00, 1970-03-06 12:00:00],", - " [1970-03-06 12:00:00, 1970-03-07 12:00:00],", - " [1970-03-07 12:00:00, 1970-03-08 12:00:00],", - " [1970-03-08 12:00:00, 1970-03-09 12:00:00]]", - " shape: (5,) bounds(5, 2)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_dates_masked(self): - result = self.coord_representations(dates=True, masked=True) - expected = [ - "", - "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", - " points: [", - " 1970-03-05 00:00:00, -- ,", - " 1970-03-07 00:00:00, 1970-03-08 00:00:00,", - " -- ]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_untypical_bounds(self): - # Check printing when n-bounds > 2 - coord = self.sample_coord() - bounds = coord.points.reshape((5, 1)) + np.array([[-3.0, -2, 2, 3]]) - coord.bounds = bounds - result = self.repr_str_strings(coord) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0., 1., 2., 3., 4.]", - " bounds: [", - " [-3., -2., 2., 3.],", - " [-2., -1., 3., 4.],", - " ...,", - " [ 0., 1., 5., 6.],", - " [ 1., 2., 6., 7.]]", - " shape: (5,) bounds(5, 4)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_multidimensional(self): - # Demonstrate formatting of multdimensional arrays - result = self.coord_representations(shape=(7, 5, 3)) - # This one is a bit unavoidably long .. - expected = [ - "", - "AuxCoord : x / (m)", - " points: [", - " [[ 0., 1., 2.],", - " [ 3., 4., 5.],", - " ...,", - " [ 9., 10., 11.],", - " [ 12., 13., 14.]],", - " ", - " [[ 15., 16., 17.],", - " [ 18., 19., 20.],", - " ...,", - " [ 24., 25., 26.],", - " [ 27., 28., 29.]],", - " ", - " ...,", - " ", - " [[ 75., 76., 77.],", - " [ 78., 79., 80.],", - " ...,", - " [ 84., 85., 86.],", - " [ 87., 88., 89.]],", - " ", - " [[ 90., 91., 92.],", - " [ 93., 94., 95.],", - " ...,", - " [ 99., 100., 101.],", - " [102., 103., 104.]]]", - " shape: (7, 5, 3)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_multidimensional_small(self): - # Demonstrate that a small-enough multidim will print in the repr. - result = self.coord_representations(shape=(2, 2), datatype=int) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [", - " [0, 1],", - " [2, 3]]", - " shape: (2, 2)", - " dtype: int64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_integers_short(self): - result = self.coord_representations(datatype=np.int16) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0, 1, 2, 3, 4]", - " shape: (5,)", - " dtype: int16", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_integers_masked(self): - result = self.coord_representations(datatype=int, masked=True) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0 , --, 2 , 3 , --]", - " shape: (5,)", - " dtype: int64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_integers_masked_long(self): - result = self.coord_representations( - shape=(20,), datatype=int, masked=True - ) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0 , --, ..., 18, --]", - " shape: (20,)", - " dtype: int64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - -class Test__print_Coord(Mixin__string_representations, tests.IrisTest): - """ - Test Coord-specific aspects of __str__ and __repr__ output. - - Aspects : - * DimCoord / AuxCoord - * coord_system - * climatological - * circular - - """ - - def test_dimcoord(self): - result = self.coord_representations(dimcoord=True) - expected = [ - "", - "DimCoord : x / (m)", - " points: [0., 1., 2., 3., 4.]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_coord_system(self): - result = self.coord_representations(coord_system=GeogCS(1000.0)) - expected = [ - "", - "AuxCoord : x / (m)", - " points: [0., 1., 2., 3., 4.]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - " coord_system: GeogCS(1000.0)", - ] - self.assertLines(expected, result) - - def test_climatological(self): - cube = cube_with_climatology() - coord = cube.coord("time") - coord = coord[:1] # Just to make it a bit shorter - result = self.repr_str_strings(coord) - expected = [ - ( - "" - ), - ( - "DimCoord : time / (days since 1970-01-01 00:00:00-00, " - "gregorian calendar)" - ), - " points: [2001-01-10 00:00:00]", - " bounds: [[2001-01-10 00:00:00, 2011-01-10 00:00:00]]", - " shape: (1,) bounds(1, 2)", - " dtype: float64", - " standard_name: 'time'", - " climatological: True", - ] - self.assertLines(expected, result) - - def test_circular(self): - coord = self.sample_coord(shape=(2,), dimcoord=True) - coord.circular = True - result = self.repr_str_strings(coord) - expected = [ - "", - "DimCoord : x / (m)", - " points: [0., 1.]", - " shape: (2,)", - " dtype: float64", - " long_name: 'x'", - " circular: True", - ] - self.assertLines(expected, result) - - -class Test__print_noncoord(Mixin__string_representations, tests.IrisTest): - """ - Limited testing of other _DimensionalMetadata subclasses. - - * AncillaryVariable - * CellMeasure - * Connectivity - * MeshCoord - - """ - - def test_ancillary(self): - # Check we can print an AncillaryVariable - # Practically, ~identical to an AuxCoord, but without bounds, and the - # array is called 'data'. - data = self.sample_data() - ancil = AncillaryVariable(data, long_name="v_aux", units="m s-1") - result = self.repr_str_strings(ancil) - expected = [ - "", - "AncillaryVariable : v_aux / (m s-1)", - " data: [0., 1., 2., 3., 4.]", - " shape: (5,)", - " dtype: float64", - " long_name: 'v_aux'", - ] - self.assertLines(expected, result) - - def test_cellmeasure(self): - # Check we can print an AncillaryVariable - # N.B. practically, identical to an AuxCoord (without bounds) - # Check we can print an AncillaryVariable - # Practically, ~identical to an AuxCoord, but without bounds, and the - # array is called 'data'. - data = self.sample_data() - cell_measure = CellMeasure( - data, measure="area", long_name="cell_area", units="m^2" - ) - result = self.repr_str_strings(cell_measure) - expected = [ - "", - "CellMeasure : cell_area / (m^2)", - " data: [0., 1., 2., 3., 4.]", - " shape: (5,)", - " dtype: float64", - " long_name: 'cell_area'", - " measure: 'area'", - ] - self.assertLines(expected, result) - - def test_connectivity(self): - # Check we can print a Connectivity - # Like a Coord, but always print : cf_role, location_axis, start_index - data = self.sample_data(shape=(3, 2), datatype=int) - conn = Connectivity( - data, cf_role="edge_node_connectivity", long_name="enc", units="1" - ) - result = self.repr_str_strings(conn) - expected = [ - "", - "Connectivity : enc / (1)", - " data: [", - " [0, 1],", - " [2, 3],", - " [4, 5]]", - " shape: (3, 2)", - " dtype: int64", - " long_name: 'enc'", - " cf_role: 'edge_node_connectivity'", - " start_index: 0", - " location_axis: 0", - ] - self.assertLines(expected, result) - - def test_connectivity__start_index(self): - # Check we can print a Connectivity - # Like a Coord, but always print : cf_role, location_axis, start_index - data = self.sample_data(shape=(3, 2), datatype=int) - conn = Connectivity( - data + 1, - start_index=1, - cf_role="edge_node_connectivity", - long_name="enc", - units="1", - ) - result = self.repr_str_strings(conn) - expected = [ - "", - "Connectivity : enc / (1)", - " data: [", - " [1, 2],", - " [3, 4],", - " [5, 6]]", - " shape: (3, 2)", - " dtype: int64", - " long_name: 'enc'", - " cf_role: 'edge_node_connectivity'", - " start_index: 1", - " location_axis: 0", - ] - self.assertLines(expected, result) - - def test_connectivity__location_axis(self): - # Check we can print a Connectivity - # Like a Coord, but always print : cf_role, location_axis, start_index - data = self.sample_data(shape=(3, 2), datatype=int) - conn = Connectivity( - data.transpose(), - location_axis=1, - cf_role="edge_node_connectivity", - long_name="enc", - units="1", - ) - result = self.repr_str_strings(conn) - expected = [ - "", - "Connectivity : enc / (1)", - " data: [", - " [0, 2, 4],", - " [1, 3, 5]]", - " shape: (2, 3)", - " dtype: int64", - " long_name: 'enc'", - " cf_role: 'edge_node_connectivity'", - " start_index: 0", - " location_axis: 1", - ] - self.assertLines(expected, result) - - def test_meshcoord(self): - meshco = sample_meshcoord() - meshco.mesh.long_name = "test_mesh" # For stable printout of the Mesh - result = self.repr_str_strings(meshco) - expected = [ - ( - "" - ), - "MeshCoord : longitude / (degrees_east)", - " mesh: ", - " location: 'face'", - " points: [3100, 3101, 3102]", - " bounds: [", - " [1100, 1101, 1102, 1103],", - " [1104, 1105, 1106, 1107],", - " [1108, 1109, 1110, 1111]]", - " shape: (3,) bounds(3, 4)", - " dtype: int64", - " standard_name: 'longitude'", - " long_name: 'long-name'", - " attributes:", - " a 1", - " b 'c'", - " axis: 'x'", - ] - self.assertLines(expected, result) - - -class Test_summary(Mixin__string_representations, tests.IrisTest): - """ - Test the controls of the 'summary' method. - """ - - def test_shorten(self): - coord = self.sample_coord() - expected = self.repr_str_strings(coord) - result = coord.summary(shorten=True) + "\n" + coord.summary() - self.assertEqual(expected, result) - - def test_max_values__default(self): - coord = self.sample_coord() - result = coord.summary() - expected = [ - "AuxCoord : x / (m)", - " points: [0., 1., 2., 3., 4.]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_max_values__2(self): - coord = self.sample_coord() - result = coord.summary(max_values=2) - expected = [ - "AuxCoord : x / (m)", - " points: [0., 1., ..., 3., 4.]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_max_values__bounded__2(self): - coord = self.sample_coord(bounded=True) - result = coord.summary(max_values=2) - expected = [ - "AuxCoord : x / (m)", - " points: [0., 1., ..., 3., 4.]", - " bounds: [", - " [-0.5, 0.5],", - " [ 0.5, 1.5],", - " ...,", - " [ 2.5, 3.5],", - " [ 3.5, 4.5]]", - " shape: (5,) bounds(5, 2)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_max_values__0(self): - coord = self.sample_coord(bounded=True) - result = coord.summary(max_values=0) - expected = [ - "AuxCoord : x / (m)", - " points: [...]", - " bounds: [...]", - " shape: (5,) bounds(5, 2)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_linewidth__default(self): - coord = self.sample_coord() - coord.points = coord.points + 1000.003 # Make the output numbers wider - result = coord.summary() - expected = [ - "AuxCoord : x / (m)", - " points: [1000.003, 1001.003, 1002.003, 1003.003, 1004.003]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - # Show that, when unset, it follows the numpy setting - with np.printoptions(linewidth=35): - result = coord.summary() - expected = [ - "AuxCoord : x / (m)", - " points: [", - " 1000.003, 1001.003,", - " 1002.003, 1003.003,", - " 1004.003]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - def test_linewidth__set(self): - coord = self.sample_coord() - coord.points = coord.points + 1000.003 # Make the output numbers wider - expected = [ - "AuxCoord : x / (m)", - " points: [", - " 1000.003, 1001.003,", - " 1002.003, 1003.003,", - " 1004.003]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - result = coord.summary(linewidth=35) - self.assertLines(expected, result) - - with np.printoptions(linewidth=999): - # Show that, when set, it ignores the numpy setting - result = coord.summary(linewidth=35) - self.assertLines(expected, result) - - def test_convert_dates(self): - coord = self.sample_coord(dates=True) - result = coord.summary() - expected = [ - "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", - " points: [", - ( - " 1970-03-05 00:00:00, 1970-03-06 00:00:00, " - "1970-03-07 00:00:00," - ), - " 1970-03-08 00:00:00, 1970-03-09 00:00:00]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - result = coord.summary(convert_dates=False) - expected = [ - "AuxCoord : x / (days since 1970-03-5, gregorian calendar)", - " points: [0., 1., 2., 3., 4.]", - " shape: (5,)", - " dtype: float64", - " long_name: 'x'", - ] - self.assertLines(expected, result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/cube/__init__.py b/lib/iris/tests/unit/cube/__init__.py deleted file mode 100644 index 7852593e21..0000000000 --- a/lib/iris/tests/unit/cube/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.cube` module.""" diff --git a/lib/iris/tests/unit/cube/test_Cube.py b/lib/iris/tests/unit/cube/test_Cube.py deleted file mode 100644 index 7d56b505bd..0000000000 --- a/lib/iris/tests/unit/cube/test_Cube.py +++ /dev/null @@ -1,3167 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.cube.Cube` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from itertools import permutations -from unittest import mock - -from cf_units import Unit -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import as_lazy_data -import iris.analysis -from iris.analysis import MEAN, Aggregator, WeightedAggregator -import iris.aux_factory -from iris.aux_factory import HybridHeightFactory -from iris.common.metadata import BaseMetadata -import iris.coords -from iris.coords import ( - AncillaryVariable, - AuxCoord, - CellMeasure, - CellMethod, - DimCoord, -) -from iris.cube import Cube -import iris.exceptions -from iris.exceptions import ( - AncillaryVariableNotFoundError, - CellMeasureNotFoundError, - CoordinateNotFoundError, - UnitConversionError, -) -import iris.tests.stock as stock -from iris.tests.stock.mesh import ( - sample_mesh, - sample_mesh_cube, - sample_meshcoord, -) - - -class Test___init___data(tests.IrisTest): - def test_ndarray(self): - # np.ndarray should be allowed through - data = np.arange(12).reshape(3, 4) - cube = Cube(data) - self.assertEqual(type(cube.data), np.ndarray) - self.assertArrayEqual(cube.data, data) - - def test_masked(self): - # ma.MaskedArray should be allowed through - data = ma.masked_greater(np.arange(12).reshape(3, 4), 1) - cube = Cube(data) - self.assertEqual(type(cube.data), ma.MaskedArray) - self.assertMaskedArrayEqual(cube.data, data) - - def test_masked_no_mask(self): - # ma.MaskedArray should be allowed through even if it has no mask - data = ma.masked_array(np.arange(12).reshape(3, 4), False) - cube = Cube(data) - self.assertEqual(type(cube.data), ma.MaskedArray) - self.assertMaskedArrayEqual(cube.data, data) - - def test_matrix(self): - # Subclasses of np.ndarray should be coerced back to np.ndarray. - # (Except for np.ma.MaskedArray.) - data = np.matrix([[1, 2, 3], [4, 5, 6]]) - cube = Cube(data) - self.assertEqual(type(cube.data), np.ndarray) - self.assertArrayEqual(cube.data, data) - - -class Test_data_dtype_fillvalue(tests.IrisTest): - def _sample_data( - self, dtype=("f4"), masked=False, fill_value=None, lazy=False - ): - data = np.arange(6).reshape((2, 3)) - dtype = np.dtype(dtype) - data = data.astype(dtype) - if masked: - data = ma.masked_array( - data, mask=[[0, 1, 0], [0, 0, 0]], fill_value=fill_value - ) - if lazy: - data = as_lazy_data(data) - return data - - def _sample_cube( - self, dtype=("f4"), masked=False, fill_value=None, lazy=False - ): - data = self._sample_data( - dtype=dtype, masked=masked, fill_value=fill_value, lazy=lazy - ) - cube = Cube(data) - return cube - - def test_realdata_change(self): - # Check re-assigning real data. - cube = self._sample_cube() - self.assertEqual(cube.dtype, np.float32) - new_dtype = np.dtype("i4") - new_data = self._sample_data(dtype=new_dtype) - cube.data = new_data - self.assertIs(cube.core_data(), new_data) - self.assertEqual(cube.dtype, new_dtype) - - def test_realmaskdata_change(self): - # Check re-assigning real masked data. - cube = self._sample_cube(masked=True, fill_value=1234) - self.assertEqual(cube.dtype, np.float32) - new_dtype = np.dtype("i4") - new_fill_value = 4321 - new_data = self._sample_data( - masked=True, fill_value=new_fill_value, dtype=new_dtype - ) - cube.data = new_data - self.assertIs(cube.core_data(), new_data) - self.assertEqual(cube.dtype, new_dtype) - self.assertEqual(cube.data.fill_value, new_fill_value) - - def test_lazydata_change(self): - # Check re-assigning lazy data. - cube = self._sample_cube(lazy=True) - self.assertEqual(cube.core_data().dtype, np.float32) - new_dtype = np.dtype("f8") - new_data = self._sample_data(new_dtype, lazy=True) - cube.data = new_data - self.assertIs(cube.core_data(), new_data) - self.assertEqual(cube.dtype, new_dtype) - - def test_lazymaskdata_change(self): - # Check re-assigning lazy masked data. - cube = self._sample_cube(masked=True, fill_value=1234, lazy=True) - self.assertEqual(cube.core_data().dtype, np.float32) - new_dtype = np.dtype("f8") - new_fill_value = 4321 - new_data = self._sample_data( - dtype=new_dtype, masked=True, fill_value=new_fill_value, lazy=True - ) - cube.data = new_data - self.assertIs(cube.core_data(), new_data) - self.assertEqual(cube.dtype, new_dtype) - self.assertEqual(cube.data.fill_value, new_fill_value) - - def test_lazydata_realise(self): - # Check touching lazy data. - cube = self._sample_cube(lazy=True) - data = cube.data - self.assertIs(cube.core_data(), data) - self.assertEqual(cube.dtype, np.float32) - - def test_lazymaskdata_realise(self): - # Check touching masked lazy data. - fill_value = 27.3 - cube = self._sample_cube(masked=True, fill_value=fill_value, lazy=True) - data = cube.data - self.assertIs(cube.core_data(), data) - self.assertEqual(cube.dtype, np.float32) - self.assertEqual(data.fill_value, np.float32(fill_value)) - - def test_realmaskedconstantint_realise(self): - masked_data = ma.masked_array([666], mask=True) - masked_constant = masked_data[0] - cube = Cube(masked_constant) - data = cube.data - self.assertTrue(ma.isMaskedArray(data)) - self.assertNotIsInstance(data, ma.core.MaskedConstant) - - def test_lazymaskedconstantint_realise(self): - dtype = np.dtype("i2") - masked_data = ma.masked_array([666], mask=True, dtype=dtype) - masked_constant = masked_data[0] - masked_constant_lazy = as_lazy_data(masked_constant) - cube = Cube(masked_constant_lazy) - data = cube.data - self.assertTrue(ma.isMaskedArray(data)) - self.assertNotIsInstance(data, ma.core.MaskedConstant) - - def test_lazydata___getitem__dtype(self): - fill_value = 1234 - dtype = np.dtype("int16") - masked_array = ma.masked_array( - np.arange(5), - mask=[0, 0, 1, 0, 0], - fill_value=fill_value, - dtype=dtype, - ) - lazy_masked_array = as_lazy_data(masked_array) - cube = Cube(lazy_masked_array) - subcube = cube[3:] - self.assertEqual(subcube.dtype, dtype) - self.assertEqual(subcube.data.fill_value, fill_value) - - -class Test_extract(tests.IrisTest): - def test_scalar_cube_exists(self): - # Ensure that extract is able to extract a scalar cube. - constraint = iris.Constraint(name="a1") - cube = Cube(1, long_name="a1") - res = cube.extract(constraint) - self.assertIs(res, cube) - - def test_scalar_cube_noexists(self): - # Ensure that extract does not return a non-matching scalar cube. - constraint = iris.Constraint(name="a2") - cube = Cube(1, long_name="a1") - res = cube.extract(constraint) - self.assertIs(res, None) - - def test_scalar_cube_coord_match(self): - # Ensure that extract is able to extract a scalar cube according to - # constrained scalar coordinate. - constraint = iris.Constraint(scalar_coord=0) - cube = Cube(1, long_name="a1") - coord = iris.coords.AuxCoord(0, long_name="scalar_coord") - cube.add_aux_coord(coord, None) - res = cube.extract(constraint) - self.assertIs(res, cube) - - def test_scalar_cube_coord_nomatch(self): - # Ensure that extract is not extracting a scalar cube with scalar - # coordinate that does not match the constraint. - constraint = iris.Constraint(scalar_coord=1) - cube = Cube(1, long_name="a1") - coord = iris.coords.AuxCoord(0, long_name="scalar_coord") - cube.add_aux_coord(coord, None) - res = cube.extract(constraint) - self.assertIs(res, None) - - def test_1d_cube_exists(self): - # Ensure that extract is able to extract from a 1d cube. - constraint = iris.Constraint(name="a1") - cube = Cube([1], long_name="a1") - res = cube.extract(constraint) - self.assertIs(res, cube) - - def test_1d_cube_noexists(self): - # Ensure that extract does not return a non-matching 1d cube. - constraint = iris.Constraint(name="a2") - cube = Cube([1], long_name="a1") - res = cube.extract(constraint) - self.assertIs(res, None) - - -class Test_xml(tests.IrisTest): - def test_checksum_ignores_masked_values(self): - # Mask out an single element. - data = ma.arange(12).reshape(3, 4) - data[1, 2] = ma.masked - cube = Cube(data) - self.assertCML(cube) - - # If we change the underlying value before masking it, the - # checksum should be unaffected. - data = ma.arange(12).reshape(3, 4) - data[1, 2] = 42 - data[1, 2] = ma.masked - cube = Cube(data) - self.assertCML(cube) - - def test_byteorder_default(self): - cube = Cube(np.arange(3)) - self.assertIn("byteorder", cube.xml()) - - def test_byteorder_false(self): - cube = Cube(np.arange(3)) - self.assertNotIn("byteorder", cube.xml(byteorder=False)) - - def test_byteorder_true(self): - cube = Cube(np.arange(3)) - self.assertIn("byteorder", cube.xml(byteorder=True)) - - def test_cell_measures(self): - cube = stock.simple_3d_w_multidim_coords() - cm_a = iris.coords.CellMeasure( - np.zeros(cube.shape[-2:]), measure="area", units="1" - ) - cube.add_cell_measure(cm_a, (1, 2)) - cm_v = iris.coords.CellMeasure( - np.zeros(cube.shape), - measure="volume", - long_name="madeup", - units="m3", - ) - cube.add_cell_measure(cm_v, (0, 1, 2)) - self.assertCML(cube) - - def test_ancils(self): - cube = stock.simple_2d_w_multidim_coords() - av = iris.coords.AncillaryVariable( - np.zeros(cube.shape), long_name="xy", var_name="vxy", units="1" - ) - cube.add_ancillary_variable(av, (0, 1)) - self.assertCML(cube) - - -class Test_collapsed__lazy(tests.IrisTest): - def setUp(self): - self.data = np.arange(6.0).reshape((2, 3)) - self.lazydata = as_lazy_data(self.data) - cube = Cube(self.lazydata) - for i_dim, name in enumerate(("y", "x")): - npts = cube.shape[i_dim] - coord = DimCoord(np.arange(npts), long_name=name) - cube.add_dim_coord(coord, i_dim) - self.cube = cube - - def test_dim0_lazy(self): - cube_collapsed = self.cube.collapsed("y", MEAN) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual(cube_collapsed.data, [1.5, 2.5, 3.5]) - self.assertFalse(cube_collapsed.has_lazy_data()) - - def test_dim1_lazy(self): - cube_collapsed = self.cube.collapsed("x", MEAN) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual(cube_collapsed.data, [1.0, 4.0]) - self.assertFalse(cube_collapsed.has_lazy_data()) - - def test_multidims(self): - # Check that MEAN works with multiple dims. - cube_collapsed = self.cube.collapsed(("x", "y"), MEAN) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAllClose(cube_collapsed.data, 2.5) - - def test_non_lazy_aggregator(self): - # An aggregator which doesn't have a lazy function should still work. - dummy_agg = Aggregator( - "custom_op", lambda x, axis=None: np.mean(x, axis=axis) - ) - result = self.cube.collapsed("x", dummy_agg) - self.assertFalse(result.has_lazy_data()) - self.assertArrayEqual(result.data, np.mean(self.data, axis=1)) - - -class Test_collapsed__multidim_weighted(tests.IrisTest): - def setUp(self): - self.data = np.arange(6.0).reshape((2, 3)) - self.lazydata = as_lazy_data(self.data) - # Test cubes wth (same-valued) real and lazy data - cube_real = Cube(self.data) - for i_dim, name in enumerate(("y", "x")): - npts = cube_real.shape[i_dim] - coord = DimCoord(np.arange(npts), long_name=name) - cube_real.add_dim_coord(coord, i_dim) - self.cube_real = cube_real - self.cube_lazy = cube_real.copy(data=self.lazydata) - # Test weights and expected result for a y-collapse - self.y_weights = np.array([0.3, 0.5]) - self.full_weights_y = np.broadcast_to( - self.y_weights.reshape((2, 1)), cube_real.shape - ) - self.expected_result_y = np.array([1.875, 2.875, 3.875]) - # Test weights and expected result for an x-collapse - self.x_weights = np.array([0.7, 0.4, 0.6]) - self.full_weights_x = np.broadcast_to( - self.x_weights.reshape((1, 3)), cube_real.shape - ) - self.expected_result_x = np.array([0.941176, 3.941176]) - - def test_weighted_fullweights_real_y(self): - # Supplying full-shape weights for collapsing over a single dimension. - cube_collapsed = self.cube_real.collapsed( - "y", MEAN, weights=self.full_weights_y - ) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_y - ) - - def test_weighted_fullweights_lazy_y(self): - # Full-shape weights, lazy data : Check lazy result, same values as real calc. - cube_collapsed = self.cube_lazy.collapsed( - "y", MEAN, weights=self.full_weights_y - ) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_y - ) - - def test_weighted_1dweights_real_y(self): - # 1-D weights, real data : Check same results as full-shape. - cube_collapsed = self.cube_real.collapsed( - "y", MEAN, weights=self.y_weights - ) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_y - ) - - def test_weighted_1dweights_lazy_y(self): - # 1-D weights, lazy data : Check lazy result, same values as real calc. - cube_collapsed = self.cube_lazy.collapsed( - "y", MEAN, weights=self.y_weights - ) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_y - ) - - def test_weighted_fullweights_real_x(self): - # Full weights, real data, ** collapse X ** : as for 'y' case above - cube_collapsed = self.cube_real.collapsed( - "x", MEAN, weights=self.full_weights_x - ) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_x - ) - - def test_weighted_fullweights_lazy_x(self): - # Full weights, lazy data, ** collapse X ** : as for 'y' case above - cube_collapsed = self.cube_lazy.collapsed( - "x", MEAN, weights=self.full_weights_x - ) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_x - ) - - def test_weighted_1dweights_real_x(self): - # 1-D weights, real data, ** collapse X ** : as for 'y' case above - cube_collapsed = self.cube_real.collapsed( - "x", MEAN, weights=self.x_weights - ) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_x - ) - - def test_weighted_1dweights_lazy_x(self): - # 1-D weights, lazy data, ** collapse X ** : as for 'y' case above - cube_collapsed = self.cube_lazy.collapsed( - "x", MEAN, weights=self.x_weights - ) - self.assertTrue(cube_collapsed.has_lazy_data()) - self.assertArrayAlmostEqual( - cube_collapsed.data, self.expected_result_x - ) - - -class Test_collapsed__cellmeasure_ancils(tests.IrisTest): - def setUp(self): - cube = Cube(np.arange(6.0).reshape((2, 3))) - for i_dim, name in enumerate(("y", "x")): - npts = cube.shape[i_dim] - coord = DimCoord(np.arange(npts), long_name=name) - cube.add_dim_coord(coord, i_dim) - self.ancillary_variable = AncillaryVariable([0, 1], long_name="foo") - cube.add_ancillary_variable(self.ancillary_variable, 0) - self.cell_measure = CellMeasure([0, 1], long_name="bar") - cube.add_cell_measure(self.cell_measure, 0) - self.cube = cube - - def test_ancillary_variables_and_cell_measures_kept(self): - cube_collapsed = self.cube.collapsed("x", MEAN) - self.assertEqual( - cube_collapsed.ancillary_variables(), [self.ancillary_variable] - ) - self.assertEqual(cube_collapsed.cell_measures(), [self.cell_measure]) - - def test_ancillary_variables_and_cell_measures_removed(self): - cube_collapsed = self.cube.collapsed("y", MEAN) - self.assertEqual(cube_collapsed.ancillary_variables(), []) - self.assertEqual(cube_collapsed.cell_measures(), []) - - -class Test_collapsed__warning(tests.IrisTest): - def setUp(self): - self.cube = Cube([[1, 2], [1, 2]]) - lat = DimCoord([1, 2], standard_name="latitude") - lon = DimCoord([1, 2], standard_name="longitude") - grid_lat = AuxCoord([1, 2], standard_name="grid_latitude") - grid_lon = AuxCoord([1, 2], standard_name="grid_longitude") - wibble = AuxCoord([1, 2], long_name="wibble") - - self.cube.add_dim_coord(lat, 0) - self.cube.add_dim_coord(lon, 1) - self.cube.add_aux_coord(grid_lat, 0) - self.cube.add_aux_coord(grid_lon, 1) - self.cube.add_aux_coord(wibble, 1) - - def _aggregator(self, uses_weighting): - # Returns a mock aggregator with a mocked method (uses_weighting) - # which returns the given True/False condition. - aggregator = mock.Mock(spec=WeightedAggregator, lazy_func=None) - aggregator.cell_method = None - aggregator.uses_weighting = mock.Mock(return_value=uses_weighting) - - return aggregator - - def _assert_warn_collapse_without_weight(self, coords, warn): - # Ensure that warning is raised. - msg = "Collapsing spatial coordinate {!r} without weighting" - for coord in coords: - self.assertIn(mock.call(msg.format(coord)), warn.call_args_list) - - def _assert_nowarn_collapse_without_weight(self, coords, warn): - # Ensure that warning is not rised. - msg = "Collapsing spatial coordinate {!r} without weighting" - for coord in coords: - self.assertNotIn(mock.call(msg.format(coord)), warn.call_args_list) - - def test_lat_lon_noweighted_aggregator(self): - # Collapse latitude coordinate with unweighted aggregator. - aggregator = mock.Mock(spec=Aggregator, lazy_func=None) - aggregator.cell_method = None - coords = ["latitude", "longitude"] - - with mock.patch("warnings.warn") as warn: - self.cube.collapsed(coords, aggregator, somekeyword="bla") - - self._assert_nowarn_collapse_without_weight(coords, warn) - - def test_lat_lon_weighted_aggregator(self): - # Collapse latitude coordinate with weighted aggregator without - # providing weights. - aggregator = self._aggregator(False) - coords = ["latitude", "longitude"] - - with mock.patch("warnings.warn") as warn: - self.cube.collapsed(coords, aggregator) - - coords = [coord for coord in coords if "latitude" in coord] - self._assert_warn_collapse_without_weight(coords, warn) - - def test_lat_lon_weighted_aggregator_with_weights(self): - # Collapse latitude coordinate with a weighted aggregators and - # providing suitable weights. - weights = np.array([[0.1, 0.5], [0.3, 0.2]]) - aggregator = self._aggregator(True) - coords = ["latitude", "longitude"] - - with mock.patch("warnings.warn") as warn: - self.cube.collapsed(coords, aggregator, weights=weights) - - self._assert_nowarn_collapse_without_weight(coords, warn) - - def test_lat_lon_weighted_aggregator_alt(self): - # Collapse grid_latitude coordinate with weighted aggregator without - # providing weights. Tests coordinate matching logic. - aggregator = self._aggregator(False) - coords = ["grid_latitude", "grid_longitude"] - - with mock.patch("warnings.warn") as warn: - self.cube.collapsed(coords, aggregator) - - coords = [coord for coord in coords if "latitude" in coord] - self._assert_warn_collapse_without_weight(coords, warn) - - def test_no_lat_weighted_aggregator_mixed(self): - # Collapse grid_latitude and an unmatched coordinate (not lat/lon) - # with weighted aggregator without providing weights. - # Tests coordinate matching logic. - aggregator = self._aggregator(False) - coords = ["wibble"] - - with mock.patch("warnings.warn") as warn: - self.cube.collapsed(coords, aggregator) - - self._assert_nowarn_collapse_without_weight(coords, warn) - - -class Test_summary(tests.IrisTest): - def setUp(self): - self.cube = Cube(0) - - def test_cell_datetime_objects(self): - self.cube.add_aux_coord(AuxCoord(42, units="hours since epoch")) - summary = self.cube.summary() - self.assertIn("1970-01-02 18:00:00", summary) - - def test_scalar_str_coord(self): - str_value = "foo" - self.cube.add_aux_coord(AuxCoord(str_value)) - summary = self.cube.summary() - self.assertIn(str_value, summary) - - def test_ancillary_variable(self): - cube = Cube(np.arange(6).reshape(2, 3)) - av = AncillaryVariable([1, 2], "status_flag") - cube.add_ancillary_variable(av, 0) - expected_summary = ( - "unknown / (unknown) (-- : 2; -- : 3)\n" - " Ancillary variables:\n" - " status_flag x -" - ) - self.assertEqual(expected_summary, cube.summary()) - - def test_similar_coords(self): - coord1 = AuxCoord( - 42, long_name="foo", attributes=dict(bar=np.array([2, 5])) - ) - coord2 = coord1.copy() - coord2.attributes = dict(bar="baz") - for coord in [coord1, coord2]: - self.cube.add_aux_coord(coord) - self.assertIn("baz", self.cube.summary()) - - def test_long_components(self): - # Check that components with long names 'stretch' the printout correctly. - cube = Cube(np.zeros((20, 20, 20)), units=1) - dimco = DimCoord(np.arange(20), long_name="dimco") - auxco = AuxCoord(np.zeros(20), long_name="auxco") - ancil = AncillaryVariable(np.zeros(20), long_name="ancil") - cellm = CellMeasure(np.zeros(20), long_name="cellm") - cube.add_dim_coord(dimco, 0) - cube.add_aux_coord(auxco, 0) - cube.add_cell_measure(cellm, 1) - cube.add_ancillary_variable(ancil, 2) - - original_summary = cube.summary() - long_name = "long_name______________________________________" - for component in (dimco, auxco, ancil, cellm): - # For each (type of) component, set a long name so the header columns get shifted. - old_name = component.name() - component.rename(long_name) - new_summary = cube.summary() - component.rename( - old_name - ) # Put each back the way it was afterwards - - # Check that the resulting 'stretched' output has dimension columns aligned correctly. - lines = new_summary.split("\n") - header = lines[0] - colon_inds = [ - i_char for i_char, char in enumerate(header) if char == ":" - ] - for line in lines[1:]: - # Replace all '-' with 'x' to make checking easier, and add a final buffer space. - line = line.replace("-", "x") + " " - if " x " in line: - # For lines with any columns : check that columns are where expected - for col_ind in colon_inds: - # Chop out chars before+after each expected column. - self.assertEqual( - line[col_ind - 1 : col_ind + 2], " x " - ) - - # Finally also: compare old with new, but replacing new name and ignoring spacing differences - def collapse_space(string): - # Replace all multiple spaces with a single space. - while " " in string: - string = string.replace(" ", " ") - return string - - self.assertEqual( - collapse_space(new_summary).replace(long_name, old_name), - collapse_space(original_summary), - ) - - -class Test_is_compatible(tests.IrisTest): - def setUp(self): - self.test_cube = Cube([1.0]) - self.other_cube = self.test_cube.copy() - - def test_noncommon_array_attrs_compatible(self): - # Non-common array attributes should be ok. - self.test_cube.attributes["array_test"] = np.array([1.0, 2, 3]) - self.assertTrue(self.test_cube.is_compatible(self.other_cube)) - - def test_matching_array_attrs_compatible(self): - # Matching array attributes should be ok. - self.test_cube.attributes["array_test"] = np.array([1.0, 2, 3]) - self.other_cube.attributes["array_test"] = np.array([1.0, 2, 3]) - self.assertTrue(self.test_cube.is_compatible(self.other_cube)) - - def test_different_array_attrs_incompatible(self): - # Differing array attributes should make the cubes incompatible. - self.test_cube.attributes["array_test"] = np.array([1.0, 2, 3]) - self.other_cube.attributes["array_test"] = np.array([1.0, 2, 777.7]) - self.assertFalse(self.test_cube.is_compatible(self.other_cube)) - - -class Test_aggregated_by(tests.IrisTest): - def setUp(self): - self.cube = Cube(np.arange(44).reshape(4, 11)) - - val_coord = AuxCoord( - [0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], long_name="val" - ) - label_coord = AuxCoord( - [ - "alpha", - "alpha", - "beta", - "beta", - "alpha", - "gamma", - "alpha", - "alpha", - "alpha", - "gamma", - "beta", - ], - long_name="label", - units="no_unit", - ) - simple_agg_coord = AuxCoord([1, 1, 2, 2], long_name="simple_agg") - spanning_coord = AuxCoord( - np.arange(44).reshape(4, 11), long_name="spanning" - ) - spanning_label_coord = AuxCoord( - np.arange(1, 441, 10).reshape(4, 11).astype(str), - long_name="span_label", - units="no_unit", - ) - - self.cube.add_aux_coord(simple_agg_coord, 0) - self.cube.add_aux_coord(val_coord, 1) - self.cube.add_aux_coord(label_coord, 1) - self.cube.add_aux_coord(spanning_coord, (0, 1)) - self.cube.add_aux_coord(spanning_label_coord, (0, 1)) - - self.mock_agg = mock.Mock(spec=Aggregator) - self.mock_agg.cell_method = [] - self.mock_agg.aggregate = mock.Mock( - return_value=mock.Mock(dtype="object") - ) - self.mock_agg.aggregate_shape = mock.Mock(return_value=()) - self.mock_agg.lazy_func = None - self.mock_agg.post_process = mock.Mock(side_effect=lambda x, y, z: x) - - self.ancillary_variable = AncillaryVariable( - [0, 1, 2, 3], long_name="foo" - ) - self.cube.add_ancillary_variable(self.ancillary_variable, 0) - self.cell_measure = CellMeasure([0, 1, 2, 3], long_name="bar") - self.cube.add_cell_measure(self.cell_measure, 0) - - def test_2d_coord_simple_agg(self): - # For 2d coords, slices of aggregated coord should be the same as - # aggregated slices. - res_cube = self.cube.aggregated_by("simple_agg", self.mock_agg) - for res_slice, cube_slice in zip( - res_cube.slices("simple_agg"), self.cube.slices("simple_agg") - ): - cube_slice_agg = cube_slice.aggregated_by( - "simple_agg", self.mock_agg - ) - self.assertEqual( - res_slice.coord("spanning"), cube_slice_agg.coord("spanning") - ) - self.assertEqual( - res_slice.coord("span_label"), - cube_slice_agg.coord("span_label"), - ) - - def test_agg_by_label(self): - # Aggregate a cube on a string coordinate label where label - # and val entries are not in step; the resulting cube has a val - # coord of bounded cells and a label coord of single string entries. - res_cube = self.cube.aggregated_by("label", self.mock_agg) - val_coord = AuxCoord( - np.array([1.0, 0.5, 1.0]), - bounds=np.array([[0, 2], [0, 1], [0, 2]]), - long_name="val", - ) - label_coord = AuxCoord( - np.array(["alpha", "beta", "gamma"]), - long_name="label", - units="no_unit", - ) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - - def test_agg_by_label_bounded(self): - # Aggregate a cube on a string coordinate label where label - # and val entries are not in step; the resulting cube has a val - # coord of bounded cells and a label coord of single string entries. - val_points = self.cube.coord("val").points - self.cube.coord("val").bounds = np.array( - [val_points - 0.5, val_points + 0.5] - ).T - res_cube = self.cube.aggregated_by("label", self.mock_agg) - val_coord = AuxCoord( - np.array([1.0, 0.5, 1.0]), - bounds=np.array([[-0.5, 2.5], [-0.5, 1.5], [-0.5, 2.5]]), - long_name="val", - ) - label_coord = AuxCoord( - np.array(["alpha", "beta", "gamma"]), - long_name="label", - units="no_unit", - ) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - - def test_2d_agg_by_label(self): - res_cube = self.cube.aggregated_by("label", self.mock_agg) - # For 2d coord, slices of aggregated coord should be the same as - # aggregated slices. - for res_slice, cube_slice in zip( - res_cube.slices("val"), self.cube.slices("val") - ): - cube_slice_agg = cube_slice.aggregated_by("label", self.mock_agg) - self.assertEqual( - res_slice.coord("spanning"), cube_slice_agg.coord("spanning") - ) - - def test_agg_by_val(self): - # Aggregate a cube on a numeric coordinate val where label - # and val entries are not in step; the resulting cube has a label - # coord with serialised labels from the aggregated cells. - res_cube = self.cube.aggregated_by("val", self.mock_agg) - val_coord = AuxCoord(np.array([0, 1, 2]), long_name="val") - exp0 = "alpha|alpha|beta|alpha|alpha|gamma" - exp1 = "beta|alpha|beta" - exp2 = "gamma|alpha" - label_coord = AuxCoord( - np.array((exp0, exp1, exp2)), long_name="label", units="no_unit" - ) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - - def test_2d_agg_by_val(self): - res_cube = self.cube.aggregated_by("val", self.mock_agg) - # For 2d coord, slices of aggregated coord should be the same as - # aggregated slices. - for res_slice, cube_slice in zip( - res_cube.slices("val"), self.cube.slices("val") - ): - cube_slice_agg = cube_slice.aggregated_by("val", self.mock_agg) - self.assertEqual( - res_slice.coord("spanning"), cube_slice_agg.coord("spanning") - ) - - def test_single_string_aggregation(self): - aux_coords = [ - (AuxCoord(["a", "b", "a"], long_name="foo"), 0), - (AuxCoord(["a", "a", "a"], long_name="bar"), 0), - ] - cube = iris.cube.Cube( - np.arange(12).reshape(3, 4), aux_coords_and_dims=aux_coords - ) - result = cube.aggregated_by("foo", MEAN) - self.assertEqual(result.shape, (2, 4)) - self.assertEqual( - result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar") - ) - - def test_ancillary_variables_and_cell_measures_kept(self): - cube_agg = self.cube.aggregated_by("val", self.mock_agg) - self.assertEqual( - cube_agg.ancillary_variables(), [self.ancillary_variable] - ) - self.assertEqual(cube_agg.cell_measures(), [self.cell_measure]) - - def test_ancillary_variables_and_cell_measures_removed(self): - cube_agg = self.cube.aggregated_by("simple_agg", self.mock_agg) - self.assertEqual(cube_agg.ancillary_variables(), []) - self.assertEqual(cube_agg.cell_measures(), []) - - -class Test_aggregated_by__lazy(tests.IrisTest): - def setUp(self): - self.data = np.arange(44).reshape(4, 11) - self.lazydata = as_lazy_data(self.data) - self.cube = Cube(self.lazydata) - - val_coord = AuxCoord( - [0, 0, 0, 1, 1, 2, 0, 0, 2, 0, 1], long_name="val" - ) - label_coord = AuxCoord( - [ - "alpha", - "alpha", - "beta", - "beta", - "alpha", - "gamma", - "alpha", - "alpha", - "alpha", - "gamma", - "beta", - ], - long_name="label", - units="no_unit", - ) - simple_agg_coord = AuxCoord([1, 1, 2, 2], long_name="simple_agg") - - self.label_mean = np.array( - [ - [4.0 + 1.0 / 3.0, 5.0, 7.0], - [15.0 + 1.0 / 3.0, 16.0, 18.0], - [26.0 + 1.0 / 3.0, 27.0, 29.0], - [37.0 + 1.0 / 3.0, 38.0, 40.0], - ] - ) - self.val_mean = np.array( - [ - [4.0 + 1.0 / 6.0, 5.0 + 2.0 / 3.0, 6.5], - [15.0 + 1.0 / 6.0, 16.0 + 2.0 / 3.0, 17.5], - [26.0 + 1.0 / 6.0, 27.0 + 2.0 / 3.0, 28.5], - [37.0 + 1.0 / 6.0, 38.0 + 2.0 / 3.0, 39.5], - ] - ) - - self.cube.add_aux_coord(simple_agg_coord, 0) - self.cube.add_aux_coord(val_coord, 1) - self.cube.add_aux_coord(label_coord, 1) - - def test_agg_by_label__lazy(self): - # Aggregate a cube on a string coordinate label where label - # and val entries are not in step; the resulting cube has a val - # coord of bounded cells and a label coord of single string entries. - res_cube = self.cube.aggregated_by("label", MEAN) - val_coord = AuxCoord( - np.array([1.0, 0.5, 1.0]), - bounds=np.array([[0, 2], [0, 1], [0, 2]]), - long_name="val", - ) - label_coord = AuxCoord( - np.array(["alpha", "beta", "gamma"]), - long_name="label", - units="no_unit", - ) - self.assertTrue(res_cube.has_lazy_data()) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - self.assertArrayEqual(res_cube.data, self.label_mean) - self.assertFalse(res_cube.has_lazy_data()) - - def test_agg_by_val__lazy(self): - # Aggregate a cube on a numeric coordinate val where label - # and val entries are not in step; the resulting cube has a label - # coord with serialised labels from the aggregated cells. - res_cube = self.cube.aggregated_by("val", MEAN) - val_coord = AuxCoord(np.array([0, 1, 2]), long_name="val") - exp0 = "alpha|alpha|beta|alpha|alpha|gamma" - exp1 = "beta|alpha|beta" - exp2 = "gamma|alpha" - label_coord = AuxCoord( - np.array((exp0, exp1, exp2)), long_name="label", units="no_unit" - ) - self.assertTrue(res_cube.has_lazy_data()) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("label"), label_coord) - self.assertArrayEqual(res_cube.data, self.val_mean) - self.assertFalse(res_cube.has_lazy_data()) - - def test_single_string_aggregation__lazy(self): - aux_coords = [ - (AuxCoord(["a", "b", "a"], long_name="foo"), 0), - (AuxCoord(["a", "a", "a"], long_name="bar"), 0), - ] - cube = iris.cube.Cube( - as_lazy_data(np.arange(12).reshape(3, 4)), - aux_coords_and_dims=aux_coords, - ) - means = np.array([[4.0, 5.0, 6.0, 7.0], [4.0, 5.0, 6.0, 7.0]]) - result = cube.aggregated_by("foo", MEAN) - self.assertTrue(result.has_lazy_data()) - self.assertEqual(result.shape, (2, 4)) - self.assertEqual( - result.coord("bar"), AuxCoord(["a|a", "a"], long_name="bar") - ) - self.assertArrayEqual(result.data, means) - self.assertFalse(result.has_lazy_data()) - - -class Test_rolling_window(tests.IrisTest): - def setUp(self): - self.cube = Cube(np.arange(6)) - self.multi_dim_cube = Cube(np.arange(36).reshape(6, 6)) - val_coord = DimCoord([0, 1, 2, 3, 4, 5], long_name="val") - month_coord = AuxCoord( - ["jan", "feb", "mar", "apr", "may", "jun"], long_name="month" - ) - extra_coord = AuxCoord([0, 1, 2, 3, 4, 5], long_name="extra") - self.cube.add_dim_coord(val_coord, 0) - self.cube.add_aux_coord(month_coord, 0) - self.multi_dim_cube.add_dim_coord(val_coord, 0) - self.multi_dim_cube.add_aux_coord(extra_coord, 1) - self.ancillary_variable = AncillaryVariable( - [0, 1, 2, 0, 1, 2], long_name="foo" - ) - self.multi_dim_cube.add_ancillary_variable(self.ancillary_variable, 1) - self.cell_measure = CellMeasure([0, 1, 2, 0, 1, 2], long_name="bar") - self.multi_dim_cube.add_cell_measure(self.cell_measure, 1) - - self.mock_agg = mock.Mock(spec=Aggregator) - self.mock_agg.aggregate = mock.Mock(return_value=np.empty([4])) - self.mock_agg.post_process = mock.Mock(side_effect=lambda x, y, z: x) - - def test_string_coord(self): - # Rolling window on a cube that contains a string coordinate. - res_cube = self.cube.rolling_window("val", self.mock_agg, 3) - val_coord = DimCoord( - np.array([1, 2, 3, 4]), - bounds=np.array([[0, 2], [1, 3], [2, 4], [3, 5]]), - long_name="val", - ) - month_coord = AuxCoord( - np.array( - ["jan|feb|mar", "feb|mar|apr", "mar|apr|may", "apr|may|jun"] - ), - bounds=np.array( - [ - ["jan", "mar"], - ["feb", "apr"], - ["mar", "may"], - ["apr", "jun"], - ] - ), - long_name="month", - ) - self.assertEqual(res_cube.coord("val"), val_coord) - self.assertEqual(res_cube.coord("month"), month_coord) - - def test_kwargs(self): - # Rolling window with missing data not tolerated - window = 2 - self.cube.data = ma.array( - self.cube.data, mask=([True, False, False, False, True, False]) - ) - res_cube = self.cube.rolling_window( - "val", iris.analysis.MEAN, window, mdtol=0 - ) - expected_result = ma.array( - [-99.0, 1.5, 2.5, -99.0, -99.0], - mask=[True, False, False, True, True], - dtype=np.float64, - ) - self.assertMaskedArrayEqual(expected_result, res_cube.data) - - def test_ancillary_variables_and_cell_measures_kept(self): - res_cube = self.multi_dim_cube.rolling_window("val", self.mock_agg, 3) - self.assertEqual( - res_cube.ancillary_variables(), [self.ancillary_variable] - ) - self.assertEqual(res_cube.cell_measures(), [self.cell_measure]) - - def test_ancillary_variables_and_cell_measures_removed(self): - res_cube = self.multi_dim_cube.rolling_window( - "extra", self.mock_agg, 3 - ) - self.assertEqual(res_cube.ancillary_variables(), []) - self.assertEqual(res_cube.cell_measures(), []) - - -class Test_slices_dim_order(tests.IrisTest): - """ - This class tests the capability of iris.cube.Cube.slices(), including its - ability to correctly re-order the dimensions. - """ - - def setUp(self): - """ - setup a 4D iris cube, each dimension is length 1. - The dimensions are; - dim1: time - dim2: height - dim3: latitude - dim4: longitude - """ - self.cube = iris.cube.Cube(np.array([[[[8.0]]]])) - self.cube.add_dim_coord(iris.coords.DimCoord([0], "time"), [0]) - self.cube.add_dim_coord(iris.coords.DimCoord([0], "height"), [1]) - self.cube.add_dim_coord(iris.coords.DimCoord([0], "latitude"), [2]) - self.cube.add_dim_coord(iris.coords.DimCoord([0], "longitude"), [3]) - - @staticmethod - def expected_cube_setup(dim1name, dim2name, dim3name): - """ - input: - ------ - dim1name: str - name of the first dimension coordinate - dim2name: str - name of the second dimension coordinate - dim3name: str - name of the third dimension coordinate - output: - ------ - cube: iris cube - iris cube with the specified axis holding the data 8 - """ - cube = iris.cube.Cube(np.array([[[8.0]]])) - cube.add_dim_coord(iris.coords.DimCoord([0], dim1name), [0]) - cube.add_dim_coord(iris.coords.DimCoord([0], dim2name), [1]) - cube.add_dim_coord(iris.coords.DimCoord([0], dim3name), [2]) - return cube - - def check_order(self, dim1, dim2, dim3, dim_to_remove): - """ - does two things: - (1) slices the 4D cube in dim1, dim2, dim3 (and removes the scalar - coordinate) and - (2) sets up a 3D cube with dim1, dim2, dim3. - input: - ----- - dim1: str - name of first dimension - dim2: str - name of second dimension - dim3: str - name of third dimension - dim_to_remove: str - name of the dimension that transforms into a scalar coordinate - when slicing the cube. - output: - ------ - sliced_cube: 3D cube - the cube that results if slicing the original cube - expected_cube: 3D cube - a cube set up with the axis corresponding to the dims - """ - sliced_cube = next(self.cube.slices([dim1, dim2, dim3])) - sliced_cube.remove_coord(dim_to_remove) - expected_cube = self.expected_cube_setup(dim1, dim2, dim3) - self.assertEqual(sliced_cube, expected_cube) - - def test_all_permutations(self): - for perm in permutations(["time", "height", "latitude", "longitude"]): - self.check_order(*perm) - - -@tests.skip_data -class Test_slices_over(tests.IrisTest): - def setUp(self): - self.cube = stock.realistic_4d() - # Define expected iterators for 1D and 2D test cases. - self.exp_iter_1d = range( - len(self.cube.coord("model_level_number").points) - ) - self.exp_iter_2d = np.ndindex(6, 70, 1, 1) - # Define maximum number of interations for particularly long - # (and so time-consuming) iterators. - self.long_iterator_max = 5 - - def test_1d_slice_coord_given(self): - res = self.cube.slices_over(self.cube.coord("model_level_number")) - for i, res_cube in zip(self.exp_iter_1d, res): - expected = self.cube[:, i] - self.assertEqual(res_cube, expected) - - def test_1d_slice_nonexistent_coord_given(self): - with self.assertRaises(CoordinateNotFoundError): - _ = self.cube.slices_over(self.cube.coord("wibble")) - - def test_1d_slice_coord_name_given(self): - res = self.cube.slices_over("model_level_number") - for i, res_cube in zip(self.exp_iter_1d, res): - expected = self.cube[:, i] - self.assertEqual(res_cube, expected) - - def test_1d_slice_nonexistent_coord_name_given(self): - with self.assertRaises(CoordinateNotFoundError): - _ = self.cube.slices_over("wibble") - - def test_1d_slice_dimension_given(self): - res = self.cube.slices_over(1) - for i, res_cube in zip(self.exp_iter_1d, res): - expected = self.cube[:, i] - self.assertEqual(res_cube, expected) - - def test_1d_slice_nonexistent_dimension_given(self): - with self.assertRaisesRegex(ValueError, "iterator over a dimension"): - _ = self.cube.slices_over(self.cube.ndim + 1) - - def test_2d_slice_coord_given(self): - # Slicing over these two dimensions returns 420 2D cubes, so only check - # cubes up to `self.long_iterator_max` to keep test runtime sensible. - res = self.cube.slices_over( - [self.cube.coord("time"), self.cube.coord("model_level_number")] - ) - for ct in range(self.long_iterator_max): - indices = list(next(self.exp_iter_2d)) - # Replace the dimensions not iterated over with spanning slices. - indices[2] = indices[3] = slice(None) - expected = self.cube[tuple(indices)] - self.assertEqual(next(res), expected) - - def test_2d_slice_nonexistent_coord_given(self): - with self.assertRaises(CoordinateNotFoundError): - _ = self.cube.slices_over( - [self.cube.coord("time"), self.cube.coord("wibble")] - ) - - def test_2d_slice_coord_name_given(self): - # Slicing over these two dimensions returns 420 2D cubes, so only check - # cubes up to `self.long_iterator_max` to keep test runtime sensible. - res = self.cube.slices_over(["time", "model_level_number"]) - for ct in range(self.long_iterator_max): - indices = list(next(self.exp_iter_2d)) - # Replace the dimensions not iterated over with spanning slices. - indices[2] = indices[3] = slice(None) - expected = self.cube[tuple(indices)] - self.assertEqual(next(res), expected) - - def test_2d_slice_nonexistent_coord_name_given(self): - with self.assertRaises(CoordinateNotFoundError): - _ = self.cube.slices_over(["time", "wibble"]) - - def test_2d_slice_dimension_given(self): - # Slicing over these two dimensions returns 420 2D cubes, so only check - # cubes up to `self.long_iterator_max` to keep test runtime sensible. - res = self.cube.slices_over([0, 1]) - for ct in range(self.long_iterator_max): - indices = list(next(self.exp_iter_2d)) - # Replace the dimensions not iterated over with spanning slices. - indices[2] = indices[3] = slice(None) - expected = self.cube[tuple(indices)] - self.assertEqual(next(res), expected) - - def test_2d_slice_reversed_dimension_given(self): - # Confirm that reversing the order of the dimensions returns the same - # results as the above test. - res = self.cube.slices_over([1, 0]) - for ct in range(self.long_iterator_max): - indices = list(next(self.exp_iter_2d)) - # Replace the dimensions not iterated over with spanning slices. - indices[2] = indices[3] = slice(None) - expected = self.cube[tuple(indices)] - self.assertEqual(next(res), expected) - - def test_2d_slice_nonexistent_dimension_given(self): - with self.assertRaisesRegex(ValueError, "iterator over a dimension"): - _ = self.cube.slices_over([0, self.cube.ndim + 1]) - - def test_multidim_slice_coord_given(self): - # Slicing over surface altitude returns 100x100 2D cubes, so only check - # cubes up to `self.long_iterator_max` to keep test runtime sensible. - res = self.cube.slices_over("surface_altitude") - # Define special ndindex iterator for the different dims sliced over. - nditer = np.ndindex(1, 1, 100, 100) - for ct in range(self.long_iterator_max): - indices = list(next(nditer)) - # Replace the dimensions not iterated over with spanning slices. - indices[0] = indices[1] = slice(None) - expected = self.cube[tuple(indices)] - self.assertEqual(next(res), expected) - - def test_duplicate_coordinate_given(self): - res = self.cube.slices_over([1, 1]) - for i, res_cube in zip(self.exp_iter_1d, res): - expected = self.cube[:, i] - self.assertEqual(res_cube, expected) - - def test_non_orthogonal_coordinates_given(self): - res = self.cube.slices_over(["model_level_number", "sigma"]) - for i, res_cube in zip(self.exp_iter_1d, res): - expected = self.cube[:, i] - self.assertEqual(res_cube, expected) - - def test_nodimension(self): - # Slicing over no dimension should return the whole cube. - res = self.cube.slices_over([]) - self.assertEqual(next(res), self.cube) - - -def create_cube(lon_min, lon_max, bounds=False): - n_lons = max(lon_min, lon_max) - min(lon_max, lon_min) - data = np.arange(4 * 3 * n_lons, dtype="f4").reshape(4, 3, -1) - data = as_lazy_data(data) - cube = Cube(data, standard_name="x_wind", units="ms-1") - cube.add_dim_coord( - iris.coords.DimCoord( - [0, 20, 40, 80], long_name="level_height", units="m" - ), - 0, - ) - cube.add_aux_coord( - iris.coords.AuxCoord( - [1.0, 0.9, 0.8, 0.6], long_name="sigma", units="1" - ), - 0, - ) - cube.add_dim_coord( - iris.coords.DimCoord([-45, 0, 45], "latitude", units="degrees"), 1 - ) - step = 1 if lon_max > lon_min else -1 - circular = abs(lon_max - lon_min) == 360 - cube.add_dim_coord( - iris.coords.DimCoord( - np.arange(lon_min, lon_max, step), - "longitude", - units="degrees", - circular=circular, - ), - 2, - ) - if bounds: - cube.coord("longitude").guess_bounds() - cube.add_aux_coord( - iris.coords.AuxCoord( - np.arange(3 * n_lons).reshape(3, -1) * 10, - "surface_altitude", - units="m", - ), - [1, 2], - ) - cube.add_aux_factory( - iris.aux_factory.HybridHeightFactory( - cube.coord("level_height"), - cube.coord("sigma"), - cube.coord("surface_altitude"), - ) - ) - return cube - - -# Ensure all the other coordinates and factories are correctly preserved. -class Test_intersection__Metadata(tests.IrisTest): - def test_metadata(self): - cube = create_cube(0, 360) - result = cube.intersection(longitude=(170, 190)) - self.assertCMLApproxData(result) - - def test_metadata_wrapped(self): - cube = create_cube(-180, 180) - result = cube.intersection(longitude=(170, 190)) - self.assertCMLApproxData(result) - - -# Explicitly check the handling of `circular` on the result. -class Test_intersection__Circular(tests.IrisTest): - def test_regional(self): - cube = create_cube(0, 360) - result = cube.intersection(longitude=(170, 190)) - self.assertFalse(result.coord("longitude").circular) - - def test_regional_wrapped(self): - cube = create_cube(-180, 180) - result = cube.intersection(longitude=(170, 190)) - self.assertFalse(result.coord("longitude").circular) - - def test_global(self): - cube = create_cube(-180, 180) - result = cube.intersection(longitude=(-180, 180)) - self.assertTrue(result.coord("longitude").circular) - - def test_global_wrapped(self): - cube = create_cube(-180, 180) - result = cube.intersection(longitude=(10, 370)) - self.assertTrue(result.coord("longitude").circular) - - -# Check the various error conditions. -class Test_intersection__Invalid(tests.IrisTest): - def test_reversed_min_max(self): - cube = create_cube(0, 360) - with self.assertRaises(ValueError): - cube.intersection(longitude=(30, 10)) - - def test_dest_too_large(self): - cube = create_cube(0, 360) - with self.assertRaises(ValueError): - cube.intersection(longitude=(30, 500)) - - def test_src_too_large(self): - cube = create_cube(0, 400) - with self.assertRaises(ValueError): - cube.intersection(longitude=(10, 30)) - - def test_missing_coord(self): - cube = create_cube(0, 360) - with self.assertRaises(iris.exceptions.CoordinateNotFoundError): - cube.intersection(parrots=(10, 30)) - - def test_multi_dim_coord(self): - cube = create_cube(0, 360) - with self.assertRaises(iris.exceptions.CoordinateMultiDimError): - cube.intersection(surface_altitude=(10, 30)) - - def test_null_region(self): - # 10 <= v < 10 - cube = create_cube(0, 360) - with self.assertRaises(IndexError): - cube.intersection(longitude=(10, 10, False, False)) - - -class Test_intersection__Lazy(tests.IrisTest): - def test_real_data(self): - cube = create_cube(0, 360) - cube.data - result = cube.intersection(longitude=(170, 190)) - self.assertFalse(result.has_lazy_data()) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(170, 191) - ) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) - - def test_real_data_wrapped(self): - cube = create_cube(-180, 180) - cube.data - result = cube.intersection(longitude=(170, 190)) - self.assertFalse(result.has_lazy_data()) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(170, 191) - ) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) - - def test_lazy_data(self): - cube = create_cube(0, 360) - result = cube.intersection(longitude=(170, 190)) - self.assertTrue(result.has_lazy_data()) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(170, 191) - ) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) - - def test_lazy_data_wrapped(self): - cube = create_cube(-180, 180) - result = cube.intersection(longitude=(170, 190)) - self.assertTrue(result.has_lazy_data()) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(170, 191) - ) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) - - -class Test_intersection_Points(tests.IrisTest): - def test_ignore_bounds(self): - cube = create_cube(0, 30, bounds=True) - result = cube.intersection(longitude=(9.5, 12.5), ignore_bounds=True) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(10, 13) - ) - self.assertArrayEqual(result.coord("longitude").bounds[0], [9.5, 10.5]) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [11.5, 12.5] - ) - - -# Check what happens with a regional, points-only circular intersection -# coordinate. -class Test_intersection__RegionalSrcModulus(tests.IrisTest): - def test_request_subset(self): - cube = create_cube(40, 60) - result = cube.intersection(longitude=(45, 50)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(45, 51) - ) - self.assertArrayEqual(result.data[0, 0], np.arange(5, 11)) - - def test_request_left(self): - cube = create_cube(40, 60) - result = cube.intersection(longitude=(35, 45)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(40, 46) - ) - self.assertArrayEqual(result.data[0, 0], np.arange(0, 6)) - - def test_request_right(self): - cube = create_cube(40, 60) - result = cube.intersection(longitude=(55, 65)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(55, 60) - ) - self.assertArrayEqual(result.data[0, 0], np.arange(15, 20)) - - def test_request_superset(self): - cube = create_cube(40, 60) - result = cube.intersection(longitude=(35, 65)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(40, 60) - ) - self.assertArrayEqual(result.data[0, 0], np.arange(0, 20)) - - def test_request_subset_modulus(self): - cube = create_cube(40, 60) - result = cube.intersection(longitude=(45 + 360, 50 + 360)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(45 + 360, 51 + 360) - ) - self.assertArrayEqual(result.data[0, 0], np.arange(5, 11)) - - def test_request_left_modulus(self): - cube = create_cube(40, 60) - result = cube.intersection(longitude=(35 + 360, 45 + 360)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(40 + 360, 46 + 360) - ) - self.assertArrayEqual(result.data[0, 0], np.arange(0, 6)) - - def test_request_right_modulus(self): - cube = create_cube(40, 60) - result = cube.intersection(longitude=(55 + 360, 65 + 360)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(55 + 360, 60 + 360) - ) - self.assertArrayEqual(result.data[0, 0], np.arange(15, 20)) - - def test_request_superset_modulus(self): - cube = create_cube(40, 60) - result = cube.intersection(longitude=(35 + 360, 65 + 360)) - self.assertArrayEqual( - result.coord("longitude").points, np.arange(40 + 360, 60 + 360) - ) - self.assertArrayEqual(result.data[0, 0], np.arange(0, 20)) - - def test_tolerance_f4(self): - cube = create_cube(0, 5) - cube.coord("longitude").points = np.array( - [0.0, 3.74999905, 7.49999809, 11.24999714, 14.99999619], dtype="f4" - ) - result = cube.intersection(longitude=(0, 5)) - self.assertArrayAlmostEqual( - result.coord("longitude").points, np.array([0.0, 3.74999905]) - ) - self.assertArrayEqual(result.data[0, 0], np.array([0, 1])) - - def test_tolerance_f8(self): - cube = create_cube(0, 5) - cube.coord("longitude").points = np.array( - [0.0, 3.74999905, 7.49999809, 11.24999714, 14.99999619], dtype="f8" - ) - result = cube.intersection(longitude=(0, 5)) - self.assertArrayAlmostEqual( - result.coord("longitude").points, np.array([0.0, 3.74999905]) - ) - self.assertArrayEqual(result.data[0, 0], np.array([0, 1])) - - -# Check what happens with a global, points-only circular intersection -# coordinate. -class Test_intersection__GlobalSrcModulus(tests.IrisTest): - def test_global_wrapped_extreme_increasing_base_period(self): - # Ensure that we can correctly handle points defined at (base + period) - cube = create_cube(-180.0, 180.0) - lons = cube.coord("longitude") - # Redefine longitude so that points at (base + period) - lons.points = np.linspace(-180.0, 180, lons.points.size) - result = cube.intersection( - longitude=(lons.points.min(), lons.points.max()) - ) - self.assertArrayEqual(result.data, cube.data) - - def test_global_wrapped_extreme_decreasing_base_period(self): - # Ensure that we can correctly handle points defined at (base + period) - cube = create_cube(180.0, -180.0) - lons = cube.coord("longitude") - # Redefine longitude so that points at (base + period) - lons.points = np.linspace(180.0, -180.0, lons.points.size) - result = cube.intersection( - longitude=(lons.points.min(), lons.points.max()) - ) - self.assertArrayEqual(result.data, cube.data) - - def test_global(self): - cube = create_cube(0, 360) - result = cube.intersection(longitude=(0, 360)) - self.assertEqual(result.coord("longitude").points[0], 0) - self.assertEqual(result.coord("longitude").points[-1], 359) - self.assertEqual(result.data[0, 0, 0], 0) - self.assertEqual(result.data[0, 0, -1], 359) - - def test_global_wrapped(self): - cube = create_cube(0, 360) - result = cube.intersection(longitude=(-180, 180)) - self.assertEqual(result.coord("longitude").points[0], -180) - self.assertEqual(result.coord("longitude").points[-1], 179) - self.assertEqual(result.data[0, 0, 0], 180) - self.assertEqual(result.data[0, 0, -1], 179) - - def test_aux_coord(self): - cube = create_cube(0, 360) - cube.replace_coord( - iris.coords.AuxCoord.from_coord(cube.coord("longitude")) - ) - result = cube.intersection(longitude=(0, 360)) - self.assertEqual(result.coord("longitude").points[0], 0) - self.assertEqual(result.coord("longitude").points[-1], 359) - self.assertEqual(result.data[0, 0, 0], 0) - self.assertEqual(result.data[0, 0, -1], 359) - - def test_aux_coord_wrapped(self): - cube = create_cube(0, 360) - cube.replace_coord( - iris.coords.AuxCoord.from_coord(cube.coord("longitude")) - ) - result = cube.intersection(longitude=(-180, 180)) - self.assertEqual(result.coord("longitude").points[0], 0) - self.assertEqual(result.coord("longitude").points[-1], -1) - self.assertEqual(result.data[0, 0, 0], 0) - self.assertEqual(result.data[0, 0, -1], 359) - - def test_aux_coord_non_contiguous_wrapped(self): - cube = create_cube(0, 360) - coord = iris.coords.AuxCoord.from_coord(cube.coord("longitude")) - coord.points = (coord.points * 1.5) % 360 - cube.replace_coord(coord) - result = cube.intersection(longitude=(-90, 90)) - self.assertEqual(result.coord("longitude").points[0], 0) - self.assertEqual(result.coord("longitude").points[-1], 90) - self.assertEqual(result.data[0, 0, 0], 0) - self.assertEqual(result.data[0, 0, -1], 300) - - def test_decrementing(self): - cube = create_cube(360, 0) - result = cube.intersection(longitude=(40, 60)) - self.assertEqual(result.coord("longitude").points[0], 60) - self.assertEqual(result.coord("longitude").points[-1], 40) - self.assertEqual(result.data[0, 0, 0], 300) - self.assertEqual(result.data[0, 0, -1], 320) - - def test_decrementing_wrapped(self): - cube = create_cube(360, 0) - result = cube.intersection(longitude=(-10, 10)) - self.assertEqual(result.coord("longitude").points[0], 10) - self.assertEqual(result.coord("longitude").points[-1], -10) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) - - def test_no_wrap_after_modulus(self): - cube = create_cube(0, 360) - result = cube.intersection(longitude=(170 + 360, 190 + 360)) - self.assertEqual(result.coord("longitude").points[0], 170 + 360) - self.assertEqual(result.coord("longitude").points[-1], 190 + 360) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) - - def test_wrap_after_modulus(self): - cube = create_cube(-180, 180) - result = cube.intersection(longitude=(170 + 360, 190 + 360)) - self.assertEqual(result.coord("longitude").points[0], 170 + 360) - self.assertEqual(result.coord("longitude").points[-1], 190 + 360) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) - - def test_select_by_coord(self): - cube = create_cube(0, 360) - coord = iris.coords.DimCoord(0, "longitude", units="degrees") - result = cube.intersection(iris.coords.CoordExtent(coord, 10, 30)) - self.assertEqual(result.coord("longitude").points[0], 10) - self.assertEqual(result.coord("longitude").points[-1], 30) - self.assertEqual(result.data[0, 0, 0], 10) - self.assertEqual(result.data[0, 0, -1], 30) - - def test_inclusive_exclusive(self): - cube = create_cube(0, 360) - result = cube.intersection(longitude=(170, 190, True, False)) - self.assertEqual(result.coord("longitude").points[0], 170) - self.assertEqual(result.coord("longitude").points[-1], 189) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 189) - - def test_exclusive_inclusive(self): - cube = create_cube(0, 360) - result = cube.intersection(longitude=(170, 190, False)) - self.assertEqual(result.coord("longitude").points[0], 171) - self.assertEqual(result.coord("longitude").points[-1], 190) - self.assertEqual(result.data[0, 0, 0], 171) - self.assertEqual(result.data[0, 0, -1], 190) - - def test_exclusive_exclusive(self): - cube = create_cube(0, 360) - result = cube.intersection(longitude=(170, 190, False, False)) - self.assertEqual(result.coord("longitude").points[0], 171) - self.assertEqual(result.coord("longitude").points[-1], 189) - self.assertEqual(result.data[0, 0, 0], 171) - self.assertEqual(result.data[0, 0, -1], 189) - - def test_single_point(self): - # 10 <= v <= 10 - cube = create_cube(0, 360) - result = cube.intersection(longitude=(10, 10)) - self.assertEqual(result.coord("longitude").points[0], 10) - self.assertEqual(result.coord("longitude").points[-1], 10) - self.assertEqual(result.data[0, 0, 0], 10) - self.assertEqual(result.data[0, 0, -1], 10) - - def test_two_points(self): - # -1.5 <= v <= 0.5 - cube = create_cube(0, 360) - result = cube.intersection(longitude=(-1.5, 0.5)) - self.assertEqual(result.coord("longitude").points[0], -1) - self.assertEqual(result.coord("longitude").points[-1], 0) - self.assertEqual(result.data[0, 0, 0], 359) - self.assertEqual(result.data[0, 0, -1], 0) - - def test_wrap_radians(self): - cube = create_cube(0, 360) - cube.coord("longitude").convert_units("radians") - result = cube.intersection(longitude=(-1, 0.5)) - self.assertArrayAllClose( - result.coord("longitude").points, np.arange(-57, 29) * np.pi / 180 - ) - self.assertEqual(result.data[0, 0, 0], 303) - self.assertEqual(result.data[0, 0, -1], 28) - - def test_tolerance_bug(self): - # Floating point changes introduced by wrapping mean - # the resulting coordinate values are not equal to their - # equivalents. This led to a bug that this test checks. - cube = create_cube(0, 400) - cube.coord("longitude").points = np.linspace(-179.55, 179.55, 400) - result = cube.intersection(longitude=(125, 145)) - self.assertArrayAlmostEqual( - result.coord("longitude").points, - cube.coord("longitude").points[339:361], - ) - - def test_tolerance_bug_wrapped(self): - cube = create_cube(0, 400) - cube.coord("longitude").points = np.linspace(-179.55, 179.55, 400) - result = cube.intersection(longitude=(-190, -170)) - # Expected result is the last 11 and first 11 points. - expected = np.append( - cube.coord("longitude").points[389:] - 360.0, - cube.coord("longitude").points[:11], - ) - self.assertArrayAlmostEqual(result.coord("longitude").points, expected) - - -# Check what happens with a global, points-and-bounds circular -# intersection coordinate. -class Test_intersection__ModulusBounds(tests.IrisTest): - def test_global_wrapped_extreme_increasing_base_period(self): - # Ensure that we can correctly handle bounds defined at (base + period) - cube = create_cube(-180.0, 180.0, bounds=True) - lons = cube.coord("longitude") - result = cube.intersection( - longitude=(lons.bounds.min(), lons.bounds.max()) - ) - self.assertArrayEqual(result.data, cube.data) - - def test_global_wrapped_extreme_decreasing_base_period(self): - # Ensure that we can correctly handle bounds defined at (base + period) - cube = create_cube(180.0, -180.0, bounds=True) - lons = cube.coord("longitude") - result = cube.intersection( - longitude=(lons.bounds.min(), lons.bounds.max()) - ) - self.assertArrayEqual(result.data, cube.data) - - def test_misaligned_points_inside(self): - cube = create_cube(0, 360, bounds=True) - result = cube.intersection(longitude=(169.75, 190.25)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [169.5, 170.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [189.5, 190.5] - ) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) - - def test_misaligned_points_outside(self): - cube = create_cube(0, 360, bounds=True) - result = cube.intersection(longitude=(170.25, 189.75)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [169.5, 170.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [189.5, 190.5] - ) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) - - def test_misaligned_bounds(self): - cube = create_cube(-180, 180, bounds=True) - result = cube.intersection(longitude=(0, 360)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [-0.5, 0.5]) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [358.5, 359.5] - ) - self.assertEqual(result.data[0, 0, 0], 180) - self.assertEqual(result.data[0, 0, -1], 179) - - def test_misaligned_bounds_decreasing(self): - cube = create_cube(180, -180, bounds=True) - result = cube.intersection(longitude=(0, 360)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [359.5, 358.5] - ) - self.assertArrayEqual(result.coord("longitude").points[-1], 0) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [0.5, -0.5] - ) - self.assertEqual(result.data[0, 0, 0], 181) - self.assertEqual(result.data[0, 0, -1], 180) - - def test_aligned_inclusive(self): - cube = create_cube(0, 360, bounds=True) - result = cube.intersection(longitude=(170.5, 189.5)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [169.5, 170.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [189.5, 190.5] - ) - self.assertEqual(result.data[0, 0, 0], 170) - self.assertEqual(result.data[0, 0, -1], 190) - - def test_aligned_exclusive(self): - cube = create_cube(0, 360, bounds=True) - result = cube.intersection(longitude=(170.5, 189.5, False, False)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [170.5, 171.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [188.5, 189.5] - ) - self.assertEqual(result.data[0, 0, 0], 171) - self.assertEqual(result.data[0, 0, -1], 189) - - def test_aligned_bounds_at_modulus(self): - cube = create_cube(-179.5, 180.5, bounds=True) - result = cube.intersection(longitude=(0, 360)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0, 1]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [359, 360]) - self.assertEqual(result.data[0, 0, 0], 180) - self.assertEqual(result.data[0, 0, -1], 179) - - def test_negative_aligned_bounds_at_modulus(self): - cube = create_cube(0.5, 360.5, bounds=True) - result = cube.intersection(longitude=(-180, 180)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [-180, -179] - ) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [179, 180]) - self.assertEqual(result.data[0, 0, 0], 180) - self.assertEqual(result.data[0, 0, -1], 179) - - def test_negative_misaligned_points_inside(self): - cube = create_cube(0, 360, bounds=True) - result = cube.intersection(longitude=(-10.25, 10.25)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [-10.5, -9.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [9.5, 10.5] - ) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) - - def test_negative_misaligned_points_outside(self): - cube = create_cube(0, 360, bounds=True) - result = cube.intersection(longitude=(-9.75, 9.75)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [-10.5, -9.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [9.5, 10.5] - ) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) - - def test_negative_aligned_inclusive(self): - cube = create_cube(0, 360, bounds=True) - result = cube.intersection(longitude=(-10.5, 10.5)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [-11.5, -10.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [10.5, 11.5] - ) - self.assertEqual(result.data[0, 0, 0], 349) - self.assertEqual(result.data[0, 0, -1], 11) - - def test_negative_aligned_exclusive(self): - cube = create_cube(0, 360, bounds=True) - result = cube.intersection(longitude=(-10.5, 10.5, False, False)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [-10.5, -9.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [9.5, 10.5] - ) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) - - def test_decrementing(self): - cube = create_cube(360, 0, bounds=True) - result = cube.intersection(longitude=(40, 60)) - self.assertArrayEqual( - result.coord("longitude").bounds[0], [60.5, 59.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [40.5, 39.5] - ) - self.assertEqual(result.data[0, 0, 0], 300) - self.assertEqual(result.data[0, 0, -1], 320) - - def test_decrementing_wrapped(self): - cube = create_cube(360, 0, bounds=True) - result = cube.intersection(longitude=(-10, 10)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [10.5, 9.5]) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [-9.5, -10.5] - ) - self.assertEqual(result.data[0, 0, 0], 350) - self.assertEqual(result.data[0, 0, -1], 10) - - def test_numerical_tolerance(self): - # test the tolerance on the coordinate value is not causing a - # modulus wrapping - cube = create_cube(28.5, 68.5, bounds=True) - result = cube.intersection(longitude=(27.74, 68.61)) - result_lons = result.coord("longitude") - self.assertAlmostEqual(result_lons.points[0], 28.5) - self.assertAlmostEqual(result_lons.points[-1], 67.5) - dtype = result_lons.dtype - np.testing.assert_array_almost_equal( - result_lons.bounds[0], np.array([28.0, 29.0], dtype=dtype) - ) - np.testing.assert_array_almost_equal( - result_lons.bounds[-1], np.array([67.0, 68.0], dtype=dtype) - ) - - def test_numerical_tolerance_wrapped(self): - # test the tolerance on the coordinate value causes modulus wrapping - # where appropriate - cube = create_cube(0.5, 3600.5, bounds=True) - lons = cube.coord("longitude") - lons.points = lons.points / 10 - lons.bounds = lons.bounds / 10 - result = cube.intersection(longitude=(-60, 60)) - result_lons = result.coord("longitude") - self.assertAlmostEqual(result_lons.points[0], -60.05) - self.assertAlmostEqual(result_lons.points[-1], 60.05) - dtype = result_lons.dtype - np.testing.assert_array_almost_equal( - result_lons.bounds[0], np.array([-60.1, -60.0], dtype=dtype) - ) - np.testing.assert_array_almost_equal( - result_lons.bounds[-1], np.array([60.0, 60.1], dtype=dtype) - ) - - def test_ignore_bounds_wrapped(self): - # Test `ignore_bounds` fully ignores bounds when wrapping - cube = create_cube(0, 360, bounds=True) - result = cube.intersection( - longitude=(10.25, 370.25), ignore_bounds=True - ) - # Expect points 11..370 not bounds [9.5, 10.5] .. [368.5, 369.5] - self.assertArrayEqual( - result.coord("longitude").bounds[0], [10.5, 11.5] - ) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [369.5, 370.5] - ) - self.assertEqual(result.data[0, 0, 0], 11) - self.assertEqual(result.data[0, 0, -1], 10) - - def test_within_cell(self): - # Test cell is included when it entirely contains the requested range - cube = create_cube(0, 10, bounds=True) - result = cube.intersection(longitude=(0.7, 0.8)) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [0.5, 1.5]) - self.assertEqual(result.data[0, 0, 0], 1) - self.assertEqual(result.data[0, 0, -1], 1) - - def test_threshold_half(self): - cube = create_cube(0, 10, bounds=True) - result = cube.intersection(longitude=(1, 6.999), threshold=0.5) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [5.5, 6.5]) - self.assertEqual(result.data[0, 0, 0], 1) - self.assertEqual(result.data[0, 0, -1], 6) - - def test_threshold_full(self): - cube = create_cube(0, 10, bounds=True) - result = cube.intersection(longitude=(0.5, 7.499), threshold=1) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual(result.coord("longitude").bounds[-1], [5.5, 6.5]) - self.assertEqual(result.data[0, 0, 0], 1) - self.assertEqual(result.data[0, 0, -1], 6) - - def test_threshold_wrapped(self): - # Test that a cell is wrapped to `maximum` if required to exceed - # the threshold - cube = create_cube(-180, 180, bounds=True) - result = cube.intersection(longitude=(0.4, 360.4), threshold=0.2) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [359.5, 360.5] - ) - self.assertEqual(result.data[0, 0, 0], 181) - self.assertEqual(result.data[0, 0, -1], 180) - - def test_threshold_wrapped_gap(self): - # Test that a cell is wrapped to `maximum` if required to exceed - # the threshold (even with a gap in the range) - cube = create_cube(-180, 180, bounds=True) - result = cube.intersection(longitude=(0.4, 360.35), threshold=0.2) - self.assertArrayEqual(result.coord("longitude").bounds[0], [0.5, 1.5]) - self.assertArrayEqual( - result.coord("longitude").bounds[-1], [359.5, 360.5] - ) - self.assertEqual(result.data[0, 0, 0], 181) - self.assertEqual(result.data[0, 0, -1], 180) - - -def unrolled_cube(): - data = np.arange(5, dtype="f4") - cube = Cube(data) - cube.add_aux_coord( - iris.coords.AuxCoord( - [5.0, 10.0, 8.0, 5.0, 3.0], "longitude", units="degrees" - ), - 0, - ) - cube.add_aux_coord( - iris.coords.AuxCoord([1.0, 3.0, -2.0, -1.0, -4.0], "latitude"), 0 - ) - return cube - - -# Check what happens with a "unrolled" scatter-point data with a circular -# intersection coordinate. -class Test_intersection__ScatterModulus(tests.IrisTest): - def test_subset(self): - cube = unrolled_cube() - result = cube.intersection(longitude=(5, 8)) - self.assertArrayEqual(result.coord("longitude").points, [5, 8, 5]) - self.assertArrayEqual(result.data, [0, 2, 3]) - - def test_subset_wrapped(self): - cube = unrolled_cube() - result = cube.intersection(longitude=(5 + 360, 8 + 360)) - self.assertArrayEqual( - result.coord("longitude").points, [365, 368, 365] - ) - self.assertArrayEqual(result.data, [0, 2, 3]) - - def test_superset(self): - cube = unrolled_cube() - result = cube.intersection(longitude=(0, 15)) - self.assertArrayEqual( - result.coord("longitude").points, [5, 10, 8, 5, 3] - ) - self.assertArrayEqual(result.data, np.arange(5)) - - -# Test the API of the cube interpolation method. -class Test_interpolate(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_2d() - - self.scheme = mock.Mock(name="interpolation scheme") - self.interpolator = mock.Mock(name="interpolator") - self.interpolator.return_value = mock.sentinel.RESULT - self.scheme.interpolator.return_value = self.interpolator - self.collapse_coord = True - - def test_api(self): - sample_points = (("foo", 0.5), ("bar", 0.6)) - result = self.cube.interpolate( - sample_points, self.scheme, self.collapse_coord - ) - self.scheme.interpolator.assert_called_once_with( - self.cube, ("foo", "bar") - ) - self.interpolator.assert_called_once_with( - (0.5, 0.6), collapse_scalar=self.collapse_coord - ) - self.assertIs(result, mock.sentinel.RESULT) - - -class Test_regrid(tests.IrisTest): - def test(self): - # Test that Cube.regrid() just defers to the regridder of the - # given scheme. - - # Define a fake scheme and its associated regridder which just - # capture their arguments and return them in place of the - # regridded cube. - class FakeRegridder: - def __init__(self, *args): - self.args = args - - def __call__(self, cube): - return self.args + (cube,) - - class FakeScheme: - def regridder(self, src, target): - return FakeRegridder(self, src, target) - - cube = Cube(0) - scheme = FakeScheme() - result = cube.regrid(mock.sentinel.TARGET, scheme) - self.assertEqual(result, (scheme, cube, mock.sentinel.TARGET, cube)) - - -class Test_copy(tests.IrisTest): - def _check_copy(self, cube, cube_copy): - self.assertIsNot(cube_copy, cube) - self.assertEqual(cube_copy, cube) - self.assertIsNot(cube_copy.data, cube.data) - if ma.isMaskedArray(cube.data): - self.assertMaskedArrayEqual(cube_copy.data, cube.data) - if cube.data.mask is not ma.nomask: - # "No mask" is a constant : all other cases must be distinct. - self.assertIsNot(cube_copy.data.mask, cube.data.mask) - else: - self.assertArrayEqual(cube_copy.data, cube.data) - - def test(self): - cube = stock.simple_3d() - self._check_copy(cube, cube.copy()) - - def test_copy_ancillary_variables(self): - cube = stock.simple_3d() - avr = AncillaryVariable([2, 3], long_name="foo") - cube.add_ancillary_variable(avr, 0) - self._check_copy(cube, cube.copy()) - - def test_copy_cell_measures(self): - cube = stock.simple_3d() - cms = CellMeasure([2, 3], long_name="foo") - cube.add_cell_measure(cms, 0) - self._check_copy(cube, cube.copy()) - - def test__masked_emptymask(self): - cube = Cube(ma.array([0, 1])) - self._check_copy(cube, cube.copy()) - - def test__masked_arraymask(self): - cube = Cube(ma.array([0, 1], mask=[True, False])) - self._check_copy(cube, cube.copy()) - - def test__scalar(self): - cube = Cube(0) - self._check_copy(cube, cube.copy()) - - def test__masked_scalar_emptymask(self): - cube = Cube(ma.array(0)) - self._check_copy(cube, cube.copy()) - - def test__masked_scalar_arraymask(self): - cube = Cube(ma.array(0, mask=False)) - self._check_copy(cube, cube.copy()) - - def test__lazy(self): - cube = Cube(as_lazy_data(np.array([1, 0]))) - self._check_copy(cube, cube.copy()) - - -def _add_test_meshcube(self, nomesh=False, n_z=2, **meshcoord_kwargs): - """ - Common setup action : Create a standard mesh test cube with a variety of coords, and save the cube and various of - its components as properties of the 'self' TestCase. - - """ - nomesh_faces = 5 if nomesh else None - cube, parts = sample_mesh_cube( - nomesh_faces=nomesh_faces, n_z=n_z, with_parts=True, **meshcoord_kwargs - ) - mesh, zco, mesh_dimco, auxco_x, meshx, meshy = parts - self.mesh = mesh - self.dimco_z = zco - self.dimco_mesh = mesh_dimco - if not nomesh: - self.meshco_x = meshx - self.meshco_y = meshy - self.auxco_x = auxco_x - self.allcoords = [meshx, meshy, zco, mesh_dimco, auxco_x] - self.cube = cube - - -class Test_coords__mesh_coords(tests.IrisTest): - """ - Checking *only* the new "mesh_coords" keyword of the coord/coords methods. - - This is *not* attached to the existing tests for this area, as they are - very old and patchy legacy tests. See: iris.tests.test_cdm.TestQueryCoord. - - """ - - def setUp(self): - # Create a standard test cube with a variety of types of coord. - _add_test_meshcube(self) - - def _assert_lists_equal(self, items_a, items_b): - """ - Check that two lists of coords, cubes etc contain the same things. - Lists must contain the same items, including any repeats, but can be in - a different order. - - """ - # Compare (and thus sort) by their *common* metadata. - def sortkey(item): - return BaseMetadata.from_metadata(item.metadata) - - items_a = sorted(items_a, key=sortkey) - items_b = sorted(items_b, key=sortkey) - self.assertEqual(items_a, items_b) - - def test_coords__all__meshcoords_default(self): - # coords() includes mesh-coords along with the others. - result = self.cube.coords() - expected = self.allcoords - self._assert_lists_equal(expected, result) - - def test_coords__all__meshcoords_only(self): - # Coords(mesh_coords=True) returns only mesh-coords. - result = self.cube.coords(mesh_coords=True) - expected = [self.meshco_x, self.meshco_y] - self._assert_lists_equal(expected, result) - - def test_coords__all__meshcoords_omitted(self): - # Coords(mesh_coords=False) omits the mesh-coords. - result = self.cube.coords(mesh_coords=False) - expected = set(self.allcoords) - set([self.meshco_x, self.meshco_y]) - self._assert_lists_equal(expected, result) - - def test_coords__axis__meshcoords(self): - # Coord (singular) with axis + mesh_coords=True - result = self.cube.coord(axis="x", mesh_coords=True) - self.assertIs(result, self.meshco_x) - - def test_coords__dimcoords__meshcoords(self): - # dim_coords and mesh_coords should be mutually exclusive. - result = self.cube.coords(dim_coords=True, mesh_coords=True) - self.assertEqual(result, []) - - def test_coords__nodimcoords__meshcoords(self): - # When mesh_coords=True, dim_coords=False should have no effect. - result = self.cube.coords(dim_coords=False, mesh_coords=True) - expected = [self.meshco_x, self.meshco_y] - self._assert_lists_equal(expected, result) - - -class Test_mesh(tests.IrisTest): - def setUp(self): - # Create a standard test cube with a variety of types of coord. - _add_test_meshcube(self) - - def test_mesh(self): - result = self.cube.mesh - self.assertIs(result, self.mesh) - - def test_no_mesh(self): - # Replace standard setUp cube with a no-mesh version. - _add_test_meshcube(self, nomesh=True) - result = self.cube.mesh - self.assertIsNone(result) - - -class Test_location(tests.IrisTest): - def setUp(self): - # Create a standard test cube with a variety of types of coord. - _add_test_meshcube(self) - - def test_no_mesh(self): - # Replace standard setUp cube with a no-mesh version. - _add_test_meshcube(self, nomesh=True) - result = self.cube.location - self.assertIsNone(result) - - def test_mesh(self): - cube = self.cube - result = cube.location - self.assertEqual(result, self.meshco_x.location) - - def test_alternate_location(self): - # Replace standard setUp cube with an edge-based version. - _add_test_meshcube(self, location="edge") - cube = self.cube - result = cube.location - self.assertEqual(result, "edge") - - -class Test_mesh_dim(tests.IrisTest): - def setUp(self): - # Create a standard test cube with a variety of types of coord. - _add_test_meshcube(self) - - def test_no_mesh(self): - # Replace standard setUp cube with a no-mesh version. - _add_test_meshcube(self, nomesh=True) - result = self.cube.mesh_dim() - self.assertIsNone(result) - - def test_mesh(self): - cube = self.cube - result = cube.mesh_dim() - self.assertEqual(result, 1) - - def test_alternate(self): - # Replace standard setUp cube with an edge-based version. - _add_test_meshcube(self, location="edge") - cube = self.cube - # Transpose the cube : the mesh dim is then 0 - cube.transpose() - result = cube.mesh_dim() - self.assertEqual(result, 0) - - -class Test__init__mesh(tests.IrisTest): - """ - Test that creation with mesh-coords functions, and prevents a cube having - incompatible mesh-coords. - - """ - - def setUp(self): - # Create a standard test mesh and other useful components. - mesh = sample_mesh() - meshco = sample_meshcoord(mesh=mesh) - self.mesh = mesh - self.meshco = meshco - self.nz = 2 - self.n_faces = meshco.shape[0] - - def test_mesh(self): - # Create a new cube from some of the parts. - nz, n_faces = self.nz, self.n_faces - dimco_z = DimCoord(np.arange(nz), long_name="z") - dimco_mesh = DimCoord(np.arange(n_faces), long_name="x") - meshco = self.meshco - cube = Cube( - np.zeros((nz, n_faces)), - dim_coords_and_dims=[(dimco_z, 0), (dimco_mesh, 1)], - aux_coords_and_dims=[(meshco, 1)], - ) - self.assertEqual(cube.mesh, meshco.mesh) - - def test_fail_dim_meshcoord(self): - # As "test_mesh", but attempt to use the meshcoord as a dim-coord. - # This should not be allowed. - nz, n_faces = self.nz, self.n_faces - dimco_z = DimCoord(np.arange(nz), long_name="z") - meshco = self.meshco - with self.assertRaisesRegex(ValueError, "may not be an AuxCoord"): - Cube( - np.zeros((nz, n_faces)), - dim_coords_and_dims=[(dimco_z, 0), (meshco, 1)], - ) - - def test_multi_meshcoords(self): - meshco_x = sample_meshcoord(axis="x", mesh=self.mesh) - meshco_y = sample_meshcoord(axis="y", mesh=self.mesh) - n_faces = meshco_x.shape[0] - cube = Cube( - np.zeros(n_faces), - aux_coords_and_dims=[(meshco_x, 0), (meshco_y, 0)], - ) - self.assertEqual(cube.mesh, meshco_x.mesh) - - def test_multi_meshcoords_same_axis(self): - # *Not* an error, as long as the coords are distinguishable. - meshco_1 = sample_meshcoord(axis="x", mesh=self.mesh) - meshco_2 = sample_meshcoord(axis="x", mesh=self.mesh) - # Can't make these different at creation, owing to the limited - # constructor args, but we can adjust common metadata afterwards. - meshco_2.rename("junk_name") - - n_faces = meshco_1.shape[0] - cube = Cube( - np.zeros(n_faces), - aux_coords_and_dims=[(meshco_1, 0), (meshco_2, 0)], - ) - self.assertEqual(cube.mesh, meshco_1.mesh) - - def test_fail_meshcoords_different_locations(self): - # Same as successful 'multi_mesh', but different locations. - # N.B. must have a mesh with n-faces == n-edges to test this - mesh = sample_mesh(n_faces=7, n_edges=7) - meshco_1 = sample_meshcoord(axis="x", mesh=mesh, location="face") - meshco_2 = sample_meshcoord(axis="y", mesh=mesh, location="edge") - # They should still have the same *shape* (or would fail anyway) - self.assertEqual(meshco_1.shape, meshco_2.shape) - n_faces = meshco_1.shape[0] - msg = "does not match existing cube location" - with self.assertRaisesRegex(ValueError, msg): - Cube( - np.zeros(n_faces), - aux_coords_and_dims=[(meshco_1, 0), (meshco_2, 0)], - ) - - def test_meshcoords_equal_meshes(self): - meshco_x = sample_meshcoord(axis="x") - meshco_y = sample_meshcoord(axis="y") - n_faces = meshco_x.shape[0] - Cube( - np.zeros(n_faces), - aux_coords_and_dims=[(meshco_x, 0), (meshco_y, 0)], - ) - - def test_fail_meshcoords_different_meshes(self): - meshco_x = sample_meshcoord(axis="x") - meshco_y = sample_meshcoord(axis="y") # Own (different) mesh - meshco_y.mesh.long_name = "new_name" - n_faces = meshco_x.shape[0] - with self.assertRaisesRegex(ValueError, "Mesh.* does not match"): - Cube( - np.zeros(n_faces), - aux_coords_and_dims=[(meshco_x, 0), (meshco_y, 0)], - ) - - def test_fail_meshcoords_different_dims(self): - # Same as 'test_mesh', but meshcoords on different dimensions. - # Replace standard setup with one where n_z == n_faces. - n_z, n_faces = 4, 4 - mesh = sample_mesh(n_faces=n_faces) - meshco_x = sample_meshcoord(mesh=mesh, axis="x") - meshco_y = sample_meshcoord(mesh=mesh, axis="y") - msg = "does not match existing cube mesh dimension" - with self.assertRaisesRegex(ValueError, msg): - Cube( - np.zeros((n_z, n_faces)), - aux_coords_and_dims=[(meshco_x, 1), (meshco_y, 0)], - ) - - -class Test__add_aux_coord__mesh(tests.IrisTest): - """ - Test that "Cube.add_aux_coord" functions with a mesh-coord, and prevents a - cube having incompatible mesh-coords. - - """ - - def setUp(self): - _add_test_meshcube(self) - # Remove the existing "meshco_y", so we can add similar ones without - # needing to distinguish from the existing. - self.cube.remove_coord(self.meshco_y) - - def test_add_compatible(self): - cube = self.cube - meshco_y = self.meshco_y - # Add the y-meshco back into the cube. - cube.add_aux_coord(meshco_y, 1) - self.assertIn(meshco_y, cube.coords(mesh_coords=True)) - - def test_add_multiple(self): - # Show that we can add extra mesh coords. - cube = self.cube - meshco_y = self.meshco_y - # Add the y-meshco back into the cube. - cube.add_aux_coord(meshco_y, 1) - # Make a duplicate y-meshco, renamed so it can add into the cube. - new_meshco_y = meshco_y.copy() - new_meshco_y.rename("alternative") - cube.add_aux_coord(new_meshco_y, 1) - self.assertEqual(len(cube.coords(mesh_coords=True)), 3) - - def test_add_equal_mesh(self): - # Make a duplicate y-meshco, and rename so it can add into the cube. - cube = self.cube - # Create 'meshco_y' duplicate, but a new mesh - meshco_y = sample_meshcoord(axis="y") - cube.add_aux_coord(meshco_y, 1) - self.assertIn(meshco_y, cube.coords(mesh_coords=True)) - - def test_fail_different_mesh(self): - # Make a duplicate y-meshco, and rename so it can add into the cube. - cube = self.cube - # Create 'meshco_y' duplicate, but a new mesh - meshco_y = sample_meshcoord(axis="y") - meshco_y.mesh.long_name = "new_name" - msg = "does not match existing cube mesh" - with self.assertRaisesRegex(ValueError, msg): - cube.add_aux_coord(meshco_y, 1) - - def test_fail_different_location(self): - # Make a new mesh with equal n_faces and n_edges - mesh = sample_mesh(n_faces=4, n_edges=4) - # Re-make the test objects based on that. - _add_test_meshcube(self, mesh=mesh) - cube = self.cube - cube.remove_coord(self.meshco_y) # Remove y-coord, as in setUp() - # Create a new meshco_y, same mesh but based on edges. - meshco_y = sample_meshcoord(axis="y", mesh=self.mesh, location="edge") - msg = "does not match existing cube location" - with self.assertRaisesRegex(ValueError, msg): - cube.add_aux_coord(meshco_y, 1) - - def test_fail_different_dimension(self): - # Re-make the test objects with the non-mesh dim equal in length. - n_faces = self.cube.shape[1] - _add_test_meshcube(self, n_z=n_faces) - cube = self.cube - meshco_y = self.meshco_y - cube.remove_coord(meshco_y) # Remove y-coord, as in setUp() - - # Attempt to re-attach the 'y' meshcoord, to a different cube dimension. - msg = "does not match existing cube mesh dimension" - with self.assertRaisesRegex(ValueError, msg): - cube.add_aux_coord(meshco_y, 0) - - -class Test__add_dim_coord__mesh(tests.IrisTest): - """ - Test that "Cube.add_dim_coord" cannot work with a mesh-coord. - - """ - - def test(self): - # Create a mesh with only 2 faces, so coord *can't* be non-monotonic. - mesh = sample_mesh(n_faces=2) - meshco = sample_meshcoord(mesh=mesh) - cube = Cube([0, 1]) - with self.assertRaisesRegex(ValueError, "may not be an AuxCoord"): - cube.add_dim_coord(meshco, 0) - - -class Test__eq__mesh(tests.IrisTest): - """ - Check that cubes with meshes support == as expected. - - Note: there is no special code for this in iris.cube.Cube : it is - provided by the coord comparisons. - - """ - - def setUp(self): - # Create a 'standard' test cube. - _add_test_meshcube(self) - - def test_copied_cube_match(self): - cube = self.cube - cube2 = cube.copy() - self.assertEqual(cube, cube2) - - def test_equal_mesh_match(self): - cube1 = self.cube - # re-create an identical cube, using the same mesh. - _add_test_meshcube(self) - cube2 = self.cube - self.assertEqual(cube1, cube2) - - def test_new_mesh_different(self): - cube1 = self.cube - # re-create an identical cube, using a different mesh. - _add_test_meshcube(self) - self.cube.mesh.long_name = "new_name" - cube2 = self.cube - self.assertNotEqual(cube1, cube2) - - -class Test_dtype(tests.IrisTest): - def setUp(self): - self.dtypes = ( - np.dtype("int"), - np.dtype("uint"), - np.dtype("bool"), - np.dtype("float"), - ) - - def test_real_data(self): - for dtype in self.dtypes: - data = np.array([0, 1], dtype=dtype) - cube = Cube(data) - self.assertEqual(cube.dtype, dtype) - - def test_real_data_masked__mask_unset(self): - for dtype in self.dtypes: - data = ma.array([0, 1], dtype=dtype) - cube = Cube(data) - self.assertEqual(cube.dtype, dtype) - - def test_real_data_masked__mask_set(self): - for dtype in self.dtypes: - data = ma.array([0, 1], dtype=dtype) - data[0] = ma.masked - cube = Cube(data) - self.assertEqual(cube.dtype, dtype) - - def test_lazy_data(self): - for dtype in self.dtypes: - data = np.array([0, 1], dtype=dtype) - cube = Cube(as_lazy_data(data)) - self.assertEqual(cube.dtype, dtype) - # Check that accessing the dtype does not trigger loading - # of the data. - self.assertTrue(cube.has_lazy_data()) - - def test_lazy_data_masked__mask_unset(self): - for dtype in self.dtypes: - data = ma.array([0, 1], dtype=dtype) - cube = Cube(as_lazy_data(data)) - self.assertEqual(cube.dtype, dtype) - # Check that accessing the dtype does not trigger loading - # of the data. - self.assertTrue(cube.has_lazy_data()) - - def test_lazy_data_masked__mask_set(self): - for dtype in self.dtypes: - data = ma.array([0, 1], dtype=dtype) - data[0] = ma.masked - cube = Cube(as_lazy_data(data)) - self.assertEqual(cube.dtype, dtype) - # Check that accessing the dtype does not trigger loading - # of the data. - self.assertTrue(cube.has_lazy_data()) - - -class TestSubset(tests.IrisTest): - def test_scalar_coordinate(self): - cube = Cube(0, long_name="apricot", units="1") - cube.add_aux_coord(DimCoord([0], long_name="banana", units="1")) - result = cube.subset(cube.coord("banana")) - self.assertEqual(cube, result) - - def test_dimensional_coordinate(self): - cube = Cube(np.zeros((4)), long_name="tinned_peach", units="1") - cube.add_dim_coord( - DimCoord([0, 1, 2, 3], long_name="sixteen_ton_weight", units="1"), - 0, - ) - result = cube.subset(cube.coord("sixteen_ton_weight")) - self.assertEqual(cube, result) - - def test_missing_coordinate(self): - cube = Cube(0, long_name="raspberry", units="1") - cube.add_aux_coord(DimCoord([0], long_name="loganberry", units="1")) - bad_coord = DimCoord([0], long_name="tiger", units="1") - self.assertRaises(CoordinateNotFoundError, cube.subset, bad_coord) - - def test_different_coordinate(self): - cube = Cube(0, long_name="raspberry", units="1") - cube.add_aux_coord(DimCoord([0], long_name="loganberry", units="1")) - different_coord = DimCoord([2], long_name="loganberry", units="1") - result = cube.subset(different_coord) - self.assertEqual(result, None) - - def test_different_coordinate_vector(self): - cube = Cube([0, 1], long_name="raspberry", units="1") - cube.add_dim_coord( - DimCoord([0, 1], long_name="loganberry", units="1"), 0 - ) - different_coord = DimCoord([2], long_name="loganberry", units="1") - result = cube.subset(different_coord) - self.assertEqual(result, None) - - def test_not_coordinate(self): - cube = Cube(0, long_name="peach", units="1") - cube.add_aux_coord(DimCoord([0], long_name="crocodile", units="1")) - self.assertRaises(ValueError, cube.subset, "Pointed Stick") - - -class Test_add_metadata(tests.IrisTest): - def test_add_dim_coord(self): - cube = Cube(np.arange(3)) - x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") - cube.add_dim_coord(x_coord, 0) - self.assertEqual(cube.coord("x"), x_coord) - - def test_add_aux_coord(self): - cube = Cube(np.arange(6).reshape(2, 3)) - x_coord = AuxCoord(points=np.arange(6).reshape(2, 3), long_name="x") - cube.add_aux_coord(x_coord, [0, 1]) - self.assertEqual(cube.coord("x"), x_coord) - - def test_add_cell_measure(self): - cube = Cube(np.arange(6).reshape(2, 3)) - a_cell_measure = CellMeasure( - np.arange(6).reshape(2, 3), long_name="area" - ) - cube.add_cell_measure(a_cell_measure, [0, 1]) - self.assertEqual(cube.cell_measure("area"), a_cell_measure) - - def test_add_ancillary_variable(self): - cube = Cube(np.arange(6).reshape(2, 3)) - ancillary_variable = AncillaryVariable( - data=np.arange(6).reshape(2, 3), long_name="detection quality" - ) - cube.add_ancillary_variable(ancillary_variable, [0, 1]) - self.assertEqual( - cube.ancillary_variable("detection quality"), ancillary_variable - ) - - def test_add_valid_aux_factory(self): - cube = Cube(np.arange(8).reshape(2, 2, 2)) - delta = AuxCoord(points=[0, 1], long_name="delta", units="m") - sigma = AuxCoord(points=[0, 1], long_name="sigma") - orog = AuxCoord(np.arange(4).reshape(2, 2), units="m") - cube.add_aux_coord(delta, 0) - cube.add_aux_coord(sigma, 0) - cube.add_aux_coord(orog, (1, 2)) - factory = HybridHeightFactory(delta=delta, sigma=sigma, orography=orog) - self.assertIsNone(cube.add_aux_factory(factory)) - - def test_error_for_add_invalid_aux_factory(self): - cube = Cube(np.arange(8).reshape(2, 2, 2), long_name="bar") - delta = AuxCoord(points=[0, 1], long_name="delta", units="m") - sigma = AuxCoord(points=[0, 1], long_name="sigma") - orog = AuxCoord(np.arange(4).reshape(2, 2), units="m", long_name="foo") - cube.add_aux_coord(delta, 0) - cube.add_aux_coord(sigma, 0) - # Note orography is not added to the cube here - factory = HybridHeightFactory(delta=delta, sigma=sigma, orography=orog) - expected_error = ( - "foo coordinate for factory is not present on cube " "bar" - ) - with self.assertRaisesRegex(ValueError, expected_error): - cube.add_aux_factory(factory) - - -class Test_remove_metadata(tests.IrisTest): - def setUp(self): - cube = Cube(np.arange(6).reshape(2, 3)) - x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") - cube.add_dim_coord(x_coord, 1) - z_coord = AuxCoord(points=np.arange(6).reshape(2, 3), long_name="z") - cube.add_aux_coord(z_coord, [0, 1]) - a_cell_measure = CellMeasure( - np.arange(6).reshape(2, 3), long_name="area" - ) - self.b_cell_measure = CellMeasure( - np.arange(6).reshape(2, 3), long_name="other_area" - ) - cube.add_cell_measure(a_cell_measure, [0, 1]) - cube.add_cell_measure(self.b_cell_measure, [0, 1]) - ancillary_variable = AncillaryVariable( - data=np.arange(6).reshape(2, 3), long_name="Quality of Detection" - ) - cube.add_ancillary_variable(ancillary_variable, [0, 1]) - self.cube = cube - - def test_remove_dim_coord(self): - self.cube.remove_coord(self.cube.coord("x")) - self.assertEqual(self.cube.coords("x"), []) - - def test_remove_aux_coord(self): - self.cube.remove_coord(self.cube.coord("z")) - self.assertEqual(self.cube.coords("z"), []) - - def test_remove_cell_measure(self): - self.cube.remove_cell_measure(self.cube.cell_measure("area")) - self.assertEqual( - self.cube._cell_measures_and_dims, [(self.b_cell_measure, (0, 1))] - ) - - def test_remove_cell_measure_by_name(self): - self.cube.remove_cell_measure("area") - self.assertEqual( - self.cube._cell_measures_and_dims, [(self.b_cell_measure, (0, 1))] - ) - - def test_fail_remove_cell_measure_by_name(self): - with self.assertRaises(CellMeasureNotFoundError): - self.cube.remove_cell_measure("notarea") - - def test_remove_ancilliary_variable(self): - self.cube.remove_ancillary_variable( - self.cube.ancillary_variable("Quality of Detection") - ) - self.assertEqual(self.cube._ancillary_variables_and_dims, []) - - def test_remove_ancilliary_variable_by_name(self): - self.cube.remove_ancillary_variable("Quality of Detection") - self.assertEqual(self.cube._ancillary_variables_and_dims, []) - - def test_fail_remove_ancilliary_variable_by_name(self): - with self.assertRaises(AncillaryVariableNotFoundError): - self.cube.remove_ancillary_variable("notname") - - -class Test__getitem_CellMeasure(tests.IrisTest): - def setUp(self): - cube = Cube(np.arange(6).reshape(2, 3)) - x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") - cube.add_dim_coord(x_coord, 1) - y_coord = DimCoord(points=np.array([5, 6]), long_name="y") - cube.add_dim_coord(y_coord, 0) - z_coord = AuxCoord(points=np.arange(6).reshape(2, 3), long_name="z") - cube.add_aux_coord(z_coord, [0, 1]) - a_cell_measure = CellMeasure( - np.arange(6).reshape(2, 3), long_name="area" - ) - cube.add_cell_measure(a_cell_measure, [0, 1]) - self.cube = cube - - def test_cell_measure_2d(self): - result = self.cube[0:2, 0:2] - self.assertEqual(len(result.cell_measures()), 1) - self.assertEqual(result.shape, result.cell_measures()[0].data.shape) - - def test_cell_measure_1d(self): - result = self.cube[0, 0:2] - self.assertEqual(len(result.cell_measures()), 1) - self.assertEqual(result.shape, result.cell_measures()[0].data.shape) - - -class Test__getitem_AncillaryVariables(tests.IrisTest): - def setUp(self): - cube = Cube(np.arange(6).reshape(2, 3)) - x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") - cube.add_dim_coord(x_coord, 1) - y_coord = DimCoord(points=np.array([5, 6]), long_name="y") - cube.add_dim_coord(y_coord, 0) - z_coord = AuxCoord(points=np.arange(6).reshape(2, 3), long_name="z") - cube.add_aux_coord(z_coord, [0, 1]) - a_ancillary_variable = AncillaryVariable( - data=np.arange(6).reshape(2, 3), long_name="foo" - ) - cube.add_ancillary_variable(a_ancillary_variable, [0, 1]) - self.cube = cube - - def test_ancillary_variables_2d(self): - result = self.cube[0:2, 0:2] - self.assertEqual(len(result.ancillary_variables()), 1) - self.assertEqual( - result.shape, result.ancillary_variables()[0].data.shape - ) - - def test_ancillary_variables_1d(self): - result = self.cube[0, 0:2] - self.assertEqual(len(result.ancillary_variables()), 1) - self.assertEqual( - result.shape, result.ancillary_variables()[0].data.shape - ) - - -class TestAncillaryVariables(tests.IrisTest): - def setUp(self): - cube = Cube(10 * np.arange(6).reshape(2, 3)) - self.ancill_var = AncillaryVariable( - np.arange(6).reshape(2, 3), - standard_name="number_of_observations", - units="1", - ) - cube.add_ancillary_variable(self.ancill_var, [0, 1]) - self.cube = cube - - def test_get_ancillary_variable(self): - ancill_var = self.cube.ancillary_variable("number_of_observations") - self.assertEqual(ancill_var, self.ancill_var) - - def test_get_ancillary_variables(self): - ancill_vars = self.cube.ancillary_variables("number_of_observations") - self.assertEqual(len(ancill_vars), 1) - self.assertEqual(ancill_vars[0], self.ancill_var) - - def test_get_ancillary_variable_obj(self): - ancill_vars = self.cube.ancillary_variables(self.ancill_var) - self.assertEqual(len(ancill_vars), 1) - self.assertEqual(ancill_vars[0], self.ancill_var) - - def test_fail_get_ancillary_variables(self): - with self.assertRaises(AncillaryVariableNotFoundError): - self.cube.ancillary_variable("other_ancill_var") - - def test_fail_get_ancillary_variables_obj(self): - ancillary_variable = self.ancill_var.copy() - ancillary_variable.long_name = "Number of observations at site" - with self.assertRaises(AncillaryVariableNotFoundError): - self.cube.ancillary_variable(ancillary_variable) - - def test_ancillary_variable_dims(self): - ancill_var_dims = self.cube.ancillary_variable_dims(self.ancill_var) - self.assertEqual(ancill_var_dims, (0, 1)) - - def test_fail_ancill_variable_dims(self): - ancillary_variable = self.ancill_var.copy() - ancillary_variable.long_name = "Number of observations at site" - with self.assertRaises(AncillaryVariableNotFoundError): - self.cube.ancillary_variable_dims(ancillary_variable) - - def test_ancillary_variable_dims_by_name(self): - ancill_var_dims = self.cube.ancillary_variable_dims( - "number_of_observations" - ) - self.assertEqual(ancill_var_dims, (0, 1)) - - def test_fail_ancillary_variable_dims_by_name(self): - with self.assertRaises(AncillaryVariableNotFoundError): - self.cube.ancillary_variable_dims("notname") - - -class TestCellMeasures(tests.IrisTest): - def setUp(self): - cube = Cube(np.arange(6).reshape(2, 3)) - x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") - cube.add_dim_coord(x_coord, 1) - z_coord = AuxCoord(points=np.arange(6).reshape(2, 3), long_name="z") - cube.add_aux_coord(z_coord, [0, 1]) - self.a_cell_measure = CellMeasure( - np.arange(6).reshape(2, 3), long_name="area", units="m2" - ) - cube.add_cell_measure(self.a_cell_measure, [0, 1]) - self.cube = cube - - def test_get_cell_measure(self): - cm = self.cube.cell_measure("area") - self.assertEqual(cm, self.a_cell_measure) - - def test_get_cell_measures(self): - cms = self.cube.cell_measures() - self.assertEqual(len(cms), 1) - self.assertEqual(cms[0], self.a_cell_measure) - - def test_get_cell_measures_obj(self): - cms = self.cube.cell_measures(self.a_cell_measure) - self.assertEqual(len(cms), 1) - self.assertEqual(cms[0], self.a_cell_measure) - - def test_fail_get_cell_measure(self): - with self.assertRaises(CellMeasureNotFoundError): - _ = self.cube.cell_measure("notarea") - - def test_fail_get_cell_measures_obj(self): - a_cell_measure = self.a_cell_measure.copy() - a_cell_measure.units = "km2" - with self.assertRaises(CellMeasureNotFoundError): - _ = self.cube.cell_measure(a_cell_measure) - - def test_cell_measure_dims(self): - cm_dims = self.cube.cell_measure_dims(self.a_cell_measure) - self.assertEqual(cm_dims, (0, 1)) - - def test_fail_cell_measure_dims(self): - a_cell_measure = self.a_cell_measure.copy() - a_cell_measure.units = "km2" - with self.assertRaises(CellMeasureNotFoundError): - _ = self.cube.cell_measure_dims(a_cell_measure) - - def test_cell_measure_dims_by_name(self): - cm_dims = self.cube.cell_measure_dims("area") - self.assertEqual(cm_dims, (0, 1)) - - def test_fail_cell_measure_dims_by_name(self): - with self.assertRaises(CellMeasureNotFoundError): - self.cube.cell_measure_dims("notname") - - -class Test_transpose(tests.IrisTest): - def setUp(self): - self.data = np.arange(24).reshape(3, 2, 4) - self.cube = Cube(self.data) - self.lazy_cube = Cube(as_lazy_data(self.data)) - - def test_lazy_data(self): - cube = self.lazy_cube - cube.transpose() - self.assertTrue(cube.has_lazy_data()) - self.assertArrayEqual(self.data.T, cube.data) - - def test_real_data(self): - self.cube.transpose() - self.assertFalse(self.cube.has_lazy_data()) - self.assertIs(self.data.base, self.cube.data.base) - self.assertArrayEqual(self.data.T, self.cube.data) - - def test_real_data__new_order(self): - new_order = [2, 0, 1] - self.cube.transpose(new_order) - self.assertFalse(self.cube.has_lazy_data()) - self.assertIs(self.data.base, self.cube.data.base) - self.assertArrayEqual(self.data.transpose(new_order), self.cube.data) - - def test_lazy_data__new_order(self): - new_order = [2, 0, 1] - cube = self.lazy_cube - cube.transpose(new_order) - self.assertTrue(cube.has_lazy_data()) - self.assertArrayEqual(self.data.transpose(new_order), cube.data) - - def test_lazy_data__transpose_order_ndarray(self): - # Check that a transpose order supplied as an array does not trip up - # a dask transpose operation. - new_order = np.array([2, 0, 1]) - cube = self.lazy_cube - cube.transpose(new_order) - self.assertTrue(cube.has_lazy_data()) - self.assertArrayEqual(self.data.transpose(new_order), cube.data) - - def test_bad_transpose_order(self): - exp_emsg = "Incorrect number of dimensions" - with self.assertRaisesRegex(ValueError, exp_emsg): - self.cube.transpose([1]) - - def test_dim_coords(self): - x_coord = DimCoord(points=np.array([2, 3, 4]), long_name="x") - self.cube.add_dim_coord(x_coord, 0) - self.cube.transpose() - self.assertEqual(self.cube._dim_coords_and_dims, [(x_coord, 2)]) - - def test_aux_coords(self): - x_coord = AuxCoord( - points=np.array([[2, 3], [8, 4], [7, 9]]), long_name="x" - ) - self.cube.add_aux_coord(x_coord, (0, 1)) - self.cube.transpose() - self.assertEqual(self.cube._aux_coords_and_dims, [(x_coord, (2, 1))]) - - def test_cell_measures(self): - area_cm = CellMeasure( - np.arange(12).reshape(3, 4), long_name="area of cells" - ) - self.cube.add_cell_measure(area_cm, (0, 2)) - self.cube.transpose() - self.assertEqual( - self.cube._cell_measures_and_dims, [(area_cm, (2, 0))] - ) - - def test_ancillary_variables(self): - ancill_var = AncillaryVariable( - data=np.arange(8).reshape(2, 4), long_name="instrument error" - ) - self.cube.add_ancillary_variable(ancill_var, (1, 2)) - self.cube.transpose() - self.assertEqual( - self.cube._ancillary_variables_and_dims, [(ancill_var, (1, 0))] - ) - - -class Test_convert_units(tests.IrisTest): - def test_convert_unknown_units(self): - cube = iris.cube.Cube(1) - emsg = ( - "Cannot convert from unknown units. " - 'The "cube.units" attribute may be set directly.' - ) - with self.assertRaisesRegex(UnitConversionError, emsg): - cube.convert_units("mm day-1") - - def test_preserves_lazy(self): - real_data = np.arange(12.0).reshape((3, 4)) - lazy_data = as_lazy_data(real_data) - cube = iris.cube.Cube(lazy_data, units="m") - real_data_ft = Unit("m").convert(real_data, "ft") - cube.convert_units("ft") - self.assertTrue(cube.has_lazy_data()) - self.assertArrayAllClose(cube.data, real_data_ft) - - -class Test__eq__data(tests.IrisTest): - """Partial cube equality testing, for data type only.""" - - def test_data_float_eq(self): - cube1 = Cube([1.0]) - cube2 = Cube([1.0]) - self.assertTrue(cube1 == cube2) - - def test_data_float_eqtol(self): - val1 = np.array(1.0, dtype=np.float32) - # NOTE: Since v2.3, Iris uses "allclose". Prior to that we used - # "rtol=1e-8", and this example would *fail*. - val2 = np.array(1.0 + 1.0e-6, dtype=np.float32) - cube1 = Cube([val1]) - cube2 = Cube([val2]) - self.assertNotEqual(val1, val2) - self.assertTrue(cube1 == cube2) - - def test_data_float_not_eq(self): - val1 = 1.0 - val2 = 1.0 + 1.0e-4 - cube1 = Cube([1.0, val1]) - cube2 = Cube([1.0, val2]) - self.assertFalse(cube1 == cube2) - - def test_data_int_eq(self): - cube1 = Cube([1, 2, 3]) - cube2 = Cube([1, 2, 3]) - self.assertTrue(cube1 == cube2) - - def test_data_int_not_eq(self): - cube1 = Cube([1, 2, 3]) - cube2 = Cube([1, 2, 0]) - self.assertFalse(cube1 == cube2) - - # NOTE: since numpy v1.18, boolean array subtract is deprecated. - def test_data_bool_eq(self): - cube1 = Cube([True, False]) - cube2 = Cube([True, False]) - self.assertTrue(cube1 == cube2) - - def test_data_bool_not_eq(self): - cube1 = Cube([True, False]) - cube2 = Cube([True, True]) - self.assertFalse(cube1 == cube2) - - -class Test__eq__meta(tests.IrisTest): - def test_ancillary_fail(self): - cube1 = Cube([0, 1]) - cube2 = Cube([0, 1]) - avr = AncillaryVariable([2, 3], long_name="foo") - cube2.add_ancillary_variable(avr, 0) - self.assertFalse(cube1 == cube2) - - def test_ancillary_reorder(self): - cube1 = Cube([0, 1]) - cube2 = Cube([0, 1]) - avr1 = AncillaryVariable([2, 3], long_name="foo") - avr2 = AncillaryVariable([4, 5], long_name="bar") - # Add the same ancillary variables to cube1 and cube2 in - # opposite orders. - cube1.add_ancillary_variable(avr1, 0) - cube1.add_ancillary_variable(avr2, 0) - cube2.add_ancillary_variable(avr2, 0) - cube2.add_ancillary_variable(avr1, 0) - self.assertTrue(cube1 == cube2) - - def test_ancillary_diff_data(self): - cube1 = Cube([0, 1]) - cube2 = Cube([0, 1]) - avr1 = AncillaryVariable([2, 3], long_name="foo") - avr2 = AncillaryVariable([4, 5], long_name="foo") - cube1.add_ancillary_variable(avr1, 0) - cube2.add_ancillary_variable(avr2, 0) - self.assertFalse(cube1 == cube2) - - def test_cell_measure_fail(self): - cube1 = Cube([0, 1]) - cube2 = Cube([0, 1]) - cms = CellMeasure([2, 3], long_name="foo") - cube2.add_cell_measure(cms, 0) - self.assertFalse(cube1 == cube2) - - def test_cell_measure_reorder(self): - cube1 = Cube([0, 1]) - cube2 = Cube([0, 1]) - cms1 = CellMeasure([2, 3], long_name="foo") - cms2 = CellMeasure([4, 5], long_name="bar") - # Add the same cell measure to cube1 and cube2 in - # opposite orders. - cube1.add_cell_measure(cms1, 0) - cube1.add_cell_measure(cms2, 0) - cube2.add_cell_measure(cms2, 0) - cube2.add_cell_measure(cms1, 0) - self.assertTrue(cube1 == cube2) - - def test_cell_measure_diff_data(self): - cube1 = Cube([0, 1]) - cube2 = Cube([0, 1]) - cms1 = CellMeasure([2, 3], long_name="foo") - cms2 = CellMeasure([4, 5], long_name="foo") - cube1.add_cell_measure(cms1, 0) - cube2.add_cell_measure(cms2, 0) - self.assertFalse(cube1 == cube2) - - def test_cell_method_fail(self): - cube1 = Cube([0, 1]) - cube2 = Cube([0, 1]) - cmth = CellMethod("mean", "time", "6hr") - cube2.add_cell_method(cmth) - self.assertFalse(cube1 == cube2) - - # Unlike cell measures, cell methods are order sensitive. - def test_cell_method_reorder_fail(self): - cube1 = Cube([0, 1]) - cube2 = Cube([0, 1]) - cmth1 = CellMethod("mean", "time", "6hr") - cmth2 = CellMethod("mean", "time", "12hr") - # Add the same cell method to cube1 and cube2 in - # opposite orders. - cube1.add_cell_method(cmth1) - cube1.add_cell_method(cmth2) - cube2.add_cell_method(cmth2) - cube2.add_cell_method(cmth1) - self.assertFalse(cube1 == cube2) - - def test_cell_method_correct_order(self): - cube1 = Cube([0, 1]) - cube2 = Cube([0, 1]) - cmth1 = CellMethod("mean", "time", "6hr") - cmth2 = CellMethod("mean", "time", "12hr") - # Add the same cell method to cube1 and cube2 in - # the same order. - cube1.add_cell_method(cmth1) - cube1.add_cell_method(cmth2) - cube2.add_cell_method(cmth1) - cube2.add_cell_method(cmth2) - self.assertTrue(cube1 == cube2) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/cube/test_CubeList.py b/lib/iris/tests/unit/cube/test_CubeList.py deleted file mode 100644 index eb4c6c4f3f..0000000000 --- a/lib/iris/tests/unit/cube/test_CubeList.py +++ /dev/null @@ -1,629 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.cube.CubeList` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import collections -from unittest import mock - -from cf_units import Unit -import numpy as np - -from iris import Constraint -import iris.coord_systems -from iris.coords import AuxCoord, DimCoord -from iris.cube import Cube, CubeList -import iris.exceptions -from iris.fileformats.pp import STASH -import iris.tests.stock - - -class Test_concatenate_cube(tests.IrisTest): - def setUp(self): - self.units = Unit( - "days since 1970-01-01 00:00:00", calendar="gregorian" - ) - self.cube1 = Cube([1, 2, 3], "air_temperature", units="K") - self.cube1.add_dim_coord( - DimCoord([0, 1, 2], "time", units=self.units), 0 - ) - - def test_pass(self): - self.cube2 = Cube([1, 2, 3], "air_temperature", units="K") - self.cube2.add_dim_coord( - DimCoord([3, 4, 5], "time", units=self.units), 0 - ) - result = CubeList([self.cube1, self.cube2]).concatenate_cube() - self.assertIsInstance(result, Cube) - - def test_fail(self): - units = Unit("days since 1970-01-02 00:00:00", calendar="gregorian") - cube2 = Cube([1, 2, 3], "air_temperature", units="K") - cube2.add_dim_coord(DimCoord([0, 1, 2], "time", units=units), 0) - with self.assertRaises(iris.exceptions.ConcatenateError): - CubeList([self.cube1, cube2]).concatenate_cube() - - def test_names_differ_fail(self): - self.cube2 = Cube([1, 2, 3], "air_temperature", units="K") - self.cube2.add_dim_coord( - DimCoord([3, 4, 5], "time", units=self.units), 0 - ) - self.cube3 = Cube([1, 2, 3], "air_pressure", units="Pa") - self.cube3.add_dim_coord( - DimCoord([3, 4, 5], "time", units=self.units), 0 - ) - exc_regexp = "Cube names differ: air_temperature != air_pressure" - with self.assertRaisesRegex( - iris.exceptions.ConcatenateError, exc_regexp - ): - CubeList([self.cube1, self.cube2, self.cube3]).concatenate_cube() - - def test_empty(self): - exc_regexp = "can't concatenate an empty CubeList" - with self.assertRaisesRegex(ValueError, exc_regexp): - CubeList([]).concatenate_cube() - - -class Test_extract_overlapping(tests.IrisTest): - def setUp(self): - shape = (6, 14, 19) - n_time, n_lat, n_lon = shape - n_data = n_time * n_lat * n_lon - cube = Cube(np.arange(n_data, dtype=np.int32).reshape(shape)) - coord = iris.coords.DimCoord( - points=np.arange(n_time), - standard_name="time", - units="hours since epoch", - ) - cube.add_dim_coord(coord, 0) - cs = iris.coord_systems.GeogCS(6371229) - coord = iris.coords.DimCoord( - points=np.linspace(-90, 90, n_lat), - standard_name="latitude", - units="degrees", - coord_system=cs, - ) - cube.add_dim_coord(coord, 1) - coord = iris.coords.DimCoord( - points=np.linspace(-180, 180, n_lon), - standard_name="longitude", - units="degrees", - coord_system=cs, - ) - cube.add_dim_coord(coord, 2) - self.cube = cube - - def test_extract_one_str_dim(self): - cubes = iris.cube.CubeList([self.cube[2:], self.cube[:4]]) - a, b = cubes.extract_overlapping("time") - self.assertEqual(a.coord("time"), self.cube.coord("time")[2:4]) - self.assertEqual(b.coord("time"), self.cube.coord("time")[2:4]) - - def test_extract_one_list_dim(self): - cubes = iris.cube.CubeList([self.cube[2:], self.cube[:4]]) - a, b = cubes.extract_overlapping(["time"]) - self.assertEqual(a.coord("time"), self.cube.coord("time")[2:4]) - self.assertEqual(b.coord("time"), self.cube.coord("time")[2:4]) - - def test_extract_two_dims(self): - cubes = iris.cube.CubeList([self.cube[2:, 5:], self.cube[:4, :10]]) - a, b = cubes.extract_overlapping(["time", "latitude"]) - self.assertEqual(a.coord("time"), self.cube.coord("time")[2:4]) - self.assertEqual( - a.coord("latitude"), self.cube.coord("latitude")[5:10] - ) - self.assertEqual(b.coord("time"), self.cube.coord("time")[2:4]) - self.assertEqual( - b.coord("latitude"), self.cube.coord("latitude")[5:10] - ) - - def test_different_orders(self): - cubes = iris.cube.CubeList([self.cube[::-1][:4], self.cube[:4]]) - a, b = cubes.extract_overlapping("time") - self.assertEqual(a.coord("time"), self.cube[::-1].coord("time")[2:4]) - self.assertEqual(b.coord("time"), self.cube.coord("time")[2:4]) - - -class Test_merge_cube(tests.IrisTest): - def setUp(self): - self.cube1 = Cube([1, 2, 3], "air_temperature", units="K") - self.cube1.add_aux_coord(AuxCoord([0], "height", units="m")) - - def test_pass(self): - cube2 = self.cube1.copy() - cube2.coord("height").points = [1] - result = CubeList([self.cube1, cube2]).merge_cube() - self.assertIsInstance(result, Cube) - - def test_fail(self): - cube2 = self.cube1.copy() - cube2.rename("not air temperature") - with self.assertRaises(iris.exceptions.MergeError): - CubeList([self.cube1, cube2]).merge_cube() - - def test_empty(self): - with self.assertRaises(ValueError): - CubeList([]).merge_cube() - - def test_single_cube(self): - result = CubeList([self.cube1]).merge_cube() - self.assertEqual(result, self.cube1) - self.assertIsNot(result, self.cube1) - - def test_repeated_cube(self): - with self.assertRaises(iris.exceptions.MergeError): - CubeList([self.cube1, self.cube1]).merge_cube() - - -class Test_merge__time_triple(tests.IrisTest): - @staticmethod - def _make_cube(fp, rt, t, realization=None): - cube = Cube(np.arange(20).reshape(4, 5)) - cube.add_dim_coord(DimCoord(np.arange(5), long_name="x", units="1"), 1) - cube.add_dim_coord(DimCoord(np.arange(4), long_name="y", units="1"), 0) - cube.add_aux_coord( - DimCoord(fp, standard_name="forecast_period", units="1") - ) - cube.add_aux_coord( - DimCoord(rt, standard_name="forecast_reference_time", units="1") - ) - cube.add_aux_coord(DimCoord(t, standard_name="time", units="1")) - if realization is not None: - cube.add_aux_coord( - DimCoord(realization, standard_name="realization", units="1") - ) - return cube - - def test_orthogonal_with_realization(self): - # => fp: 2; rt: 2; t: 2; realization: 2 - triples = ( - (0, 10, 1), - (0, 10, 2), - (0, 11, 1), - (0, 11, 2), - (1, 10, 1), - (1, 10, 2), - (1, 11, 1), - (1, 11, 2), - ) - en1_cubes = [ - self._make_cube(*triple, realization=1) for triple in triples - ] - en2_cubes = [ - self._make_cube(*triple, realization=2) for triple in triples - ] - cubes = CubeList(en1_cubes) + CubeList(en2_cubes) - (cube,) = cubes.merge() - self.assertCML(cube, checksum=False) - - def test_combination_with_realization(self): - # => fp, rt, t: 8; realization: 2 - triples = ( - (0, 10, 1), - (0, 10, 2), - (0, 11, 1), - (0, 11, 3), # This '3' breaks the pattern. - (1, 10, 1), - (1, 10, 2), - (1, 11, 1), - (1, 11, 2), - ) - en1_cubes = [ - self._make_cube(*triple, realization=1) for triple in triples - ] - en2_cubes = [ - self._make_cube(*triple, realization=2) for triple in triples - ] - cubes = CubeList(en1_cubes) + CubeList(en2_cubes) - (cube,) = cubes.merge() - self.assertCML(cube, checksum=False) - - def test_combination_with_extra_realization(self): - # => fp, rt, t, realization: 17 - triples = ( - (0, 10, 1), - (0, 10, 2), - (0, 11, 1), - (0, 11, 2), - (1, 10, 1), - (1, 10, 2), - (1, 11, 1), - (1, 11, 2), - ) - en1_cubes = [ - self._make_cube(*triple, realization=1) for triple in triples - ] - en2_cubes = [ - self._make_cube(*triple, realization=2) for triple in triples - ] - # Add extra that is a duplicate of one of the time triples - # but with a different realisation. - en3_cubes = [self._make_cube(0, 10, 2, realization=3)] - cubes = CubeList(en1_cubes) + CubeList(en2_cubes) + CubeList(en3_cubes) - (cube,) = cubes.merge() - self.assertCML(cube, checksum=False) - - def test_combination_with_extra_triple(self): - # => fp, rt, t, realization: 17 - triples = ( - (0, 10, 1), - (0, 10, 2), - (0, 11, 1), - (0, 11, 2), - (1, 10, 1), - (1, 10, 2), - (1, 11, 1), - (1, 11, 2), - ) - en1_cubes = [ - self._make_cube(*triple, realization=1) for triple in triples - ] - # Add extra time triple on the end. - en2_cubes = [ - self._make_cube(*triple, realization=2) - for triple in triples + ((1, 11, 3),) - ] - cubes = CubeList(en1_cubes) + CubeList(en2_cubes) - (cube,) = cubes.merge() - self.assertCML(cube, checksum=False) - - -class Test_xml(tests.IrisTest): - def setUp(self): - self.cubes = CubeList([Cube(np.arange(3)), Cube(np.arange(3))]) - - def test_byteorder_default(self): - self.assertIn("byteorder", self.cubes.xml()) - - def test_byteorder_false(self): - self.assertNotIn("byteorder", self.cubes.xml(byteorder=False)) - - def test_byteorder_true(self): - self.assertIn("byteorder", self.cubes.xml(byteorder=True)) - - -class Test_extract(tests.IrisTest): - def setUp(self): - self.scalar_cubes = CubeList() - for i in range(5): - for letter in "abcd": - self.scalar_cubes.append(Cube(i, long_name=letter)) - - def test_scalar_cube_name_constraint(self): - # Test the name based extraction of a CubeList containing scalar cubes. - res = self.scalar_cubes.extract("a") - expected = CubeList([Cube(i, long_name="a") for i in range(5)]) - self.assertEqual(res, expected) - - def test_scalar_cube_data_constraint(self): - # Test the extraction of a CubeList containing scalar cubes - # when using a cube_func. - val = 2 - constraint = iris.Constraint(cube_func=lambda c: c.data == val) - res = self.scalar_cubes.extract(constraint) - expected = CubeList([Cube(val, long_name=letter) for letter in "abcd"]) - self.assertEqual(res, expected) - - -class ExtractMixin: - # Choose "which" extract method to test. - # Effectively "abstract" -- inheritor must define this property : - # method_name = 'extract_cube' / 'extract_cubes' - - def setUp(self): - self.cube_x = Cube(0, long_name="x") - self.cube_y = Cube(0, long_name="y") - self.cons_x = Constraint("x") - self.cons_y = Constraint("y") - self.cons_any = Constraint(cube_func=lambda cube: True) - self.cons_none = Constraint(cube_func=lambda cube: False) - - def check_extract(self, cubes, constraints, expected): - # Check that extracting a cubelist with the given arguments has the - # expected result. - # 'expected' and the operation results can be: - # * None - # * a single cube - # * a list of cubes --> cubelist (with cubes matching) - # * string --> a ConstraintMatchException matching the string - cubelist = CubeList(cubes) - method = getattr(cubelist, self.method_name) - if isinstance(expected, str): - with self.assertRaisesRegex( - iris.exceptions.ConstraintMismatchError, expected - ): - method(constraints) - else: - result = method(constraints) - if expected is None: - self.assertIsNone(result) - elif isinstance(expected, Cube): - self.assertIsInstance(result, Cube) - self.assertEqual(result, expected) - elif isinstance(expected, list): - self.assertIsInstance(result, CubeList) - self.assertEqual(result, expected) - else: - msg = ( - 'Unhandled usage in "check_extract" call: ' - '"expected" arg has type {}, value {}.' - ) - raise ValueError(msg.format(type(expected), expected)) - - -class Test_extract_cube(ExtractMixin, tests.IrisTest): - method_name = "extract_cube" - - def test_empty(self): - self.check_extract([], self.cons_x, "Got 0 cubes .* expecting 1") - - def test_single_cube_ok(self): - self.check_extract([self.cube_x], self.cons_x, self.cube_x) - - def test_single_cube_fail__too_few(self): - self.check_extract( - [self.cube_x], self.cons_y, "Got 0 cubes .* expecting 1" - ) - - def test_single_cube_fail__too_many(self): - self.check_extract( - [self.cube_x, self.cube_y], - self.cons_any, - "Got 2 cubes .* expecting 1", - ) - - def test_string_as_constraint(self): - # Check that we can use a string, that converts to a constraint - # ( via "as_constraint" ). - self.check_extract([self.cube_x], "x", self.cube_x) - - def test_none_as_constraint(self): - # Check that we can use a None, that converts to a constraint - # ( via "as_constraint" ). - self.check_extract([self.cube_x], None, self.cube_x) - - def test_constraint_in_list__fail(self): - # Check that we *cannot* use [constraint] - msg = "cannot be cast to a constraint" - with self.assertRaisesRegex(TypeError, msg): - self.check_extract([], [self.cons_x], []) - - def test_multi_cube_ok(self): - self.check_extract( - [self.cube_x, self.cube_y], self.cons_x, self.cube_x - ) # NOTE: returns a cube - - def test_multi_cube_fail__too_few(self): - self.check_extract( - [self.cube_x, self.cube_y], - self.cons_none, - "Got 0 cubes .* expecting 1", - ) - - def test_multi_cube_fail__too_many(self): - self.check_extract( - [self.cube_x, self.cube_y], - self.cons_any, - "Got 2 cubes .* expecting 1", - ) - - -class ExtractCubesMixin(ExtractMixin): - method_name = "extract_cubes" - - -class Test_extract_cubes__noconstraint(ExtractCubesMixin, tests.IrisTest): - """Test with an empty list of constraints.""" - - def test_empty(self): - self.check_extract([], [], []) - - def test_single_cube(self): - self.check_extract([self.cube_x], [], []) - - def test_multi_cubes(self): - self.check_extract([self.cube_x, self.cube_y], [], []) - - -class ExtractCubesSingleConstraintMixin(ExtractCubesMixin): - """ - Common code for testing extract_cubes with a single constraint. - Generalised, so that we can do the same tests for a "bare" constraint, - and a list containing a single [constraint]. - - """ - - # Effectively "abstract" -- inheritor must define this property : - # wrap_test_constraint_as_list_of_one = True / False - - def check_extract(self, cubes, constraint, result): - # Overload standard test operation. - if self.wrap_test_constraint_as_list_of_one: - constraint = [constraint] - super().check_extract(cubes, constraint, result) - - def test_empty(self): - self.check_extract([], self.cons_x, "Got 0 cubes .* expecting 1") - - def test_single_cube_ok(self): - self.check_extract( - [self.cube_x], self.cons_x, [self.cube_x] - ) # NOTE: always returns list NOT cube - - def test_single_cube__fail_mismatch(self): - self.check_extract( - [self.cube_x], self.cons_y, "Got 0 cubes .* expecting 1" - ) - - def test_multi_cube_ok(self): - self.check_extract( - [self.cube_x, self.cube_y], self.cons_x, [self.cube_x] - ) # NOTE: always returns list NOT cube - - def test_multi_cube__fail_too_few(self): - self.check_extract( - [self.cube_x, self.cube_y], - self.cons_none, - "Got 0 cubes .* expecting 1", - ) - - def test_multi_cube__fail_too_many(self): - self.check_extract( - [self.cube_x, self.cube_y], - self.cons_any, - "Got 2 cubes .* expecting 1", - ) - - -class Test_extract_cubes__bare_single_constraint( - ExtractCubesSingleConstraintMixin, tests.IrisTest -): - """Testing with a single constraint as the argument.""" - - wrap_test_constraint_as_list_of_one = False - - -class Test_extract_cubes__list_single_constraint( - ExtractCubesSingleConstraintMixin, tests.IrisTest -): - """Testing with a list of one constraint as the argument.""" - - wrap_test_constraint_as_list_of_one = True - - -class Test_extract_cubes__multi_constraints(ExtractCubesMixin, tests.IrisTest): - """ - Testing when the 'constraints' arg is a list of multiple constraints. - """ - - def test_empty(self): - # Always fails. - self.check_extract( - [], [self.cons_x, self.cons_any], "Got 0 cubes .* expecting 1" - ) - - def test_single_cube_ok(self): - # Possible if the one cube matches all the constraints. - self.check_extract( - [self.cube_x], - [self.cons_x, self.cons_any], - [self.cube_x, self.cube_x], - ) - - def test_single_cube__fail_too_few(self): - self.check_extract( - [self.cube_x], - [self.cons_x, self.cons_y], - "Got 0 cubes .* expecting 1", - ) - - def test_multi_cube_ok(self): - self.check_extract( - [self.cube_x, self.cube_y], - [self.cons_y, self.cons_x], # N.B. reverse order ! - [self.cube_y, self.cube_x], - ) - - def test_multi_cube_castable_constraint_args(self): - # Check with args that *aren't* constraints, but can be converted - # ( via "as_constraint" ). - self.check_extract( - [self.cube_x, self.cube_y], - ["y", "x", self.cons_y], - [self.cube_y, self.cube_x, self.cube_y], - ) - - # NOTE: not bothering to check we can cast a 'None', as it will anyway - # fail with multiple input cubes. - - def test_multi_cube__fail_too_few(self): - self.check_extract( - [self.cube_x, self.cube_y], - [self.cons_x, self.cons_y, self.cons_none], - "Got 0 cubes .* expecting 1", - ) - - def test_multi_cube__fail_too_many(self): - self.check_extract( - [self.cube_x, self.cube_y], - [self.cons_x, self.cons_y, self.cons_any], - "Got 2 cubes .* expecting 1", - ) - - -class Test_iteration(tests.IrisTest): - def setUp(self): - self.scalar_cubes = CubeList() - for i in range(5): - for letter in "abcd": - self.scalar_cubes.append(Cube(i, long_name=letter)) - - def test_iterable(self): - self.assertTrue(isinstance(self.scalar_cubes, collections.Iterable)) - - def test_iteration(self): - letters = "abcd" * 5 - for i, cube in enumerate(self.scalar_cubes): - self.assertEqual(cube.long_name, letters[i]) - - -class TestPrint(tests.IrisTest): - def setUp(self): - self.cubes = CubeList([iris.tests.stock.lat_lon_cube()]) - - def test_summary(self): - expected = ( - "0: unknown / (unknown) " - " (latitude: 3; longitude: 4)" - ) - self.assertEqual(str(self.cubes), expected) - - def test_summary_name_unit(self): - self.cubes[0].long_name = "aname" - self.cubes[0].units = "1" - expected = ( - "0: aname / (1) " - " (latitude: 3; longitude: 4)" - ) - self.assertEqual(str(self.cubes), expected) - - def test_summary_stash(self): - self.cubes[0].attributes["STASH"] = STASH.from_msi("m01s00i004") - expected = ( - "0: m01s00i004 / (unknown) " - " (latitude: 3; longitude: 4)" - ) - self.assertEqual(str(self.cubes), expected) - - -class TestRealiseData(tests.IrisTest): - def test_realise_data(self): - # Simply check that calling CubeList.realise_data is calling - # _lazy_data.co_realise_cubes. - mock_cubes_list = [mock.Mock(ident=count) for count in range(3)] - test_cubelist = CubeList(mock_cubes_list) - call_patch = self.patch("iris._lazy_data.co_realise_cubes") - test_cubelist.realise_data() - # Check it was called once, passing cubes as *args. - self.assertEqual( - call_patch.call_args_list, [mock.call(*mock_cubes_list)] - ) - - -class Test_CubeList_copy(tests.IrisTest): - def setUp(self): - self.cube_list = iris.cube.CubeList() - self.copied_cube_list = self.cube_list.copy() - - def test_copy(self): - self.assertIsInstance(self.copied_cube_list, iris.cube.CubeList) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/cube/test_Cube__operators.py b/lib/iris/tests/unit/cube/test_Cube__operators.py deleted file mode 100644 index e860c57636..0000000000 --- a/lib/iris/tests/unit/cube/test_Cube__operators.py +++ /dev/null @@ -1,252 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.cube.Cube` class operators.""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -import operator - -import dask.array as da -import numpy as np -import numpy.ma as ma - -import iris -from iris._lazy_data import as_lazy_data -from iris.coords import DimCoord - - -class Test_lazy_maths(tests.IrisTest): - def build_lazy_cube(self, points, dtype=np.float64, bounds=None, nx=10): - data = np.arange(len(points) * nx, dtype=dtype) + 1 # Just avoid 0. - data = data.reshape(len(points), nx) - data = as_lazy_data(data) - cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") - lat = DimCoord(points, "latitude", bounds=bounds) - lon = DimCoord(np.arange(nx), "longitude") - cube.add_dim_coord(lat, 0) - cube.add_dim_coord(lon, 1) - return cube - - def check_common(self, base_cube, result): - self.assertTrue(base_cube.has_lazy_data()) - self.assertTrue(result.has_lazy_data()) - self.assertIsInstance(result.lazy_data(), da.core.Array) - - def cube_cube_math_op(self, c1, math_op): - result = math_op(c1, c1) - self.check_common(c1, result) - expected = math_op(c1.data, c1.data) - self.assertArrayAlmostEqual(result.data, expected) - - def cube_scalar_math_op(self, c1, scalar, math_op, commutative=True): - result = math_op(c1, scalar) - if commutative: - self.assertEqual(math_op(c1, scalar), math_op(scalar, c1)) - self.check_common(c1, result) - expected = math_op(c1.data, scalar) - self.assertArrayAlmostEqual(result.data, expected) - - def test_add_cubes__float(self): - c1 = self.build_lazy_cube([1, 2]) - op = operator.add - self.cube_cube_math_op(c1, op) - - def test_add_scalar__float(self): - c1 = self.build_lazy_cube([1, 2]) - scalar = 5 - op = operator.add - self.cube_scalar_math_op(c1, scalar, op) - - def test_mul_cubes__float(self): - c1 = self.build_lazy_cube([1, 2]) - op = operator.mul - self.cube_cube_math_op(c1, op) - - def test_mul_scalar__float(self): - c1 = self.build_lazy_cube([1, 2]) - scalar = 5 - op = operator.mul - self.cube_scalar_math_op(c1, scalar, op) - - def test_sub_cubes__float(self): - c1 = self.build_lazy_cube([1, 2]) - op = operator.sub - self.cube_cube_math_op(c1, op) - - def test_sub_scalar__float(self): - c1 = self.build_lazy_cube([1, 2]) - scalar = 5 - op = operator.sub - self.cube_scalar_math_op(c1, scalar, op, commutative=False) - - def test_div_cubes__float(self): - c1 = self.build_lazy_cube([1, 2]) - op = operator.truediv - self.cube_cube_math_op(c1, op) - - def test_div_scalar__float(self): - c1 = self.build_lazy_cube([1, 2]) - scalar = 5 - op = operator.truediv - self.cube_scalar_math_op(c1, scalar, op, commutative=False) - - def test_add_cubes__int(self): - c1 = self.build_lazy_cube([1, 2], dtype=np.int64) - op = operator.add - self.cube_cube_math_op(c1, op) - - def test_add_scalar__int(self): - c1 = self.build_lazy_cube([1, 2], dtype=np.int64) - scalar = 5 - op = operator.add - self.cube_scalar_math_op(c1, scalar, op) - - def test_mul_cubes__int(self): - c1 = self.build_lazy_cube([1, 2], dtype=np.int64) - op = operator.mul - self.cube_cube_math_op(c1, op) - - def test_mul_scalar__int(self): - c1 = self.build_lazy_cube([1, 2], dtype=np.int64) - scalar = 5 - op = operator.mul - self.cube_scalar_math_op(c1, scalar, op) - - def test_sub_cubes__int(self): - c1 = self.build_lazy_cube([1, 2], dtype=np.int64) - op = operator.sub - self.cube_cube_math_op(c1, op) - - def test_sub_scalar__int(self): - c1 = self.build_lazy_cube([1, 2], dtype=np.int64) - scalar = 5 - op = operator.sub - self.cube_scalar_math_op(c1, scalar, op, commutative=False) - - def test_div_cubes__int(self): - c1 = self.build_lazy_cube([1, 2], dtype=np.int64) - op = operator.truediv - self.cube_cube_math_op(c1, op) - - def test_div_scalar__int(self): - c1 = self.build_lazy_cube([1, 2], dtype=np.int64) - scalar = 5 - op = operator.truediv - self.cube_scalar_math_op(c1, scalar, op, commutative=False) - - -class Test_lazy_maths__scalar_cube(tests.IrisTest): - def build_lazy_cube(self, value, dtype=np.float64): - data = as_lazy_data(np.array(value, dtype=dtype)) - return iris.cube.Cube(data, standard_name="air_temperature", units="K") - - def setUp(self): - self.c1 = self.build_lazy_cube(3) - self.c2 = self.build_lazy_cube(4) - self.c3 = self.build_lazy_cube(3, dtype=np.int64) - self.c4 = self.build_lazy_cube(4, dtype=np.int64) - - def check_common(self, c1, c2, math_op): - cube = math_op(c1, c2) - data = cube.data - self.assertTrue(isinstance(data, np.ndarray)) - self.assertEqual(data.shape, ()) - - def test_add_scalar__int(self): - c3, c4, op = self.c3, 5, operator.add - self.check_common(c3, c4, op) - - def test_add_cubes__int(self): - c3, c4, op = self.c3, self.c4, operator.add - self.check_common(c3, c4, op) - - def test_mul_scalar__int(self): - c3, c4, op = self.c3, 5, operator.mul - self.check_common(c3, c4, op) - - def test_mul_cubes__int(self): - c3, c4, op = self.c3, self.c4, operator.mul - self.check_common(c3, c4, op) - - def test_sub_scalar__int(self): - c3, c4, op = self.c3, 5, operator.sub - self.check_common(c3, c4, op) - - def test_sub_cubes__int(self): - c3, c4, op = self.c3, self.c4, operator.sub - self.check_common(c3, c4, op) - - def test_div_scalar__int(self): - c3, c4, op = self.c3, 5, operator.truediv - self.check_common(c3, c4, op) - - def test_div_cubes__int(self): - c3, c4, op = self.c3, self.c4, operator.truediv - self.check_common(c3, c4, op) - - def test_add_scalar__float(self): - c1, c2, op = self.c1, 5, operator.add - self.check_common(c1, c2, op) - - def test_add_cubes__float(self): - c1, c2, op = self.c1, self.c2, operator.add - self.check_common(c1, c2, op) - - def test_mul_scalar__float(self): - c1, c2, op = self.c1, 5, operator.mul - self.check_common(c1, c2, op) - - def test_mul_cubes__float(self): - c1, c2, op = self.c1, self.c2, operator.mul - self.check_common(c1, c2, op) - - def test_sub_scalar__float(self): - c1, c2, op = self.c1, 5, operator.sub - self.check_common(c1, c2, op) - - def test_sub_cubes__float(self): - c1, c2, op = self.c1, self.c2, operator.sub - self.check_common(c1, c2, op) - - def test_div_scalar__float(self): - c1, c2, op = self.c1, 5, operator.truediv - self.check_common(c1, c2, op) - - def test_div_cubes__float(self): - c1, c2, op = self.c1, self.c2, operator.truediv - self.check_common(c1, c2, op) - - -class Test_lazy_maths__masked_data(tests.IrisTest): - def build_lazy_cube(self, dtype=np.float64): - data = ma.array( - [[1.0, 1.0], [1.0, 100000.0]], mask=[[0, 0], [0, 1]], dtype=dtype - ) - data = as_lazy_data(data) - cube = iris.cube.Cube(data, standard_name="air_temperature", units="K") - lat = DimCoord([-10, 10], "latitude") - lon = DimCoord([10, 20], "longitude") - cube.add_dim_coord(lat, 0) - cube.add_dim_coord(lon, 1) - return cube - - def test_subtract__float(self): - cube_a = self.build_lazy_cube() - cube_b = self.build_lazy_cube() - cube_c = cube_a - cube_b - self.assertTrue(ma.isMaskedArray(cube_c.data)) - - def test_subtract__int(self): - cube_a = self.build_lazy_cube(dtype=np.int64) - cube_b = self.build_lazy_cube(dtype=np.int64) - cube_c = cube_a - cube_b - self.assertTrue(ma.isMaskedArray(cube_c.data)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/data_manager/__init__.py b/lib/iris/tests/unit/data_manager/__init__.py deleted file mode 100644 index 41dcc0adf3..0000000000 --- a/lib/iris/tests/unit/data_manager/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris._data_manager` module.""" diff --git a/lib/iris/tests/unit/data_manager/test_DataManager.py b/lib/iris/tests/unit/data_manager/test_DataManager.py deleted file mode 100644 index e73714730f..0000000000 --- a/lib/iris/tests/unit/data_manager/test_DataManager.py +++ /dev/null @@ -1,598 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris._data_manager.DataManager`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import copy -from unittest import mock - -import numpy as np -import numpy.ma as ma - -from iris._data_manager import DataManager -from iris._lazy_data import as_lazy_data - - -class Test___copy__(tests.IrisTest): - def test(self): - dm = DataManager(np.array(0)) - emsg = "Shallow-copy of {!r} is not permitted." - name = type(dm).__name__ - with self.assertRaisesRegex(copy.Error, emsg.format(name)): - copy.copy(dm) - - -class Test___deepcopy__(tests.IrisTest): - def test(self): - dm = DataManager(np.array(0)) - method = "iris._data_manager.DataManager._deepcopy" - return_value = mock.sentinel.return_value - with mock.patch(method) as mocker: - mocker.return_value = return_value - result = copy.deepcopy(dm) - self.assertEqual(mocker.call_count, 1) - [args], kwargs = mocker.call_args - self.assertEqual(kwargs, dict()) - self.assertEqual(len(args), 2) - expected = [return_value, [dm]] - for item in args.values(): - self.assertIn(item, expected) - self.assertIs(result, return_value) - - -class Test___eq__(tests.IrisTest): - def setUp(self): - self.shape = (2, 3, 4) - self.size = np.prod(self.shape) - self.real_array = np.arange(self.size, dtype=float).reshape(self.shape) - - def test_real_with_real(self): - dm1 = DataManager(self.real_array) - dm2 = DataManager(self.real_array.copy()) - self.assertEqual(dm1, dm2) - - def test_real_with_real_failure(self): - dm1 = DataManager(self.real_array) - dm2 = DataManager(np.ones(self.shape)) - self.assertFalse(dm1 == dm2) - - def test_real_with_real__dtype_failure(self): - dm1 = DataManager(self.real_array) - dm2 = DataManager(self.real_array.astype(int)) - self.assertFalse(dm1 == dm2) - - def test_real_with_lazy_failure(self): - dm1 = DataManager(self.real_array) - dm2 = DataManager(as_lazy_data(self.real_array)) - self.assertFalse(dm1 == dm2) - self.assertFalse(dm2 == dm1) - - def test_lazy_with_lazy(self): - dm1 = DataManager(as_lazy_data(self.real_array)) - dm2 = DataManager(as_lazy_data(self.real_array)) - self.assertEqual(dm1, dm2) - - def test_lazy_with_lazy_failure(self): - dm1 = DataManager(as_lazy_data(self.real_array)) - dm2 = DataManager(as_lazy_data(self.real_array) * 10) - self.assertFalse(dm1 == dm2) - - def test_lazy_with_lazy__dtype_failure(self): - dm1 = DataManager(as_lazy_data(self.real_array)) - dm2 = DataManager(as_lazy_data(self.real_array).astype(int)) - self.assertFalse(dm1 == dm2) - - def test_non_DataManager_failure(self): - dm = DataManager(np.array(0)) - self.assertFalse(dm == 0) - - -class Test___ne__(tests.IrisTest): - def setUp(self): - self.shape = (2, 3, 4) - self.size = np.prod(self.shape) - self.real_array = np.arange(self.size, dtype=float).reshape(self.shape) - - def test_real_with_real(self): - dm1 = DataManager(self.real_array) - dm2 = DataManager(np.ones(self.shape)) - self.assertNotEqual(dm1, dm2) - - def test_real_with_real_failure(self): - dm1 = DataManager(self.real_array) - dm2 = DataManager(self.real_array.copy()) - self.assertFalse(dm1 != dm2) - - def test_real_with_real__dtype(self): - dm1 = DataManager(self.real_array) - dm2 = DataManager(self.real_array.astype(int)) - self.assertNotEqual(dm1, dm2) - - def test_real_with_lazy(self): - dm1 = DataManager(self.real_array) - dm2 = DataManager(as_lazy_data(self.real_array)) - self.assertNotEqual(dm1, dm2) - self.assertNotEqual(dm2, dm1) - - def test_lazy_with_lazy(self): - dm1 = DataManager(as_lazy_data(self.real_array)) - dm2 = DataManager(as_lazy_data(self.real_array) * 10) - self.assertNotEqual(dm1, dm2) - - def test_lazy_with_lazy_failure(self): - dm1 = DataManager(as_lazy_data(self.real_array)) - dm2 = DataManager(as_lazy_data(self.real_array)) - self.assertFalse(dm1 != dm2) - - def test_lazy_with_lazy__dtype(self): - dm1 = DataManager(as_lazy_data(self.real_array)) - dm2 = DataManager(as_lazy_data(self.real_array).astype(int)) - self.assertNotEqual(dm1, dm2) - - def test_non_DataManager(self): - dm = DataManager(np.array(0)) - self.assertNotEqual(dm, 0) - - -class Test___repr__(tests.IrisTest): - def setUp(self): - self.real_array = np.array(123) - masked_array = ma.array([0, 1], mask=[0, 1]) - self.lazy_array = as_lazy_data(masked_array) - self.name = DataManager.__name__ - - def test_real(self): - dm = DataManager(self.real_array) - result = repr(dm) - expected = "{}({!r})".format(self.name, self.real_array) - self.assertEqual(result, expected) - - def test_lazy(self): - dm = DataManager(self.lazy_array) - result = repr(dm) - expected = "{}({!r})".format(self.name, self.lazy_array) - self.assertEqual(result, expected) - - -class Test__assert_axioms(tests.IrisTest): - def setUp(self): - self.real_array = np.array(0) - self.lazy_array = as_lazy_data(self.real_array) - self.dm = DataManager(self.real_array) - - def test_array_none(self): - self.dm._real_array = None - emsg = "Unexpected data state, got no lazy and no real data" - with self.assertRaisesRegex(AssertionError, emsg): - self.dm._assert_axioms() - - def test_array_all(self): - self.dm._lazy_array = self.lazy_array - emsg = "Unexpected data state, got lazy and real data" - with self.assertRaisesRegex(AssertionError, emsg): - self.dm._assert_axioms() - - -class Test__deepcopy(tests.IrisTest): - def setUp(self): - self.shape = (2, 3, 4) - self.size = np.prod(self.shape) - self.real_array = np.arange(self.size, dtype=float).reshape(self.shape) - self.memo = dict() - - def test_real(self): - dm = DataManager(self.real_array) - result = dm._deepcopy(self.memo) - self.assertEqual(dm, result) - - def test_lazy(self): - dm = DataManager(as_lazy_data(self.real_array)) - result = dm._deepcopy(self.memo) - self.assertEqual(dm, result) - - def test_real_with_real(self): - dm = DataManager(self.real_array) - data = self.real_array.copy() * 10 - result = dm._deepcopy(self.memo, data=data) - expected = DataManager(data) - self.assertEqual(result, expected) - self.assertIs(result._real_array, data) - - def test_real_with_lazy(self): - dm = DataManager(self.real_array) - data = as_lazy_data(self.real_array) * 10 - result = dm._deepcopy(self.memo, data=data) - expected = DataManager(data) - self.assertEqual(result, expected) - self.assertIs(result._lazy_array, data) - - def test_lazy_with_real(self): - dm = DataManager(as_lazy_data(self.real_array)) - data = self.real_array.copy() * 10 - result = dm._deepcopy(self.memo, data=data) - expected = DataManager(data) - self.assertEqual(result, expected) - self.assertIs(result._real_array, data) - - def test_lazy_with_lazy(self): - dm = DataManager(as_lazy_data(self.real_array)) - data = as_lazy_data(self.real_array) * 10 - result = dm._deepcopy(self.memo, data=data) - expected = DataManager(data) - self.assertEqual(result, expected) - self.assertIs(result._lazy_array, data) - - def test_real_with_real_failure(self): - dm = DataManager(self.real_array) - emsg = "Cannot copy" - with self.assertRaisesRegex(ValueError, emsg): - dm._deepcopy(self.memo, data=np.array(0)) - - def test_real_with_lazy_failure(self): - dm = DataManager(self.real_array) - emsg = "Cannot copy" - with self.assertRaisesRegex(ValueError, emsg): - dm._deepcopy(self.memo, data=as_lazy_data(np.array(0))) - - def test_lazy_with_real_failure(self): - dm = DataManager(as_lazy_data(self.real_array)) - emsg = "Cannot copy" - with self.assertRaisesRegex(ValueError, emsg): - dm._deepcopy(self.memo, data=np.array(0)) - - def test_lazy_with_lazy_failure(self): - dm = DataManager(as_lazy_data(self.real_array)) - emsg = "Cannot copy" - with self.assertRaisesRegex(ValueError, emsg): - dm._deepcopy(self.memo, data=as_lazy_data(np.array(0))) - - -class Test_data__getter(tests.IrisTest): - def setUp(self): - shape = (2, 3, 4) - size = np.prod(shape) - self.real_array = np.arange(size).reshape(shape) - self.lazy_array = as_lazy_data(self.real_array) - self.mask_array = ma.masked_array(self.real_array) - self.mask_array_masked = self.mask_array.copy() - self.mask_array_masked[0, 0, 0] = ma.masked - self.dtype = self.mask_array.dtype - self.fill_value = self.mask_array.fill_value - self.lazy_mask_array = as_lazy_data(self.mask_array) - self.lazy_mask_array_masked = as_lazy_data(self.mask_array_masked) - - def test_with_real_array(self): - dm = DataManager(self.real_array) - self.assertFalse(dm.has_lazy_data()) - result = dm.data - self.assertFalse(dm.has_lazy_data()) - self.assertIs(result, self.real_array) - - def test_with_lazy_array(self): - dm = DataManager(self.lazy_array) - self.assertTrue(dm.has_lazy_data()) - result = dm.data - self.assertFalse(dm.has_lazy_data()) - self.assertArrayEqual(result, self.real_array) - - def test_with_lazy_mask_array__not_masked(self): - dm = DataManager(self.lazy_mask_array) - self.assertTrue(dm.has_lazy_data()) - result = dm.data - self.assertFalse(dm.has_lazy_data()) - self.assertIsInstance(result, np.core.ndarray) - self.assertEqual(dm.dtype, self.dtype) - self.assertEqual(result.fill_value, self.fill_value) - self.assertArrayEqual(result, self.real_array) - - def test_with_lazy_mask_array__masked(self): - dm = DataManager(self.lazy_mask_array_masked) - self.assertTrue(dm.has_lazy_data()) - result = dm.data - self.assertFalse(dm.has_lazy_data()) - self.assertIsInstance(result, ma.MaskedArray) - self.assertEqual(dm.dtype, self.dtype) - self.assertEqual(result.fill_value, self.fill_value) - self.assertArrayEqual(result, self.mask_array_masked) - - def test_with_real_masked_constant(self): - masked_data = ma.masked_array([666], mask=True, dtype=np.dtype("f8")) - masked_constant = masked_data[0] - dm = DataManager(masked_constant) - result = dm.data - self.assertFalse(dm.has_lazy_data()) - self.assertIsInstance(result, ma.MaskedArray) - self.assertNotIsInstance(result, ma.core.MaskedConstant) - self.assertMaskedArrayEqual(result, masked_data) - - def test_with_lazy_masked_constant(self): - masked_data = ma.masked_array([666], mask=True) - masked_constant = masked_data[0] - lazy_masked_constant = as_lazy_data(masked_constant) - dm = DataManager(lazy_masked_constant) - result = dm.data - self.assertFalse(dm.has_lazy_data()) - self.assertIsInstance(result, ma.MaskedArray) - self.assertNotIsInstance(result, ma.core.MaskedConstant) - self.assertMaskedArrayEqual(result, masked_data) - - -class Test_data__setter(tests.IrisTest): - def test_zero_ndim_real_with_scalar_int(self): - value = 456 - dm = DataManager(np.array(123)) - self.assertFalse(dm.has_lazy_data()) - dm.data = value - self.assertFalse(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, np.array(value)) - - def test_zero_ndim_real_with_scalar_float(self): - value = 456.0 - dm = DataManager(np.array(123)) - self.assertFalse(dm.has_lazy_data()) - dm.data = value - self.assertFalse(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, np.array(value)) - - def test_zero_ndim_real_with_zero_ndim_real(self): - real_array = np.array(456) - dm = DataManager(np.array(123)) - self.assertFalse(dm.has_lazy_data()) - dm.data = real_array - self.assertFalse(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, real_array) - - def test_zero_ndim_real_with_zero_ndim_lazy(self): - lazy_array = as_lazy_data(np.array(456)) - dm = DataManager(np.array(123)) - self.assertFalse(dm.has_lazy_data()) - dm.data = lazy_array - self.assertTrue(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, lazy_array.compute()) - - def test_zero_ndim_lazy_with_zero_ndim_real(self): - real_array = np.array(456) - dm = DataManager(as_lazy_data(np.array(123))) - self.assertTrue(dm.has_lazy_data()) - dm.data = real_array - self.assertFalse(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, real_array) - - def test_zero_ndim_lazy_with_zero_ndim_lazy(self): - lazy_array = as_lazy_data(np.array(456)) - dm = DataManager(as_lazy_data(np.array(123))) - self.assertTrue(dm.has_lazy_data()) - dm.data = lazy_array - self.assertTrue(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, lazy_array.compute()) - - def test_zero_ndim_real_to_scalar_1d_real_promote(self): - real_array = np.array([456]) - dm = DataManager(np.array(123)) - self.assertFalse(dm.has_lazy_data()) - dm.data = real_array - self.assertFalse(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, real_array) - - def test_zero_ndim_real_to_scalar_1d_lazy_promote(self): - lazy_array = as_lazy_data(np.array([456])) - dm = DataManager(np.array(123)) - self.assertFalse(dm.has_lazy_data()) - dm.data = lazy_array - self.assertTrue(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, lazy_array.compute()) - - def test_zero_ndim_lazy_to_scalar_1d_real_promote(self): - real_array = np.array([456]) - dm = DataManager(as_lazy_data(np.array(123))) - self.assertTrue(dm.has_lazy_data()) - dm.data = real_array - self.assertFalse(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, real_array) - - def test_zero_ndim_lazy_to_scalar_1d_lazy_promote(self): - lazy_array = as_lazy_data(np.array([456])) - dm = DataManager(as_lazy_data(np.array(123))) - self.assertTrue(dm.has_lazy_data()) - dm.data = lazy_array - self.assertTrue(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, lazy_array.compute()) - - def test_scalar_1d_to_zero_ndim_fail(self): - dm = DataManager(np.array([123])) - emsg = r"Require data with shape \(1,\), got \(\)." - with self.assertRaisesRegex(ValueError, emsg): - dm.data = 456 - - def test_nd_real_to_nd_real(self): - shape = (2, 3, 4) - size = np.prod(shape) - real_array = np.arange(size).reshape(shape) - dm = DataManager(real_array * 10) - self.assertFalse(dm.has_lazy_data()) - dm.data = real_array - self.assertFalse(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, real_array) - - def test_nd_real_to_nd_lazy(self): - shape = (2, 3, 4) - size = np.prod(shape) - real_array = np.arange(size).reshape(shape) - lazy_array = as_lazy_data(real_array) * 10 - dm = DataManager(real_array) - self.assertFalse(dm.has_lazy_data()) - dm.data = lazy_array - self.assertTrue(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, lazy_array.compute()) - - def test_nd_lazy_to_nd_real(self): - shape = (2, 3, 4) - size = np.prod(shape) - real_array = np.arange(size).reshape(shape) - lazy_array = as_lazy_data(real_array) - dm = DataManager(lazy_array * 10) - self.assertTrue(dm.has_lazy_data()) - dm.data = real_array - self.assertFalse(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, real_array) - - def test_nd_lazy_to_nd_lazy(self): - shape = (2, 3, 4) - size = np.prod(shape) - real_array = np.arange(size).reshape(shape) - lazy_array = as_lazy_data(real_array) - dm = DataManager(lazy_array * 10) - self.assertTrue(dm.has_lazy_data()) - dm.data = lazy_array - self.assertTrue(dm.has_lazy_data()) - self.assertArrayEqual(dm.data, lazy_array.compute()) - - def test_coerce_to_ndarray(self): - shape = (2, 3) - size = np.prod(shape) - real_array = np.arange(size).reshape(shape) - matrix = np.matrix(real_array) - dm = DataManager(real_array) - dm.data = matrix - self.assertIsInstance(dm._real_array, np.core.ndarray) - self.assertIsInstance(dm.data, np.core.ndarray) - self.assertArrayEqual(dm.data, real_array) - - def test_real_masked_constant_to_array(self): - masked_data = ma.masked_array([666], mask=True, dtype=np.dtype("f8")) - masked_constant = masked_data[0] - dm = DataManager(masked_constant) - self.assertIsInstance(dm._real_array, ma.MaskedArray) - self.assertNotIsInstance(dm._real_array, ma.core.MaskedConstant) - self.assertIsInstance(dm.data, ma.MaskedArray) - self.assertNotIsInstance(dm.data, ma.core.MaskedConstant) - self.assertMaskedArrayEqual(dm.data, masked_data) - - -class Test_dtype(tests.IrisTest): - def setUp(self): - self.real_array = np.array(0, dtype=np.dtype("int64")) - self.lazy_array = as_lazy_data(np.array(0, dtype=np.dtype("float64"))) - - def test_real_array(self): - dm = DataManager(self.real_array) - self.assertEqual(dm.dtype, np.dtype("int64")) - - def test_lazy_array(self): - dm = DataManager(self.lazy_array) - self.assertEqual(dm.dtype, np.dtype("float64")) - - -class Test_ndim(tests.IrisTest): - def test_ndim_0(self): - real_array = np.array(0) - dm = DataManager(real_array) - self.assertEqual(dm.ndim, 0) - lazy_array = as_lazy_data(real_array) - dm = DataManager(lazy_array) - self.assertEqual(dm.ndim, 0) - - def test_ndim_nd(self): - shape = (2, 3, 4) - real_array = np.arange(24).reshape(shape) - dm = DataManager(real_array) - self.assertEqual(dm.ndim, len(shape)) - lazy_array = as_lazy_data(real_array) - dm = DataManager(lazy_array) - self.assertEqual(dm.ndim, len(shape)) - - -class Test_shape(tests.IrisTest): - def test_shape_scalar(self): - real_array = np.array(0) - dm = DataManager(real_array) - self.assertEqual(dm.shape, ()) - lazy_array = as_lazy_data(real_array) - dm = DataManager(lazy_array) - self.assertEqual(dm.shape, ()) - - def test_shape_nd(self): - shape = (2, 3, 4) - real_array = np.arange(24).reshape(shape) - dm = DataManager(real_array) - self.assertEqual(dm.shape, shape) - lazy_array = as_lazy_data(real_array) - dm = DataManager(lazy_array) - self.assertEqual(dm.shape, shape) - - -class Test_copy(tests.IrisTest): - def setUp(self): - self.method = "iris._data_manager.DataManager._deepcopy" - self.data = mock.sentinel.data - self.return_value = mock.sentinel.return_value - self.memo = {} - - def test(self): - dm = DataManager(np.array(0)) - kwargs = dict(data=self.data) - with mock.patch(self.method) as mocker: - mocker.return_value = self.return_value - result = dm.copy(data=self.data) - mocker.assert_called_once_with(self.memo, **kwargs) - self.assertIs(result, self.return_value) - - -class Test_core_data(tests.IrisTest): - def test_real_array(self): - real_array = np.array(0) - dm = DataManager(real_array) - self.assertIs(dm.core_data(), real_array) - - def test_lazy_array(self): - lazy_array = as_lazy_data(np.array(0)) - dm = DataManager(lazy_array) - self.assertIs(dm.core_data(), lazy_array) - - -class Test_has_lazy_data(tests.IrisTest): - def setUp(self): - self.real_array = np.array(0) - self.lazy_array = as_lazy_data(self.real_array) - - def test_with_lazy_array(self): - dm = DataManager(self.lazy_array) - self.assertTrue(dm.has_lazy_data()) - - def test_with_real_array(self): - dm = DataManager(self.real_array) - self.assertFalse(dm.has_lazy_data()) - - -class Test_lazy_data(tests.IrisTest): - def setUp(self): - self.real_array = np.array(0) - self.lazy_array = as_lazy_data(self.real_array) - - def test_with_real_array(self): - dm = DataManager(self.real_array) - self.assertFalse(dm.has_lazy_data()) - result = dm.lazy_data() - self.assertFalse(dm.has_lazy_data()) - self.assertEqual(result, self.lazy_array) - self.assertFalse(dm.has_lazy_data()) - - def test_with_lazy_array(self): - dm = DataManager(self.lazy_array) - self.assertTrue(dm.has_lazy_data()) - result = dm.lazy_data() - self.assertTrue(dm.has_lazy_data()) - self.assertIs(result, dm._lazy_array) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/__init__.py b/lib/iris/tests/unit/experimental/__init__.py deleted file mode 100644 index 438827bab2..0000000000 --- a/lib/iris/tests/unit/experimental/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.experimental` package.""" diff --git a/lib/iris/tests/unit/experimental/raster/__init__.py b/lib/iris/tests/unit/experimental/raster/__init__.py deleted file mode 100644 index 5f85d810c9..0000000000 --- a/lib/iris/tests/unit/experimental/raster/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.experimental.raster` module.""" diff --git a/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py b/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py deleted file mode 100644 index a3b68ef761..0000000000 --- a/lib/iris/tests/unit/experimental/raster/test_export_geotiff.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.experimental.raster.export_geotiff` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import re - -import numpy as np - -try: - from osgeo import gdal - - from iris.experimental.raster import export_geotiff -except ImportError: - gdal = None - -from iris.coord_systems import GeogCS -from iris.coords import DimCoord -from iris.cube import Cube - - -@tests.skip_gdal -class TestDtypeAndValues(tests.IrisTest): - def _cube(self, dtype): - data = np.arange(12).reshape(3, 4).astype(dtype) + 20 - cube = Cube(data, "air_pressure_anomaly") - coord = DimCoord(np.arange(3), "latitude", units="degrees") - coord.guess_bounds() - cube.add_dim_coord(coord, 0) - coord = DimCoord(np.arange(4), "longitude", units="degrees") - coord.guess_bounds() - cube.add_dim_coord(coord, 1) - return cube - - def _check_dtype(self, dtype, gdal_dtype): - cube = self._cube(dtype) - with self.temp_filename(".tif") as temp_filename: - export_geotiff(cube, temp_filename) - dataset = gdal.Open(temp_filename, gdal.GA_ReadOnly) - band = dataset.GetRasterBand(1) - self.assertEqual(band.DataType, gdal_dtype) - self.assertEqual(band.ComputeRasterMinMax(1), (20, 31)) - - def test_int16(self): - self._check_dtype("i2", gdal.GDT_Int16) - - def test_int32(self): - self._check_dtype("i4", gdal.GDT_Int32) - - def test_uint8(self): - self._check_dtype("u1", gdal.GDT_Byte) - - def test_uint16(self): - self._check_dtype("u2", gdal.GDT_UInt16) - - def test_uint32(self): - self._check_dtype("u4", gdal.GDT_UInt32) - - def test_float32(self): - self._check_dtype("f4", gdal.GDT_Float32) - - def test_float64(self): - self._check_dtype("f8", gdal.GDT_Float64) - - def test_invalid(self): - cube = self._cube("i1") - with self.assertRaises(ValueError): - with self.temp_filename(".tif") as temp_filename: - export_geotiff(cube, temp_filename) - - -@tests.skip_gdal -class TestProjection(tests.IrisTest): - def _cube(self, ellipsoid=None): - data = np.arange(12).reshape(3, 4).astype("u1") - cube = Cube(data, "air_pressure_anomaly") - coord = DimCoord( - np.arange(3), "latitude", units="degrees", coord_system=ellipsoid - ) - coord.guess_bounds() - cube.add_dim_coord(coord, 0) - coord = DimCoord( - np.arange(4), "longitude", units="degrees", coord_system=ellipsoid - ) - coord.guess_bounds() - cube.add_dim_coord(coord, 1) - return cube - - def test_no_ellipsoid(self): - cube = self._cube() - with self.temp_filename(".tif") as temp_filename: - export_geotiff(cube, temp_filename) - dataset = gdal.Open(temp_filename, gdal.GA_ReadOnly) - self.assertEqual(dataset.GetProjection(), "") - - def test_sphere(self): - cube = self._cube(GeogCS(6377000)) - with self.temp_filename(".tif") as temp_filename: - export_geotiff(cube, temp_filename) - dataset = gdal.Open(temp_filename, gdal.GA_ReadOnly) - projection_string = dataset.GetProjection() - # String has embedded floating point values, - # Test with values to N decimal places, using a regular expression. - re_pattern = ( - r'GEOGCS\["unknown",DATUM\["unknown",' - r'SPHEROID\["unknown",637....,0\]\],PRIMEM\["Greenwich",0\],' - r'UNIT\["degree",0.01745[0-9]*,AUTHORITY\["EPSG","9122"\]\],' - r'AXIS\["Latitude",NORTH\],AXIS\["Longitude",EAST\]\]' - ) - re_exp = re.compile(re_pattern) - self.assertIsNotNone( - re_exp.match(projection_string), - "projection string {!r} does not match {!r}".format( - projection_string, re_pattern - ), - ) - - def test_ellipsoid(self): - cube = self._cube(GeogCS(6377000, 6360000)) - with self.temp_filename(".tif") as temp_filename: - export_geotiff(cube, temp_filename) - dataset = gdal.Open(temp_filename, gdal.GA_ReadOnly) - projection_string = dataset.GetProjection() - # String has embedded floating point values, - # Test with values to N decimal places, using a regular expression. - re_pattern = ( - r'GEOGCS\["unknown",DATUM\["unknown",' - r'SPHEROID\["unknown",637....,375.117[0-9]*\]\],' - r'PRIMEM\["Greenwich",0\],UNIT\["degree",0.01745[0-9]*,' - r'AUTHORITY\["EPSG","9122"\]\],AXIS\["Latitude",NORTH\],' - r'AXIS\["Longitude",EAST\]\]' - ) - re_exp = re.compile(re_pattern) - self.assertIsNotNone( - re_exp.match(projection_string), - "projection string {!r} does not match {!r}".format( - projection_string, re_pattern - ), - ) - - -@tests.skip_gdal -class TestGeoTransform(tests.IrisTest): - def test_(self): - data = np.arange(12).reshape(3, 4).astype(np.uint8) - cube = Cube(data, "air_pressure_anomaly") - coord = DimCoord([30, 40, 50], "latitude", units="degrees") - coord.guess_bounds() - cube.add_dim_coord(coord, 0) - coord = DimCoord([-10, -5, 0, 5], "longitude", units="degrees") - coord.guess_bounds() - cube.add_dim_coord(coord, 1) - with self.temp_filename(".tif") as temp_filename: - export_geotiff(cube, temp_filename) - dataset = gdal.Open(temp_filename, gdal.GA_ReadOnly) - self.assertEqual( - dataset.GetGeoTransform(), (-12.5, 5, 0, 55, 0, -10) - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/regrid/__init__.py b/lib/iris/tests/unit/experimental/regrid/__init__.py deleted file mode 100644 index 578c15f11c..0000000000 --- a/lib/iris/tests/unit/experimental/regrid/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.experimental.regrid` package.""" diff --git a/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py b/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py deleted file mode 100644 index 5ec3c956b9..0000000000 --- a/lib/iris/tests/unit/experimental/regrid/test_regrid_area_weighted_rectilinear_src_and_grid.py +++ /dev/null @@ -1,192 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function -:func:`iris.experimental.regrid.regrid_area_weighted_rectilinear_src_and_grid`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris.coord_systems import GeogCS -from iris.coords import DimCoord -from iris.cube import Cube -from iris.experimental.regrid import ( - regrid_area_weighted_rectilinear_src_and_grid as regrid, -) -from iris.tests.experimental.regrid.test_regrid_area_weighted_rectilinear_src_and_grid import ( - _resampled_grid, -) - - -class TestMdtol(tests.IrisTest): - # Tests to check the masking behaviour controlled by mdtol kwarg. - def setUp(self): - # A (3, 2, 4) cube with a masked element. - cube = Cube(np.ma.arange(24, dtype=np.int32).reshape((3, 2, 4))) - cs = GeogCS(6371229) - coord = DimCoord( - points=np.array([-1, 0, 1], dtype=np.int32), - standard_name="latitude", - units="degrees", - coord_system=cs, - ) - cube.add_dim_coord(coord, 0) - coord = DimCoord( - points=np.array([-1, 0, 1, 2], dtype=np.int32), - standard_name="longitude", - units="degrees", - coord_system=cs, - ) - cube.add_dim_coord(coord, 2) - cube.coord("latitude").guess_bounds() - cube.coord("longitude").guess_bounds() - cube.data[1, 1, 2] = ma.masked - self.src_cube = cube - # Create (7, 2, 9) grid cube. - self.grid_cube = _resampled_grid(cube, 2.3, 2.4) - - def test_default(self): - res = regrid(self.src_cube, self.grid_cube) - expected_mask = np.zeros((7, 2, 9), bool) - expected_mask[2:5, 1, 4:7] = True - self.assertArrayEqual(res.data.mask, expected_mask) - - def test_zero(self): - res = regrid(self.src_cube, self.grid_cube, mdtol=0) - expected_mask = np.zeros((7, 2, 9), bool) - expected_mask[2:5, 1, 4:7] = True - self.assertArrayEqual(res.data.mask, expected_mask) - - def test_one(self): - res = regrid(self.src_cube, self.grid_cube, mdtol=1) - expected_mask = np.zeros((7, 2, 9), bool) - # Only a single cell has all contributing cells masked. - expected_mask[3, 1, 5] = True - self.assertArrayEqual(res.data.mask, expected_mask) - - def test_fraction_below_min(self): - # Cells in target grid that overlap with the masked src cell - # have the following fractions (approx. due to spherical area). - # 4 5 6 7 - # 2 ---------------------- - # | 0.33 | 0.66 | 0.50 | - # 3 ---------------------- - # | 0.33 | 1.00 | 0.75 | - # 4 ---------------------- - # | 0.33 | 0.66 | 0.50 | - # 5 ---------------------- - # - - # Threshold less than minimum fraction. - mdtol = 0.2 - res = regrid(self.src_cube, self.grid_cube, mdtol=mdtol) - expected_mask = np.zeros((7, 2, 9), bool) - expected_mask[2:5, 1, 4:7] = True - self.assertArrayEqual(res.data.mask, expected_mask) - - def test_fraction_between_min_and_max(self): - # Threshold between min and max fraction. See - # test_fraction_below_min() comment for picture showing - # the fractions of masked data. - mdtol = 0.6 - res = regrid(self.src_cube, self.grid_cube, mdtol=mdtol) - expected_mask = np.zeros((7, 2, 9), bool) - expected_mask[2:5, 1, 5] = True - expected_mask[3, 1, 6] = True - self.assertArrayEqual(res.data.mask, expected_mask) - - def test_src_not_masked_array(self): - self.src_cube.data = self.src_cube.data.filled(1.0) - res = regrid(self.src_cube, self.grid_cube, mdtol=0.9) - self.assertFalse(ma.isMaskedArray(res.data)) - - def test_boolean_mask(self): - self.src_cube.data = np.ma.arange(24).reshape(3, 2, 4) - res = regrid(self.src_cube, self.grid_cube, mdtol=0.9) - self.assertEqual(ma.count_masked(res.data), 0) - - def test_scalar_no_overlap(self): - # Slice src so result collapses to a scalar. - src_cube = self.src_cube[:, 1, :] - # Regrid to a single cell with no overlap with masked src cells. - grid_cube = self.grid_cube[2, 1, 3] - res = regrid(src_cube, grid_cube, mdtol=0.8) - self.assertFalse(ma.isMaskedArray(res.data)) - - def test_scalar_with_overlap_below_mdtol(self): - # Slice src so result collapses to a scalar. - src_cube = self.src_cube[:, 1, :] - # Regrid to a single cell with 50% overlap with masked src cells. - grid_cube = self.grid_cube[3, 1, 4] - # Set threshold (mdtol) to greater than 0.5 (50%). - res = regrid(src_cube, grid_cube, mdtol=0.6) - self.assertEqual(ma.count_masked(res.data), 0) - - def test_scalar_with_overlap_above_mdtol(self): - # Slice src so result collapses to a scalar. - src_cube = self.src_cube[:, 1, :] - # Regrid to a single cell with 50% overlap with masked src cells. - grid_cube = self.grid_cube[3, 1, 4] - # Set threshold (mdtol) to less than 0.5 (50%). - res = regrid(src_cube, grid_cube, mdtol=0.4) - self.assertEqual(ma.count_masked(res.data), 1) - - -class TestWrapAround(tests.IrisTest): - def test_float_tolerant_equality(self): - # Ensure that floating point numbers are treated appropriately when - # introducing precision difference from wrap_around. - source = Cube([[1]]) - cs = GeogCS(6371229) - - bounds = np.array([[-91, 0]], dtype="float") - points = bounds.mean(axis=1) - lon_coord = DimCoord( - points, - bounds=bounds, - standard_name="longitude", - units="degrees", - coord_system=cs, - ) - source.add_aux_coord(lon_coord, 1) - - bounds = np.array([[-90, 90]], dtype="float") - points = bounds.mean(axis=1) - lat_coord = DimCoord( - points, - bounds=bounds, - standard_name="latitude", - units="degrees", - coord_system=cs, - ) - source.add_aux_coord(lat_coord, 0) - - grid = Cube([[0]]) - bounds = np.array([[270, 360]], dtype="float") - points = bounds.mean(axis=1) - lon_coord = DimCoord( - points, - bounds=bounds, - standard_name="longitude", - units="degrees", - coord_system=cs, - ) - grid.add_aux_coord(lon_coord, 1) - grid.add_aux_coord(lat_coord, 0) - - res = regrid(source, grid) - # The result should be equal to the source data and NOT be masked. - self.assertArrayEqual(res.data, np.array([1.0])) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py b/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py deleted file mode 100644 index b0908dd2e4..0000000000 --- a/lib/iris/tests/unit/experimental/regrid/test_regrid_weighted_curvilinear_to_rectilinear.py +++ /dev/null @@ -1,378 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function -:func:`iris.experimental.regrid.regrid_weighted_curvilinear_to_rectilinear`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import copy - -import numpy as np -import numpy.ma as ma - -import iris -from iris.coord_systems import GeogCS, LambertConformal -import iris.coords -from iris.coords import AuxCoord, DimCoord -import iris.cube -from iris.experimental.regrid import ( - regrid_weighted_curvilinear_to_rectilinear as regrid, -) -from iris.fileformats.pp import EARTH_RADIUS - -PLAIN_LATLON_CS = GeogCS(EARTH_RADIUS) - - -class Test(tests.IrisTest): - def setUp(self): - # Source cube. - self.test_src_name = "air_temperature" - self.test_src_units = "K" - self.test_src_data = ma.arange(1, 13, dtype=np.float64).reshape(3, 4) - self.test_src_attributes = dict(wibble="wobble") - self.test_scalar_coord = iris.coords.DimCoord( - [1], long_name="test_scalar_coord" - ) - self.src = iris.cube.Cube( - self.test_src_data, - standard_name=self.test_src_name, - units=self.test_src_units, - aux_coords_and_dims=[(self.test_scalar_coord, None)], - attributes=self.test_src_attributes, - ) - - # Source cube x-coordinates. - points = np.array( - [[10, 20, 200, 220], [110, 120, 180, 185], [190, 203, 211, 220]] - ) - self.src_x_positive = iris.coords.AuxCoord( - points, - standard_name="longitude", - units="degrees", - coord_system=PLAIN_LATLON_CS, - ) - self.src_x_transpose = iris.coords.AuxCoord( - points.T, - standard_name="longitude", - units="degrees", - coord_system=PLAIN_LATLON_CS, - ) - points = np.array( - [ - [-180, -176, -170, -150], - [-180, -179, -178, -177], - [-170, -168, -159, -140], - ] - ) - self.src_x_negative = iris.coords.AuxCoord( - points, - standard_name="longitude", - units="degrees", - coord_system=PLAIN_LATLON_CS, - ) - - # Source cube y-coordinates. - points = np.array([[0, 4, 3, 1], [5, 7, 10, 6], [12, 20, 15, 30]]) - self.src_y = iris.coords.AuxCoord( - points, - standard_name="latitude", - units="degrees", - coord_system=PLAIN_LATLON_CS, - ) - self.src_y_transpose = iris.coords.AuxCoord( - points.T, - standard_name="latitude", - units="degrees", - coord_system=PLAIN_LATLON_CS, - ) - - # Weights. - self.weight_factor = 10 - self.weights = np.asarray(self.test_src_data) * self.weight_factor - - # Target grid cube. - self.grid = iris.cube.Cube(np.zeros((2, 2))) - - # Target grid cube x-coordinates. - self.grid_x_inc = iris.coords.DimCoord( - [187, 200], - standard_name="longitude", - units="degrees", - bounds=[[180, 190], [190, 220]], - coord_system=PLAIN_LATLON_CS, - ) - self.grid_x_dec = iris.coords.DimCoord( - [200, 187], - standard_name="longitude", - units="degrees", - bounds=[[220, 190], [190, 180]], - coord_system=PLAIN_LATLON_CS, - ) - - # Target grid cube y-coordinates. - self.grid_y_inc = iris.coords.DimCoord( - [2, 10], - standard_name="latitude", - units="degrees", - bounds=[[0, 5], [5, 30]], - coord_system=PLAIN_LATLON_CS, - ) - self.grid_y_dec = iris.coords.DimCoord( - [10, 2], - standard_name="latitude", - units="degrees", - bounds=[[30, 5], [5, 0]], - coord_system=PLAIN_LATLON_CS, - ) - - def _weighted_mean(self, points): - points = np.asarray(points, dtype=np.float64) - weights = points * self.weight_factor - numerator = denominator = 0 - for point, weight in zip(points, weights): - numerator += point * weight - denominator += weight - return numerator / denominator - - def _expected_cube(self, data): - cube = iris.cube.Cube(data) - cube.metadata = copy.deepcopy(self.src.metadata) - grid_x = self.grid.coord(axis="x") - grid_y = self.grid.coord(axis="y") - cube.add_dim_coord(grid_x.copy(), self.grid.coord_dims(grid_x)) - cube.add_dim_coord(grid_y.copy(), self.grid.coord_dims(grid_y)) - src_x = self.src.coord(axis="x") - src_y = self.src.coord(axis="y") - for coord in self.src.aux_coords: - if coord is not src_x and coord is not src_y: - if not self.src.coord_dims(coord): - cube.add_aux_coord(coord) - return cube - - def test_aligned_src_x(self): - self.src.add_aux_coord(self.src_y, (0, 1)) - self.src.add_aux_coord(self.src_x_positive, (0, 1)) - self.grid.add_dim_coord(self.grid_y_inc, 0) - self.grid.add_dim_coord(self.grid_x_inc, 1) - result = regrid(self.src, self.weights, self.grid) - data = np.array( - [ - 0, - self._weighted_mean([3]), - self._weighted_mean([7, 8]), - self._weighted_mean([9, 10, 11]), - ] - ).reshape(2, 2) - expected = self._expected_cube(data) - self.assertEqual(result, expected) - mask = np.array([[True, False], [False, False]]) - self.assertArrayEqual(result.data.mask, mask) - - def test_non_latlon(self): - odd_coord_system = LambertConformal() - co_src_y = AuxCoord( - self.src_y.points, - standard_name="projection_y_coordinate", - units="km", - coord_system=odd_coord_system, - ) - co_src_x = AuxCoord( - self.src_x_positive.points, - standard_name="projection_x_coordinate", - units="km", - coord_system=odd_coord_system, - ) - co_grid_y = DimCoord( - self.grid_y_inc.points, - bounds=self.grid_y_inc.bounds, - standard_name="projection_y_coordinate", - units="km", - coord_system=odd_coord_system, - ) - co_grid_x = DimCoord( - self.grid_x_inc.points, - bounds=self.grid_x_inc.bounds, - standard_name="projection_x_coordinate", - units="km", - coord_system=odd_coord_system, - ) - self.src.add_aux_coord(co_src_y, (0, 1)) - self.src.add_aux_coord(co_src_x, (0, 1)) - self.grid.add_dim_coord(co_grid_y, 0) - self.grid.add_dim_coord(co_grid_x, 1) - result = regrid(self.src, self.weights, self.grid) - data = np.array( - [ - 0, - self._weighted_mean([3]), - self._weighted_mean([7, 8]), - self._weighted_mean([9, 10, 11]), - ] - ).reshape(2, 2) - expected = self._expected_cube(data) - self.assertEqual(result, expected) - mask = np.array([[True, False], [False, False]]) - self.assertArrayEqual(result.data.mask, mask) - - def test_src_xy_not_2d(self): - new_shape = (2, 2, 3) - # Reshape the source cube, including the X and Y coordinates, - # from (3, 4) to (2, 2, 3). - # This is really an "invalid" reshape, but should still work because - # the XY shape is actually irrelevant to the regrid operation. - src = iris.cube.Cube( - self.test_src_data.reshape(new_shape), - standard_name=self.test_src_name, - units=self.test_src_units, - aux_coords_and_dims=[(self.test_scalar_coord, None)], - attributes=self.test_src_attributes, - ) - co_src_y = self.src_y.copy(points=self.src_y.points.reshape(new_shape)) - co_src_x = self.src_x_positive.copy( - points=self.src_x_positive.points.reshape(new_shape) - ) - src.add_aux_coord(co_src_y, (0, 1, 2)) - src.add_aux_coord(co_src_x, (0, 1, 2)) - self.grid.add_dim_coord(self.grid_y_inc, 0) - self.grid.add_dim_coord(self.grid_x_inc, 1) - weights = self.weights.reshape(new_shape) - result = regrid(src, weights, self.grid) - # NOTE: set the grid of self.src to make '_expected_cube' work ... - self.src.add_aux_coord(self.src_y, (0, 1)) - self.src.add_aux_coord(self.src_x_positive, (0, 1)) - # ... given that, we expect exactly the same 'normal' result. - data = np.array( - [ - 0, - self._weighted_mean([3]), - self._weighted_mean([7, 8]), - self._weighted_mean([9, 10, 11]), - ] - ).reshape(2, 2) - expected = self._expected_cube(data) - self.assertEqual(result, expected) - mask = np.array([[True, False], [False, False]]) - self.assertArrayEqual(result.data.mask, mask) - - def test_aligned_src_x_mask(self): - self.src.add_aux_coord(self.src_y, (0, 1)) - self.src.add_aux_coord(self.src_x_positive, (0, 1)) - self.src.data[([1, 2, 2], [3, 0, 2])] = ma.masked - self.grid.add_dim_coord(self.grid_y_inc, 0) - self.grid.add_dim_coord(self.grid_x_inc, 1) - result = regrid(self.src, self.weights, self.grid) - data = np.array( - [ - 0, - self._weighted_mean([3]), - self._weighted_mean([7]), - self._weighted_mean([10]), - ] - ).reshape(2, 2) - expected = self._expected_cube(data) - self.assertEqual(result, expected) - mask = np.array([[True, False], [False, False]]) - self.assertArrayEqual(result.data.mask, mask) - - def test_aligned_src_x_zero_weights(self): - self.src.add_aux_coord(self.src_y, (0, 1)) - self.src.add_aux_coord(self.src_x_positive, (0, 1)) - self.grid.add_dim_coord(self.grid_y_inc, 0) - self.grid.add_dim_coord(self.grid_x_inc, 1) - self.weights[:, 2] = 0 - self.weights[1, :] = 0 - result = regrid(self.src, self.weights, self.grid) - data = np.array([0, 0, 0, self._weighted_mean([9, 10])]).reshape(2, 2) - expected = self._expected_cube(data) - self.assertEqual(result, expected) - mask = np.array([[True, True], [True, False]]) - self.assertArrayEqual(result.data.mask, mask) - - def test_aligned_tgt_dec(self): - self.src.add_aux_coord(self.src_y, (0, 1)) - self.src.add_aux_coord(self.src_x_positive, (0, 1)) - self.grid.add_dim_coord(self.grid_y_dec, 0) - self.grid.add_dim_coord(self.grid_x_dec, 1) - result = regrid(self.src, self.weights, self.grid) - data = np.array( - [ - self._weighted_mean([10, 11, 12]), - self._weighted_mean([8, 9]), - self._weighted_mean([3, 4]), - 0, - ] - ).reshape(2, 2) - expected = self._expected_cube(data) - self.assertEqual(result, expected) - mask = np.array([[False, False], [False, True]]) - self.assertArrayEqual(result.data.mask, mask) - - def test_misaligned_src_x_negative(self): - self.src.add_aux_coord(self.src_y, (0, 1)) - self.src.add_aux_coord(self.src_x_negative, (0, 1)) - self.grid.add_dim_coord(self.grid_y_inc, 0) - self.grid.add_dim_coord(self.grid_x_inc, 1) - result = regrid(self.src, self.weights, self.grid) - data = np.array( - [ - self._weighted_mean([1, 2]), - self._weighted_mean([3, 4]), - self._weighted_mean([5, 6, 7, 8]), - self._weighted_mean([9, 10, 11]), - ] - ).reshape(2, 2) - expected = self._expected_cube(data) - self.assertEqual(result, expected) - mask = np.array([[False, False], [False, False]]) - self.assertArrayEqual(result.data.mask, mask) - - def test_misaligned_src_x_negative_mask(self): - self.src.add_aux_coord(self.src_y, (0, 1)) - self.src.add_aux_coord(self.src_x_negative, (0, 1)) - self.src.data[([0, 0, 1, 1, 2, 2], [1, 3, 1, 3, 1, 3])] = ma.masked - self.grid.add_dim_coord(self.grid_y_inc, 0) - self.grid.add_dim_coord(self.grid_x_inc, 1) - result = regrid(self.src, self.weights, self.grid) - data = np.array( - [ - self._weighted_mean([1]), - self._weighted_mean([3]), - self._weighted_mean([5, 7]), - self._weighted_mean([9, 11]), - ] - ).reshape(2, 2) - expected = self._expected_cube(data) - self.assertEqual(result, expected) - mask = np.array([[False, False], [False, False]]) - self.assertArrayEqual(result.data.mask, mask) - - def test_misaligned_tgt_dec(self): - self.src.add_aux_coord(self.src_y, (0, 1)) - self.src.add_aux_coord(self.src_x_negative, (0, 1)) - self.grid.add_dim_coord(self.grid_y_dec, 0) - self.grid.add_dim_coord(self.grid_x_dec, 1) - result = regrid(self.src, self.weights, self.grid) - data = np.array( - [ - self._weighted_mean([10, 11, 12]), - self._weighted_mean([6, 7, 8, 9]), - self._weighted_mean([4]), - self._weighted_mean([2, 3]), - ] - ).reshape(2, 2) - expected = self._expected_cube(data) - self.assertEqual(result, expected) - mask = np.array([[False, False], [False, False]]) - self.assertArrayEqual(result.data.mask, mask) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/representation/__init__.py b/lib/iris/tests/unit/experimental/representation/__init__.py deleted file mode 100644 index c856263a5c..0000000000 --- a/lib/iris/tests/unit/experimental/representation/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.experimental.representation` package.""" diff --git a/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py b/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py deleted file mode 100644 index 8dc3cd7849..0000000000 --- a/lib/iris/tests/unit/experimental/representation/test_CubeListRepresentation.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.cube.CubeRepresentation` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from html import escape - -from iris.cube import CubeList -from iris.experimental.representation import CubeListRepresentation -import iris.tests.stock as stock - - -@tests.skip_data -class Test__instantiation(tests.IrisTest): - def setUp(self): - self.cubes = CubeList([stock.simple_3d()]) - self.representer = CubeListRepresentation(self.cubes) - - def test_ids(self): - self.assertEqual(id(self.cubes), self.representer.cubelist_id) - - -@tests.skip_data -class Test_make_content(tests.IrisTest): - def setUp(self): - self.cubes = CubeList([stock.simple_3d(), stock.lat_lon_cube()]) - self.cubes[0].rename("name & ") - self.representer = CubeListRepresentation(self.cubes) - self.content = self.representer.make_content() - - def test_repr_len(self): - self.assertEqual(len(self.cubes), len(self.content)) - - def test_summary_lines(self): - names = [c.name() for c in self.cubes] - for name, content in zip(names, self.content): - name = escape(name) - self.assertIn(name, content) - - def test__cube_name_summary_consistency(self): - # Just check the first cube in the CubeList. - single_cube_html = self.content[0] - # Get a "prettified" cube name, as it should be in the cubelist repr. - cube_name = self.cubes[0].name() - pretty_cube_name = cube_name.strip().replace("_", " ").title() - pretty_escaped_name = escape(pretty_cube_name) - self.assertIn(pretty_escaped_name, single_cube_html) - - -@tests.skip_data -class Test_repr_html(tests.IrisTest): - def setUp(self): - self.cubes = CubeList([stock.simple_3d(), stock.lat_lon_cube()]) - self.representer = CubeListRepresentation(self.cubes) - - def test_html_length(self): - html = self.representer.repr_html() - n_html_elems = html.count(" tag per cube. - self.assertEqual(len(self.cubes), n_html_elems) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py b/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py deleted file mode 100644 index eab3e7942d..0000000000 --- a/lib/iris/tests/unit/experimental/representation/test_CubeRepresentation.py +++ /dev/null @@ -1,413 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.cube.CubeRepresentation` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from html import escape - -import numpy as np - -from iris.coords import AncillaryVariable, CellMeasure, CellMethod -from iris.cube import Cube -from iris.experimental.representation import CubeRepresentation -import iris.tests.stock as stock -from iris.tests.stock.mesh import sample_mesh - - -@tests.skip_data -class Test__instantiation(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_3d() - self.representer = CubeRepresentation(self.cube) - - def test_cube_attributes(self): - self.assertEqual(id(self.cube), self.representer.cube_id) - self.assertStringEqual(str(self.cube), self.representer.cube_str) - - def test__heading_contents(self): - content = set(self.representer.str_headings.values()) - self.assertEqual(len(content), 1) - self.assertIsNone(list(content)[0]) - - -@tests.skip_data -class Test__get_dim_names(tests.IrisTest): - def setUp(self): - self.cube = stock.realistic_4d() - self.dim_names = [c.name() for c in self.cube.coords(dim_coords=True)] - self.representer = CubeRepresentation(self.cube) - - def test_basic(self): - result_names = self.representer._get_dim_names() - self.assertEqual(result_names, self.dim_names) - - def test_one_anonymous_dim(self): - self.cube.remove_coord("time") - expected_names = ["--"] - expected_names.extend(self.dim_names[1:]) - result_names = self.representer._get_dim_names() - self.assertEqual(result_names, expected_names) - - def test_anonymous_dims(self): - target_dims = [1, 3] - # Replicate this here as we're about to modify it. - expected_names = [c.name() for c in self.cube.coords(dim_coords=True)] - for dim in target_dims: - (this_dim_coord,) = self.cube.coords( - contains_dimension=dim, dim_coords=True - ) - self.cube.remove_coord(this_dim_coord) - expected_names[dim] = "--" - result_names = self.representer._get_dim_names() - self.assertEqual(result_names, expected_names) - - -@tests.skip_data -class Test__summary_content(tests.IrisTest): - def setUp(self): - self.cube = stock.lat_lon_cube() - # Check we're not tripped up by names containing spaces. - self.cube.rename("Electron density (&)") - self.cube.units = "1e11 e/m^3" - self.representer = CubeRepresentation(self.cube) - - def test_name(self): - # Check the cube name is being set and formatted correctly. - expected = escape(self.cube.name().replace("_", " ").title()) - result = self.representer.name - self.assertEqual(expected, result) - - def test_names(self): - # Check the dimension names used as column headings are split out and - # formatted correctly. - expected_coord_names = [ - c.name().replace("_", " ") - for c in self.cube.coords(dim_coords=True) - ] - result_coord_names = self.representer.names[1:] - for result in result_coord_names: - self.assertIn(result, expected_coord_names) - - def test_units(self): - # Check the units is being set correctly. - expected = self.cube.units - result = self.representer.units - self.assertEqual(expected, result) - - def test_shapes(self): - # Check cube dim lengths are split out correctly from the - # summary string. - expected = self.cube.shape - result = self.representer.shapes - self.assertEqual(expected, result) - - def test_ndims(self): - expected = self.cube.ndim - result = self.representer.ndims - self.assertEqual(expected, result) - - -@tests.skip_data -class Test__get_bits(tests.IrisTest): - def setUp(self): - self.cube = stock.realistic_4d() - cmth = CellMethod("mean", "time", "6hr") - self.cube.add_cell_method(cmth) - cms = CellMeasure([0, 1, 2, 3, 4, 5], long_name="foo") - self.cube.add_cell_measure(cms, 0) - avr = AncillaryVariable([0, 1, 2, 3, 4, 5], long_name="bar") - self.cube.add_ancillary_variable(avr, 0) - scms = CellMeasure([0], long_name="baz") - self.cube.add_cell_measure(scms) - self.representer = CubeRepresentation(self.cube) - self.representer._get_bits(self.representer._get_lines()) - - def test_population(self): - nonmesh_values = [ - value - for key, value in self.representer.str_headings.items() - if "Mesh" not in key - ] - for v in nonmesh_values: - self.assertIsNotNone(v) - - def test_headings__dimcoords(self): - contents = self.representer.str_headings["Dimension coordinates:"] - content_str = ",".join(content for content in contents) - dim_coords = [c.name() for c in self.cube.dim_coords] - for coord in dim_coords: - self.assertIn(coord, content_str) - - def test_headings__auxcoords(self): - contents = self.representer.str_headings["Auxiliary coordinates:"] - content_str = ",".join(content for content in contents) - aux_coords = [ - c.name() for c in self.cube.aux_coords if c.shape != (1,) - ] - for coord in aux_coords: - self.assertIn(coord, content_str) - - def test_headings__derivedcoords(self): - contents = self.representer.str_headings["Derived coordinates:"] - content_str = ",".join(content for content in contents) - derived_coords = [c.name() for c in self.cube.derived_coords] - for coord in derived_coords: - self.assertIn(coord, content_str) - - def test_headings__cellmeasures(self): - contents = self.representer.str_headings["Cell measures:"] - content_str = ",".join(content for content in contents) - cell_measures = [ - c.name() for c in self.cube.cell_measures() if c.shape != (1,) - ] - for coord in cell_measures: - self.assertIn(coord, content_str) - - def test_headings__ancillaryvars(self): - contents = self.representer.str_headings["Ancillary variables:"] - content_str = ",".join(content for content in contents) - ancillary_variables = [ - c.name() for c in self.cube.ancillary_variables() - ] - for coord in ancillary_variables: - self.assertIn(coord, content_str) - - def test_headings__scalarcellmeasures(self): - contents = self.representer.str_headings["Scalar cell measures:"] - content_str = ",".join(content for content in contents) - scalar_cell_measures = [ - c.name() for c in self.cube.cell_measures() if c.shape == (1,) - ] - for coord in scalar_cell_measures: - self.assertIn(coord, content_str) - - def test_headings__scalarcoords(self): - contents = self.representer.str_headings["Scalar coordinates:"] - content_str = ",".join(content for content in contents) - scalar_coords = [ - c.name() for c in self.cube.coords() if c.shape == (1,) - ] - for coord in scalar_coords: - self.assertIn(coord, content_str) - - def test_headings__attributes(self): - contents = self.representer.str_headings["Attributes:"] - content_str = ",".join(content for content in contents) - for attr_name, attr_value in self.cube.attributes.items(): - self.assertIn(attr_name, content_str) - self.assertIn(attr_value, content_str) - - def test_headings__cellmethods(self): - contents = self.representer.str_headings["Cell methods:"] - content_str = ",".join(content for content in contents) - for method in self.cube.cell_methods: - name = method.method - value = str(method)[len(name + ": ") :] - self.assertIn(name, content_str) - self.assertIn(value, content_str) - - -@tests.skip_data -class Test__make_header(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_3d() - self.representer = CubeRepresentation(self.cube) - self.representer._get_bits(self.representer._get_lines()) - self.header_emts = self.representer._make_header().split("\n") - - def test_name_and_units(self): - # Check the correct name and units are being written into the top-left - # table cell. - # This is found in the first cell after the `` is defined. - name_and_units_cell = self.header_emts[1] - expected = "{name} ({units})".format( - name=self.cube.name(), units=self.cube.units - ) - self.assertIn(expected.lower(), name_and_units_cell.lower()) - - def test_number_of_columns(self): - # There should be one headings column, plus a column per dimension. - # Ignore opening and closing tags. - result_cols = self.header_emts[1:-1] - expected = self.cube.ndim + 1 - self.assertEqual(len(result_cols), expected) - - def test_row_headings(self): - # Get only the dimension heading cells and not the headings column. - dim_coord_names = [c.name() for c in self.cube.coords(dim_coords=True)] - dim_col_headings = self.header_emts[2:-1] - for coord_name, col_heading in zip(dim_coord_names, dim_col_headings): - self.assertIn(coord_name, col_heading) - - -@tests.skip_data -class Test__make_shapes_row(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_3d() - self.representer = CubeRepresentation(self.cube) - self.representer._get_bits(self.representer._get_lines()) - self.result = self.representer._make_shapes_row().split("\n") - - def test_row_title(self): - title_cell = self.result[1] - self.assertIn("Shape", title_cell) - - def test_shapes(self): - expected_shapes = self.cube.shape - result_shapes = self.result[2:-1] - for expected, result in zip(expected_shapes, result_shapes): - self.assertIn(str(expected), result) - - -@tests.skip_data -class Test__make_row(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_3d() - cm = CellMethod("mean", "time", "6hr") - self.cube.add_cell_method(cm) - self.representer = CubeRepresentation(self.cube) - self.representer._get_bits(self.representer._get_lines()) - - def test__title_row(self): - title = "Wibble:" - row = self.representer._make_row(title) - # A cell for the title, an empty cell for each cube dimension, plus row - # opening and closing tags. - expected_len = self.cube.ndim + 3 - self.assertEqual(len(row), expected_len) - # Check for specific content. - row_str = "\n".join(element for element in row) - self.assertIn(title.strip(":"), row_str) - expected_html_class = "iris-title" - self.assertIn(expected_html_class, row_str) - - def test__inclusion_row(self): - # An inclusion row has x/- to indicate whether a coordinate describes - # a dimension. - title = "time" - body = ["x", "-", "-", "-"] - row = self.representer._make_row(title, body) - # A cell for the title, a cell for each cube dimension, plus row - # opening and closing tags. - expected_len = len(body) + 3 - self.assertEqual(len(row), expected_len) - # Check for specific content. - row_str = "\n".join(element for element in row) - self.assertIn(title, row_str) - self.assertIn("x", row_str) - self.assertIn("-", row_str) - expected_html_class_1 = "iris-word-cell" - expected_html_class_2 = "iris-inclusion-cell" - self.assertIn(expected_html_class_1, row_str) - self.assertIn(expected_html_class_2, row_str) - # We do not expect a colspan to be set. - self.assertNotIn("colspan", row_str) - - def test__attribute_row(self): - # An attribute row does not contain inclusion indicators. - title = "source" - body = "Iris test case" - colspan = 5 - row = self.representer._make_row(title, body, colspan) - # We only expect two cells here: the row title cell and one other cell - # that spans a number of columns. We also need to open and close the - # tr html element, giving 4 bits making up the row. - self.assertEqual(len(row), 4) - # Check for specific content. - row_str = "\n".join(element for element in row) - self.assertIn(title, row_str) - self.assertIn(body, row_str) - # We expect a colspan to be set. - colspan_str = 'colspan="{}"'.format(colspan) - self.assertIn(colspan_str, row_str) - - -@tests.skip_data -class Test__expand_last_cell(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_3d() - self.representer = CubeRepresentation(self.cube) - self.representer._get_bits(self.representer._get_lines()) - col_span = self.representer.ndims - self.row = self.representer._make_row( - "title", body="first", col_span=col_span - ) - - def test_add_line(self): - cell = self.representer._expand_last_cell(self.row[-2], "second") - self.assertIn("first
second", cell) - - -@tests.skip_data -class Test__make_content(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_3d() - self.representer = CubeRepresentation(self.cube) - self.representer._get_bits(self.representer._get_lines()) - self.result = self.representer._make_content() - - # Also provide an ultra-simple mesh cube, with only meshcoords. - mesh = sample_mesh() - meshco_x, meshco_y = mesh.to_MeshCoords("face") - mesh_cube = Cube(np.zeros(meshco_x.shape)) - mesh_cube.add_aux_coord(meshco_x, (0,)) - mesh_cube.add_aux_coord(meshco_y, (0,)) - self.mesh_cube = mesh_cube - self.mesh_representer = CubeRepresentation(self.mesh_cube) - self.mesh_representer._get_bits(self.mesh_representer._get_lines()) - self.mesh_result = self.mesh_representer._make_content() - - def test_included(self): - included = "Dimension coordinates" - self.assertIn(included, self.result) - dim_coord_names = [c.name() for c in self.cube.dim_coords] - for coord_name in dim_coord_names: - self.assertIn(coord_name, self.result) - - def test_not_included(self): - # `stock.simple_3d()` only contains the `Dimension coordinates` attr. - not_included = list(self.representer.str_headings.keys()) - not_included.pop(not_included.index("Dimension coordinates:")) - for heading in not_included: - self.assertNotIn(heading, self.result) - - def test_mesh_included(self): - # self.mesh_cube contains a `Mesh coordinates` section. - included = "Mesh coordinates" - self.assertIn(included, self.mesh_result) - mesh_coord_names = [ - c.name() for c in self.mesh_cube.coords(mesh_coords=True) - ] - for coord_name in mesh_coord_names: - self.assertIn(coord_name, self.result) - - def test_mesh_not_included(self): - # self.mesh_cube _only_ contains a `Mesh coordinates` section. - not_included = list(self.representer.str_headings.keys()) - not_included.pop(not_included.index("Mesh coordinates:")) - for heading in not_included: - self.assertNotIn(heading, self.result) - - -@tests.skip_data -class Test_repr_html(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_3d() - representer = CubeRepresentation(self.cube) - self.result = representer.repr_html() - - def test_contents_added(self): - included = "Dimension coordinates" - self.assertIn(included, self.result) - not_included = "Auxiliary coordinates" - self.assertNotIn(not_included, self.result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/stratify/__init__.py b/lib/iris/tests/unit/experimental/stratify/__init__.py deleted file mode 100644 index 7218455e76..0000000000 --- a/lib/iris/tests/unit/experimental/stratify/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.experimental.stratify` package.""" diff --git a/lib/iris/tests/unit/experimental/stratify/test_relevel.py b/lib/iris/tests/unit/experimental/stratify/test_relevel.py deleted file mode 100644 index 6958fa9a2f..0000000000 --- a/lib/iris/tests/unit/experimental/stratify/test_relevel.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.experimental.stratify.relevel` function. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from functools import partial - -import numpy as np -from numpy.testing import assert_array_equal - -from iris.coords import AuxCoord, DimCoord -import iris.tests.stock as stock - -try: - import stratify - - from iris.experimental.stratify import relevel -except ImportError: - stratify = None - - -@tests.skip_stratify -class Test(tests.IrisTest): - def setUp(self): - cube = stock.simple_3d()[:, :1, :1] - #: The data from which to get the levels. - self.src_levels = cube.copy() - #: The data to interpolate. - self.cube = cube.copy() - self.cube.rename("foobar") - self.cube *= 10 - self.coord = self.src_levels.coord("wibble") - self.axes = (self.coord, self.coord.name(), None, 0) - - def test_broadcast_fail_src_levels(self): - emsg = "Cannot broadcast the cube and src_levels" - data = np.arange(60).reshape(3, 4, 5) - with self.assertRaisesRegex(ValueError, emsg): - relevel(self.cube, AuxCoord(data), [1, 2, 3]) - - def test_broadcast_fail_tgt_levels(self): - emsg = "Cannot broadcast the cube and tgt_levels" - data = np.arange(60).reshape(3, 4, 5) - with self.assertRaisesRegex(ValueError, emsg): - relevel(self.cube, self.coord, data) - - def test_standard_input(self): - for axis in self.axes: - result = relevel( - self.cube, self.src_levels, [-1, 0, 5.5], axis=axis - ) - assert_array_equal( - result.data.flatten(), np.array([np.nan, 0, 55]) - ) - expected = DimCoord([-1, 0, 5.5], units=1, long_name="thingness") - self.assertEqual(expected, result.coord("thingness")) - - def test_non_monotonic(self): - for axis in self.axes: - result = relevel(self.cube, self.src_levels, [2, 3, 2], axis=axis) - assert_array_equal( - result.data.flatten(), np.array([20, 30, np.nan]) - ) - expected = AuxCoord([2, 3, 2], units=1, long_name="thingness") - self.assertEqual(result.coord("thingness"), expected) - - def test_static_level(self): - for axis in self.axes: - result = relevel(self.cube, self.src_levels, [2, 2], axis=axis) - assert_array_equal(result.data.flatten(), np.array([20, 20])) - - def test_coord_input(self): - source = AuxCoord(self.src_levels.data) - metadata = self.src_levels.metadata._asdict() - metadata["coord_system"] = None - metadata["climatological"] = None - source.metadata = metadata - - for axis in self.axes: - result = relevel(self.cube, source, [0, 12, 13], axis=axis) - self.assertEqual(result.shape, (3, 1, 1)) - assert_array_equal(result.data.flatten(), [0, 120, np.nan]) - - def test_custom_interpolator(self): - interpolator = partial(stratify.interpolate, interpolation="nearest") - - for axis in self.axes: - result = relevel( - self.cube, - self.src_levels, - [-1, 0, 6.5], - axis=axis, - interpolator=interpolator, - ) - assert_array_equal( - result.data.flatten(), np.array([np.nan, 0, 120]) - ) - - def test_multi_dim_target_levels(self): - interpolator = partial( - stratify.interpolate, - interpolation="linear", - extrapolation="linear", - ) - - for axis in self.axes: - result = relevel( - self.cube, - self.src_levels, - self.src_levels.data, - axis=axis, - interpolator=interpolator, - ) - assert_array_equal(result.data.flatten(), np.array([0, 120])) - self.assertCML(result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/__init__.py b/lib/iris/tests/unit/experimental/ugrid/__init__.py deleted file mode 100644 index 7f55678f06..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.experimental.ugrid` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/__init__.py b/lib/iris/tests/unit/experimental/ugrid/cf/__init__.py deleted file mode 100644 index 2e70f2cd5d..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/cf/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.experimental.ugrid.cf` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py deleted file mode 100644 index bdf1d5e03b..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridAuxiliaryCoordinateVariable.py +++ /dev/null @@ -1,238 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridAuxiliaryCoordinateVariable` class. - -todo: fold these tests into cf tests when experimental.ugrid is folded into - standard behaviour. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.experimental.ugrid.cf import ( - CFUGridAuxiliaryCoordinateVariable, - logger, -) -from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( - netcdf_ugrid_variable, -) - - -def named_variable(name): - # Don't need to worry about dimensions or dtype for these tests. - return netcdf_ugrid_variable(name, "", int) - - -class TestIdentify(tests.IrisTest): - def setUp(self): - self.cf_identities = [ - "node_coordinates", - "edge_coordinates", - "face_coordinates", - "volume_coordinates", - ] - - def test_cf_identities(self): - subject_name = "ref_subject" - ref_subject = named_variable(subject_name) - vars_common = { - subject_name: ref_subject, - "ref_not_subject": named_variable("ref_not_subject"), - } - # ONLY expecting ref_subject, excluding ref_not_subject. - expected = { - subject_name: CFUGridAuxiliaryCoordinateVariable( - subject_name, ref_subject - ) - } - - for identity in self.cf_identities: - ref_source = named_variable("ref_source") - setattr(ref_source, identity, subject_name) - vars_all = dict({"ref_source": ref_source}, **vars_common) - result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_duplicate_refs(self): - subject_name = "ref_subject" - ref_subject = named_variable(subject_name) - ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") - } - for var in ref_source_vars.values(): - setattr(var, self.cf_identities[0], subject_name) - vars_all = dict( - { - subject_name: ref_subject, - "ref_not_subject": named_variable("ref_not_subject"), - }, - **ref_source_vars, - ) - - # ONLY expecting ref_subject, excluding ref_not_subject. - expected = { - subject_name: CFUGridAuxiliaryCoordinateVariable( - subject_name, ref_subject - ) - } - result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_two_coords(self): - subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } - - ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") - } - for ix, var in enumerate(ref_source_vars.values()): - setattr(var, self.cf_identities[ix], subject_names[ix]) - vars_all = dict( - {"ref_not_subject": named_variable("ref_not_subject")}, - **ref_subject_vars, - **ref_source_vars, - ) - - # Not expecting ref_not_subject. - expected = { - name: CFUGridAuxiliaryCoordinateVariable(name, var) - for name, var in ref_subject_vars.items() - } - result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_two_part_ref(self): - subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } - - ref_source = named_variable("ref_source") - setattr(ref_source, self.cf_identities[0], " ".join(subject_names)) - vars_all = { - "ref_not_subject": named_variable("ref_not_subject"), - "ref_source": ref_source, - **ref_subject_vars, - } - - expected = { - name: CFUGridAuxiliaryCoordinateVariable(name, var) - for name, var in ref_subject_vars.items() - } - result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_string_type_ignored(self): - subject_name = "ref_subject" - ref_source = named_variable("ref_source") - setattr(ref_source, self.cf_identities[0], subject_name) - vars_all = { - subject_name: netcdf_ugrid_variable(subject_name, "", np.bytes_), - "ref_not_subject": named_variable("ref_not_subject"), - "ref_source": ref_source, - } - - result = CFUGridAuxiliaryCoordinateVariable.identify(vars_all) - self.assertDictEqual({}, result) - - def test_ignore(self): - subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } - - ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") - } - for ix, var in enumerate(ref_source_vars.values()): - setattr(var, self.cf_identities[0], subject_names[ix]) - vars_all = dict( - {"ref_not_subject": named_variable("ref_not_subject")}, - **ref_subject_vars, - **ref_source_vars, - ) - - # ONLY expect the subject variable that hasn't been ignored. - expected_name = subject_names[0] - expected = { - expected_name: CFUGridAuxiliaryCoordinateVariable( - expected_name, ref_subject_vars[expected_name] - ) - } - result = CFUGridAuxiliaryCoordinateVariable.identify( - vars_all, ignore=subject_names[1] - ) - self.assertDictEqual(expected, result) - - def test_target(self): - subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } - - source_names = ("ref_source_1", "ref_source_2") - ref_source_vars = {name: named_variable(name) for name in source_names} - for ix, var in enumerate(ref_source_vars.values()): - setattr(var, self.cf_identities[0], subject_names[ix]) - vars_all = dict( - {"ref_not_subject": named_variable("ref_not_subject")}, - **ref_subject_vars, - **ref_source_vars, - ) - - # ONLY expect the variable referenced by the named ref_source_var. - expected_name = subject_names[0] - expected = { - expected_name: CFUGridAuxiliaryCoordinateVariable( - expected_name, ref_subject_vars[expected_name] - ) - } - result = CFUGridAuxiliaryCoordinateVariable.identify( - vars_all, target=source_names[0] - ) - self.assertDictEqual(expected, result) - - def test_warn(self): - subject_name = "ref_subject" - ref_source = named_variable("ref_source") - setattr(ref_source, self.cf_identities[0], subject_name) - vars_all = { - "ref_not_subject": named_variable("ref_not_subject"), - "ref_source": ref_source, - } - - # The warn kwarg and expected corresponding log level. - warn_and_level = {True: "WARNING", False: "DEBUG"} - - # Missing warning. - log_regex = rf"Missing CF-netCDF auxiliary coordinate variable {subject_name}.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - result = CFUGridAuxiliaryCoordinateVariable.identify( - vars_all, warn=warn - ) - self.assertDictEqual({}, result) - - # String variable warning. - log_regex = r".*is a CF-netCDF label variable.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - result = CFUGridAuxiliaryCoordinateVariable.identify( - vars_all, warn=warn - ) - self.assertDictEqual({}, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py deleted file mode 100644 index 7d461b324a..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridConnectivityVariable.py +++ /dev/null @@ -1,224 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridConnectivityVariable` class. - -todo: fold these tests into cf tests when experimental.ugrid is folded into - standard behaviour. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.experimental.ugrid.cf import CFUGridConnectivityVariable, logger -from iris.experimental.ugrid.mesh import Connectivity -from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( - netcdf_ugrid_variable, -) - - -def named_variable(name): - # Don't need to worry about dimensions or dtype for these tests. - return netcdf_ugrid_variable(name, "", int) - - -class TestIdentify(tests.IrisTest): - def test_cf_identities(self): - subject_name = "ref_subject" - ref_subject = named_variable(subject_name) - vars_common = { - subject_name: ref_subject, - "ref_not_subject": named_variable("ref_not_subject"), - } - # ONLY expecting ref_subject, excluding ref_not_subject. - expected = { - subject_name: CFUGridConnectivityVariable( - subject_name, ref_subject - ) - } - - for identity in Connectivity.UGRID_CF_ROLES: - ref_source = named_variable("ref_source") - setattr(ref_source, identity, subject_name) - vars_all = dict({"ref_source": ref_source}, **vars_common) - result = CFUGridConnectivityVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_duplicate_refs(self): - subject_name = "ref_subject" - ref_subject = named_variable(subject_name) - ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") - } - for var in ref_source_vars.values(): - setattr(var, Connectivity.UGRID_CF_ROLES[0], subject_name) - vars_all = dict( - { - subject_name: ref_subject, - "ref_not_subject": named_variable("ref_not_subject"), - }, - **ref_source_vars, - ) - - # ONLY expecting ref_subject, excluding ref_not_subject. - expected = { - subject_name: CFUGridConnectivityVariable( - subject_name, ref_subject - ) - } - result = CFUGridConnectivityVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_two_cf_roles(self): - subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } - - ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") - } - for ix, var in enumerate(ref_source_vars.values()): - setattr(var, Connectivity.UGRID_CF_ROLES[ix], subject_names[ix]) - vars_all = dict( - {"ref_not_subject": named_variable("ref_not_subject")}, - **ref_subject_vars, - **ref_source_vars, - ) - - # Not expecting ref_not_subject. - expected = { - name: CFUGridConnectivityVariable(name, var) - for name, var in ref_subject_vars.items() - } - result = CFUGridConnectivityVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_two_part_ref_ignored(self): - # Not expected to handle more than one variable for a connectivity - # cf role - invalid UGRID. - subject_name = "ref_subject" - ref_source = named_variable("ref_source") - setattr( - ref_source, Connectivity.UGRID_CF_ROLES[0], subject_name + " foo" - ) - vars_all = { - subject_name: named_variable(subject_name), - "ref_not_subject": named_variable("ref_not_subject"), - "ref_source": ref_source, - } - - result = CFUGridConnectivityVariable.identify(vars_all) - self.assertDictEqual({}, result) - - def test_string_type_ignored(self): - subject_name = "ref_subject" - ref_source = named_variable("ref_source") - setattr(ref_source, Connectivity.UGRID_CF_ROLES[0], subject_name) - vars_all = { - subject_name: netcdf_ugrid_variable(subject_name, "", np.bytes_), - "ref_not_subject": named_variable("ref_not_subject"), - "ref_source": ref_source, - } - - result = CFUGridConnectivityVariable.identify(vars_all) - self.assertDictEqual({}, result) - - def test_ignore(self): - subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } - - ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") - } - for ix, var in enumerate(ref_source_vars.values()): - setattr(var, Connectivity.UGRID_CF_ROLES[0], subject_names[ix]) - vars_all = dict( - {"ref_not_subject": named_variable("ref_not_subject")}, - **ref_subject_vars, - **ref_source_vars, - ) - - # ONLY expect the subject variable that hasn't been ignored. - expected_name = subject_names[0] - expected = { - expected_name: CFUGridConnectivityVariable( - expected_name, ref_subject_vars[expected_name] - ) - } - result = CFUGridConnectivityVariable.identify( - vars_all, ignore=subject_names[1] - ) - self.assertDictEqual(expected, result) - - def test_target(self): - subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } - - source_names = ("ref_source_1", "ref_source_2") - ref_source_vars = {name: named_variable(name) for name in source_names} - for ix, var in enumerate(ref_source_vars.values()): - setattr(var, Connectivity.UGRID_CF_ROLES[0], subject_names[ix]) - vars_all = dict( - {"ref_not_subject": named_variable("ref_not_subject")}, - **ref_subject_vars, - **ref_source_vars, - ) - - # ONLY expect the variable referenced by the named ref_source_var. - expected_name = subject_names[0] - expected = { - expected_name: CFUGridConnectivityVariable( - expected_name, ref_subject_vars[expected_name] - ) - } - result = CFUGridConnectivityVariable.identify( - vars_all, target=source_names[0] - ) - self.assertDictEqual(expected, result) - - def test_warn(self): - subject_name = "ref_subject" - ref_source = named_variable("ref_source") - setattr(ref_source, Connectivity.UGRID_CF_ROLES[0], subject_name) - vars_all = { - "ref_not_subject": named_variable("ref_not_subject"), - "ref_source": ref_source, - } - - # The warn kwarg and expected corresponding log level. - warn_and_level = {True: "WARNING", False: "DEBUG"} - - # Missing warning. - log_regex = rf"Missing CF-UGRID connectivity variable {subject_name}.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - result = CFUGridConnectivityVariable.identify( - vars_all, warn=warn - ) - self.assertDictEqual({}, result) - - # String variable warning. - log_regex = r".*is a CF-netCDF label variable.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - result = CFUGridConnectivityVariable.identify( - vars_all, warn=warn - ) - self.assertDictEqual({}, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py deleted file mode 100644 index a3a0e665bb..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridGroup.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridGroup` class. - -todo: fold these tests into cf tests when experimental.ugrid is folded into - standard behaviour. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest.mock import MagicMock - -from iris.experimental.ugrid.cf import ( - CFUGridAuxiliaryCoordinateVariable, - CFUGridConnectivityVariable, - CFUGridGroup, - CFUGridMeshVariable, -) -from iris.fileformats.cf import CFCoordinateVariable, CFDataVariable - - -class Tests(tests.IrisTest): - def setUp(self): - self.cf_group = CFUGridGroup() - - def test_inherited(self): - coord_var = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var") - self.cf_group[coord_var.cf_name] = coord_var - self.assertEqual( - coord_var, self.cf_group.coordinates[coord_var.cf_name] - ) - - def test_connectivities(self): - conn_var = MagicMock( - spec=CFUGridConnectivityVariable, cf_name="conn_var" - ) - self.cf_group[conn_var.cf_name] = conn_var - self.assertEqual( - conn_var, self.cf_group.connectivities[conn_var.cf_name] - ) - - def test_ugrid_coords(self): - coord_var = MagicMock( - spec=CFUGridAuxiliaryCoordinateVariable, cf_name="coord_var" - ) - self.cf_group[coord_var.cf_name] = coord_var - self.assertEqual( - coord_var, self.cf_group.ugrid_coords[coord_var.cf_name] - ) - - def test_meshes(self): - mesh_var = MagicMock(spec=CFUGridMeshVariable, cf_name="mesh_var") - self.cf_group[mesh_var.cf_name] = mesh_var - self.assertEqual(mesh_var, self.cf_group.meshes[mesh_var.cf_name]) - - def test_non_data_names(self): - data_var = MagicMock(spec=CFDataVariable, cf_name="data_var") - coord_var = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var") - conn_var = MagicMock( - spec=CFUGridConnectivityVariable, cf_name="conn_var" - ) - ugrid_coord_var = MagicMock( - spec=CFUGridAuxiliaryCoordinateVariable, cf_name="ugrid_coord_var" - ) - mesh_var = MagicMock(spec=CFUGridMeshVariable, cf_name="mesh_var") - mesh_var2 = MagicMock(spec=CFUGridMeshVariable, cf_name="mesh_var2") - duplicate_name_var = MagicMock( - spec=CFUGridMeshVariable, cf_name="coord_var" - ) - - for var in ( - data_var, - coord_var, - conn_var, - ugrid_coord_var, - mesh_var, - mesh_var2, - duplicate_name_var, - ): - self.cf_group[var.cf_name] = var - - expected_names = [ - var.cf_name - for var in ( - coord_var, - conn_var, - ugrid_coord_var, - mesh_var, - mesh_var2, - ) - ] - expected = set(expected_names) - self.assertEqual(expected, self.cf_group.non_data_variable_names) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py deleted file mode 100644 index 08915f7cff..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridMeshVariable.py +++ /dev/null @@ -1,263 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridMeshVariable` class. - -todo: fold these tests into cf tests when experimental.ugrid is folded into - standard behaviour. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.experimental.ugrid.cf import CFUGridMeshVariable, logger -from iris.tests.unit.experimental.ugrid.cf.test_CFUGridReader import ( - netcdf_ugrid_variable, -) - - -def named_variable(name): - # Don't need to worry about dimensions or dtype for these tests. - return netcdf_ugrid_variable(name, "", int) - - -class TestIdentify(tests.IrisTest): - def setUp(self): - self.cf_identity = "mesh" - - def test_cf_role(self): - # Test that mesh variables can be identified by having `cf_role="mesh_topology"`. - match_name = "match" - match = named_variable(match_name) - setattr(match, "cf_role", "mesh_topology") - - not_match_name = f"not_{match_name}" - not_match = named_variable(not_match_name) - setattr(not_match, "cf_role", "foo") - - vars_all = {match_name: match, not_match_name: not_match} - - # ONLY expecting match, excluding not_match. - expected = {match_name: CFUGridMeshVariable(match_name, match)} - result = CFUGridMeshVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_cf_identity(self): - # Test that mesh variables can be identified by being another variable's - # `mesh` attribute. - subject_name = "ref_subject" - ref_subject = named_variable(subject_name) - ref_source = named_variable("ref_source") - setattr(ref_source, self.cf_identity, subject_name) - vars_all = { - subject_name: ref_subject, - "ref_not_subject": named_variable("ref_not_subject"), - "ref_source": ref_source, - } - - # ONLY expecting ref_subject, excluding ref_not_subject. - expected = { - subject_name: CFUGridMeshVariable(subject_name, ref_subject) - } - result = CFUGridMeshVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_cf_role_and_identity(self): - # Test that identification can successfully handle a combination of - # mesh variables having `cf_role="mesh_topology"` AND being referenced as - # another variable's `mesh` attribute. - role_match_name = "match" - role_match = named_variable(role_match_name) - setattr(role_match, "cf_role", "mesh_topology") - ref_source_1 = named_variable("ref_source_1") - setattr(ref_source_1, self.cf_identity, role_match_name) - - subject_name = "ref_subject" - ref_subject = named_variable(subject_name) - ref_source_2 = named_variable("ref_source_2") - setattr(ref_source_2, self.cf_identity, subject_name) - - vars_all = { - role_match_name: role_match, - subject_name: ref_subject, - "ref_not_subject": named_variable("ref_not_subject"), - "ref_source_1": ref_source_1, - "ref_source_2": ref_source_2, - } - - # Expecting role_match and ref_subject but excluding other variables. - expected = { - role_match_name: CFUGridMeshVariable(role_match_name, role_match), - subject_name: CFUGridMeshVariable(subject_name, ref_subject), - } - result = CFUGridMeshVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_duplicate_refs(self): - subject_name = "ref_subject" - ref_subject = named_variable(subject_name) - ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") - } - for var in ref_source_vars.values(): - setattr(var, self.cf_identity, subject_name) - vars_all = dict( - { - subject_name: ref_subject, - "ref_not_subject": named_variable("ref_not_subject"), - }, - **ref_source_vars, - ) - - # ONLY expecting ref_subject, excluding ref_not_subject. - expected = { - subject_name: CFUGridMeshVariable(subject_name, ref_subject) - } - result = CFUGridMeshVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_two_refs(self): - subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } - - ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") - } - for ix, var in enumerate(ref_source_vars.values()): - setattr(var, self.cf_identity, subject_names[ix]) - vars_all = dict( - {"ref_not_subject": named_variable("ref_not_subject")}, - **ref_subject_vars, - **ref_source_vars, - ) - - # Not expecting ref_not_subject. - expected = { - name: CFUGridMeshVariable(name, var) - for name, var in ref_subject_vars.items() - } - result = CFUGridMeshVariable.identify(vars_all) - self.assertDictEqual(expected, result) - - def test_two_part_ref_ignored(self): - # Not expected to handle more than one variable for a mesh - # cf role - invalid UGRID. - subject_name = "ref_subject" - ref_source = named_variable("ref_source") - setattr(ref_source, self.cf_identity, subject_name + " foo") - vars_all = { - subject_name: named_variable(subject_name), - "ref_not_subject": named_variable("ref_not_subject"), - "ref_source": ref_source, - } - - result = CFUGridMeshVariable.identify(vars_all) - self.assertDictEqual({}, result) - - def test_string_type_ignored(self): - subject_name = "ref_subject" - ref_source = named_variable("ref_source") - setattr(ref_source, self.cf_identity, subject_name) - vars_all = { - subject_name: netcdf_ugrid_variable(subject_name, "", np.bytes_), - "ref_not_subject": named_variable("ref_not_subject"), - "ref_source": ref_source, - } - - result = CFUGridMeshVariable.identify(vars_all) - self.assertDictEqual({}, result) - - def test_ignore(self): - subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } - - ref_source_vars = { - name: named_variable(name) - for name in ("ref_source_1", "ref_source_2") - } - for ix, var in enumerate(ref_source_vars.values()): - setattr(var, self.cf_identity, subject_names[ix]) - vars_all = dict( - {"ref_not_subject": named_variable("ref_not_subject")}, - **ref_subject_vars, - **ref_source_vars, - ) - - # ONLY expect the subject variable that hasn't been ignored. - expected_name = subject_names[0] - expected = { - expected_name: CFUGridMeshVariable( - expected_name, ref_subject_vars[expected_name] - ) - } - result = CFUGridMeshVariable.identify( - vars_all, ignore=subject_names[1] - ) - self.assertDictEqual(expected, result) - - def test_target(self): - subject_names = ("ref_subject_1", "ref_subject_2") - ref_subject_vars = { - name: named_variable(name) for name in subject_names - } - - source_names = ("ref_source_1", "ref_source_2") - ref_source_vars = {name: named_variable(name) for name in source_names} - for ix, var in enumerate(ref_source_vars.values()): - setattr(var, self.cf_identity, subject_names[ix]) - vars_all = dict( - {"ref_not_subject": named_variable("ref_not_subject")}, - **ref_subject_vars, - **ref_source_vars, - ) - - # ONLY expect the variable referenced by the named ref_source_var. - expected_name = subject_names[0] - expected = { - expected_name: CFUGridMeshVariable( - expected_name, ref_subject_vars[expected_name] - ) - } - result = CFUGridMeshVariable.identify(vars_all, target=source_names[0]) - self.assertDictEqual(expected, result) - - def test_warn(self): - subject_name = "ref_subject" - ref_source = named_variable("ref_source") - setattr(ref_source, self.cf_identity, subject_name) - vars_all = { - "ref_not_subject": named_variable("ref_not_subject"), - "ref_source": ref_source, - } - - # The warn kwarg and expected corresponding log level. - warn_and_level = {True: "WARNING", False: "DEBUG"} - - # Missing warning. - log_regex = rf"Missing CF-UGRID mesh variable {subject_name}.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - result = CFUGridMeshVariable.identify(vars_all, warn=warn) - self.assertDictEqual({}, result) - - # String variable warning. - log_regex = r".*is a CF-netCDF label variable.*" - for warn, level in warn_and_level.items(): - with self.assertLogs(logger, level=level, msg_regex=log_regex): - vars_all[subject_name] = netcdf_ugrid_variable( - subject_name, "", np.bytes_ - ) - result = CFUGridMeshVariable.identify(vars_all, warn=warn) - self.assertDictEqual({}, result) diff --git a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py b/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py deleted file mode 100644 index 8f029c1b7b..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/cf/test_CFUGridReader.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.cf.CFUGridGroup` class. - -todo: fold these tests into cf tests when experimental.ugrid is folded into - standard behaviour. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.experimental.ugrid.cf import ( - CFUGridAuxiliaryCoordinateVariable, - CFUGridConnectivityVariable, - CFUGridGroup, - CFUGridMeshVariable, - CFUGridReader, -) -from iris.fileformats.cf import CFCoordinateVariable, CFDataVariable -from iris.tests.unit.fileformats.cf.test_CFReader import netcdf_variable - - -def netcdf_ugrid_variable( - name, - dimensions, - dtype, - coordinates=None, -): - ncvar = netcdf_variable( - name=name, dimensions=dimensions, dtype=dtype, coordinates=coordinates - ) - - # Fill in all the extra UGRID attributes to prevent problems with getattr - # and Mock. Any attribute can be replaced in downstream setUp if present. - ugrid_attrs = ( - CFUGridAuxiliaryCoordinateVariable.cf_identities - + CFUGridConnectivityVariable.cf_identities - + [CFUGridMeshVariable.cf_identity] - ) - for attr in ugrid_attrs: - setattr(ncvar, attr, None) - - return ncvar - - -class Test_build_cf_groups(tests.IrisTest): - @classmethod - def setUpClass(cls): - # Replicating syntax from test_CFReader.Test_build_cf_groups__formula_terms. - cls.mesh = netcdf_ugrid_variable("mesh", "", np.int) - cls.node_x = netcdf_ugrid_variable("node_x", "node", np.float) - cls.node_y = netcdf_ugrid_variable("node_y", "node", np.float) - cls.face_x = netcdf_ugrid_variable("face_x", "face", np.float) - cls.face_y = netcdf_ugrid_variable("face_y", "face", np.float) - cls.face_nodes = netcdf_ugrid_variable( - "face_nodes", "face vertex", np.int - ) - cls.levels = netcdf_ugrid_variable("levels", "levels", np.int) - cls.data = netcdf_ugrid_variable( - "data", "levels face", np.float, coordinates="face_x face_y" - ) - - # Add necessary attributes for mesh recognition. - cls.mesh.cf_role = "mesh_topology" - cls.mesh.node_coordinates = "node_x node_y" - cls.mesh.face_coordinates = "face_x face_y" - cls.mesh.face_node_connectivity = "face_nodes" - cls.face_nodes.cf_role = "face_node_connectivity" - cls.data.mesh = "mesh" - - cls.variables = dict( - mesh=cls.mesh, - node_x=cls.node_x, - node_y=cls.node_y, - face_x=cls.face_x, - face_y=cls.face_y, - face_nodes=cls.face_nodes, - levels=cls.levels, - data=cls.data, - ) - ncattrs = mock.Mock(return_value=[]) - cls.dataset = mock.Mock( - file_format="NetCDF4", variables=cls.variables, ncattrs=ncattrs - ) - - def setUp(self): - # Restrict the CFUGridReader functionality to only performing - # translations and building first level cf-groups for variables. - self.patch("iris.experimental.ugrid.cf.CFUGridReader._reset") - self.patch("netCDF4.Dataset", return_value=self.dataset) - cf_reader = CFUGridReader("dummy") - self.cf_group = cf_reader.cf_group - - def test_inherited(self): - for expected_var, collection in ( - [CFCoordinateVariable("levels", self.levels), "coordinates"], - [CFDataVariable("data", self.data), "data_variables"], - ): - expected = {expected_var.cf_name: expected_var} - self.assertDictEqual(expected, getattr(self.cf_group, collection)) - - def test_connectivities(self): - expected_var = CFUGridConnectivityVariable( - "face_nodes", self.face_nodes - ) - expected = {expected_var.cf_name: expected_var} - self.assertDictEqual(expected, self.cf_group.connectivities) - - def test_mesh(self): - expected_var = CFUGridMeshVariable("mesh", self.mesh) - expected = {expected_var.cf_name: expected_var} - self.assertDictEqual(expected, self.cf_group.meshes) - - def test_ugrid_coords(self): - names = [ - f"{loc}_{ax}" for loc in ("node", "face") for ax in ("x", "y") - ] - expected = { - name: CFUGridAuxiliaryCoordinateVariable(name, getattr(self, name)) - for name in names - } - self.assertDictEqual(expected, self.cf_group.ugrid_coords) - - def test_is_cf_ugrid_group(self): - self.assertIsInstance(self.cf_group, CFUGridGroup) diff --git a/lib/iris/tests/unit/experimental/ugrid/load/__init__.py b/lib/iris/tests/unit/experimental/ugrid/load/__init__.py deleted file mode 100644 index 36c9108dc2..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/load/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.experimental.ugrid.load` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py b/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py deleted file mode 100644 index 1203633297..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_ParseUgridOnLoad.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.load.ParseUgridOnLoad` class. - -todo: remove this module when experimental.ugrid is folded into standard behaviour. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD, ParseUGridOnLoad - - -class TestClass(tests.IrisTest): - @classmethod - def setUpClass(cls): - cls.cls = ParseUGridOnLoad() - - def test_default(self): - self.assertFalse(self.cls) - - def test_context(self): - self.assertFalse(self.cls) - with self.cls.context(): - self.assertTrue(self.cls) - self.assertFalse(self.cls) - - -class TestConstant(tests.IrisTest): - @classmethod - def setUpClass(cls): - cls.constant = PARSE_UGRID_ON_LOAD - - def test_default(self): - self.assertFalse(self.constant) - - def test_context(self): - self.assertFalse(self.constant) - with self.constant.context(): - self.assertTrue(self.constant) - self.assertFalse(self.constant) diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py deleted file mode 100644 index 4de11d5610..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_mesh.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.experimental.ugrid.load.load_mesh` function. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD, load_mesh - - -class Tests(tests.IrisTest): - # All 'real' tests have been done for load_meshes(). Here we just check - # that load_mesh() works with load_meshes() correctly, using mocking. - def setUp(self): - self.load_meshes_mock = self.patch( - "iris.experimental.ugrid.load.load_meshes" - ) - # The expected return from load_meshes - a dict of files, each with - # a list of meshes. - self.load_meshes_mock.return_value = {"file": ["mesh"]} - - def test_calls_load_meshes(self): - args = [("file_1", "file_2"), "my_var_name"] - with PARSE_UGRID_ON_LOAD.context(): - _ = load_mesh(args) - self.assertTrue(self.load_meshes_mock.called_with(args)) - - def test_returns_mesh(self): - with PARSE_UGRID_ON_LOAD.context(): - mesh = load_mesh([]) - self.assertEqual(mesh, "mesh") - - def test_single_mesh(self): - # Override the load_meshes_mock return values to provoke errors. - def common(ret_val): - self.load_meshes_mock.return_value = ret_val - with self.assertRaisesRegex(ValueError, "Expecting 1 mesh.*"): - with PARSE_UGRID_ON_LOAD.context(): - _ = load_mesh([]) - - # Too many. - common({"file": ["mesh1", "mesh2"]}) - # Too few. - common({"file": []}) diff --git a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py b/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py deleted file mode 100644 index f2175ef99a..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/load/test_load_meshes.py +++ /dev/null @@ -1,235 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.experimental.ugrid.load.load_meshes` function. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from pathlib import Path -from shutil import rmtree -from subprocess import check_call -import tempfile -from uuid import uuid4 - -from iris.experimental.ugrid.load import ( - PARSE_UGRID_ON_LOAD, - load_meshes, - logger, -) - - -def setUpModule(): - global TMP_DIR - TMP_DIR = Path(tempfile.mkdtemp()) - - -def tearDownModule(): - if TMP_DIR is not None: - rmtree(TMP_DIR) - - -def cdl_to_nc(cdl): - cdl_path = TMP_DIR / "tst.cdl" - nc_path = TMP_DIR / f"{uuid4()}.nc" - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) - return str(nc_path) - - -class TestsBasic(tests.IrisTest): - def setUp(self): - self.ref_cdl = """ - netcdf mesh_test { - dimensions: - node = 3 ; - face = 1 ; - vertex = 3 ; - levels = 2 ; - variables: - int mesh ; - mesh:cf_role = "mesh_topology" ; - mesh:topology_dimension = 2 ; - mesh:node_coordinates = "node_x node_y" ; - mesh:face_node_connectivity = "face_nodes" ; - float node_x(node) ; - node_x:standard_name = "longitude" ; - float node_y(node) ; - node_y:standard_name = "latitude" ; - int face_nodes(face, vertex) ; - face_nodes:cf_role = "face_node_connectivity" ; - face_nodes:start_index = 0 ; - int levels(levels) ; - float node_data(levels, node) ; - node_data:coordinates = "node_x node_y" ; - node_data:location = "node" ; - node_data:mesh = "mesh" ; - data: - mesh = 0; - node_x = 0., 2., 1.; - node_y = 0., 0., 1.; - face_nodes = 0, 1, 2; - levels = 1, 2; - node_data = 0., 0., 0.; - } - """ - self.nc_path = cdl_to_nc(self.ref_cdl) - - def add_second_mesh(self): - second_name = "mesh2" - cdl_extra = f""" - int {second_name} ; - {second_name}:cf_role = "mesh_topology" ; - {second_name}:topology_dimension = 2 ; - {second_name}:node_coordinates = "node_x node_y" ; - {second_name}:face_coordinates = "face_x face_y" ; - {second_name}:face_node_connectivity = "face_nodes" ; - """ - vars_string = "variables:" - vars_start = self.ref_cdl.index(vars_string) + len(vars_string) - new_cdl = ( - self.ref_cdl[:vars_start] + cdl_extra + self.ref_cdl[vars_start:] - ) - return new_cdl, second_name - - def test_with_data(self): - nc_path = cdl_to_nc(self.ref_cdl) - with PARSE_UGRID_ON_LOAD.context(): - meshes = load_meshes(nc_path) - - files = list(meshes.keys()) - self.assertEqual(1, len(files)) - file_meshes = meshes[files[0]] - self.assertEqual(1, len(file_meshes)) - mesh = file_meshes[0] - self.assertEqual("mesh", mesh.var_name) - - def test_no_data(self): - cdl_lines = self.ref_cdl.split("\n") - cdl_lines = filter( - lambda line: ':mesh = "mesh"' not in line, cdl_lines - ) - ref_cdl = "\n".join(cdl_lines) - - nc_path = cdl_to_nc(ref_cdl) - with PARSE_UGRID_ON_LOAD.context(): - meshes = load_meshes(nc_path) - - files = list(meshes.keys()) - self.assertEqual(1, len(files)) - file_meshes = meshes[files[0]] - self.assertEqual(1, len(file_meshes)) - mesh = file_meshes[0] - self.assertEqual("mesh", mesh.var_name) - - def test_no_mesh(self): - cdl_lines = self.ref_cdl.split("\n") - cdl_lines = filter( - lambda line: all( - [s not in line for s in (':mesh = "mesh"', "mesh_topology")] - ), - cdl_lines, - ) - ref_cdl = "\n".join(cdl_lines) - - nc_path = cdl_to_nc(ref_cdl) - with PARSE_UGRID_ON_LOAD.context(): - meshes = load_meshes(nc_path) - - self.assertDictEqual({}, meshes) - - def test_multi_files(self): - files_count = 3 - nc_paths = [cdl_to_nc(self.ref_cdl) for _ in range(files_count)] - with PARSE_UGRID_ON_LOAD.context(): - meshes = load_meshes(nc_paths) - self.assertEqual(files_count, len(meshes)) - - def test_multi_meshes(self): - ref_cdl, second_name = self.add_second_mesh() - nc_path = cdl_to_nc(ref_cdl) - with PARSE_UGRID_ON_LOAD.context(): - meshes = load_meshes(nc_path) - - files = list(meshes.keys()) - self.assertEqual(1, len(files)) - file_meshes = meshes[files[0]] - self.assertEqual(2, len(file_meshes)) - mesh_names = [mesh.var_name for mesh in file_meshes] - self.assertIn("mesh", mesh_names) - self.assertIn(second_name, mesh_names) - - def test_var_name(self): - second_cdl, second_name = self.add_second_mesh() - cdls = [self.ref_cdl, second_cdl] - nc_paths = [cdl_to_nc(cdl) for cdl in cdls] - with PARSE_UGRID_ON_LOAD.context(): - meshes = load_meshes(nc_paths, second_name) - - files = list(meshes.keys()) - self.assertEqual(1, len(files)) - file_meshes = meshes[files[0]] - self.assertEqual(1, len(file_meshes)) - self.assertEqual(second_name, file_meshes[0].var_name) - - def test_no_parsing(self): - nc_path = cdl_to_nc(self.ref_cdl) - with self.assertRaisesRegex( - ValueError, ".*Must be True to enable mesh loading." - ): - _ = load_meshes(nc_path) - - def test_invalid_scheme(self): - with self.assertRaisesRegex( - ValueError, "Iris cannot handle the URI scheme:.*" - ): - with PARSE_UGRID_ON_LOAD.context(): - _ = load_meshes("foo://bar") - - @tests.skip_data - def test_non_nc(self): - log_regex = r"Ignoring non-NetCDF file:.*" - with self.assertLogs(logger, level="INFO", msg_regex=log_regex): - with PARSE_UGRID_ON_LOAD.context(): - meshes = load_meshes( - tests.get_data_path(["PP", "simple_pp", "global.pp"]) - ) - self.assertDictEqual({}, meshes) - - -class TestsHttp(tests.IrisTest): - # Tests of HTTP (OpenDAP) loading need mocking since we can't have tests - # that rely on 3rd party servers. - def setUp(self): - self.format_agent_mock = self.patch( - "iris.fileformats.FORMAT_AGENT.get_spec" - ) - - def test_http(self): - url = "http://foo" - with PARSE_UGRID_ON_LOAD.context(): - _ = load_meshes(url) - self.format_agent_mock.assert_called_with(url, None) - - def test_mixed_sources(self): - url = "http://foo" - file = TMP_DIR / f"{uuid4()}.nc" - file.touch() - glob = f"{TMP_DIR}/*.nc" - - with PARSE_UGRID_ON_LOAD.context(): - _ = load_meshes([url, glob]) - file_uris = [ - call[0][0] for call in self.format_agent_mock.call_args_list - ] - for source in (url, Path(file).name): - self.assertIn(source, file_uris) diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/__init__.py b/lib/iris/tests/unit/experimental/ugrid/mesh/__init__.py deleted file mode 100644 index 4ce979d845..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.experimental.ugrid.mesh` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py deleted file mode 100644 index 9a81c79d44..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Connectivity.py +++ /dev/null @@ -1,368 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.experimental.ugrid.mesh.Connectivity` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from xml.dom import minidom - -import numpy as np -from numpy import ma - -from iris._lazy_data import as_lazy_data, is_lazy_data -from iris.experimental.ugrid.mesh import Connectivity - - -class TestStandard(tests.IrisTest): - def setUp(self): - # Crete an instance, with non-default arguments to allow testing of - # correct property setting. - self.kwargs = { - "indices": np.linspace(1, 12, 12, dtype=int).reshape((4, -1)), - "cf_role": "face_node_connectivity", - "long_name": "my_face_nodes", - "var_name": "face_nodes", - "attributes": {"notes": "this is a test"}, - "start_index": 1, - "location_axis": 1, - } - self.connectivity = Connectivity(**self.kwargs) - - def test_cf_role(self): - self.assertEqual(self.kwargs["cf_role"], self.connectivity.cf_role) - - def test_location(self): - expected = self.kwargs["cf_role"].split("_")[0] - self.assertEqual(expected, self.connectivity.location) - - def test_connected(self): - expected = self.kwargs["cf_role"].split("_")[1] - self.assertEqual(expected, self.connectivity.connected) - - def test_start_index(self): - self.assertEqual( - self.kwargs["start_index"], self.connectivity.start_index - ) - - def test_location_axis(self): - self.assertEqual( - self.kwargs["location_axis"], self.connectivity.location_axis - ) - - def test_indices(self): - self.assertArrayEqual( - self.kwargs["indices"], self.connectivity.indices - ) - - def test_read_only(self): - attributes = ("indices", "cf_role", "start_index", "location_axis") - for attribute in attributes: - self.assertRaisesRegex( - AttributeError, - "can't set attribute", - setattr, - self.connectivity, - attribute, - 1, - ) - - def test_transpose(self): - expected_dim = 1 - self.kwargs["location_axis"] - expected_indices = self.kwargs["indices"].transpose() - new_connectivity = self.connectivity.transpose() - self.assertEqual(expected_dim, new_connectivity.location_axis) - self.assertArrayEqual(expected_indices, new_connectivity.indices) - - def test_lazy_indices(self): - self.assertTrue(is_lazy_data(self.connectivity.lazy_indices())) - - def test_core_indices(self): - self.assertArrayEqual( - self.kwargs["indices"], self.connectivity.core_indices() - ) - - def test_has_lazy_indices(self): - self.assertFalse(self.connectivity.has_lazy_indices()) - - def test_lazy_location_lengths(self): - self.assertTrue( - is_lazy_data(self.connectivity.lazy_location_lengths()) - ) - - def test_location_lengths(self): - expected = [4, 4, 4] - self.assertArrayEqual(expected, self.connectivity.location_lengths()) - - def test___str__(self): - expected = "\n".join( - [ - "Connectivity : my_face_nodes / (unknown)", - " data: [", - " [ 1, 2, 3],", - " [ 4, 5, 6],", - " [ 7, 8, 9],", - " [10, 11, 12]]", - " shape: (4, 3)", - " dtype: int64", - " long_name: 'my_face_nodes'", - " var_name: 'face_nodes'", - " attributes:", - " notes 'this is a test'", - " cf_role: 'face_node_connectivity'", - " start_index: 1", - " location_axis: 1", - ] - ) - self.assertEqual(expected, self.connectivity.__str__()) - - def test___repr__(self): - expected = "" - self.assertEqual(expected, self.connectivity.__repr__()) - - def test_xml_element(self): - doc = minidom.Document() - connectivity_element = self.connectivity.xml_element(doc) - self.assertEqual(connectivity_element.tagName, "connectivity") - for attribute in ("cf_role", "start_index", "location_axis"): - self.assertIn(attribute, connectivity_element.attributes) - - def test___eq__(self): - equivalent_kwargs = self.kwargs - equivalent_kwargs["indices"] = self.kwargs["indices"].transpose() - equivalent_kwargs["location_axis"] = 1 - self.kwargs["location_axis"] - equivalent = Connectivity(**equivalent_kwargs) - self.assertFalse( - np.array_equal(equivalent.indices, self.connectivity.indices) - ) - self.assertEqual(equivalent, self.connectivity) - - def test_different(self): - different_kwargs = self.kwargs - different_kwargs["indices"] = self.kwargs["indices"].transpose() - different = Connectivity(**different_kwargs) - self.assertNotEqual(different, self.connectivity) - - def test_no_cube_dims(self): - self.assertRaises(NotImplementedError, self.connectivity.cube_dims, 1) - - def test_shape(self): - self.assertEqual(self.kwargs["indices"].shape, self.connectivity.shape) - - def test_ndim(self): - self.assertEqual(self.kwargs["indices"].ndim, self.connectivity.ndim) - - def test___getitem_(self): - subset = self.connectivity[:, 0:1] - self.assertArrayEqual(self.kwargs["indices"][:, 0:1], subset.indices) - - def test_copy(self): - new_indices = np.linspace(11, 16, 6, dtype=int).reshape((3, -1)) - copy_connectivity = self.connectivity.copy(new_indices) - self.assertArrayEqual(new_indices, copy_connectivity.indices) - - def test_indices_by_location(self): - expected = self.kwargs["indices"].transpose() - self.assertArrayEqual( - expected, self.connectivity.indices_by_location() - ) - - def test_indices_by_location_input(self): - expected = as_lazy_data(self.kwargs["indices"].transpose()) - by_location = self.connectivity.indices_by_location( - self.connectivity.lazy_indices() - ) - self.assertArrayEqual(expected, by_location) - - -class TestAltIndices(tests.IrisTest): - def setUp(self): - mask = ([0, 0, 0, 0, 1] * 2) + [0, 0, 0, 1, 1] - data = np.linspace(1, 15, 15, dtype=int).reshape((-1, 5)) - self.masked_indices = ma.array(data=data, mask=mask) - self.lazy_indices = as_lazy_data(data) - - def common(self, indices): - connectivity = Connectivity( - indices=indices, cf_role="face_node_connectivity" - ) - self.assertArrayEqual(indices, connectivity.indices) - - def test_int32(self): - indices = np.linspace(1, 9, 9, dtype=np.int32).reshape((-1, 3)) - self.common(indices) - - def test_uint32(self): - indices = np.linspace(1, 9, 9, dtype=np.uint32).reshape((-1, 3)) - self.common(indices) - - def test_lazy(self): - self.common(self.lazy_indices) - - def test_masked(self): - self.common(self.masked_indices) - - def test_masked_lazy(self): - self.common(as_lazy_data(self.masked_indices)) - - def test_has_lazy_indices(self): - connectivity = Connectivity( - indices=self.lazy_indices, cf_role="face_node_connectivity" - ) - self.assertTrue(connectivity.has_lazy_indices()) - - -class TestValidations(tests.IrisTest): - def test_start_index(self): - kwargs = { - "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), - "cf_role": "face_node_connectivity", - "start_index": 2, - } - self.assertRaisesRegex( - ValueError, "Invalid start_index .", Connectivity, **kwargs - ) - - def test_location_axis(self): - kwargs = { - "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), - "cf_role": "face_node_connectivity", - "location_axis": 2, - } - self.assertRaisesRegex( - ValueError, "Invalid location_axis .", Connectivity, **kwargs - ) - - def test_cf_role(self): - kwargs = { - "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), - "cf_role": "error", - } - self.assertRaisesRegex( - ValueError, "Invalid cf_role .", Connectivity, **kwargs - ) - - def test_indices_int(self): - kwargs = { - "indices": np.linspace(1, 9, 9).reshape((-1, 3)), - "cf_role": "face_node_connectivity", - } - self.assertRaisesRegex( - ValueError, - "dtype must be numpy integer subtype", - Connectivity, - **kwargs, - ) - - def test_indices_start_index(self): - kwargs = { - "indices": np.linspace(-9, -1, 9, dtype=int).reshape((-1, 3)), - "cf_role": "face_node_connectivity", - } - self.assertRaisesRegex( - ValueError, " < start_index", Connectivity, **kwargs - ) - - def test_indices_dims_low(self): - kwargs = { - "indices": np.linspace(1, 9, 9, dtype=int), - "cf_role": "face_node_connectivity", - } - self.assertRaisesRegex( - ValueError, "Expected 2-dimensional shape,", Connectivity, **kwargs - ) - - def test_indices_dims_high(self): - kwargs = { - "indices": np.linspace(1, 12, 12, dtype=int).reshape((-1, 3, 2)), - "cf_role": "face_node_connectivity", - } - self.assertRaisesRegex( - ValueError, "Expected 2-dimensional shape,", Connectivity, **kwargs - ) - - def test_indices_locations_edge(self): - kwargs = { - "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), - "cf_role": "edge_node_connectivity", - } - self.assertRaisesRegex( - ValueError, - "Not all edges meet requirement: len=2", - Connectivity, - **kwargs, - ) - - def test_indices_locations_face(self): - kwargs = { - "indices": np.linspace(1, 6, 6, dtype=int).reshape((-1, 2)), - "cf_role": "face_node_connectivity", - } - self.assertRaisesRegex( - ValueError, - "Not all faces meet requirement: len>=3", - Connectivity, - **kwargs, - ) - - def test_indices_locations_volume_face(self): - kwargs = { - "indices": np.linspace(1, 9, 9, dtype=int).reshape((-1, 3)), - "cf_role": "volume_face_connectivity", - } - self.assertRaisesRegex( - ValueError, - "Not all volumes meet requirement: len>=4", - Connectivity, - **kwargs, - ) - - def test_indices_locations_volume_edge(self): - kwargs = { - "indices": np.linspace(1, 12, 12, dtype=int).reshape((-1, 3)), - "cf_role": "volume_edge_connectivity", - } - self.assertRaisesRegex( - ValueError, - "Not all volumes meet requirement: len>=6", - Connectivity, - **kwargs, - ) - - def test_indices_locations_alt_dim(self): - """The transposed equivalent of `test_indices_locations_volume_face`.""" - kwargs = { - "indices": np.linspace(1, 9, 9, dtype=int).reshape((3, -1)), - "cf_role": "volume_face_connectivity", - "location_axis": 1, - } - self.assertRaisesRegex( - ValueError, - "Not all volumes meet requirement: len>=4", - Connectivity, - **kwargs, - ) - - def test_indices_locations_masked(self): - mask = ([0, 0, 0] * 2) + [0, 0, 1] - data = np.linspace(1, 9, 9, dtype=int).reshape((3, -1)) - kwargs = { - "indices": ma.array(data=data, mask=mask), - "cf_role": "face_node_connectivity", - } - # Validation of individual location sizes (denoted by masks) only - # available through explicit call of Connectivity.validate_indices(). - connectivity = Connectivity(**kwargs) - self.assertRaisesRegex( - ValueError, - "Not all faces meet requirement: len>=3", - connectivity.validate_indices, - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py deleted file mode 100644 index f39f3706ee..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh.py +++ /dev/null @@ -1,1348 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`mesh` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.coords import AuxCoord -from iris.exceptions import ConnectivityNotFoundError, CoordinateNotFoundError -from iris.experimental.ugrid import mesh, metadata -from iris.experimental.ugrid.mesh import logger - - -class TestMeshCommon(tests.IrisTest): - @classmethod - def setUpClass(cls): - # A collection of minimal coords and connectivities describing an - # equilateral triangle. - cls.NODE_LON = AuxCoord( - [0, 2, 1], - standard_name="longitude", - long_name="long_name", - var_name="node_lon", - attributes={"test": 1}, - ) - cls.NODE_LAT = AuxCoord( - [0, 0, 1], standard_name="latitude", var_name="node_lat" - ) - cls.EDGE_LON = AuxCoord( - [1, 1.5, 0.5], standard_name="longitude", var_name="edge_lon" - ) - cls.EDGE_LAT = AuxCoord( - [0, 0.5, 0.5], standard_name="latitude", var_name="edge_lat" - ) - cls.FACE_LON = AuxCoord( - [0.5], standard_name="longitude", var_name="face_lon" - ) - cls.FACE_LAT = AuxCoord( - [0.5], standard_name="latitude", var_name="face_lat" - ) - - cls.EDGE_NODE = mesh.Connectivity( - [[0, 1], [1, 2], [2, 0]], - cf_role="edge_node_connectivity", - long_name="long_name", - var_name="var_name", - attributes={"test": 1}, - ) - cls.FACE_NODE = mesh.Connectivity( - [[0, 1, 2]], cf_role="face_node_connectivity" - ) - cls.FACE_EDGE = mesh.Connectivity( - [[0, 1, 2]], cf_role="face_edge_connectivity" - ) - # (Actually meaningless:) - cls.FACE_FACE = mesh.Connectivity( - [[0, 0, 0]], cf_role="face_face_connectivity" - ) - # (Actually meaningless:) - cls.EDGE_FACE = mesh.Connectivity( - [[0, 0], [0, 0], [0, 0]], cf_role="edge_face_connectivity" - ) - cls.BOUNDARY_NODE = mesh.Connectivity( - [[0, 1], [1, 2], [2, 0]], cf_role="boundary_node_connectivity" - ) - - -class TestProperties1D(TestMeshCommon): - # Tests that can re-use a single instance for greater efficiency. - @classmethod - def setUpClass(cls): - super().setUpClass() - # Mesh kwargs with topology_dimension=1 and all applicable - # arguments populated - this tests correct property setting. - cls.kwargs = { - "topology_dimension": 1, - "node_coords_and_axes": ((cls.NODE_LON, "x"), (cls.NODE_LAT, "y")), - "connectivities": [cls.EDGE_NODE], - "long_name": "my_topology_mesh", - "var_name": "mesh", - "attributes": {"notes": "this is a test"}, - "node_dimension": "NodeDim", - "edge_dimension": "EdgeDim", - "edge_coords_and_axes": ((cls.EDGE_LON, "x"), (cls.EDGE_LAT, "y")), - } - cls.mesh = mesh.Mesh(**cls.kwargs) - - def test__metadata_manager(self): - self.assertEqual( - self.mesh._metadata_manager.cls.__name__, - metadata.MeshMetadata.__name__, - ) - - def test___getstate__(self): - expected = ( - self.mesh._metadata_manager, - self.mesh._coord_manager, - self.mesh._connectivity_manager, - ) - self.assertEqual(expected, self.mesh.__getstate__()) - - def test___repr__(self): - expected = "" - self.assertEqual(expected, repr(self.mesh)) - - def test___str__(self): - expected = [ - "Mesh : 'my_topology_mesh'", - " topology_dimension: 1", - " node", - " node_dimension: 'NodeDim'", - " node coordinates", - " ", - " ", - " edge", - " edge_dimension: 'EdgeDim'", - ( - " edge_node_connectivity: " - "" - ), - " edge coordinates", - " ", - " ", - " long_name: 'my_topology_mesh'", - " var_name: 'mesh'", - " attributes:", - " notes 'this is a test'", - ] - self.assertEqual(expected, str(self.mesh).split("\n")) - - def test___eq__(self): - # The dimension names do not participate in equality. - equivalent_kwargs = self.kwargs.copy() - equivalent_kwargs["node_dimension"] = "something_else" - equivalent = mesh.Mesh(**equivalent_kwargs) - self.assertEqual(equivalent, self.mesh) - - def test_different(self): - different_kwargs = self.kwargs.copy() - different_kwargs["long_name"] = "new_name" - different = mesh.Mesh(**different_kwargs) - self.assertNotEqual(different, self.mesh) - - different_kwargs = self.kwargs.copy() - ncaa = self.kwargs["node_coords_and_axes"] - new_lat = ncaa[1][0].copy(points=ncaa[1][0].points + 1) - new_ncaa = (ncaa[0], (new_lat, "y")) - different_kwargs["node_coords_and_axes"] = new_ncaa - different = mesh.Mesh(**different_kwargs) - self.assertNotEqual(different, self.mesh) - - different_kwargs = self.kwargs.copy() - conns = self.kwargs["connectivities"] - new_conn = conns[0].copy(conns[0].indices + 1) - different_kwargs["connectivities"] = new_conn - different = mesh.Mesh(**different_kwargs) - self.assertNotEqual(different, self.mesh) - - def test_all_connectivities(self): - expected = mesh.Mesh1DConnectivities(self.EDGE_NODE) - self.assertEqual(expected, self.mesh.all_connectivities) - - def test_all_coords(self): - expected = mesh.Mesh1DCoords( - self.NODE_LON, self.NODE_LAT, self.EDGE_LON, self.EDGE_LAT - ) - self.assertEqual(expected, self.mesh.all_coords) - - def test_boundary_node(self): - with self.assertRaises(AttributeError): - _ = self.mesh.boundary_node_connectivity - - def test_cf_role(self): - self.assertEqual("mesh_topology", self.mesh.cf_role) - # Read only. - self.assertRaises(AttributeError, setattr, self.mesh.cf_role, "foo", 1) - - def test_connectivities(self): - # General results. Method intended for inheritance. - positive_kwargs = ( - {"item": self.EDGE_NODE}, - {"item": "long_name"}, - {"long_name": "long_name"}, - {"var_name": "var_name"}, - {"attributes": {"test": 1}}, - {"cf_role": "edge_node_connectivity"}, - ) - - fake_connectivity = tests.mock.Mock( - __class__=mesh.Connectivity, cf_role="fake" - ) - negative_kwargs = ( - {"item": fake_connectivity}, - {"item": "foo"}, - {"standard_name": "air_temperature"}, - {"long_name": "foo"}, - {"var_name": "foo"}, - {"attributes": {"test": 2}}, - {"cf_role": "foo"}, - ) - - func = self.mesh.connectivities - for kwargs in positive_kwargs: - self.assertEqual([self.EDGE_NODE], func(**kwargs)) - for kwargs in negative_kwargs: - self.assertEqual([], func(**kwargs)) - - def test_connectivities_elements(self): - # topology_dimension-specific results. Method intended to be overridden. - positive_kwargs = ( - {"contains_node": True}, - {"contains_edge": True}, - {"contains_node": True, "contains_edge": True}, - ) - negative_kwargs = ( - {"contains_node": False}, - {"contains_edge": False}, - {"contains_edge": True, "contains_node": False}, - {"contains_edge": False, "contains_node": False}, - ) - - func = self.mesh.connectivities - for kwargs in positive_kwargs: - self.assertEqual([self.EDGE_NODE], func(**kwargs)) - for kwargs in negative_kwargs: - self.assertEqual([], func(**kwargs)) - - log_regex = r".*filter for non-existent.*" - with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): - self.assertEqual([], func(contains_face=True)) - - def test_coord(self): - # See Mesh.coords tests for thorough coverage of cases. - func = self.mesh.coord - exception = CoordinateNotFoundError - self.assertRaisesRegex( - exception, ".*but found 2", func, include_nodes=True - ) - self.assertRaisesRegex(exception, ".*but found none", func, axis="t") - - def test_coords(self): - # General results. Method intended for inheritance. - positive_kwargs = ( - {"item": self.NODE_LON}, - {"item": "longitude"}, - {"standard_name": "longitude"}, - {"long_name": "long_name"}, - {"var_name": "node_lon"}, - {"attributes": {"test": 1}}, - ) - - fake_coord = AuxCoord([0]) - negative_kwargs = ( - {"item": fake_coord}, - {"item": "foo"}, - {"standard_name": "air_temperature"}, - {"long_name": "foo"}, - {"var_name": "foo"}, - {"attributes": {"test": 2}}, - ) - - func = self.mesh.coords - for kwargs in positive_kwargs: - self.assertIn(self.NODE_LON, func(**kwargs)) - for kwargs in negative_kwargs: - self.assertNotIn(self.NODE_LON, func(**kwargs)) - - def test_coords_elements(self): - # topology_dimension-specific results. Method intended to be overridden. - all_expected = { - "node_x": self.NODE_LON, - "node_y": self.NODE_LAT, - "edge_x": self.EDGE_LON, - "edge_y": self.EDGE_LAT, - } - - kwargs_expected = ( - ({"axis": "x"}, ["node_x", "edge_x"]), - ({"axis": "y"}, ["node_y", "edge_y"]), - ({"include_nodes": True}, ["node_x", "node_y"]), - ({"include_edges": True}, ["edge_x", "edge_y"]), - ({"include_nodes": False}, ["edge_x", "edge_y"]), - ({"include_edges": False}, ["node_x", "node_y"]), - ( - {"include_nodes": True, "include_edges": True}, - ["node_x", "node_y", "edge_x", "edge_y"], - ), - ({"include_nodes": False, "include_edges": False}, []), - ( - {"include_nodes": False, "include_edges": True}, - ["edge_x", "edge_y"], - ), - ) - - func = self.mesh.coords - for kwargs, expected in kwargs_expected: - expected = [all_expected[k] for k in expected if k in all_expected] - self.assertEqual(expected, func(**kwargs)) - - log_regex = r".*filter non-existent.*" - with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): - self.assertEqual([], func(include_faces=True)) - - def test_edge_dimension(self): - self.assertEqual( - self.kwargs["edge_dimension"], self.mesh.edge_dimension - ) - - def test_edge_coords(self): - expected = mesh.MeshEdgeCoords(self.EDGE_LON, self.EDGE_LAT) - self.assertEqual(expected, self.mesh.edge_coords) - - def test_edge_face(self): - with self.assertRaises(AttributeError): - _ = self.mesh.edge_face_connectivity - - def test_edge_node(self): - self.assertEqual(self.EDGE_NODE, self.mesh.edge_node_connectivity) - - def test_face_coords(self): - with self.assertRaises(AttributeError): - _ = self.mesh.face_coords - - def test_face_dimension(self): - self.assertIsNone(self.mesh.face_dimension) - - def test_face_edge(self): - with self.assertRaises(AttributeError): - _ = self.mesh.face_edge_connectivity - - def test_face_face(self): - with self.assertRaises(AttributeError): - _ = self.mesh.face_face_connectivity - - def test_face_node(self): - with self.assertRaises(AttributeError): - _ = self.mesh.face_node_connectivity - - def test_node_coords(self): - expected = mesh.MeshNodeCoords(self.NODE_LON, self.NODE_LAT) - self.assertEqual(expected, self.mesh.node_coords) - - def test_node_dimension(self): - self.assertEqual( - self.kwargs["node_dimension"], self.mesh.node_dimension - ) - - def test_topology_dimension(self): - self.assertEqual( - self.kwargs["topology_dimension"], self.mesh.topology_dimension - ) - # Read only. - self.assertRaises( - AttributeError, setattr, self.mesh.topology_dimension, "foo", 1 - ) - - -class TestProperties2D(TestProperties1D): - # Additional/specialised tests for topology_dimension=2. - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.kwargs["topology_dimension"] = 2 - cls.kwargs["connectivities"] = ( - cls.FACE_NODE, - cls.EDGE_NODE, - cls.FACE_EDGE, - cls.FACE_FACE, - cls.EDGE_FACE, - cls.BOUNDARY_NODE, - ) - cls.kwargs["face_dimension"] = "FaceDim" - cls.kwargs["face_coords_and_axes"] = ( - (cls.FACE_LON, "x"), - (cls.FACE_LAT, "y"), - ) - cls.mesh = mesh.Mesh(**cls.kwargs) - - def test___repr__(self): - expected = "" - self.assertEqual(expected, repr(self.mesh)) - - def test___str__(self): - expected = [ - "Mesh : 'my_topology_mesh'", - " topology_dimension: 2", - " node", - " node_dimension: 'NodeDim'", - " node coordinates", - " ", - " ", - " edge", - " edge_dimension: 'EdgeDim'", - ( - " edge_node_connectivity: " - "" - ), - " edge coordinates", - " ", - " ", - " face", - " face_dimension: 'FaceDim'", - ( - " face_node_connectivity: " - "" - ), - " face coordinates", - " ", - " ", - " optional connectivities", - ( - " face_face_connectivity: " - "" - ), - ( - " face_edge_connectivity: " - "" - ), - ( - " edge_face_connectivity: " - "" - ), - " long_name: 'my_topology_mesh'", - " var_name: 'mesh'", - " attributes:", - " notes 'this is a test'", - ] - self.assertEqual(expected, str(self.mesh).split("\n")) - - # Test some different options of the str() operation here. - def test___str__noedgecoords(self): - mesh_kwargs = self.kwargs.copy() - del mesh_kwargs["edge_coords_and_axes"] - alt_mesh = mesh.Mesh(**mesh_kwargs) - expected = [ - "Mesh : 'my_topology_mesh'", - " topology_dimension: 2", - " node", - " node_dimension: 'NodeDim'", - " node coordinates", - " ", - " ", - " edge", - " edge_dimension: 'EdgeDim'", - ( - " edge_node_connectivity: " - "" - ), - " face", - " face_dimension: 'FaceDim'", - ( - " face_node_connectivity: " - "" - ), - " face coordinates", - " ", - " ", - " optional connectivities", - ( - " face_face_connectivity: " - "" - ), - ( - " face_edge_connectivity: " - "" - ), - ( - " edge_face_connectivity: " - "" - ), - " long_name: 'my_topology_mesh'", - " var_name: 'mesh'", - " attributes:", - " notes 'this is a test'", - ] - self.assertEqual(expected, str(alt_mesh).split("\n")) - - def test_all_connectivities(self): - expected = mesh.Mesh2DConnectivities( - self.FACE_NODE, - self.EDGE_NODE, - self.FACE_EDGE, - self.FACE_FACE, - self.EDGE_FACE, - self.BOUNDARY_NODE, - ) - self.assertEqual(expected, self.mesh.all_connectivities) - - def test_all_coords(self): - expected = mesh.Mesh2DCoords( - self.NODE_LON, - self.NODE_LAT, - self.EDGE_LON, - self.EDGE_LAT, - self.FACE_LON, - self.FACE_LAT, - ) - self.assertEqual(expected, self.mesh.all_coords) - - def test_boundary_node(self): - self.assertEqual( - self.BOUNDARY_NODE, self.mesh.boundary_node_connectivity - ) - - def test_connectivity(self): - # See Mesh.connectivities tests for thorough coverage of cases. - # Can only test Mesh.connectivity for 2D since we need >1 connectivity. - func = self.mesh.connectivity - exception = ConnectivityNotFoundError - self.assertRaisesRegex( - exception, ".*but found 3", func, contains_node=True - ) - self.assertRaisesRegex( - exception, - ".*but found none", - func, - contains_node=False, - contains_edge=False, - contains_face=False, - ) - - def test_connectivities_elements(self): - kwargs_expected = ( - ( - {"contains_node": True}, - [self.EDGE_NODE, self.FACE_NODE, self.BOUNDARY_NODE], - ), - ( - {"contains_edge": True}, - [self.EDGE_NODE, self.FACE_EDGE, self.EDGE_FACE], - ), - ( - {"contains_face": True}, - [ - self.FACE_NODE, - self.FACE_EDGE, - self.FACE_FACE, - self.EDGE_FACE, - ], - ), - ( - {"contains_node": False}, - [self.FACE_EDGE, self.EDGE_FACE, self.FACE_FACE], - ), - ( - {"contains_edge": False}, - [self.FACE_NODE, self.BOUNDARY_NODE, self.FACE_FACE], - ), - ({"contains_face": False}, [self.EDGE_NODE, self.BOUNDARY_NODE]), - ( - {"contains_edge": True, "contains_face": True}, - [self.FACE_EDGE, self.EDGE_FACE], - ), - ( - {"contains_node": False, "contains_edge": False}, - [self.FACE_FACE], - ), - ( - {"contains_node": True, "contains_edge": False}, - [self.FACE_NODE, self.BOUNDARY_NODE], - ), - ( - { - "contains_node": False, - "contains_edge": False, - "contains_face": False, - }, - [], - ), - ) - func = self.mesh.connectivities - for kwargs, expected in kwargs_expected: - result = func(**kwargs) - self.assertEqual(len(expected), len(result)) - for item in expected: - self.assertIn(item, result) - - def test_coords_elements(self): - all_expected = { - "node_x": self.NODE_LON, - "node_y": self.NODE_LAT, - "edge_x": self.EDGE_LON, - "edge_y": self.EDGE_LAT, - "face_x": self.FACE_LON, - "face_y": self.FACE_LAT, - } - - kwargs_expected = ( - ({"axis": "x"}, ["node_x", "edge_x", "face_x"]), - ({"axis": "y"}, ["node_y", "edge_y", "face_y"]), - ({"include_nodes": True}, ["node_x", "node_y"]), - ({"include_edges": True}, ["edge_x", "edge_y"]), - ( - {"include_nodes": False}, - ["edge_x", "edge_y", "face_x", "face_y"], - ), - ( - {"include_edges": False}, - ["node_x", "node_y", "face_x", "face_y"], - ), - ( - {"include_faces": False}, - ["node_x", "node_y", "edge_x", "edge_y"], - ), - ( - {"include_faces": True, "include_edges": True}, - ["edge_x", "edge_y", "face_x", "face_y"], - ), - ( - {"include_faces": False, "include_edges": False}, - ["node_x", "node_y"], - ), - ( - {"include_faces": False, "include_edges": True}, - ["edge_x", "edge_y"], - ), - ) - - func = self.mesh.coords - for kwargs, expected in kwargs_expected: - expected = [all_expected[k] for k in expected if k in all_expected] - self.assertEqual(expected, func(**kwargs)) - - def test_edge_face(self): - self.assertEqual(self.EDGE_FACE, self.mesh.edge_face_connectivity) - - def test_face_coords(self): - expected = mesh.MeshFaceCoords(self.FACE_LON, self.FACE_LAT) - self.assertEqual(expected, self.mesh.face_coords) - - def test_face_dimension(self): - self.assertEqual( - self.kwargs["face_dimension"], self.mesh.face_dimension - ) - - def test_face_edge(self): - self.assertEqual(self.FACE_EDGE, self.mesh.face_edge_connectivity) - - def test_face_face(self): - self.assertEqual(self.FACE_FACE, self.mesh.face_face_connectivity) - - def test_face_node(self): - self.assertEqual(self.FACE_NODE, self.mesh.face_node_connectivity) - - -class Test__str__various(TestMeshCommon): - # Some extra testing for the str() operation : based on 1D meshes as simpler - def setUp(self): - # All the tests here want modified meshes, so use standard setUp to - # create afresh for each test, allowing them to modify it. - super().setUp() - # Mesh kwargs with topology_dimension=1 and all applicable - # arguments populated - this tests correct property setting. - self.kwargs = { - "topology_dimension": 1, - "node_coords_and_axes": ( - (self.NODE_LON, "x"), - (self.NODE_LAT, "y"), - ), - "connectivities": [self.EDGE_NODE], - "long_name": "my_topology_mesh", - "var_name": "mesh", - "attributes": {"notes": "this is a test"}, - "node_dimension": "NodeDim", - "edge_dimension": "EdgeDim", - "edge_coords_and_axes": ( - (self.EDGE_LON, "x"), - (self.EDGE_LAT, "y"), - ), - } - self.mesh = mesh.Mesh(**self.kwargs) - - def test___repr__basic(self): - expected = "" - self.assertEqual(expected, repr(self.mesh)) - - def test___repr__varname(self): - self.mesh.long_name = None - expected = "" - self.assertEqual(expected, repr(self.mesh)) - - def test___repr__noname(self): - self.mesh.long_name = None - self.mesh.var_name = None - expected = "" - self.assertRegex(repr(self.mesh), expected) - - def test___str__noattributes(self): - self.mesh.attributes = None - self.assertNotIn("attributes", str(self.mesh)) - - def test___str__emptyattributes(self): - self.mesh.attributes.clear() - self.assertNotIn("attributes", str(self.mesh)) - - def test__str__longstringattribute(self): - self.mesh.attributes["long_string"] = ( - "long_x_10_long_x_20_long_x_30_long_x_40_" - "long_x_50_long_x_60_long_x_70_long_x_80_" - ) - result = str(self.mesh) - # Note: initial single-quote, but no final one : this is correct ! - expected = ( - "'long_x_10_long_x_20_long_x_30_long_x_40_" - "long_x_50_long_x_60_long_x_70..." - ) - self.assertIn(expected + ":END", result + ":END") - - def test___str__units_stdname(self): - # These are usually missing, but they *can* be present. - mesh_kwargs = self.kwargs.copy() - mesh_kwargs["standard_name"] = "height" # Odd choice ! - mesh_kwargs["units"] = "m" - alt_mesh = mesh.Mesh(**mesh_kwargs) - result = str(alt_mesh) - # We expect these to appear at the end. - expected = "\n".join( - [ - " edge coordinates", - " ", - " ", - " standard_name: 'height'", - " long_name: 'my_topology_mesh'", - " var_name: 'mesh'", - " units: Unit('m')", - " attributes:", - " notes 'this is a test'", - ] - ) - self.assertTrue(result.endswith(expected)) - - -class TestOperations1D(TestMeshCommon): - # Tests that cannot re-use an existing Mesh instance, instead need a new - # one each time. - def setUp(self): - self.mesh = mesh.Mesh( - topology_dimension=1, - node_coords_and_axes=((self.NODE_LON, "x"), (self.NODE_LAT, "y")), - connectivities=self.EDGE_NODE, - ) - - @staticmethod - def new_connectivity(connectivity, new_len=False): - """Provide a new connectivity recognisably different from the original.""" - # NOTE: assumes non-transposed connectivity (location_axis=0). - if new_len: - shape = (connectivity.shape[0] + 1, connectivity.shape[1]) - else: - shape = connectivity.shape - return connectivity.copy(np.zeros(shape, dtype=int)) - - @staticmethod - def new_coord(coord, new_shape=False): - """Provide a new coordinate recognisably different from the original.""" - if new_shape: - shape = tuple([i + 1 for i in coord.shape]) - else: - shape = coord.shape - return coord.copy(np.zeros(shape)) - - def test___setstate__(self): - false_metadata_manager = "foo" - false_coord_manager = "bar" - false_connectivity_manager = "baz" - self.mesh.__setstate__( - ( - false_metadata_manager, - false_coord_manager, - false_connectivity_manager, - ) - ) - - self.assertEqual(false_metadata_manager, self.mesh._metadata_manager) - self.assertEqual(false_coord_manager, self.mesh._coord_manager) - self.assertEqual( - false_connectivity_manager, self.mesh._connectivity_manager - ) - - def test_add_connectivities(self): - # Cannot test ADD - 1D - nothing extra to add beyond minimum. - - for new_len in (False, True): - # REPLACE connectivities, first with one of the same length, then - # with one of different length. - edge_node = self.new_connectivity(self.EDGE_NODE, new_len) - self.mesh.add_connectivities(edge_node) - self.assertEqual( - mesh.Mesh1DConnectivities(edge_node), - self.mesh.all_connectivities, - ) - - def test_add_connectivities_duplicates(self): - edge_node_one = self.EDGE_NODE - edge_node_two = self.new_connectivity(self.EDGE_NODE) - self.mesh.add_connectivities(edge_node_one, edge_node_two) - self.assertEqual( - edge_node_two, - self.mesh.edge_node_connectivity, - ) - - def test_add_connectivities_invalid(self): - self.assertRaisesRegex( - TypeError, - "Expected Connectivity.*", - self.mesh.add_connectivities, - "foo", - ) - - face_node = self.FACE_NODE - log_regex = r"Not adding connectivity.*" - with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): - self.mesh.add_connectivities(face_node) - - def test_add_coords(self): - # ADD coords. - edge_kwargs = {"edge_x": self.EDGE_LON, "edge_y": self.EDGE_LAT} - self.mesh.add_coords(**edge_kwargs) - self.assertEqual( - mesh.MeshEdgeCoords(**edge_kwargs), - self.mesh.edge_coords, - ) - - for new_shape in (False, True): - # REPLACE coords, first with ones of the same shape, then with ones - # of different shape. - node_kwargs = { - "node_x": self.new_coord(self.NODE_LON, new_shape), - "node_y": self.new_coord(self.NODE_LAT, new_shape), - } - edge_kwargs = { - "edge_x": self.new_coord(self.EDGE_LON, new_shape), - "edge_y": self.new_coord(self.EDGE_LAT, new_shape), - } - self.mesh.add_coords(**node_kwargs, **edge_kwargs) - self.assertEqual( - mesh.MeshNodeCoords(**node_kwargs), - self.mesh.node_coords, - ) - self.assertEqual( - mesh.MeshEdgeCoords(**edge_kwargs), - self.mesh.edge_coords, - ) - - def test_add_coords_face(self): - self.assertRaises( - TypeError, - self.mesh.add_coords, - face_x=self.FACE_LON, - face_y=self.FACE_LAT, - ) - - def test_add_coords_invalid(self): - func = self.mesh.add_coords - self.assertRaisesRegex( - TypeError, ".*requires to be an 'AuxCoord'.*", func, node_x="foo" - ) - self.assertRaisesRegex( - TypeError, ".*requires a x-axis like.*", func, node_x=self.NODE_LAT - ) - climatological = AuxCoord( - [0], - bounds=[-1, 1], - standard_name="longitude", - climatological=True, - units="Days since 1970", - ) - self.assertRaisesRegex( - TypeError, - ".*cannot be a climatological.*", - func, - node_x=climatological, - ) - wrong_shape = self.NODE_LON.copy([0]) - self.assertRaisesRegex( - ValueError, ".*requires to have shape.*", func, node_x=wrong_shape - ) - - def test_add_coords_single(self): - # ADD coord. - edge_x = self.EDGE_LON - expected = mesh.MeshEdgeCoords(edge_x=edge_x, edge_y=None) - self.mesh.add_coords(edge_x=edge_x) - self.assertEqual(expected, self.mesh.edge_coords) - - # REPLACE coords. - node_x = self.new_coord(self.NODE_LON) - edge_x = self.new_coord(self.EDGE_LON) - expected_nodes = mesh.MeshNodeCoords( - node_x=node_x, node_y=self.mesh.node_coords.node_y - ) - expected_edges = mesh.MeshEdgeCoords(edge_x=edge_x, edge_y=None) - self.mesh.add_coords(node_x=node_x, edge_x=edge_x) - self.assertEqual(expected_nodes, self.mesh.node_coords) - self.assertEqual(expected_edges, self.mesh.edge_coords) - - # Attempt to REPLACE coords with those of DIFFERENT SHAPE. - node_x = self.new_coord(self.NODE_LON, new_shape=True) - edge_x = self.new_coord(self.EDGE_LON, new_shape=True) - node_kwarg = {"node_x": node_x} - edge_kwarg = {"edge_x": edge_x} - both_kwargs = dict(**node_kwarg, **edge_kwarg) - for kwargs in (node_kwarg, edge_kwarg, both_kwargs): - self.assertRaisesRegex( - ValueError, - ".*requires to have shape.*", - self.mesh.add_coords, - **kwargs, - ) - - def test_add_coords_single_face(self): - self.assertRaises( - TypeError, self.mesh.add_coords, face_x=self.FACE_LON - ) - - def test_dimension_names(self): - # Test defaults. - default = mesh.Mesh1DNames("Mesh1d_node", "Mesh1d_edge") - self.assertEqual(default, self.mesh.dimension_names()) - - log_regex = r"Not setting face_dimension.*" - with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): - self.mesh.dimension_names("foo", "bar", "baz") - self.assertEqual( - mesh.Mesh1DNames("foo", "bar"), - self.mesh.dimension_names(), - ) - - self.mesh.dimension_names_reset(True, True, True) - self.assertEqual(default, self.mesh.dimension_names()) - - # Single. - self.mesh.dimension_names(edge="foo") - self.assertEqual("foo", self.mesh.edge_dimension) - self.mesh.dimension_names_reset(edge=True) - self.assertEqual(default, self.mesh.dimension_names()) - - def test_edge_dimension_set(self): - self.mesh.edge_dimension = "foo" - self.assertEqual("foo", self.mesh.edge_dimension) - - def test_face_dimension_set(self): - log_regex = r"Not setting face_dimension.*" - with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): - self.mesh.face_dimension = "foo" - self.assertIsNone(self.mesh.face_dimension) - - def test_node_dimension_set(self): - self.mesh.node_dimension = "foo" - self.assertEqual("foo", self.mesh.node_dimension) - - def test_remove_connectivities(self): - """ - Test that remove() mimics the connectivities() method correctly, - and prevents removal of mandatory connectivities. - - """ - positive_kwargs = ( - {"item": self.EDGE_NODE}, - {"item": "long_name"}, - {"long_name": "long_name"}, - {"var_name": "var_name"}, - {"attributes": {"test": 1}}, - {"cf_role": "edge_node_connectivity"}, - {"contains_node": True}, - {"contains_edge": True}, - {"contains_edge": True, "contains_node": True}, - ) - - fake_connectivity = tests.mock.Mock( - __class__=mesh.Connectivity, cf_role="fake" - ) - negative_kwargs = ( - {"item": fake_connectivity}, - {"item": "foo"}, - {"standard_name": "air_temperature"}, - {"long_name": "foo"}, - {"var_name": "foo"}, - {"attributes": {"test": 2}}, - {"cf_role": "foo"}, - {"contains_node": False}, - {"contains_edge": False}, - {"contains_edge": True, "contains_node": False}, - {"contains_edge": False, "contains_node": False}, - ) - - log_regex = r"Ignoring request to remove.*" - for kwargs in positive_kwargs: - with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): - self.mesh.remove_connectivities(**kwargs) - self.assertEqual(self.EDGE_NODE, self.mesh.edge_node_connectivity) - for kwargs in negative_kwargs: - with self.assertLogs(logger, level="DEBUG") as log: - # Check that the only debug log is the one we inserted. - logger.debug("foo", extra=dict(cls=None)) - self.mesh.remove_connectivities(**kwargs) - self.assertEqual(1, len(log.records)) - self.assertEqual(self.EDGE_NODE, self.mesh.edge_node_connectivity) - - def test_remove_coords(self): - # Test that remove() mimics the coords() method correctly, - # and prevents removal of mandatory coords. - positive_kwargs = ( - {"item": self.NODE_LON}, - {"item": "longitude"}, - {"standard_name": "longitude"}, - {"long_name": "long_name"}, - {"var_name": "node_lon"}, - {"attributes": {"test": 1}}, - ) - - fake_coord = AuxCoord([0]) - negative_kwargs = ( - {"item": fake_coord}, - {"item": "foo"}, - {"standard_name": "air_temperature"}, - {"long_name": "foo"}, - {"var_name": "foo"}, - {"attributes": {"test": 2}}, - ) - - log_regex = r"Ignoring request to remove.*" - for kwargs in positive_kwargs: - with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): - self.mesh.remove_coords(**kwargs) - self.assertEqual(self.NODE_LON, self.mesh.node_coords.node_x) - for kwargs in negative_kwargs: - with self.assertLogs(logger, level="DEBUG") as log: - # Check that the only debug log is the one we inserted. - logger.debug("foo", extra=dict(cls=None)) - self.mesh.remove_coords(**kwargs) - self.assertEqual(1, len(log.records)) - self.assertEqual(self.NODE_LON, self.mesh.node_coords.node_x) - - # Test removal of optional connectivity. - self.mesh.add_coords(edge_x=self.EDGE_LON) - # Attempt to remove a non-existent coord. - self.mesh.remove_coords(self.EDGE_LAT) - # Confirm that EDGE_LON is still there. - self.assertEqual(self.EDGE_LON, self.mesh.edge_coords.edge_x) - # Remove EDGE_LON and confirm success. - self.mesh.remove_coords(self.EDGE_LON) - self.assertEqual(None, self.mesh.edge_coords.edge_x) - - def test_to_MeshCoord(self): - location = "node" - axis = "x" - result = self.mesh.to_MeshCoord(location, axis) - self.assertIsInstance(result, mesh.MeshCoord) - self.assertEqual(location, result.location) - self.assertEqual(axis, result.axis) - - def test_to_MeshCoord_face(self): - location = "face" - axis = "x" - self.assertRaises( - CoordinateNotFoundError, self.mesh.to_MeshCoord, location, axis - ) - - def test_to_MeshCoords(self): - location = "node" - result = self.mesh.to_MeshCoords(location) - self.assertEqual(len(self.mesh.AXES), len(result)) - for ix, axis in enumerate(self.mesh.AXES): - coord = result[ix] - self.assertIsInstance(coord, mesh.MeshCoord) - self.assertEqual(location, coord.location) - self.assertEqual(axis, coord.axis) - - def test_to_MeshCoords_face(self): - location = "face" - self.assertRaises( - CoordinateNotFoundError, self.mesh.to_MeshCoords, location - ) - - -class TestOperations2D(TestOperations1D): - # Additional/specialised tests for topology_dimension=2. - def setUp(self): - self.mesh = mesh.Mesh( - topology_dimension=2, - node_coords_and_axes=((self.NODE_LON, "x"), (self.NODE_LAT, "y")), - connectivities=(self.FACE_NODE), - ) - - def test_add_connectivities(self): - # ADD connectivities. - kwargs = { - "edge_node": self.EDGE_NODE, - "face_edge": self.FACE_EDGE, - "face_face": self.FACE_FACE, - "edge_face": self.EDGE_FACE, - "boundary_node": self.BOUNDARY_NODE, - } - expected = mesh.Mesh2DConnectivities( - face_node=self.mesh.face_node_connectivity, **kwargs - ) - self.mesh.add_connectivities(*kwargs.values()) - self.assertEqual(expected, self.mesh.all_connectivities) - - # REPLACE connectivities. - kwargs["face_node"] = self.FACE_NODE - for new_len in (False, True): - # First replace with ones of same length, then with ones of - # different length. - kwargs = { - k: self.new_connectivity(v, new_len) for k, v in kwargs.items() - } - self.mesh.add_connectivities(*kwargs.values()) - self.assertEqual( - mesh.Mesh2DConnectivities(**kwargs), - self.mesh.all_connectivities, - ) - - def test_add_connectivities_inconsistent(self): - # ADD Connectivities. - self.mesh.add_connectivities(self.EDGE_NODE) - face_edge = self.new_connectivity(self.FACE_EDGE, new_len=True) - edge_face = self.new_connectivity(self.EDGE_FACE, new_len=True) - for args in ([face_edge], [edge_face], [face_edge, edge_face]): - self.assertRaisesRegex( - ValueError, - "inconsistent .* counts.", - self.mesh.add_connectivities, - *args, - ) - - # REPLACE Connectivities - self.mesh.add_connectivities(self.FACE_EDGE, self.EDGE_FACE) - for args in ([face_edge], [edge_face], [face_edge, edge_face]): - self.assertRaisesRegex( - ValueError, - "inconsistent .* counts.", - self.mesh.add_connectivities, - *args, - ) - - def test_add_connectivities_invalid(self): - fake_cf_role = tests.mock.Mock( - __class__=mesh.Connectivity, cf_role="foo" - ) - log_regex = r"Not adding connectivity.*" - with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): - self.mesh.add_connectivities(fake_cf_role) - - def test_add_coords_face(self): - # ADD coords. - kwargs = {"face_x": self.FACE_LON, "face_y": self.FACE_LAT} - self.mesh.add_coords(**kwargs) - self.assertEqual( - mesh.MeshFaceCoords(**kwargs), - self.mesh.face_coords, - ) - - for new_shape in (False, True): - # REPLACE coords, first with ones of the same shape, then with ones - # of different shape. - kwargs = { - "face_x": self.new_coord(self.FACE_LON, new_shape), - "face_y": self.new_coord(self.FACE_LAT, new_shape), - } - self.mesh.add_coords(**kwargs) - self.assertEqual( - mesh.MeshFaceCoords(**kwargs), - self.mesh.face_coords, - ) - - def test_add_coords_single_face(self): - # ADD coord. - face_x = self.FACE_LON - expected = mesh.MeshFaceCoords(face_x=face_x, face_y=None) - self.mesh.add_coords(face_x=face_x) - self.assertEqual(expected, self.mesh.face_coords) - - # REPLACE coord. - face_x = self.new_coord(self.FACE_LON) - expected = mesh.MeshFaceCoords(face_x=face_x, face_y=None) - self.mesh.add_coords(face_x=face_x) - self.assertEqual(expected, self.mesh.face_coords) - - # Attempt to REPLACE coord with that of DIFFERENT SHAPE. - face_x = self.new_coord(self.FACE_LON, new_shape=True) - self.assertRaisesRegex( - ValueError, - ".*requires to have shape.*", - self.mesh.add_coords, - face_x=face_x, - ) - - def test_dimension_names(self): - # Test defaults. - default = mesh.Mesh2DNames("Mesh2d_node", "Mesh2d_edge", "Mesh2d_face") - self.assertEqual(default, self.mesh.dimension_names()) - - self.mesh.dimension_names("foo", "bar", "baz") - self.assertEqual( - mesh.Mesh2DNames("foo", "bar", "baz"), - self.mesh.dimension_names(), - ) - - self.mesh.dimension_names_reset(True, True, True) - self.assertEqual(default, self.mesh.dimension_names()) - - # Single. - self.mesh.dimension_names(face="foo") - self.assertEqual("foo", self.mesh.face_dimension) - self.mesh.dimension_names_reset(face=True) - self.assertEqual(default, self.mesh.dimension_names()) - - def test_face_dimension_set(self): - self.mesh.face_dimension = "foo" - self.assertEqual("foo", self.mesh.face_dimension) - - def test_remove_connectivities(self): - """Do what 1D test could not - test removal of optional connectivity.""" - - # Add an optional connectivity. - self.mesh.add_connectivities(self.FACE_FACE) - # Attempt to remove a non-existent connectivity. - self.mesh.remove_connectivities(self.EDGE_NODE) - # Confirm that FACE_FACE is still there. - self.assertEqual(self.FACE_FACE, self.mesh.face_face_connectivity) - # Remove FACE_FACE and confirm success. - self.mesh.remove_connectivities(contains_face=True) - self.assertEqual(None, self.mesh.face_face_connectivity) - - def test_remove_coords(self): - """Test the face argument.""" - super().test_remove_coords() - self.mesh.add_coords(face_x=self.FACE_LON) - self.assertEqual(self.FACE_LON, self.mesh.face_coords.face_x) - self.mesh.remove_coords(include_faces=True) - self.assertEqual(None, self.mesh.face_coords.face_x) - - def test_to_MeshCoord_face(self): - self.mesh.add_coords(face_x=self.FACE_LON) - location = "face" - axis = "x" - result = self.mesh.to_MeshCoord(location, axis) - self.assertIsInstance(result, mesh.MeshCoord) - self.assertEqual(location, result.location) - self.assertEqual(axis, result.axis) - - def test_to_MeshCoords_face(self): - self.mesh.add_coords(face_x=self.FACE_LON, face_y=self.FACE_LAT) - location = "face" - result = self.mesh.to_MeshCoords(location) - self.assertEqual(len(self.mesh.AXES), len(result)) - for ix, axis in enumerate(self.mesh.AXES): - coord = result[ix] - self.assertIsInstance(coord, mesh.MeshCoord) - self.assertEqual(location, coord.location) - self.assertEqual(axis, coord.axis) - - -class InitValidation(TestMeshCommon): - def test_invalid_topology(self): - kwargs = { - "topology_dimension": 0, - "node_coords_and_axes": ( - (self.NODE_LON, "x"), - (self.NODE_LAT, "y"), - ), - "connectivities": self.EDGE_NODE, - } - self.assertRaisesRegex( - ValueError, - "Expected 'topology_dimension'.*", - mesh.Mesh, - **kwargs, - ) - - def test_invalid_axes(self): - kwargs = { - "topology_dimension": 2, - "connectivities": self.FACE_NODE, - } - self.assertRaisesRegex( - ValueError, - "Invalid axis specified for node.*", - mesh.Mesh, - node_coords_and_axes=( - (self.NODE_LON, "foo"), - (self.NODE_LAT, "y"), - ), - **kwargs, - ) - kwargs["node_coords_and_axes"] = ( - (self.NODE_LON, "x"), - (self.NODE_LAT, "y"), - ) - self.assertRaisesRegex( - ValueError, - "Invalid axis specified for edge.*", - mesh.Mesh, - edge_coords_and_axes=((self.EDGE_LON, "foo"),), - **kwargs, - ) - self.assertRaisesRegex( - ValueError, - "Invalid axis specified for face.*", - mesh.Mesh, - face_coords_and_axes=((self.FACE_LON, "foo"),), - **kwargs, - ) - - # Several arg safety checks in __init__ currently unreachable given earlier checks. - - def test_minimum_connectivities(self): - # Further validations are tested in add_connectivity tests. - kwargs = { - "topology_dimension": 1, - "node_coords_and_axes": ( - (self.NODE_LON, "x"), - (self.NODE_LAT, "y"), - ), - "connectivities": (self.FACE_NODE,), - } - self.assertRaisesRegex( - ValueError, - ".*requires a edge_node_connectivity.*", - mesh.Mesh, - **kwargs, - ) - - def test_minimum_coords(self): - # Further validations are tested in add_coord tests. - kwargs = { - "topology_dimension": 1, - "node_coords_and_axes": ((self.NODE_LON, "x"), (None, "y")), - "connectivities": (self.FACE_NODE,), - } - self.assertRaisesRegex( - ValueError, - ".*is a required coordinate.*", - mesh.Mesh, - **kwargs, - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py deleted file mode 100644 index ce99a8b4be..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_MeshCoord.py +++ /dev/null @@ -1,800 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.mesh.MeshCoord`. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import re -import unittest.mock as mock - -import dask.array as da -import numpy as np - -from iris._lazy_data import as_lazy_data, is_lazy_data -from iris.common.metadata import BaseMetadata -from iris.coords import AuxCoord, Coord -from iris.cube import Cube -from iris.experimental.ugrid.mesh import Connectivity, Mesh, MeshCoord -import iris.tests.stock.mesh -from iris.tests.stock.mesh import sample_mesh, sample_meshcoord - - -class Test___init__(tests.IrisTest): - def setUp(self): - mesh = sample_mesh() - self.mesh = mesh - self.meshcoord = sample_meshcoord(mesh=mesh) - - def test_basic(self): - meshcoord = self.meshcoord - self.assertEqual(meshcoord.mesh, self.mesh) - self.assertEqual(meshcoord.location, "face") - self.assertEqual(meshcoord.axis, "x") - self.assertIsInstance(meshcoord, MeshCoord) - self.assertIsInstance(meshcoord, Coord) - - def test_derived_properties(self): - # Check the derived properties of the meshcoord against the correct - # underlying mesh coordinate. - for axis in Mesh.AXES: - meshcoord = sample_meshcoord(axis=axis) - # N.B. - node_x_coord = meshcoord.mesh.coord(include_nodes=True, axis=axis) - for key in node_x_coord.metadata._fields: - meshval = getattr(meshcoord, key) - if key == "var_name": - # var_name is unused. - self.assertIsNone(meshval) - else: - # names, units and attributes are derived from the node coord. - self.assertEqual(meshval, getattr(node_x_coord, key)) - - def test_fail_bad_mesh(self): - with self.assertRaisesRegex(TypeError, "must be a.*Mesh"): - sample_meshcoord(mesh=mock.sentinel.odd) - - def test_valid_locations(self): - for loc in Mesh.ELEMENTS: - meshcoord = sample_meshcoord(location=loc) - self.assertEqual(meshcoord.location, loc) - - def test_fail_bad_location(self): - with self.assertRaisesRegex(ValueError, "not a valid Mesh location"): - sample_meshcoord(location="bad") - - def test_fail_bad_axis(self): - with self.assertRaisesRegex(ValueError, "not a valid Mesh axis"): - sample_meshcoord(axis="q") - - -class Test__readonly_properties(tests.IrisTest): - def setUp(self): - self.meshcoord = sample_meshcoord() - - def test_fixed_metadata(self): - # Check that you cannot set any of these on an existing MeshCoord. - meshcoord = self.meshcoord - for prop in ("mesh", "location", "axis"): - with self.assertRaisesRegex(AttributeError, "can't set"): - setattr(meshcoord, prop, mock.sentinel.odd) - - def test_coord_system(self): - # The property exists, =None, can set to None, can not set otherwise. - self.assertTrue(hasattr(self.meshcoord, "coord_system")) - self.assertIsNone(self.meshcoord.coord_system) - self.meshcoord.coord_system = None - with self.assertRaisesRegex(ValueError, "Cannot set.* MeshCoord"): - self.meshcoord.coord_system = 1 - - def test_set_climatological(self): - # The property exists, =False, can set to False, can not set otherwise. - self.assertTrue(hasattr(self.meshcoord, "climatological")) - self.assertFalse(self.meshcoord.climatological) - self.meshcoord.climatological = False - with self.assertRaisesRegex(ValueError, "Cannot set.* MeshCoord"): - self.meshcoord.climatological = True - - -class Test__inherited_properties(tests.IrisTest): - """ - Check the settability and effect on equality of the common BaseMetadata - properties inherited from Coord : i.e. names/units/attributes. - - Though copied from the mesh at creation, they are also changeable. - - """ - - def setUp(self): - self.meshcoord = sample_meshcoord() - - def test_inherited_properties(self): - # Check that these are settable, and affect equality. - meshcoord = self.meshcoord - # Add an existing attribute, so we can change it. - meshcoord.attributes["thing"] = 7 - for prop in BaseMetadata._fields: - meshcoord2 = meshcoord.copy() - if "name" in prop: - # Use a standard-name, can do for any of them. - setattr(meshcoord2, prop, "height") - elif prop == "units": - meshcoord2.units = "Pa" - elif prop == "attributes": - meshcoord2.attributes["thing"] = 77 - self.assertNotEqual(meshcoord2, meshcoord) - - -class Test__points_and_bounds(tests.IrisTest): - # Basic method testing only, for 3 locations with simple array values. - # See Test_MeshCoord__dataviews for more detailed checks. - def test_node(self): - meshcoord = sample_meshcoord(location="node") - n_nodes = ( - iris.tests.stock.mesh._TEST_N_NODES - ) # n-nodes default for sample mesh - self.assertIsNone(meshcoord.core_bounds()) - self.assertArrayAllClose(meshcoord.points, 1100 + np.arange(n_nodes)) - - def test_edge(self): - meshcoord = sample_meshcoord(location="edge") - points, bounds = meshcoord.core_points(), meshcoord.core_bounds() - self.assertEqual(points.shape, meshcoord.shape) - self.assertEqual(bounds.shape, meshcoord.shape + (2,)) - self.assertArrayAllClose( - meshcoord.points, [2100, 2101, 2102, 2103, 2104] - ) - self.assertArrayAllClose( - meshcoord.bounds, - [ - (1105, 1106), - (1107, 1108), - (1109, 1110), - (1111, 1112), - (1113, 1114), - ], - ) - - def test_face(self): - meshcoord = sample_meshcoord(location="face") - points, bounds = meshcoord.core_points(), meshcoord.core_bounds() - self.assertEqual(points.shape, meshcoord.shape) - self.assertEqual(bounds.shape, meshcoord.shape + (4,)) - self.assertArrayAllClose(meshcoord.points, [3100, 3101, 3102]) - self.assertArrayAllClose( - meshcoord.bounds, - [ - (1100, 1101, 1102, 1103), - (1104, 1105, 1106, 1107), - (1108, 1109, 1110, 1111), - ], - ) - - -class Test___eq__(tests.IrisTest): - def setUp(self): - self.mesh = sample_mesh() - - def _create_common_mesh(self, **kwargs): - return sample_meshcoord(mesh=self.mesh, **kwargs) - - def test_identical_mesh(self): - meshcoord1 = self._create_common_mesh() - meshcoord2 = self._create_common_mesh() - self.assertEqual(meshcoord2, meshcoord1) - - def test_equal_mesh(self): - mesh1 = sample_mesh() - mesh2 = sample_mesh() - meshcoord1 = sample_meshcoord(mesh=mesh1) - meshcoord2 = sample_meshcoord(mesh=mesh2) - self.assertEqual(meshcoord2, meshcoord1) - - def test_different_mesh(self): - mesh1 = sample_mesh() - mesh2 = sample_mesh() - mesh2.long_name = "new_name" - meshcoord1 = sample_meshcoord(mesh=mesh1) - meshcoord2 = sample_meshcoord(mesh=mesh2) - self.assertNotEqual(meshcoord2, meshcoord1) - - def test_different_location(self): - meshcoord = self._create_common_mesh() - meshcoord2 = self._create_common_mesh(location="node") - self.assertNotEqual(meshcoord2, meshcoord) - - def test_different_axis(self): - meshcoord = self._create_common_mesh() - meshcoord2 = self._create_common_mesh(axis="y") - self.assertNotEqual(meshcoord2, meshcoord) - - -class Test__copy(tests.IrisTest): - def test_basic(self): - meshcoord = sample_meshcoord() - meshcoord2 = meshcoord.copy() - self.assertIsNot(meshcoord2, meshcoord) - self.assertEqual(meshcoord2, meshcoord) - # In this case, they should share *NOT* copy the Mesh object. - self.assertIs(meshcoord2.mesh, meshcoord.mesh) - - def test_fail_copy_newpoints(self): - meshcoord = sample_meshcoord() - with self.assertRaisesRegex(ValueError, "Cannot change the content"): - meshcoord.copy(points=meshcoord.points) - - def test_fail_copy_newbounds(self): - meshcoord = sample_meshcoord() - with self.assertRaisesRegex(ValueError, "Cannot change the content"): - meshcoord.copy(bounds=meshcoord.bounds) - - -class Test__getitem__(tests.IrisTest): - def test_slice_wholeslice_1tuple(self): - # The only slicing case that we support, to enable cube slicing. - meshcoord = sample_meshcoord() - meshcoord2 = meshcoord[ - :, - ] - self.assertIsNot(meshcoord2, meshcoord) - self.assertEqual(meshcoord2, meshcoord) - # In this case, we should *NOT* copy the linked Mesh object. - self.assertIs(meshcoord2.mesh, meshcoord.mesh) - - def test_slice_whole_slice_singlekey(self): - # A slice(None) also fails, if not presented in a 1-tuple. - meshcoord = sample_meshcoord() - with self.assertRaisesRegex(ValueError, "Cannot index"): - meshcoord[:] - - def test_fail_slice_part(self): - meshcoord = sample_meshcoord() - with self.assertRaisesRegex(ValueError, "Cannot index"): - meshcoord[:1] - - -class Test__str_repr(tests.IrisTest): - def setUp(self): - mesh = sample_mesh() - self.mesh = mesh - # Give mesh itself a name: makes a difference between str and repr. - self.mesh.rename("test_mesh") - self.meshcoord = sample_meshcoord(mesh=mesh) - - def _expected_elements_regexp( - self, - standard_name="longitude", - long_name="long-name", - attributes=True, - location="face", - axis="x", - ): - # Printed name is standard or long -- we don't have a case with neither - coord_name = standard_name or long_name - # Construct regexp in 'sections' - # NB each consumes upto first non-space in the next line - regexp = f"MeshCoord : {coord_name} / [^\n]+\n *" - regexp += r"mesh: \\n *" - regexp += f"location: '{location}'\n *" - # Now some optional sections : whichever comes first will match - # arbitrary content leading up to it. - matched_any_upto = False - if standard_name: - regexp += ".*" - matched_any_upto = True - regexp += f"standard_name: '{standard_name}'\n *" - if long_name: - if not matched_any_upto: - regexp += ".*" - matched_any_upto = True - regexp += f"long_name: '{long_name}'\n *" - if attributes: - # if we expected attributes, they should come next - # TODO: change this when each attribute goes on a new line - if not matched_any_upto: - regexp += ".*" - matched_any_upto = True - # match 'attributes:' followed by N*lines with larger indent - regexp += "attributes:(\n [^ \n]+ +[^ \n]+)+\n " - # After those items, expect 'axis' next - # N.B. this FAILS if we had attributes when we didn't expect them - regexp += f"axis: '{axis}'$" # N.B. this is always the end - - # Compile regexp, also allowing matches across newlines - regexp = re.compile(regexp, flags=re.DOTALL) - return regexp - - def test_repr(self): - # A simple check for the condensed form. - result = repr(self.meshcoord) - expected = ( - "" - ) - self.assertEqual(expected, result) - - def test_repr_lazy(self): - # Displays lazy content (and does not realise!). - self.meshcoord.points = as_lazy_data(self.meshcoord.points) - self.meshcoord.bounds = as_lazy_data(self.meshcoord.bounds) - self.assertTrue(self.meshcoord.has_lazy_points()) - self.assertTrue(self.meshcoord.has_lazy_bounds()) - - result = repr(self.meshcoord) - self.assertTrue(self.meshcoord.has_lazy_points()) - self.assertTrue(self.meshcoord.has_lazy_bounds()) - - expected = ( - "+bounds shape(3,)>" - ) - self.assertEqual(expected, result) - - def test_repr__nameless_mesh(self): - # Check what it does when the Mesh doesn't have a name. - self.mesh.long_name = None - assert self.mesh.name() == "unknown" - result = repr(self.meshcoord) - re_expected = ( - r".MeshCoord: longitude / \(degrees_east\) " - r"mesh\(.Mesh object at 0x[^>]+.\) location\(face\) " - ) - self.assertRegex(result, re_expected) - - def test__str__(self): - # Basic output contains mesh, location, standard_name, long_name, - # attributes, mesh, location and axis - result = str(self.meshcoord) - re_expected = self._expected_elements_regexp() - self.assertRegex(result, re_expected) - - def test__str__lazy(self): - # Displays lazy content (and does not realise!). - self.meshcoord.points = as_lazy_data(self.meshcoord.points) - self.meshcoord.bounds = as_lazy_data(self.meshcoord.bounds) - - result = str(self.meshcoord) - self.assertTrue(self.meshcoord.has_lazy_points()) - self.assertTrue(self.meshcoord.has_lazy_bounds()) - - self.assertIn("points: ", result) - self.assertIn("bounds: ", result) - re_expected = self._expected_elements_regexp() - self.assertRegex(result, re_expected) - - def test_alternative_location_and_axis(self): - meshcoord = sample_meshcoord(mesh=self.mesh, location="edge", axis="y") - result = str(meshcoord) - re_expected = self._expected_elements_regexp( - standard_name="latitude", - long_name=None, - location="edge", - axis="y", - attributes=None, - ) - self.assertRegex(result, re_expected) - # Basic output contains standard_name, long_name, attributes - - def test_str_no_long_name(self): - mesh = self.mesh - # Remove the long_name of the node coord in the mesh. - node_coord = mesh.coord(include_nodes=True, axis="x") - node_coord.long_name = None - # Make a new meshcoord, based on the modified mesh. - meshcoord = sample_meshcoord(mesh=self.mesh) - result = str(meshcoord) - re_expected = self._expected_elements_regexp(long_name=False) - self.assertRegex(result, re_expected) - - def test_str_no_standard_name(self): - mesh = self.mesh - # Remove the standard_name of the node coord in the mesh. - node_coord = mesh.coord(include_nodes=True, axis="x") - node_coord.standard_name = None - node_coord.axis = "x" # This is required : but it's a kludge !! - # Make a new meshcoord, based on the modified mesh. - meshcoord = sample_meshcoord(mesh=self.mesh) - result = str(meshcoord) - re_expected = self._expected_elements_regexp(standard_name=False) - self.assertRegex(result, re_expected) - - def test_str_no_attributes(self): - mesh = self.mesh - # No attributes on the node coord in the mesh. - node_coord = mesh.coord(include_nodes=True, axis="x") - node_coord.attributes = None - # Make a new meshcoord, based on the modified mesh. - meshcoord = sample_meshcoord(mesh=self.mesh) - result = str(meshcoord) - re_expected = self._expected_elements_regexp(attributes=False) - self.assertRegex(result, re_expected) - - def test_str_empty_attributes(self): - mesh = self.mesh - # Empty attributes dict on the node coord in the mesh. - node_coord = mesh.coord(include_nodes=True, axis="x") - node_coord.attributes.clear() - # Make a new meshcoord, based on the modified mesh. - meshcoord = sample_meshcoord(mesh=self.mesh) - result = str(meshcoord) - re_expected = self._expected_elements_regexp(attributes=False) - self.assertRegex(result, re_expected) - - -class Test_cube_containment(tests.IrisTest): - # Check that we can put a MeshCoord into a cube, and have it behave just - # like a regular AuxCoord. - def setUp(self): - meshcoord = sample_meshcoord() - data_shape = (2,) + meshcoord.shape - cube = Cube(np.zeros(data_shape)) - cube.add_aux_coord(meshcoord, 1) - self.meshcoord = meshcoord - self.cube = cube - - def test_added_to_cube(self): - meshcoord = self.meshcoord - cube = self.cube - self.assertIn(meshcoord, cube.coords()) - - def test_cube_dims(self): - meshcoord = self.meshcoord - cube = self.cube - self.assertEqual(meshcoord.cube_dims(cube), (1,)) - self.assertEqual(cube.coord_dims(meshcoord), (1,)) - - def test_find_by_name(self): - meshcoord = self.meshcoord - cube = self.cube - self.assertIs(cube.coord(standard_name="longitude"), meshcoord) - self.assertIs(cube.coord(long_name="long-name"), meshcoord) - - def test_find_by_axis(self): - meshcoord = self.meshcoord - cube = self.cube - self.assertIs(cube.coord(axis="x"), meshcoord) - self.assertEqual(cube.coords(axis="y"), []) - - # NOTE: the meshcoord.axis takes precedence over the older - # "guessed axis" approach. So the standard_name does not control it. - meshcoord.rename("latitude") - self.assertIs(cube.coord(axis="x"), meshcoord) - self.assertEqual(cube.coords(axis="y"), []) - - def test_cube_copy(self): - # Check that we can copy a cube, and get a MeshCoord == the original. - # Note: currently must have the *same* mesh, as for MeshCoord.copy(). - meshcoord = self.meshcoord - cube = self.cube - cube2 = cube.copy() - meshco2 = cube2.coord(meshcoord) - self.assertIsNot(meshco2, meshcoord) - self.assertEqual(meshco2, meshcoord) - - def test_cube_nonmesh_slice(self): - # Check that we can slice a cube on a non-mesh dimension, and get a - # meshcoord == original. - # Note: currently this must have the *same* mesh, as for .copy(). - meshcoord = self.meshcoord - cube = self.cube - cube2 = cube[:1] # Make a reduced copy, slicing the non-mesh dim - meshco2 = cube2.coord(meshcoord) - self.assertIsNot(meshco2, meshcoord) - self.assertEqual(meshco2, meshcoord) - - def test_cube_mesh_partslice(self): - # Check that we can *not* get a partial MeshCoord slice, as the - # MeshCoord refuses to be sliced. - # Instead, you get an AuxCoord created from the MeshCoord. - meshcoord = self.meshcoord - cube = self.cube - cube2 = cube[:, :1] # Make a reduced copy, slicing the mesh dim - - # The resulting coord can not be identified with the original. - # (i.e. metadata does not match) - co_matches = cube2.coords(meshcoord) - self.assertEqual(co_matches, []) - - # The resulting coord is an AuxCoord instead of a MeshCoord, but the - # values match. - co2 = cube2.coord(meshcoord.name()) - self.assertFalse(isinstance(co2, MeshCoord)) - self.assertIsInstance(co2, AuxCoord) - self.assertArrayAllClose(co2.points, meshcoord.points[:1]) - self.assertArrayAllClose(co2.bounds, meshcoord.bounds[:1]) - - -class Test_auxcoord_conversion(tests.IrisTest): - def test_basic(self): - meshcoord = sample_meshcoord() - auxcoord = AuxCoord.from_coord(meshcoord) - for propname, auxval in auxcoord.metadata._asdict().items(): - meshval = getattr(meshcoord, propname) - self.assertEqual(auxval, meshval) - # Also check array content. - self.assertArrayAllClose(auxcoord.points, meshcoord.points) - self.assertArrayAllClose(auxcoord.bounds, meshcoord.bounds) - - -class Test_MeshCoord__dataviews(tests.IrisTest): - """ - Fuller testing of points and bounds calculations and behaviour. - Including connectivity missing-points (non-square faces). - - """ - - def setUp(self): - self._make_test_meshcoord() - - def _make_test_meshcoord( - self, - lazy_sources=False, - location="face", - inds_start_index=0, - inds_location_axis=0, - facenodes_changes=None, - ): - # Construct a miniature face-nodes mesh for testing. - # NOTE: we will make our connectivity arrays with standard - # start_index=0 and location_axis=0 : We only adjust that (if required) when - # creating the actual connectivities. - face_nodes_array = np.array( - [ - [0, 2, 1, 3], - [1, 3, 10, 13], - [2, 7, 9, 19], - [ - 3, - 4, - 7, - -1, - ], # This one has a "missing" point (it's a triangle) - [8, 1, 7, 2], - ] - ) - # Connectivity uses *masked* for missing points. - face_nodes_array = np.ma.masked_less(face_nodes_array, 0) - if facenodes_changes: - facenodes_changes = facenodes_changes.copy() - facenodes_changes.pop("n_extra_bad_points") - for indices, value in facenodes_changes.items(): - face_nodes_array[indices] = value - - # Construct a miniature edge-nodes mesh for testing. - edge_nodes_array = np.array([[0, 2], [1, 3], [1, 4], [3, 7]]) - # Connectivity uses *masked* for missing points. - edge_nodes_array = np.ma.masked_less(edge_nodes_array, 0) - - n_faces = face_nodes_array.shape[0] - n_edges = edge_nodes_array.shape[0] - n_nodes = int(face_nodes_array.max() + 1) - self.NODECOORDS_BASENUM = 1100.0 - self.EDGECOORDS_BASENUM = 1200.0 - self.FACECOORDS_BASENUM = 1300.0 - node_xs = self.NODECOORDS_BASENUM + np.arange(n_nodes) - edge_xs = self.EDGECOORDS_BASENUM + np.arange(n_edges) - face_xs = self.FACECOORDS_BASENUM + np.arange(n_faces) - - # Record all these for re-use in tests - self.n_faces = n_faces - self.n_nodes = n_nodes - self.face_xs = face_xs - self.node_xs = node_xs - self.edge_xs = edge_xs - self.face_nodes_array = face_nodes_array - self.edge_nodes_array = edge_nodes_array - - # convert source data to Dask arrays if asked. - if lazy_sources: - - def lazify(arr): - return da.from_array(arr, chunks=-1, meta=np.ndarray) - - node_xs = lazify(node_xs) - face_xs = lazify(face_xs) - edge_xs = lazify(edge_xs) - face_nodes_array = lazify(face_nodes_array) - edge_nodes_array = lazify(edge_nodes_array) - - # Build a mesh with this info stored in it. - co_nodex = AuxCoord( - node_xs, standard_name="longitude", long_name="node_x", units=1 - ) - co_facex = AuxCoord( - face_xs, standard_name="longitude", long_name="face_x", units=1 - ) - co_edgex = AuxCoord( - edge_xs, standard_name="longitude", long_name="edge_x", units=1 - ) - # N.B. the Mesh requires 'Y's as well. - co_nodey = co_nodex.copy() - co_nodey.rename("latitude") - co_nodey.long_name = "node_y" - co_facey = co_facex.copy() - co_facey.rename("latitude") - co_facey.long_name = "face_y" - co_edgey = co_edgex.copy() - co_edgey.rename("edge_y") - co_edgey.long_name = "edge_y" - - face_node_conn = Connectivity( - inds_start_index - + ( - face_nodes_array.transpose() - if inds_location_axis == 1 - else face_nodes_array - ), - cf_role="face_node_connectivity", - long_name="face_nodes", - start_index=inds_start_index, - location_axis=inds_location_axis, - ) - - edge_node_conn = Connectivity( - inds_start_index - + ( - edge_nodes_array.transpose() - if inds_location_axis == 1 - else edge_nodes_array - ), - cf_role="edge_node_connectivity", - long_name="edge_nodes", - start_index=inds_start_index, - location_axis=inds_location_axis, - ) - - self.mesh = Mesh( - topology_dimension=2, - node_coords_and_axes=[(co_nodex, "x"), (co_nodey, "y")], - connectivities=[face_node_conn, edge_node_conn], - face_coords_and_axes=[(co_facex, "x"), (co_facey, "y")], - edge_coords_and_axes=[(co_edgex, "x"), (co_edgey, "y")], - ) - - # Construct a test meshcoord. - meshcoord = MeshCoord(mesh=self.mesh, location=location, axis="x") - self.meshcoord = meshcoord - return meshcoord - - def _check_expected_points_values(self): - # The points are just the face_x-s - meshcoord = self.meshcoord - self.assertArrayAllClose(meshcoord.points, self.face_xs) - - def _check_expected_bounds_values(self, facenodes_changes=None): - mesh_coord = self.meshcoord - # The bounds are selected node_x-s, ==> node_number + coords-offset - result = mesh_coord.bounds - # N.B. result should be masked where the masked indices are. - expected = self.NODECOORDS_BASENUM + self.face_nodes_array - if facenodes_changes: - # ALSO include any "bad" values in that calculation. - bad_values = (self.face_nodes_array < 0) | ( - self.face_nodes_array >= self.n_nodes - ) - expected[bad_values] = np.ma.masked - # Check there are *some* masked points. - n_missing_expected = 1 - if facenodes_changes: - n_missing_expected += facenodes_changes["n_extra_bad_points"] - self.assertEqual(np.count_nonzero(expected.mask), n_missing_expected) - # Check results match, *including* location of masked points. - self.assertMaskedArrayAlmostEqual(result, expected) - - def test_points_values(self): - """Basic points content check, on real data.""" - meshcoord = self.meshcoord - self.assertFalse(meshcoord.has_lazy_points()) - self.assertFalse(meshcoord.has_lazy_bounds()) - self._check_expected_points_values() - - def test_bounds_values(self): - """Basic bounds contents check.""" - meshcoord = self.meshcoord - self.assertFalse(meshcoord.has_lazy_points()) - self.assertFalse(meshcoord.has_lazy_bounds()) - self._check_expected_bounds_values() - - def test_lazy_points_values(self): - """Check lazy points calculation on lazy inputs.""" - # Remake the test data with lazy source coords. - meshcoord = self._make_test_meshcoord(lazy_sources=True) - self.assertTrue(meshcoord.has_lazy_points()) - self.assertTrue(meshcoord.has_lazy_bounds()) - # Check values, as previous. - self._check_expected_points_values() - - def test_lazy_bounds_values(self): - meshcoord = self._make_test_meshcoord(lazy_sources=True) - self.assertTrue(meshcoord.has_lazy_points()) - self.assertTrue(meshcoord.has_lazy_bounds()) - # Check values, as previous. - self._check_expected_bounds_values() - - def test_edge_points(self): - meshcoord = self._make_test_meshcoord(location="edge") - result = meshcoord.points - self.assertArrayAllClose(result, self.edge_xs) - - def test_edge_bounds(self): - meshcoord = self._make_test_meshcoord(location="edge") - result = meshcoord.bounds - # The bounds are selected node_x-s : all == node_number + 100.0 - expected = self.NODECOORDS_BASENUM + self.edge_nodes_array - # NB simpler than faces : no possibility of missing points - self.assertArrayAlmostEqual(result, expected) - - def test_bounds_connectivity__location_axis_1(self): - # Test with a transposed indices array. - self._make_test_meshcoord(inds_location_axis=1) - self._check_expected_bounds_values() - - def test_bounds_connectivity__start_index_1(self): - # Test 1-based indices. - self._make_test_meshcoord(inds_start_index=1) - self._check_expected_bounds_values() - - def test_meshcoord_leaves_originals_lazy(self): - self._make_test_meshcoord(lazy_sources=True) - mesh = self.mesh - meshcoord = self.meshcoord - - # Fetch the relevant source objects from the mesh. - def fetch_sources_from_mesh(): - return ( - mesh.coord(include_nodes=True, axis="x"), - mesh.coord(include_faces=True, axis="x"), - mesh.face_node_connectivity, - ) - - # Check all the source coords are lazy. - for coord in fetch_sources_from_mesh(): - # Note: not all are actual Coords, so can't use 'has_lazy_points'. - self.assertTrue(is_lazy_data(coord._core_values())) - - # Calculate both points + bounds of the meshcoord - self.assertTrue(meshcoord.has_lazy_points()) - self.assertTrue(meshcoord.has_lazy_bounds()) - meshcoord.points - meshcoord.bounds - self.assertFalse(meshcoord.has_lazy_points()) - self.assertFalse(meshcoord.has_lazy_bounds()) - - # Check all the source coords are still lazy. - for coord in fetch_sources_from_mesh(): - # Note: not all are actual Coords, so can't use 'has_lazy_points'. - self.assertTrue(is_lazy_data(coord._core_values())) - - def _check_bounds_bad_index_values(self, lazy): - facenodes_modify = { - # nothing wrong with this one - (2, 1): 1, - # extra missing point, normal "missing" indicator - (3, 3): np.ma.masked, - # bad index > n_nodes - (4, 2): 100, - # NOTE: **can't** set an index < 0, as it is rejected by the - # Connectivity validity check. - # Indicate how many "extra" missing results this should cause. - "n_extra_bad_points": 2, - } - self._make_test_meshcoord( - facenodes_changes=facenodes_modify, lazy_sources=lazy - ) - self._check_expected_bounds_values() - - def test_bounds_badvalues__real(self): - self._check_bounds_bad_index_values(lazy=False) - - def test_bounds_badvalues__lazy(self): - self._check_bounds_bad_index_values(lazy=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py b/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py deleted file mode 100644 index edd34f94a1..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/mesh/test_Mesh__from_coords.py +++ /dev/null @@ -1,253 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :meth:`iris.experimental.ugrid.mesh.Mesh.from_coords`. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.coords import AuxCoord, DimCoord -from iris.experimental.ugrid import logger -from iris.experimental.ugrid.mesh import Connectivity, Mesh -from iris.tests.stock import simple_2d_w_multidim_coords - - -class Test1Dim(tests.IrisTest): - def setUp(self): - self.lon = DimCoord( - points=[0.5, 1.5, 2.5], - bounds=[[0, 1], [1, 2], [2, 3]], - standard_name="longitude", - long_name="edge longitudes", - var_name="lon", - units="degrees", - attributes={"test": 1}, - ) - # Should be fine with either a DimCoord or an AuxCoord. - self.lat = AuxCoord( - points=[0.5, 2.5, 1.5], - bounds=[[0, 1], [2, 3], [1, 2]], - standard_name="latitude", - long_name="edge_latitudes", - var_name="lat", - units="degrees", - attributes={"test": 1}, - ) - - def create(self): - return Mesh.from_coords(self.lon, self.lat) - - def test_dimensionality(self): - mesh = self.create() - self.assertEqual(1, mesh.topology_dimension) - - self.assertArrayEqual( - [0, 1, 1, 2, 2, 3], mesh.node_coords.node_x.points - ) - self.assertArrayEqual( - [0, 1, 2, 3, 1, 2], mesh.node_coords.node_y.points - ) - self.assertArrayEqual([0.5, 1.5, 2.5], mesh.edge_coords.edge_x.points) - self.assertArrayEqual([0.5, 2.5, 1.5], mesh.edge_coords.edge_y.points) - self.assertIsNone(getattr(mesh, "face_coords", None)) - - for conn_name in Connectivity.UGRID_CF_ROLES: - conn = getattr(mesh, conn_name, None) - if conn_name == "edge_node_connectivity": - self.assertArrayEqual([[0, 1], [2, 3], [4, 5]], conn.indices) - else: - self.assertIsNone(conn) - - def test_node_metadata(self): - mesh = self.create() - pairs = [ - (self.lon, mesh.node_coords.node_x), - (self.lat, mesh.node_coords.node_y), - ] - for expected_coord, actual_coord in pairs: - for attr in ("standard_name", "long_name", "units", "attributes"): - expected = getattr(expected_coord, attr) - actual = getattr(actual_coord, attr) - self.assertEqual(expected, actual) - self.assertIsNone(actual_coord.var_name) - - def test_centre_metadata(self): - mesh = self.create() - pairs = [ - (self.lon, mesh.edge_coords.edge_x), - (self.lat, mesh.edge_coords.edge_y), - ] - for expected_coord, actual_coord in pairs: - for attr in ("standard_name", "long_name", "units", "attributes"): - expected = getattr(expected_coord, attr) - actual = getattr(actual_coord, attr) - self.assertEqual(expected, actual) - self.assertIsNone(actual_coord.var_name) - - def test_mesh_metadata(self): - # Inappropriate to guess these values from the input coords. - mesh = self.create() - for attr in ( - "standard_name", - "long_name", - "var_name", - ): - self.assertIsNone(getattr(mesh, attr)) - self.assertTrue(mesh.units.is_unknown()) - self.assertDictEqual({}, mesh.attributes) - - def test_lazy(self): - self.lon = AuxCoord.from_coord(self.lon) - self.lon = self.lon.copy( - self.lon.lazy_points(), self.lon.lazy_bounds() - ) - self.lat = self.lat.copy( - self.lat.lazy_points(), self.lat.lazy_bounds() - ) - - mesh = self.create() - for coord in list(mesh.all_coords): - if coord is not None: - self.assertTrue(coord.has_lazy_points()) - for conn in list(mesh.all_connectivities): - if conn is not None: - self.assertTrue(conn.has_lazy_indices()) - - def test_coord_shape_mismatch(self): - lat_orig = self.lat.copy(self.lat.points, self.lat.bounds) - self.lat = lat_orig.copy( - points=lat_orig.points, bounds=np.tile(lat_orig.bounds, 2) - ) - with self.assertRaisesRegex( - ValueError, "bounds shapes are not identical" - ): - _ = self.create() - - self.lat = lat_orig.copy( - points=lat_orig.points[-1], bounds=lat_orig.bounds[-1] - ) - with self.assertRaisesRegex( - ValueError, "points shapes are not identical" - ): - _ = self.create() - - def test_reorder(self): - # Swap the coords. - self.lat, self.lon = self.lon, self.lat - mesh = self.create() - # Confirm that the coords have been swapped back to the 'correct' order. - self.assertEqual("longitude", mesh.node_coords.node_x.standard_name) - self.assertEqual("latitude", mesh.node_coords.node_y.standard_name) - - def test_non_xy(self): - for coord in self.lon, self.lat: - coord.standard_name = None - lon_name, lat_name = [ - coord.long_name for coord in (self.lon, self.lat) - ] - # Swap the coords. - self.lat, self.lon = self.lon, self.lat - with self.assertLogs(logger, "INFO", "Unable to find 'X' and 'Y'"): - mesh = self.create() - # Confirm that the coords have not been swapped back. - self.assertEqual(lat_name, mesh.node_coords.node_x.long_name) - self.assertEqual(lon_name, mesh.node_coords.node_y.long_name) - - -class Test2Dim(Test1Dim): - def setUp(self): - super().setUp() - - self.lon.bounds = [[0, 0.5, 1], [1, 1.5, 2], [2, 2.5, 3]] - self.lon.long_name = "triangle longitudes" - self.lat.bounds = [[0, 1, 0], [2, 3, 2], [1, 2, 1]] - self.lat.long_name = "triangle latitudes" - - def test_dimensionality(self): - mesh = self.create() - self.assertEqual(2, mesh.topology_dimension) - - self.assertArrayEqual( - [0, 0.5, 1, 1, 1.5, 2, 2, 2.5, 3], mesh.node_coords.node_x.points - ) - self.assertArrayEqual( - [0, 1, 0, 2, 3, 2, 1, 2, 1], mesh.node_coords.node_y.points - ) - self.assertIsNone(mesh.edge_coords.edge_x) - self.assertIsNone(mesh.edge_coords.edge_y) - self.assertArrayEqual([0.5, 1.5, 2.5], mesh.face_coords.face_x.points) - self.assertArrayEqual([0.5, 2.5, 1.5], mesh.face_coords.face_y.points) - - for conn_name in Connectivity.UGRID_CF_ROLES: - conn = getattr(mesh, conn_name, None) - if conn_name == "face_node_connectivity": - self.assertArrayEqual( - [[0, 1, 2], [3, 4, 5], [6, 7, 8]], conn.indices - ) - else: - self.assertIsNone(conn) - - def test_centre_metadata(self): - mesh = self.create() - pairs = [ - (self.lon, mesh.face_coords.face_x), - (self.lat, mesh.face_coords.face_y), - ] - for expected_coord, actual_coord in pairs: - for attr in ("standard_name", "long_name", "units", "attributes"): - expected = getattr(expected_coord, attr) - actual = getattr(actual_coord, attr) - self.assertEqual(expected, actual) - self.assertIsNone(actual_coord.var_name) - - def test_mixed_shapes(self): - self.lon = AuxCoord.from_coord(self.lon) - lon_bounds = np.array([[0, 0, 1, 1], [1, 1, 2, 2], [2, 3, 2.5, 999]]) - self.lon.bounds = np.ma.masked_equal(lon_bounds, 999) - - lat_bounds = np.array([[0, 1, 1, 0], [1, 2, 2, 1], [2, 2, 3, 999]]) - self.lat.bounds = np.ma.masked_equal(lat_bounds, 999) - - mesh = self.create() - self.assertArrayEqual( - mesh.face_node_connectivity.location_lengths(), [4, 4, 3] - ) - self.assertEqual(mesh.node_coords.node_x.points[-1], 0.0) - self.assertEqual(mesh.node_coords.node_y.points[-1], 0.0) - - -class TestInvalidBounds(tests.IrisTest): - """Invalid bounds not supported.""" - - def test_no_bounds(self): - lon = AuxCoord(points=[0.5, 1.5, 2.5]) - lat = AuxCoord(points=[0, 1, 2]) - with self.assertRaisesRegex(ValueError, "bounds missing from"): - _ = Mesh.from_coords(lon, lat) - - def test_1_bound(self): - lon = AuxCoord(points=[0.5, 1.5, 2.5], bounds=[[0], [1], [2]]) - lat = AuxCoord(points=[0, 1, 2], bounds=[[0.5], [1.5], [2.5]]) - with self.assertRaisesRegex( - ValueError, r"Expected coordinate bounds.shape \(n, >=2\)" - ): - _ = Mesh.from_coords(lon, lat) - - -class TestInvalidPoints(tests.IrisTest): - """Only 1D coords supported.""" - - def test_2d_coord(self): - cube = simple_2d_w_multidim_coords()[:3, :3] - coord_1, coord_2 = cube.coords() - with self.assertRaisesRegex( - ValueError, "Expected coordinate ndim == 1" - ): - _ = Mesh.from_coords(coord_1, coord_2) diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/__init__.py b/lib/iris/tests/unit/experimental/ugrid/metadata/__init__.py deleted file mode 100644 index 2d2d040c1d..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.experimental.ugrid.metadata` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py deleted file mode 100644 index af92e69b08..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_ConnectivityMetadata.py +++ /dev/null @@ -1,774 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.metadata.ConnectivityMetadata`. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel - -from iris.common.lenient import _LENIENT, _qualname -from iris.common.metadata import BaseMetadata -from iris.experimental.ugrid.metadata import ConnectivityMetadata - - -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.cf_role = mock.sentinel.cf_role - self.start_index = mock.sentinel.start_index - self.location_axis = mock.sentinel.location_axis - self.cls = ConnectivityMetadata - - def test_repr(self): - metadata = self.cls( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - units=self.units, - attributes=self.attributes, - cf_role=self.cf_role, - start_index=self.start_index, - location_axis=self.location_axis, - ) - fmt = ( - "ConnectivityMetadata(standard_name={!r}, long_name={!r}, " - "var_name={!r}, units={!r}, attributes={!r}, cf_role={!r}, " - "start_index={!r}, location_axis={!r})" - ) - expected = fmt.format( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - self.cf_role, - self.start_index, - self.location_axis, - ) - self.assertEqual(expected, repr(metadata)) - - def test__fields(self): - expected = ( - "standard_name", - "long_name", - "var_name", - "units", - "attributes", - "cf_role", - "start_index", - "location_axis", - ) - self.assertEqual(self.cls._fields, expected) - - def test_bases(self): - self.assertTrue(issubclass(self.cls, BaseMetadata)) - - -class Test__eq__(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - cf_role=sentinel.cf_role, - start_index=sentinel.start_index, - location_axis=sentinel.location_axis, - ) - self.dummy = sentinel.dummy - self.cls = ConnectivityMetadata - # The "location_axis" member is stateful only, and does not participate in - # lenient/strict equivalence. - self.members_no_location_axis = filter( - lambda member: member != "location_axis", self.cls._members - ) - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) - - def test_lenient_service(self): - qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) - - def test_call(self): - other = sentinel.other - return_value = sentinel.return_value - metadata = self.cls(*(None,) * len(self.cls._fields)) - with mock.patch.object( - BaseMetadata, "__eq__", return_value=return_value - ) as mocker: - result = metadata.__eq__(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_members_none(self): - for member in self.members_no_location_axis: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_location_axis_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["location_axis"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different_members(self): - for member in self.members_no_location_axis: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different_location_axis(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["location_axis"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_members(self): - for member in self.members_no_location_axis: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_location_axis(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["location_axis"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_members_none(self): - for member in self.members_no_location_axis: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_location_axis_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["location_axis"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - -class Test___lt__(tests.IrisTest): - def setUp(self): - self.cls = ConnectivityMetadata - self.one = self.cls(1, 1, 1, 1, 1, 1, 1, 1) - self.two = self.cls(1, 1, 1, 2, 1, 1, 1, 1) - self.none = self.cls(1, 1, 1, None, 1, 1, 1, 1) - self.attributes = self.cls(1, 1, 1, 1, 10, 1, 1, 1) - - def test__ascending_lt(self): - result = self.one < self.two - self.assertTrue(result) - - def test__descending_lt(self): - result = self.two < self.one - self.assertFalse(result) - - def test__none_rhs_operand(self): - result = self.one < self.none - self.assertFalse(result) - - def test__none_lhs_operand(self): - result = self.none < self.one - self.assertTrue(result) - - def test__ignore_attributes(self): - result = self.one < self.attributes - self.assertFalse(result) - result = self.attributes < self.one - self.assertFalse(result) - - -class Test_combine(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - cf_role=sentinel.cf_role, - start_index=sentinel.start_index, - location_axis=sentinel.location_axis, - ) - self.dummy = sentinel.dummy - self.cls = ConnectivityMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) - - def test_lenient_service(self): - qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - expected = right.copy() - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["units"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values.copy() - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - -class Test_difference(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - cf_role=sentinel.cf_role, - start_index=sentinel.start_index, - location_axis=sentinel.location_axis, - ) - self.dummy = sentinel.dummy - self.cls = ConnectivityMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) - - def test_lenient_service(self): - qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - member_value = getattr(lmetadata, member) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (member_value, None) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = (None, member_value) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["units"] = (left["units"], right["units"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["units"] = lexpected["units"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different_members(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_strict_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_members(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_none(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_members_none(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - -class Test_equal(tests.IrisTest): - def setUp(self): - self.cls = ConnectivityMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) - - def test_lenient_service(self): - qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue(_LENIENT[self.cls.equal]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py deleted file mode 100644 index 5c96fb7856..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshCoordMetadata.py +++ /dev/null @@ -1,732 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshCoordMetadata`. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel - -from iris.common.lenient import _LENIENT, _qualname -from iris.common.metadata import BaseMetadata -from iris.experimental.ugrid.metadata import MeshCoordMetadata - - -class Test__identity(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.location = mock.sentinel.location - self.axis = mock.sentinel.axis - self.cls = MeshCoordMetadata - - def test_repr(self): - metadata = self.cls( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - units=self.units, - attributes=self.attributes, - location=self.location, - axis=self.axis, - ) - fmt = ( - "MeshCoordMetadata(standard_name={!r}, long_name={!r}, " - "var_name={!r}, units={!r}, attributes={!r}, " - "location={!r}, axis={!r})" - ) - expected = fmt.format( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - self.location, - self.axis, - ) - self.assertEqual(expected, repr(metadata)) - - def test__fields(self): - expected = ( - "standard_name", - "long_name", - "var_name", - "units", - "attributes", - "location", - "axis", - ) - self.assertEqual(self.cls._fields, expected) - - def test_bases(self): - self.assertTrue(issubclass(self.cls, BaseMetadata)) - - -class Test__eq__(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - location=sentinel.location, - axis=sentinel.axis, - ) - self.dummy = sentinel.dummy - self.cls = MeshCoordMetadata - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) - - def test_lenient_service(self): - qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) - - def test_call(self): - other = sentinel.other - return_value = sentinel.return_value - metadata = self.cls(*(None,) * len(self.cls._fields)) - with mock.patch.object( - BaseMetadata, "__eq__", return_value=return_value - ) as mocker: - result = metadata.__eq__(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_none_nonmember(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - -class Test___lt__(tests.IrisTest): - def setUp(self): - self.cls = MeshCoordMetadata - values = [1] * len(self.cls._fields) - self.one = self.cls(*values) - - values_two = values[:] - values_two[2] = 2 - self.two = self.cls(*values_two) - - values_none = values[:] - values_none[2] = None - self.none = self.cls(*values_none) - - values_attrs = values[:] - values_attrs[4] = 10 - self.attributes = self.cls(*values_attrs) - - def test__ascending_lt(self): - result = self.one < self.two - self.assertTrue(result) - - def test__descending_lt(self): - result = self.two < self.one - self.assertFalse(result) - - def test__none_rhs_operand(self): - result = self.one < self.none - self.assertFalse(result) - - def test__none_lhs_operand(self): - result = self.none < self.one - self.assertTrue(result) - - def test__ignore_attributes(self): - result = self.one < self.attributes - self.assertFalse(result) - result = self.attributes < self.one - self.assertFalse(result) - - -class Test_combine(tests.IrisTest): - def setUp(self): - self.cls = MeshCoordMetadata - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - location=sentinel.location, - axis=sentinel.axis, - ) - self.dummy = sentinel.dummy - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) - - def test_lenient_service(self): - qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - expected = right.copy() - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values.copy() - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - -class Test_difference(tests.IrisTest): - def setUp(self): - self.cls = MeshCoordMetadata - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - location=sentinel.location, - axis=sentinel.axis, - ) - self.dummy = sentinel.dummy - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) - - def test_lenient_service(self): - qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - member_value = getattr(lmetadata, member) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (member_value, None) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = (None, member_value) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different_members(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_strict_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_members(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_none(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_members_none(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - -class Test_equal(tests.IrisTest): - def setUp(self): - self.cls = MeshCoordMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) - - def test_lenient_service(self): - qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue(_LENIENT[self.cls.equal]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py b/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py deleted file mode 100644 index a8b25dc2e7..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/metadata/test_MeshMetadata.py +++ /dev/null @@ -1,783 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.experimental.ugrid.metadata.MeshMetadata`. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from copy import deepcopy -import unittest.mock as mock -from unittest.mock import sentinel - -from iris.common.lenient import _LENIENT, _qualname -from iris.common.metadata import BaseMetadata -from iris.experimental.ugrid.metadata import MeshMetadata - - -class Test(tests.IrisTest): - def setUp(self): - self.standard_name = mock.sentinel.standard_name - self.long_name = mock.sentinel.long_name - self.var_name = mock.sentinel.var_name - self.units = mock.sentinel.units - self.attributes = mock.sentinel.attributes - self.topology_dimension = mock.sentinel.topology_dimension - self.node_dimension = mock.sentinel.node_dimension - self.edge_dimension = mock.sentinel.edge_dimension - self.face_dimension = mock.sentinel.face_dimension - self.cls = MeshMetadata - - def test_repr(self): - metadata = self.cls( - standard_name=self.standard_name, - long_name=self.long_name, - var_name=self.var_name, - units=self.units, - attributes=self.attributes, - topology_dimension=self.topology_dimension, - node_dimension=self.node_dimension, - edge_dimension=self.edge_dimension, - face_dimension=self.face_dimension, - ) - fmt = ( - "MeshMetadata(standard_name={!r}, long_name={!r}, " - "var_name={!r}, units={!r}, attributes={!r}, " - "topology_dimension={!r}, node_dimension={!r}, " - "edge_dimension={!r}, face_dimension={!r})" - ) - expected = fmt.format( - self.standard_name, - self.long_name, - self.var_name, - self.units, - self.attributes, - self.topology_dimension, - self.node_dimension, - self.edge_dimension, - self.face_dimension, - ) - self.assertEqual(expected, repr(metadata)) - - def test__fields(self): - expected = ( - "standard_name", - "long_name", - "var_name", - "units", - "attributes", - "topology_dimension", - "node_dimension", - "edge_dimension", - "face_dimension", - ) - self.assertEqual(self.cls._fields, expected) - - def test_bases(self): - self.assertTrue(issubclass(self.cls, BaseMetadata)) - - -class Test__eq__(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - topology_dimension=sentinel.topology_dimension, - node_dimension=sentinel.node_dimension, - edge_dimension=sentinel.edge_dimension, - face_dimension=sentinel.face_dimension, - ) - self.dummy = sentinel.dummy - self.cls = MeshMetadata - # The "node_dimension", "edge_dimension" and "face_dimension" members - # are stateful only; they do not participate in lenient/strict equivalence. - self.members_dim_names = filter( - lambda member: member - in ("node_dimension", "edge_dimension", "face_dimension"), - self.cls._members, - ) - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.__eq__.__doc__, self.cls.__eq__.__doc__) - - def test_lenient_service(self): - qualname___eq__ = _qualname(self.cls.__eq__) - self.assertIn(qualname___eq__, _LENIENT) - self.assertTrue(_LENIENT[qualname___eq__]) - self.assertTrue(_LENIENT[self.cls.__eq__]) - - def test_call(self): - other = sentinel.other - return_value = sentinel.return_value - metadata = self.cls(*(None,) * len(self.cls._fields)) - with mock.patch.object( - BaseMetadata, "__eq__", return_value=return_value - ) as mocker: - result = metadata.__eq__(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_topology_dim_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["topology_dimension"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_same_dim_names_none(self): - for member in self.members_dim_names: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different_topology_dim(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["topology_dimension"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_lenient_different_dim_names(self): - for member in self.members_dim_names: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_topology_dim(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["topology_dimension"] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_dim_names(self): - for member in self.members_dim_names: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_topology_dim_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["topology_dimension"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertFalse(lmetadata.__eq__(rmetadata)) - self.assertFalse(rmetadata.__eq__(lmetadata)) - - def test_op_strict_different_dim_names_none(self): - for member in self.members_dim_names: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertTrue(lmetadata.__eq__(rmetadata)) - self.assertTrue(rmetadata.__eq__(lmetadata)) - - -class Test___lt__(tests.IrisTest): - def setUp(self): - self.cls = MeshMetadata - self.one = self.cls(1, 1, 1, 1, 1, 1, 1, 1, 1) - self.two = self.cls(1, 1, 1, 2, 1, 1, 1, 1, 1) - self.none = self.cls(1, 1, 1, None, 1, 1, 1, 1, 1) - self.attributes = self.cls(1, 1, 1, 1, 10, 1, 1, 1, 1) - - def test__ascending_lt(self): - result = self.one < self.two - self.assertTrue(result) - - def test__descending_lt(self): - result = self.two < self.one - self.assertFalse(result) - - def test__none_rhs_operand(self): - result = self.one < self.none - self.assertFalse(result) - - def test__none_lhs_operand(self): - result = self.none < self.one - self.assertTrue(result) - - def test__ignore_attributes(self): - result = self.one < self.attributes - self.assertFalse(result) - result = self.attributes < self.one - self.assertFalse(result) - - -class Test_combine(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - topology_dimension=sentinel.topology_dimension, - node_dimension=sentinel.node_dimension, - edge_dimension=sentinel.edge_dimension, - face_dimension=sentinel.face_dimension, - ) - self.dummy = sentinel.dummy - self.cls = MeshMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.combine.__doc__, self.cls.combine.__doc__ - ) - - def test_lenient_service(self): - qualname_combine = _qualname(self.cls.combine) - self.assertIn(qualname_combine, _LENIENT) - self.assertTrue(_LENIENT[qualname_combine]) - self.assertTrue(_LENIENT[self.cls.combine]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "combine", return_value=return_value - ) as mocker: - result = self.none.combine(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - expected = self.values - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_same_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - expected = right.copy() - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_lenient_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["units"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_lenient_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - expected = self.values.copy() - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_members(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - def test_op_strict_different_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected["long_name"] = None - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual(expected, lmetadata.combine(rmetadata)._asdict()) - self.assertEqual(expected, rmetadata.combine(lmetadata)._asdict()) - - def test_op_strict_different_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - expected = self.values.copy() - expected[member] = None - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - expected, lmetadata.combine(rmetadata)._asdict() - ) - self.assertEqual( - expected, rmetadata.combine(lmetadata)._asdict() - ) - - -class Test_difference(tests.IrisTest): - def setUp(self): - self.values = dict( - standard_name=sentinel.standard_name, - long_name=sentinel.long_name, - var_name=sentinel.var_name, - units=sentinel.units, - attributes=sentinel.attributes, - topology_dimension=sentinel.topology_dimension, - node_dimension=sentinel.node_dimension, - edge_dimension=sentinel.edge_dimension, - face_dimension=sentinel.face_dimension, - ) - self.dummy = sentinel.dummy - self.cls = MeshMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual( - BaseMetadata.difference.__doc__, self.cls.difference.__doc__ - ) - - def test_lenient_service(self): - qualname_difference = _qualname(self.cls.difference) - self.assertIn(qualname_difference, _LENIENT) - self.assertTrue(_LENIENT[qualname_difference]) - self.assertTrue(_LENIENT[self.cls.difference]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "difference", return_value=return_value - ) as mocker: - result = self.none.difference(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - def test_op_lenient_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_none(self): - lmetadata = self.cls(**self.values) - right = self.values.copy() - right["var_name"] = None - rmetadata = self.cls(**right) - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_lenient_same_members_none(self): - for member in self.cls._members: - lmetadata = self.cls(**self.values) - member_value = getattr(lmetadata, member) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (member_value, None) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = (None, member_value) - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["units"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["units"] = (left["units"], right["units"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["units"] = lexpected["units"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=True): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_lenient_different_members(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=True - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_same(self): - lmetadata = self.cls(**self.values) - rmetadata = self.cls(**self.values) - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertIsNone(lmetadata.difference(rmetadata)) - self.assertIsNone(rmetadata.difference(lmetadata)) - - def test_op_strict_different(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_members(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = self.dummy - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_none(self): - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right["long_name"] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected["long_name"] = (left["long_name"], right["long_name"]) - rexpected = deepcopy(self.none)._asdict() - rexpected["long_name"] = lexpected["long_name"][::-1] - - with mock.patch("iris.common.metadata._LENIENT", return_value=False): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - def test_op_strict_different_members_none(self): - for member in self.cls._members: - left = self.values.copy() - lmetadata = self.cls(**left) - right = self.values.copy() - right[member] = None - rmetadata = self.cls(**right) - lexpected = deepcopy(self.none)._asdict() - lexpected[member] = (left[member], right[member]) - rexpected = deepcopy(self.none)._asdict() - rexpected[member] = lexpected[member][::-1] - - with mock.patch( - "iris.common.metadata._LENIENT", return_value=False - ): - self.assertEqual( - lexpected, lmetadata.difference(rmetadata)._asdict() - ) - self.assertEqual( - rexpected, rmetadata.difference(lmetadata)._asdict() - ) - - -class Test_equal(tests.IrisTest): - def setUp(self): - self.cls = MeshMetadata - self.none = self.cls(*(None,) * len(self.cls._fields)) - - def test_wraps_docstring(self): - self.assertEqual(BaseMetadata.equal.__doc__, self.cls.equal.__doc__) - - def test_lenient_service(self): - qualname_equal = _qualname(self.cls.equal) - self.assertIn(qualname_equal, _LENIENT) - self.assertTrue(_LENIENT[qualname_equal]) - self.assertTrue(_LENIENT[self.cls.equal]) - - def test_lenient_default(self): - other = sentinel.other - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=None), kwargs) - - def test_lenient(self): - other = sentinel.other - lenient = sentinel.lenient - return_value = sentinel.return_value - with mock.patch.object( - BaseMetadata, "equal", return_value=return_value - ) as mocker: - result = self.none.equal(other, lenient=lenient) - - self.assertEqual(return_value, result) - self.assertEqual(1, mocker.call_count) - (arg,), kwargs = mocker.call_args - self.assertEqual(other, arg) - self.assertEqual(dict(lenient=lenient), kwargs) - - if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/experimental/ugrid/utils/__init__.py b/lib/iris/tests/unit/experimental/ugrid/utils/__init__.py deleted file mode 100644 index 135d7ee49c..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/utils/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.experimental.ugrid.utils` package.""" diff --git a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py b/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py deleted file mode 100644 index 4face700ad..0000000000 --- a/lib/iris/tests/unit/experimental/ugrid/utils/test_recombine_submeshes.py +++ /dev/null @@ -1,437 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :func:`iris.experimental.ugrid.utils.recombine_submeshes`. - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import dask.array as da -import numpy as np - -from iris.coords import AuxCoord -from iris.cube import CubeList -from iris.experimental.ugrid.utils import recombine_submeshes -from iris.tests.stock.mesh import sample_mesh, sample_mesh_cube - - -def common_test_setup(self, shape_3d=(0, 2), data_chunks=None): - # Construct a basic testcase with all-lazy mesh_cube and submesh_cubes - # full-mesh cube shape is 'shape_3d' - # data_chunks sets chunking of source cube, (else all-1-chunk) - n_outer, n_z = shape_3d - n_mesh = 20 - mesh = sample_mesh(n_nodes=20, n_edges=0, n_faces=n_mesh) - mesh_cube = sample_mesh_cube(n_z=n_z, mesh=mesh) - # Fix index-coord name to the expected default for recombine_submeshes. - mesh_cube.coord("i_mesh_face").rename("i_mesh_index") - if n_outer: - # Crudely merge a set of copies to build an outer dimension. - mesh_cube.add_aux_coord(AuxCoord([0], long_name="outer")) - meshcubes_2d = [] - for i_outer in range(n_outer): - cube = mesh_cube.copy() - cube.coord("outer").points = np.array([i_outer]) - meshcubes_2d.append(cube) - mesh_cube = CubeList(meshcubes_2d).merge_cube() - - if not data_chunks: - data_chunks = mesh_cube.shape[:-1] + (-1,) - mesh_cube.data = da.zeros(mesh_cube.shape, chunks=data_chunks) - - n_regions = 4 # it doesn't divide neatly - region_len = n_mesh // n_regions - i_points = np.arange(n_mesh) - region_inds = [ - np.where((i_points // region_len) == i_region) - for i_region in range(n_regions) - ] - # Disturb slightly to ensure some gaps + some overlaps - region_inds = [list(indarr[0]) for indarr in region_inds] - region_inds[2] = region_inds[2][:-2] # missing points - region_inds[3] += region_inds[1][:2] # duplicates - self.mesh_cube = mesh_cube - self.region_inds = region_inds - self.region_cubes = [mesh_cube[..., inds] for inds in region_inds] - for i_cube, cube in enumerate(self.region_cubes): - for i_z in range(n_z): - # Set data='z' ; don't vary over other dimensions. - cube.data[..., i_z, :] = i_cube + 1000 * i_z + 1 - cube.data = cube.lazy_data() - - # Also construct an array to match the expected result (2d cases only). - # basic layer showing region allocation (large -ve values for missing) - expected = np.array( - [1.0, 1, 1, 1, 1] - + [4, 4] # points in #1 overlapped by #3 - + [2, 2, 2] - + [3, 3, 3] - + [-99999, -99999] # missing points - + [4, 4, 4, 4, 4] - ) - # second layer should be same but +1000. - # NOTE: only correct if shape_3d=None; no current need to generalise this. - expected = np.stack([expected, expected + 1000]) - # convert to masked array with missing points. - expected = np.ma.masked_less(expected, 0) - self.expected_result = expected - - -class TestRecombine__data(tests.IrisTest): - def setUp(self): - common_test_setup(self) - - def test_basic(self): - # Just confirm that all source data is lazy (by default) - for cube in self.region_cubes + [self.mesh_cube]: - self.assertTrue(cube.has_lazy_data()) - result = recombine_submeshes(self.mesh_cube, self.region_cubes) - self.assertTrue(result.has_lazy_data()) - self.assertMaskedArrayEqual(result.data, self.expected_result) - - def test_chunking(self): - # Make non-standard testcube with higher dimensions + specific chunking - common_test_setup(self, shape_3d=(10, 3), data_chunks=(3, 2, -1)) - self.assertEqual(self.mesh_cube.lazy_data().chunksize, (3, 2, 20)) - result = recombine_submeshes(self.mesh_cube, self.region_cubes) - # Check that the result chunking matches the input. - self.assertEqual(result.lazy_data().chunksize, (3, 2, 20)) - - def test_single_region(self): - region = self.region_cubes[1] - result = recombine_submeshes(self.mesh_cube, [region]) - # Construct a snapshot of the expected result. - # basic layer showing region allocation (large -ve values for missing) - expected = np.ma.masked_array(np.zeros(self.mesh_cube.shape), True) - inds = region.coord("i_mesh_index").points - expected[..., inds] = region.data - self.assertMaskedArrayEqual(result.data, expected) - - def test_region_overlaps(self): - # generate two identical regions with different values. - region1 = self.region_cubes[2] - region1.data[:] = 101.0 - inds = region1.coord("i_mesh_index").points - region2 = region1.copy() - region2.data[:] = 202.0 - # check that result values all come from the second. - result1 = recombine_submeshes(self.mesh_cube, [region1, region2]) - result1 = result1[..., inds].data - self.assertArrayEqual(result1, 202.0) - # swap the region order, and it should resolve the other way. - result2 = recombine_submeshes(self.mesh_cube, [region2, region1]) - result2 = result2[..., inds].data - self.assertArrayEqual(result2, 101.0) - - def test_missing_points(self): - # check results with and without a specific region included. - region2 = self.region_cubes[2] - inds = region2.coord("i_mesh_index").points - # With all regions, no points in reg1 are masked - result_all = recombine_submeshes(self.mesh_cube, self.region_cubes) - self.assertTrue(np.all(~result_all[..., inds].data.mask)) - # Without region1, all points in reg1 are masked - regions_not2 = [ - cube for cube in self.region_cubes if cube is not region2 - ] - result_not2 = recombine_submeshes(self.mesh_cube, regions_not2) - self.assertTrue(np.all(result_not2[..., inds].data.mask)) - - def test_transposed(self): - # Check function when mesh-dim is NOT the last dim. - self.mesh_cube.transpose() - self.assertEqual(self.mesh_cube.mesh_dim(), 0) - for cube in self.region_cubes: - cube.transpose() - result = recombine_submeshes(self.mesh_cube, self.region_cubes) - self.assertTrue(result.has_lazy_data()) - self.assertEqual(result.mesh_dim(), 0) - self.assertMaskedArrayEqual( - result.data.transpose(), self.expected_result - ) - - def test_dtype(self): - # Check that result dtype comes from submeshes, not mesh_cube. - self.assertEqual(self.mesh_cube.dtype, np.float64) - self.assertTrue( - all(cube.dtype == np.float64 for cube in self.region_cubes) - ) - result = recombine_submeshes(self.mesh_cube, self.region_cubes) - self.assertEqual(result.dtype, np.float64) - region_cubes2 = [ - cube.copy(data=cube.lazy_data().astype(np.int16)) - for cube in self.region_cubes - ] - result2 = recombine_submeshes(self.mesh_cube, region_cubes2) - self.assertEqual(result2.dtype, np.int16) - - def test_meshcube_real(self): - # Real data in reference 'mesh_cube' makes no difference. - self.mesh_cube.data - result = recombine_submeshes(self.mesh_cube, self.region_cubes) - self.assertTrue(result.has_lazy_data()) - self.assertMaskedArrayEqual(result.data, self.expected_result) - - def test_regions_real(self): - # Real data in submesh cubes makes no difference. - for cube in self.region_cubes: - cube.data - result = recombine_submeshes(self.mesh_cube, self.region_cubes) - self.assertTrue(result.has_lazy_data()) - self.assertMaskedArrayEqual(result.data, self.expected_result) - - def test_allinput_real(self): - # Real data in reference AND regions still makes no difference. - self.mesh_cube.data - for cube in self.region_cubes: - cube.data - result = recombine_submeshes(self.mesh_cube, self.region_cubes) - self.assertTrue(result.has_lazy_data()) - self.assertMaskedArrayEqual(result.data, self.expected_result) - - def test_meshcube_masking(self): - # Masked points in the reference 'mesh_cube' should make no difference. - # get real data : copy as default is not writeable - data = self.mesh_cube.data.copy() - # mask all - data[:] = np.ma.masked # all masked - # put back - self.mesh_cube.data = data # put back real array - # recast as lazy - self.mesh_cube.data = self.mesh_cube.lazy_data() # remake as lazy - # result should show no difference - result = recombine_submeshes(self.mesh_cube, self.region_cubes) - self.assertMaskedArrayEqual(result.data, self.expected_result) - - def test_no_missing_results(self): - # For a result with no missing points, result array is still masked - # get real data : copy as default is not writeable - data = self.mesh_cube.data.copy() - # set all - data[:] = 7.777 - # put back - self.mesh_cube.data = data # put back real array - # recast as lazy - self.mesh_cube.data = self.mesh_cube.lazy_data() # remake as lazy - - # get result including original full-mesh - region_cubes = [self.mesh_cube] + self.region_cubes - result = recombine_submeshes(self.mesh_cube, region_cubes) - result = result.data - # result is as "normal" expected, except at the usually-missing points. - expected = self.expected_result - expected[expected.mask] = 7.777 - self.assertArrayEqual(result, expected) - # the actual result array is still masked, though with no masked points - self.assertIsInstance(result, np.ma.MaskedArray) - self.assertIsInstance(result.mask, np.ndarray) - self.assertArrayEqual(result.mask, False) - - def test_maskeddata(self): - # Check that masked points within regions behave like ordinary values. - # NB use overlap points - # reg[1][0:2] == reg[3][5:7], but points in reg[3] dominate - for cube in self.region_cubes: - cube.data = np.ma.masked_array(cube.data) # ensure masked arrays - self.region_cubes[0].data[:, 0] = np.ma.masked # result-index =5 - self.region_cubes[1].data[:, 0] = np.ma.masked # result-index =5 - self.region_cubes[3].data[:, 6] = np.ma.masked # result-index =6 - result = recombine_submeshes(self.mesh_cube, self.region_cubes) - result = result.data - expected = self.expected_result - expected[:, 0] = np.ma.masked - expected[:, 6] = np.ma.masked - self.assertArrayEqual(result.mask, expected.mask) - - def test_nandata(self): - # Check that NaN points within regions behave like ordinary values. - # Duplicate of previous test, replacing masks with NaNs - self.region_cubes[0].data[:, 0] = np.nan - self.region_cubes[1].data[:, 0] = np.nan - self.region_cubes[3].data[:, 6] = np.nan - result = recombine_submeshes(self.mesh_cube, self.region_cubes) - result = result.data - expected = self.expected_result - expected[:, 0] = np.nan - expected[:, 6] = np.nan - self.assertArrayEqual(np.isnan(result), np.isnan(expected)) - - -class TestRecombine__api(tests.IrisTest): - def setUp(self): - common_test_setup(self) - - def test_fail_no_mesh(self): - self.mesh_cube = self.mesh_cube[..., 0:] - with self.assertRaisesRegex(ValueError, 'mesh_cube.*has no ".mesh"'): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - def test_single_region(self): - # Check that a single region-cube can replace a list. - single_region = self.region_cubes[0] - result1 = recombine_submeshes(self.mesh_cube, single_region) - result2 = recombine_submeshes(self.mesh_cube, [single_region]) - self.assertEqual(result1, result2) - - def test_fail_no_regions(self): - with self.assertRaisesRegex( - ValueError, "'submesh_cubes' must be non-empty" - ): - recombine_submeshes(self.mesh_cube, []) - - def test_fail_dims_mismatch_mesh_regions(self): - self.mesh_cube = self.mesh_cube[0] - with self.assertRaisesRegex( - ValueError, "Submesh cube.*has 2 dimensions, but 'mesh_cube' has 1" - ): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - def test_fail_dims_mismatch_region_regions(self): - self.region_cubes[1] = self.region_cubes[1][1] - with self.assertRaisesRegex( - ValueError, "Submesh cube.*has 1 dimensions, but 'mesh_cube' has 2" - ): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - def test_fail_metdata_mismatch_region_regions(self): - reg_cube = self.region_cubes[1] - modded_cube = reg_cube.copy() - modded_cube.long_name = "qq" - self.region_cubes[1] = modded_cube - msg = ( - 'Submesh cube #2/4, "qq" has metadata.*long_name=qq.*' - "does not match that of the other region_cubes,.*" - "long_name=mesh_phenom" - ) - with self.assertRaisesRegex(ValueError, msg): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - # Also check units - modded_cube = reg_cube.copy() - modded_cube.units = "m" - self.region_cubes[1] = modded_cube - msg = ( - "metadata.*units=m.*" - "does not match that of the other region_cubes,.*" - "units=unknown" - ) - with self.assertRaisesRegex(ValueError, msg): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - # Also check attributes - modded_cube = reg_cube.copy() - modded_cube.attributes["tag"] = "x" - self.region_cubes[1] = modded_cube - msg = ( - "units=unknown, attributes={'tag': 'x'}, cell_methods=.*" - "does not match that of the other region_cubes,.*" - "units=unknown, cell_methods=" - ) - with self.assertRaisesRegex(ValueError, msg): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - def test_fail_dtype_mismatch_region_regions(self): - reg_cube = self.region_cubes[1] - reg_cube.data = reg_cube.data.astype(np.int16) - msg = ( - "Submesh cube #2/4.*has a dtype of int16, " - "which does not match that of the other region_cubes, " - "which is float64" - ) - with self.assertRaisesRegex(ValueError, msg): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - def test_fail_dimcoord_sub_no_mesh(self): - self.mesh_cube.remove_coord("level") - msg = ( - 'has a dim-coord "level" for dimension 0, ' - "but 'mesh_cube' has none." - ) - with self.assertRaisesRegex(ValueError, msg): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - def test_fail_dimcoord_mesh_no_sub(self): - self.region_cubes[2].remove_coord("level") - msg = ( - "has no dim-coord for dimension 0, " - "to match the 'mesh_cube' dimension \"level\"" - ) - with self.assertRaisesRegex(ValueError, msg): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - def test_fail_dimcoord_mesh_sub_differ(self): - dimco = self.mesh_cube.coord("level") - dimco.points = dimco.points[::-1] - msg = ( - 'has a dim-coord "level" for dimension 0, ' - "which does not match that of 'mesh_cube', \"level\"" - ) - with self.assertRaisesRegex(ValueError, msg): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - def test_index_coordname(self): - # Check that we can use different index coord names. - for cube in self.region_cubes: - cube.coord("i_mesh_index").rename("ii") - result = recombine_submeshes( - self.mesh_cube, self.region_cubes, index_coord_name="ii" - ) - self.assertArrayEqual(result.data, self.expected_result) - - def test_fail_bad_indexcoord_name(self): - self.region_cubes[2].coord("i_mesh_index").rename("ii") - msg = ( - 'Submesh cube #3/4, "mesh_phenom" has no "i_mesh_index" coord ' - r"on the mesh dimension \(dimension 1\)." - ) - with self.assertRaisesRegex(ValueError, msg): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - def test_fail_missing_indexcoord(self): - self.region_cubes[1].remove_coord("i_mesh_index") - msg = ( - 'Submesh cube #2/4, "mesh_phenom" has no "i_mesh_index" coord ' - r"on the mesh dimension \(dimension 1\)." - ) - with self.assertRaisesRegex(ValueError, msg): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - def test_no_mesh_indexcoord(self): - # It is ok for the mesh-cube to NOT have an index-coord. - self.mesh_cube.remove_coord("i_mesh_index") - result = recombine_submeshes(self.mesh_cube, self.region_cubes) - self.assertArrayEqual(result.data, self.expected_result) - - def test_fail_indexcoord_mismatch_mesh_region(self): - self.mesh_cube.coord("i_mesh_index").units = "m" - msg = ( - 'Submesh cube #1/4, "mesh_phenom" has an index coord ' - '"i_mesh_index" whose ".metadata" does not match that of ' - "the same name in 'mesh_cube'" - ".*units=1.* != .*units=m" - ) - with self.assertRaisesRegex(ValueError, msg): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - def test_fail_indexcoord_mismatch_region_region(self): - self.mesh_cube.remove_coord("i_mesh_index") - self.region_cubes[2].coord("i_mesh_index").attributes["x"] = 3 - msg = ( - 'Submesh cube #3/4, "mesh_phenom" has an index coord ' - '"i_mesh_index" whose ".metadata" does not match ' - "that of the other submesh-cubes" - ".*units=1, attributes={'x': 3}, climatological.*" - " != .*units=1, climatological" - ) - with self.assertRaisesRegex(ValueError, msg): - recombine_submeshes(self.mesh_cube, self.region_cubes) - - -if __name__ == "__main__": - # Make it runnable in its own right. - tests.main() diff --git a/lib/iris/tests/unit/fileformats/__init__.py b/lib/iris/tests/unit/fileformats/__init__.py deleted file mode 100644 index fa31283c87..0000000000 --- a/lib/iris/tests/unit/fileformats/__init__.py +++ /dev/null @@ -1,69 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.fileformats` package.""" - -import iris.tests as tests # isort:skip - - -class TestField(tests.IrisTest): - def _test_for_coord( - self, field, convert, coord_predicate, expected_points, expected_bounds - ): - ( - factories, - references, - standard_name, - long_name, - units, - attributes, - cell_methods, - dim_coords_and_dims, - aux_coords_and_dims, - ) = convert(field) - - # Check for one and only one matching coordinate. - coords_and_dims = dim_coords_and_dims + aux_coords_and_dims - matching_coords = [ - coord for coord, _ in coords_and_dims if coord_predicate(coord) - ] - self.assertEqual(len(matching_coords), 1, str(matching_coords)) - coord = matching_coords[0] - - # Check points and bounds. - if expected_points is not None: - self.assertArrayEqual(coord.points, expected_points) - - if expected_bounds is None: - self.assertIsNone(coord.bounds) - else: - self.assertArrayEqual(coord.bounds, expected_bounds) - - def assertCoordsAndDimsListsMatch( - self, coords_and_dims_got, coords_and_dims_expected - ): - """ - Check that coords_and_dims lists are equivalent. - - The arguments are lists of pairs of (coordinate, dimensions). - The elements are compared one-to-one, by coordinate name (so the order - of the lists is _not_ significant). - It also checks that the coordinate types (DimCoord/AuxCoord) match. - - """ - - def sorted_by_coordname(list): - return sorted(list, key=lambda item: item[0].name()) - - coords_and_dims_got = sorted_by_coordname(coords_and_dims_got) - coords_and_dims_expected = sorted_by_coordname( - coords_and_dims_expected - ) - self.assertEqual(coords_and_dims_got, coords_and_dims_expected) - # Also check coordinate type equivalences (as Coord.__eq__ does not). - self.assertEqual( - [type(coord) for coord, dims in coords_and_dims_got], - [type(coord) for coord, dims in coords_and_dims_expected], - ) diff --git a/lib/iris/tests/unit/fileformats/abf/__init__.py b/lib/iris/tests/unit/fileformats/abf/__init__.py deleted file mode 100644 index aaddf427c5..0000000000 --- a/lib/iris/tests/unit/fileformats/abf/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.fileformats.abf` module.""" diff --git a/lib/iris/tests/unit/fileformats/abf/test_ABFField.py b/lib/iris/tests/unit/fileformats/abf/test_ABFField.py deleted file mode 100644 index 98db52d3e9..0000000000 --- a/lib/iris/tests/unit/fileformats/abf/test_ABFField.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.abf.ABFField` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.fileformats.abf import ABFField - - -class MethodCounter: - def __init__(self, method_name): - self.method_name = method_name - self.count = 0 - - def __enter__(self): - self.orig_method = getattr(ABFField, self.method_name) - - def new_method(*args, **kwargs): - self.count += 1 - self.orig_method(*args, **kwargs) - - setattr(ABFField, self.method_name, new_method) - return self - - def __exit__(self, exc_type, exc_value, traceback): - setattr(ABFField, self.method_name, self.orig_method) - return False - - -class Test_data(tests.IrisTest): - def test_single_read(self): - path = "0000000000000000jan00000" - field = ABFField(path) - - with mock.patch("iris.fileformats.abf.np.fromfile") as fromfile: - with MethodCounter("__getattr__") as getattr: - with MethodCounter("_read") as read: - field.data - - fromfile.assert_called_once_with(path, dtype=">u1") - self.assertEqual(getattr.count, 1) - self.assertEqual(read.count, 1) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/cf/__init__.py b/lib/iris/tests/unit/fileformats/cf/__init__.py deleted file mode 100644 index 1bff79368b..0000000000 --- a/lib/iris/tests/unit/fileformats/cf/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.fileformats.cf` module.""" diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py b/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py deleted file mode 100644 index bfc2d586ef..0000000000 --- a/lib/iris/tests/unit/fileformats/cf/test_CFGroup.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.fileformats.cf.CFGroup` class.""" - -from unittest.mock import MagicMock - -from iris.fileformats.cf import ( - CFAuxiliaryCoordinateVariable, - CFCoordinateVariable, - CFDataVariable, - CFGroup, -) - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - - -class Tests(tests.IrisTest): - # TODO: unit tests for existing functionality pre 2021-03-11. - def setUp(self): - self.cf_group = CFGroup() - - def test_non_data_names(self): - data_var = MagicMock(spec=CFDataVariable, cf_name="data_var") - aux_var = MagicMock( - spec=CFAuxiliaryCoordinateVariable, cf_name="aux_var" - ) - coord_var = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var") - coord_var2 = MagicMock(spec=CFCoordinateVariable, cf_name="coord_var2") - duplicate_name_var = MagicMock( - spec=CFCoordinateVariable, cf_name="aux_var" - ) - - for var in ( - data_var, - aux_var, - coord_var, - coord_var2, - duplicate_name_var, - ): - self.cf_group[var.cf_name] = var - - expected_names = [ - var.cf_name for var in (aux_var, coord_var, coord_var2) - ] - expected = set(expected_names) - self.assertEqual(expected, self.cf_group.non_data_variable_names) diff --git a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py b/lib/iris/tests/unit/fileformats/cf/test_CFReader.py deleted file mode 100644 index 70173bb2ac..0000000000 --- a/lib/iris/tests/unit/fileformats/cf/test_CFReader.py +++ /dev/null @@ -1,356 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the `iris.fileformats.cf.CFReader` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.fileformats.cf import CFReader - - -def netcdf_variable( - name, - dimensions, - dtype, - ancillary_variables=None, - coordinates="", - bounds=None, - climatology=None, - formula_terms=None, - grid_mapping=None, - cell_measures=None, - standard_name=None, -): - """Return a mock NetCDF4 variable.""" - ndim = 0 - if dimensions is not None: - dimensions = dimensions.split() - ndim = len(dimensions) - else: - dimensions = [] - ncvar = mock.Mock( - name=name, - dimensions=dimensions, - ncattrs=mock.Mock(return_value=[]), - ndim=ndim, - dtype=dtype, - ancillary_variables=ancillary_variables, - coordinates=coordinates, - bounds=bounds, - climatology=climatology, - formula_terms=formula_terms, - grid_mapping=grid_mapping, - cell_measures=cell_measures, - standard_name=standard_name, - ) - return ncvar - - -class Test_translate__global_attributes(tests.IrisTest): - def setUp(self): - ncvar = netcdf_variable("ncvar", "height", np.float64) - ncattrs = mock.Mock(return_value=["dimensions"]) - getncattr = mock.Mock(return_value="something something_else") - self.dataset = mock.Mock( - file_format="NetCDF4", - variables={"ncvar": ncvar}, - ncattrs=ncattrs, - getncattr=getncattr, - ) - - def test_create_global_attributes(self): - with mock.patch("netCDF4.Dataset", return_value=self.dataset): - global_attrs = CFReader("dummy").cf_group.global_attributes - self.assertEqual( - global_attrs["dimensions"], "something something_else" - ) - - -class Test_translate__formula_terms(tests.IrisTest): - def setUp(self): - self.delta = netcdf_variable( - "delta", "height", np.float64, bounds="delta_bnds" - ) - self.delta_bnds = netcdf_variable( - "delta_bnds", "height bnds", np.float - ) - self.sigma = netcdf_variable( - "sigma", "height", np.float64, bounds="sigma_bnds" - ) - self.sigma_bnds = netcdf_variable( - "sigma_bnds", "height bnds", np.float - ) - self.orography = netcdf_variable("orography", "lat lon", np.float64) - formula_terms = "a: delta b: sigma orog: orography" - standard_name = "atmosphere_hybrid_height_coordinate" - self.height = netcdf_variable( - "height", - "height", - np.float64, - formula_terms=formula_terms, - bounds="height_bnds", - standard_name=standard_name, - ) - # Over-specify the formula terms on the bounds variable, - # which will be ignored by the cf loader. - formula_terms = "a: delta_bnds b: sigma_bnds orog: orography" - self.height_bnds = netcdf_variable( - "height_bnds", - "height bnds", - np.float64, - formula_terms=formula_terms, - ) - self.lat = netcdf_variable("lat", "lat", np.float64) - self.lon = netcdf_variable("lon", "lon", np.float64) - # Note that, only lat and lon are explicitly associated as coordinates. - self.temp = netcdf_variable( - "temp", "height lat lon", np.float64, coordinates="lat lon" - ) - - self.variables = dict( - delta=self.delta, - sigma=self.sigma, - orography=self.orography, - height=self.height, - lat=self.lat, - lon=self.lon, - temp=self.temp, - delta_bnds=self.delta_bnds, - sigma_bnds=self.sigma_bnds, - height_bnds=self.height_bnds, - ) - ncattrs = mock.Mock(return_value=[]) - self.dataset = mock.Mock( - file_format="NetCDF4", variables=self.variables, ncattrs=ncattrs - ) - # Restrict the CFReader functionality to only performing translations. - build_patch = mock.patch( - "iris.fileformats.cf.CFReader._build_cf_groups" - ) - reset_patch = mock.patch("iris.fileformats.cf.CFReader._reset") - build_patch.start() - reset_patch.start() - self.addCleanup(build_patch.stop) - self.addCleanup(reset_patch.stop) - - def test_create_formula_terms(self): - with mock.patch("netCDF4.Dataset", return_value=self.dataset): - cf_group = CFReader("dummy").cf_group - self.assertEqual(len(cf_group), len(self.variables)) - # Check there is a singular data variable. - group = cf_group.data_variables - self.assertEqual(len(group), 1) - self.assertEqual(list(group.keys()), ["temp"]) - self.assertIs(group["temp"].cf_data, self.temp) - # Check there are three coordinates. - group = cf_group.coordinates - self.assertEqual(len(group), 3) - coordinates = ["height", "lat", "lon"] - self.assertEqual(set(group.keys()), set(coordinates)) - for name in coordinates: - self.assertIs(group[name].cf_data, getattr(self, name)) - # Check there are three auxiliary coordinates. - group = cf_group.auxiliary_coordinates - self.assertEqual(len(group), 3) - aux_coordinates = ["delta", "sigma", "orography"] - self.assertEqual(set(group.keys()), set(aux_coordinates)) - for name in aux_coordinates: - self.assertIs(group[name].cf_data, getattr(self, name)) - # Check all the auxiliary coordinates are formula terms. - formula_terms = cf_group.formula_terms - self.assertEqual(set(group.items()), set(formula_terms.items())) - # Check there are three bounds. - group = cf_group.bounds - self.assertEqual(len(group), 3) - bounds = ["height_bnds", "delta_bnds", "sigma_bnds"] - self.assertEqual(set(group.keys()), set(bounds)) - for name in bounds: - self.assertEqual(group[name].cf_data, getattr(self, name)) - - -class Test_build_cf_groups__formula_terms(tests.IrisTest): - def setUp(self): - self.delta = netcdf_variable( - "delta", "height", np.float64, bounds="delta_bnds" - ) - self.delta_bnds = netcdf_variable( - "delta_bnds", "height bnds", np.float - ) - self.sigma = netcdf_variable( - "sigma", "height", np.float64, bounds="sigma_bnds" - ) - self.sigma_bnds = netcdf_variable( - "sigma_bnds", "height bnds", np.float - ) - self.orography = netcdf_variable("orography", "lat lon", np.float64) - formula_terms = "a: delta b: sigma orog: orography" - standard_name = "atmosphere_hybrid_height_coordinate" - self.height = netcdf_variable( - "height", - "height", - np.float64, - formula_terms=formula_terms, - bounds="height_bnds", - standard_name=standard_name, - ) - # Over-specify the formula terms on the bounds variable, - # which will be ignored by the cf loader. - formula_terms = "a: delta_bnds b: sigma_bnds orog: orography" - self.height_bnds = netcdf_variable( - "height_bnds", - "height bnds", - np.float64, - formula_terms=formula_terms, - ) - self.lat = netcdf_variable("lat", "lat", np.float64) - self.lon = netcdf_variable("lon", "lon", np.float64) - self.x = netcdf_variable("x", "lat lon", np.float64) - self.y = netcdf_variable("y", "lat lon", np.float64) - # Note that, only lat and lon are explicitly associated as coordinates. - self.temp = netcdf_variable( - "temp", "height lat lon", np.float64, coordinates="x y" - ) - - self.variables = dict( - delta=self.delta, - sigma=self.sigma, - orography=self.orography, - height=self.height, - lat=self.lat, - lon=self.lon, - temp=self.temp, - delta_bnds=self.delta_bnds, - sigma_bnds=self.sigma_bnds, - height_bnds=self.height_bnds, - x=self.x, - y=self.y, - ) - ncattrs = mock.Mock(return_value=[]) - self.dataset = mock.Mock( - file_format="NetCDF4", variables=self.variables, ncattrs=ncattrs - ) - # Restrict the CFReader functionality to only performing translations - # and building first level cf-groups for variables. - patcher = mock.patch("iris.fileformats.cf.CFReader._reset") - patcher.start() - self.addCleanup(patcher.stop) - - def test_associate_formula_terms_with_data_variable(self): - with mock.patch("netCDF4.Dataset", return_value=self.dataset): - cf_group = CFReader("dummy").cf_group - self.assertEqual(len(cf_group), len(self.variables)) - # Check the cf-group associated with the data variable. - temp_cf_group = cf_group["temp"].cf_group - # Check the data variable is associated with eight variables. - self.assertEqual(len(temp_cf_group), 8) - # Check there are three coordinates. - group = temp_cf_group.coordinates - self.assertEqual(len(group), 3) - coordinates = ["height", "lat", "lon"] - self.assertEqual(set(group.keys()), set(coordinates)) - for name in coordinates: - self.assertIs(group[name].cf_data, getattr(self, name)) - # Check the height coordinate is bounded. - group = group["height"].cf_group - self.assertEqual(len(group.bounds), 1) - self.assertIn("height_bnds", group.bounds) - self.assertIs(group["height_bnds"].cf_data, self.height_bnds) - # Check there are five auxiliary coordinates. - group = temp_cf_group.auxiliary_coordinates - self.assertEqual(len(group), 5) - aux_coordinates = ["delta", "sigma", "orography", "x", "y"] - self.assertEqual(set(group.keys()), set(aux_coordinates)) - for name in aux_coordinates: - self.assertIs(group[name].cf_data, getattr(self, name)) - # Check all the auxiliary coordinates are formula terms. - formula_terms = cf_group.formula_terms - self.assertTrue( - set(formula_terms.items()).issubset(list(group.items())) - ) - # Check the terms by root. - for name, term in zip(aux_coordinates, ["a", "b", "orog"]): - self.assertEqual( - formula_terms[name].cf_terms_by_root, dict(height=term) - ) - # Check the bounded auxiliary coordinates. - for name, name_bnds in zip( - ["delta", "sigma"], ["delta_bnds", "sigma_bnds"] - ): - aux_coord_group = group[name].cf_group - self.assertEqual(len(aux_coord_group.bounds), 1) - self.assertIn(name_bnds, aux_coord_group.bounds) - self.assertIs( - aux_coord_group[name_bnds].cf_data, - getattr(self, name_bnds), - ) - - def test_promote_reference(self): - with mock.patch("netCDF4.Dataset", return_value=self.dataset): - cf_group = CFReader("dummy").cf_group - self.assertEqual(len(cf_group), len(self.variables)) - # Check the number of data variables. - self.assertEqual(len(cf_group.data_variables), 1) - self.assertEqual(list(cf_group.data_variables.keys()), ["temp"]) - # Check the number of promoted variables. - self.assertEqual(len(cf_group.promoted), 1) - self.assertEqual(list(cf_group.promoted.keys()), ["orography"]) - # Check the promoted variable dependencies. - group = cf_group.promoted["orography"].cf_group.coordinates - self.assertEqual(len(group), 2) - coordinates = ("lat", "lon") - self.assertEqual(set(group.keys()), set(coordinates)) - for name in coordinates: - self.assertIs(group[name].cf_data, getattr(self, name)) - - def test_formula_terms_ignore(self): - self.orography.dimensions = ["lat", "wibble"] - with mock.patch( - "netCDF4.Dataset", return_value=self.dataset - ), mock.patch("warnings.warn") as warn: - cf_group = CFReader("dummy").cf_group - group = cf_group.promoted - self.assertEqual(list(group.keys()), ["orography"]) - self.assertIs(group["orography"].cf_data, self.orography) - self.assertEqual(warn.call_count, 1) - - def test_auxiliary_ignore(self): - self.x.dimensions = ["lat", "wibble"] - with mock.patch( - "netCDF4.Dataset", return_value=self.dataset - ), mock.patch("warnings.warn") as warn: - cf_group = CFReader("dummy").cf_group - promoted = ["x", "orography"] - group = cf_group.promoted - self.assertEqual(set(group.keys()), set(promoted)) - for name in promoted: - self.assertIs(group[name].cf_data, getattr(self, name)) - self.assertEqual(warn.call_count, 1) - - def test_promoted_auxiliary_ignore(self): - self.wibble = netcdf_variable("wibble", "lat wibble", np.float64) - self.variables["wibble"] = self.wibble - self.orography.coordinates = "wibble" - with mock.patch( - "netCDF4.Dataset", return_value=self.dataset - ), mock.patch("warnings.warn") as warn: - cf_group = CFReader("dummy").cf_group.promoted - promoted = ["wibble", "orography"] - self.assertEqual(set(cf_group.keys()), set(promoted)) - for name in promoted: - self.assertIs(cf_group[name].cf_data, getattr(self, name)) - self.assertEqual(warn.call_count, 2) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/dot/__init__.py b/lib/iris/tests/unit/fileformats/dot/__init__.py deleted file mode 100644 index 0dbc3ad4c6..0000000000 --- a/lib/iris/tests/unit/fileformats/dot/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :mod:`iris.fileformats.dot`.""" diff --git a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py b/lib/iris/tests/unit/fileformats/dot/test__dot_path.py deleted file mode 100644 index 1111e8bc83..0000000000 --- a/lib/iris/tests/unit/fileformats/dot/test__dot_path.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :func:`iris.fileformats.dot._dot_path`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import os.path -import subprocess -from unittest import mock - -from iris.fileformats.dot import _DOT_EXECUTABLE_PATH, _dot_path - - -class Test(tests.IrisTest): - def setUp(self): - # Because _dot_path is triggered by the initial import we - # reset the caching status to allow us to see what happens - # under different circumstances. - self.patch("iris.fileformats.dot._DOT_CHECKED", new=False) - # Also patch the private path variable to the existing value (i.e. no - # change), and restore it after each test: As these tests modify it, - # that can potentially break subsequent 'normal' behaviour. - self.patch( - "iris.fileformats.dot._DOT_EXECUTABLE_PATH", _DOT_EXECUTABLE_PATH - ) - - def test_valid_absolute_path(self): - # Override the configuration value for System.dot_path - real_path = os.path.abspath(__file__) - assert os.path.exists(real_path) and os.path.isabs(real_path) - with mock.patch("iris.config.get_option", return_value=real_path): - result = _dot_path() - self.assertEqual(result, real_path) - - def test_invalid_absolute_path(self): - # Override the configuration value for System.dot_path - dummy_path = "/not_a_real_path" * 10 - assert not os.path.exists(dummy_path) - with mock.patch("iris.config.get_option", return_value=dummy_path): - result = _dot_path() - self.assertIsNone(result) - - def test_valid_relative_path(self): - # Override the configuration value for System.dot_path - dummy_path = "not_a_real_path" * 10 - assert not os.path.exists(dummy_path) - with mock.patch("iris.config.get_option", return_value=dummy_path): - # Pretend we have a valid installation of dot - with mock.patch("subprocess.check_output"): - result = _dot_path() - self.assertEqual(result, dummy_path) - - def test_valid_relative_path_broken_install(self): - # Override the configuration value for System.dot_path - dummy_path = "not_a_real_path" * 10 - assert not os.path.exists(dummy_path) - with mock.patch("iris.config.get_option", return_value=dummy_path): - # Pretend we have a broken installation of dot - error = subprocess.CalledProcessError(-5, "foo", "bar") - with mock.patch("subprocess.check_output", side_effect=error): - result = _dot_path() - self.assertIsNone(result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/__init__.py b/lib/iris/tests/unit/fileformats/ff/__init__.py deleted file mode 100644 index 4d13a18520..0000000000 --- a/lib/iris/tests/unit/fileformats/ff/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.fileformats.ff` module.""" diff --git a/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py b/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py deleted file mode 100644 index d37b854405..0000000000 --- a/lib/iris/tests/unit/fileformats/ff/test_ArakawaC.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.fileformat.ff.ArakawaC`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.fileformats._ff import ArakawaC - - -class Test__x_vectors(tests.IrisTest): - def _test(self, column, horiz_grid_type, xp, xu): - reals = np.arange(6) + 100 - grid = ArakawaC(column, None, reals, horiz_grid_type) - result_xp, result_xu = grid._x_vectors() - self.assertArrayEqual(result_xp, xp) - self.assertArrayEqual(result_xu, xu) - - def test_none(self): - self._test(column=None, horiz_grid_type=None, xp=None, xu=None) - - def test_1d(self): - self._test( - column=np.array([[0], [1], [2], [3]]), - horiz_grid_type=None, - xp=np.array([0, 1, 2, 3]), - xu=None, - ) - - def test_2d_no_wrap(self): - self._test( - column=np.array([[0, 0], [1, 10], [2, 20], [3, 30]]), - horiz_grid_type=1, - xp=np.array([0, 1, 2, 3]), - xu=np.array([0, 10, 20, 30]), - ) - - def test_2d_with_wrap(self): - self._test( - column=np.array([[0, 0], [1, 10], [2, 20], [3, 30]]), - horiz_grid_type=0, - xp=np.array([0, 1, 2, 3]), - xu=np.array([0, 10, 20]), - ) - - -class Test_regular_x(tests.IrisTest): - def _test(self, subgrid, bzx, bdx): - grid = ArakawaC(None, None, [4.0, None, None, -5.0, None, None], None) - result_bzx, result_bdx = grid.regular_x(subgrid) - self.assertEqual(result_bzx, bzx) - self.assertEqual(result_bdx, bdx) - - def test_theta_subgrid(self): - self._test(1, -9.0, 4.0) - - def test_u_subgrid(self): - self._test(11, -7.0, 4.0) - - -class Test_regular_y(tests.IrisTest): - def _test(self, v_offset, subgrid, bzy, bdy): - grid = ArakawaC(None, None, [None, 4.0, 45.0, None, None, None], None) - grid._v_offset = v_offset - result_bzy, result_bdy = grid.regular_y(subgrid) - self.assertEqual(result_bzy, bzy) - self.assertEqual(result_bdy, bdy) - - def test_theta_subgrid_NewDynamics(self): - self._test(0.5, 1, 41.0, 4.0) - - def test_v_subgrid_NewDynamics(self): - self._test(0.5, 11, 43.0, 4.0) - - def test_theta_subgrid_ENDGame(self): - self._test(-0.5, 1, 41.0, 4.0) - - def test_v_subgrid_ENDGame(self): - self._test(-0.5, 11, 39.0, 4.0) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py b/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py deleted file mode 100644 index 696dacd672..0000000000 --- a/lib/iris/tests/unit/fileformats/ff/test_ENDGame.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.fileformat.ff.ENDGame`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.fileformats._ff import ENDGame - - -class Test(tests.IrisTest): - def test_class_attributes(self): - reals = np.arange(6) + 100 - grid = ENDGame(None, None, reals, None) - self.assertEqual(grid._v_offset, -0.5) - - -class Test__y_vectors(tests.IrisTest): - def _test(self, row, yp, yv): - reals = np.arange(6) + 100 - grid = ENDGame(None, row, reals, None) - result_yp, result_yv = grid._y_vectors() - self.assertArrayEqual(result_yp, yp) - self.assertArrayEqual(result_yv, yv) - - def test_none(self): - self._test(row=None, yp=None, yv=None) - - def test_1d(self): - self._test( - row=np.array([[0], [1], [2], [3]]), yp=np.array([0, 1, 2]), yv=None - ) - - def test_2d(self): - self._test( - row=np.array([[0, 0], [1, 10], [2, 20], [3, 30]]), - yp=np.array([0, 1, 2]), - yv=np.array([0, 10, 20, 30]), - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py b/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py deleted file mode 100644 index cec4f53bc3..0000000000 --- a/lib/iris/tests/unit/fileformats/ff/test_FF2PP.py +++ /dev/null @@ -1,600 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :class:`iris.fileformat.ff.FF2PP` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import collections -import contextlib -from unittest import mock - -import numpy as np - -from iris.exceptions import NotYetImplementedError -import iris.fileformats._ff as ff -from iris.fileformats._ff import FF2PP -import iris.fileformats.pp as pp - -# PP-field: LBPACK N1 values. -_UNPACKED = 0 -_WGDOS = 1 -_CRAY = 2 - -# PP-field: LBUSER(1) values. -_REAL = 1 -_INTEGER = 2 - - -_DummyField = collections.namedtuple( - "_DummyField", "lbext lblrec lbnrec raw_lbpack " "lbuser boundary_packing" -) -_DummyFieldWithSize = collections.namedtuple( - "_DummyFieldWithSize", - "lbext lblrec lbnrec raw_lbpack " "lbuser boundary_packing " "lbnpt lbrow", -) -_DummyBoundaryPacking = collections.namedtuple( - "_DummyBoundaryPacking", "x_halo y_halo rim_width" -) - - -class Test____iter__(tests.IrisTest): - @mock.patch("iris.fileformats._ff.FFHeader") - def test_call_structure(self, _FFHeader): - # Check that the iter method calls the two necessary utility - # functions - extract_result = mock.Mock() - interpret_patch = mock.patch( - "iris.fileformats.pp._interpret_fields", - autospec=True, - return_value=iter([]), - ) - extract_patch = mock.patch( - "iris.fileformats._ff.FF2PP._extract_field", - autospec=True, - return_value=extract_result, - ) - - FF2PP_instance = ff.FF2PP("mock") - with interpret_patch as interpret, extract_patch as extract: - list(iter(FF2PP_instance)) - - interpret.assert_called_once_with(extract_result) - extract.assert_called_once_with(FF2PP_instance) - - -class Test__extract_field__LBC_format(tests.IrisTest): - @contextlib.contextmanager - def mock_for_extract_field(self, fields, x=None, y=None): - """ - A context manager to ensure FF2PP._extract_field gets a field - instance looking like the next one in the "fields" iterable from - the "make_pp_field" call. - - """ - with mock.patch("iris.fileformats._ff.FFHeader"): - ff2pp = ff.FF2PP("mock") - ff2pp._ff_header.lookup_table = [0, 0, len(fields)] - # Fake level constants, with shape specifying just one model-level. - ff2pp._ff_header.level_dependent_constants = np.zeros(1) - grid = mock.Mock() - grid.vectors = mock.Mock(return_value=(x, y)) - ff2pp._ff_header.grid = mock.Mock(return_value=grid) - - open_func = "builtins.open" - with mock.patch( - "iris.fileformats._ff._parse_binary_stream", return_value=[0] - ), mock.patch(open_func), mock.patch( - "struct.unpack_from", return_value=[4] - ), mock.patch( - "iris.fileformats.pp.make_pp_field", side_effect=fields - ), mock.patch( - "iris.fileformats._ff.FF2PP._payload", return_value=(0, 0) - ): - yield ff2pp - - def _mock_lbc(self, **kwargs): - """Return a Mock object representing an LBC field.""" - # Default kwargs for a valid LBC field mapping just 1 model-level. - field_kwargs = dict(lbtim=0, lblev=7777, lbvc=0, lbhem=101) - # Apply provided args (replacing any defaults if specified). - field_kwargs.update(kwargs) - # Return a mock with just those properties pre-defined. - return mock.Mock(**field_kwargs) - - def test_LBC_header(self): - bzx, bzy = -10, 15 - # stash m01s00i001 - lbuser = [None, None, 121416, 1, None, None, 1] - field = self._mock_lbc( - lbegin=0, - lbrow=10, - lbnpt=12, - bdx=1, - bdy=1, - bzx=bzx, - bzy=bzy, - lbuser=lbuser, - ) - with self.mock_for_extract_field([field]) as ff2pp: - ff2pp._ff_header.dataset_type = 5 - result = list(ff2pp._extract_field()) - - self.assertEqual([field], result) - self.assertEqual(field.lbrow, 10 + 14 * 2) - self.assertEqual(field.lbnpt, 12 + 16 * 2) - - name_mapping_dict = dict( - rim_width=slice(4, 6), y_halo=slice(2, 4), x_halo=slice(0, 2) - ) - boundary_packing = pp.SplittableInt(121416, name_mapping_dict) - - self.assertEqual(field.boundary_packing, boundary_packing) - self.assertEqual(field.bzy, bzy - boundary_packing.y_halo * field.bdy) - self.assertEqual(field.bzx, bzx - boundary_packing.x_halo * field.bdx) - - def check_non_trivial_coordinate_warning(self, field): - field.lbegin = 0 - field.lbrow = 10 - field.lbnpt = 12 - # stash m01s31i020 - field.lbuser = [None, None, 121416, 20, None, None, 1] - orig_bdx, orig_bdy = field.bdx, field.bdy - - x = np.array([1, 2, 6]) - y = np.array([1, 2, 6]) - with self.mock_for_extract_field([field], x, y) as ff2pp: - ff2pp._ff_header.dataset_type = 5 - with mock.patch("warnings.warn") as warn: - list(ff2pp._extract_field()) - - # Check the values are unchanged. - self.assertEqual(field.bdy, orig_bdy) - self.assertEqual(field.bdx, orig_bdx) - - # Check a warning was raised with a suitable message. - warn_error_tmplt = "Unexpected warning message: {}" - non_trivial_coord_warn_msg = warn.call_args[0][0] - msg = ( - "The x or y coordinates of your boundary condition field may " - "be incorrect, not having taken into account the boundary " - "size." - ) - self.assertTrue( - non_trivial_coord_warn_msg.startswith(msg), - warn_error_tmplt.format(non_trivial_coord_warn_msg), - ) - - def test_LBC_header_non_trivial_coords_both(self): - # Check a warning is raised when both bdx and bdy are bad. - field = self._mock_lbc(bdx=0, bdy=0, bzx=10, bzy=10) - self.check_non_trivial_coordinate_warning(field) - - field.bdy = field.bdx = field.bmdi - self.check_non_trivial_coordinate_warning(field) - - def test_LBC_header_non_trivial_coords_x(self): - # Check a warning is raised when bdx is bad. - field = self._mock_lbc(bdx=0, bdy=10, bzx=10, bzy=10) - self.check_non_trivial_coordinate_warning(field) - - field.bdx = field.bmdi - self.check_non_trivial_coordinate_warning(field) - - def test_LBC_header_non_trivial_coords_y(self): - # Check a warning is raised when bdy is bad. - field = self._mock_lbc(bdx=10, bdy=0, bzx=10, bzy=10) - self.check_non_trivial_coordinate_warning(field) - - field.bdy = field.bmdi - self.check_non_trivial_coordinate_warning(field) - - def test_negative_bdy(self): - # Check a warning is raised when bdy is negative, - # we don't yet know what "north" means in this case. - field = self._mock_lbc( - bdx=10, - bdy=-10, - bzx=10, - bzy=10, - lbegin=0, - lbuser=[0, 0, 121416, 0, None, None, 0], - lbrow=10, - lbnpt=12, - ) - with self.mock_for_extract_field([field]) as ff2pp: - ff2pp._ff_header.dataset_type = 5 - with mock.patch("warnings.warn") as warn: - list(ff2pp._extract_field()) - msg = "The LBC has a bdy less than 0." - self.assertTrue( - warn.call_args[0][0].startswith(msg), - "Northwards bdy warning not correctly raised.", - ) - - -class Test__payload(tests.IrisTest): - def setUp(self): - # Create a mock LBC type PPField. - self.mock_field = mock.Mock() - field = self.mock_field - field.raw_lbpack = _UNPACKED - field.lbuser = [_REAL] - field.lblrec = 777 - field.lbext = 222 - field.lbnrec = 50 - field.boundary_packing = None - - def _test( - self, mock_field, expected_depth, expected_dtype, word_depth=None - ): - with mock.patch("iris.fileformats._ff.FFHeader", return_value=None): - kwargs = {} - if word_depth is not None: - kwargs["word_depth"] = word_depth - ff2pp = FF2PP("dummy_filename", **kwargs) - data_depth, data_dtype = ff2pp._payload(mock_field) - self.assertEqual(data_depth, expected_depth) - self.assertEqual(data_dtype, expected_dtype) - - def test_unpacked_real(self): - mock_field = _DummyField( - lbext=0, - lblrec=100, - lbnrec=-1, - raw_lbpack=_UNPACKED, - lbuser=[_REAL], - boundary_packing=None, - ) - self._test(mock_field, 800, ">f8") - - def test_unpacked_real_ext(self): - mock_field = _DummyField( - lbext=5, - lblrec=100, - lbnrec=-1, - raw_lbpack=_UNPACKED, - lbuser=[_REAL], - boundary_packing=None, - ) - self._test(mock_field, 760, ">f8") - - def test_unpacked_integer(self): - mock_field = _DummyField( - lbext=0, - lblrec=200, - lbnrec=-1, - raw_lbpack=_UNPACKED, - lbuser=[_INTEGER], - boundary_packing=None, - ) - self._test(mock_field, 1600, ">i8") - - def test_unpacked_integer_ext(self): - mock_field = _DummyField( - lbext=10, - lblrec=200, - lbnrec=-1, - raw_lbpack=_UNPACKED, - lbuser=[_INTEGER], - boundary_packing=None, - ) - self._test(mock_field, 1520, ">i8") - - def test_unpacked_real_ext_different_word_depth(self): - mock_field = _DummyField( - lbext=5, - lblrec=100, - lbnrec=-1, - raw_lbpack=_UNPACKED, - lbuser=[_REAL], - boundary_packing=None, - ) - self._test(mock_field, 380, ">f4", word_depth=4) - - def test_wgdos_real(self): - mock_field = _DummyField( - lbext=0, - lblrec=-1, - lbnrec=100, - raw_lbpack=_WGDOS, - lbuser=[_REAL], - boundary_packing=None, - ) - self._test(mock_field, 800, ">f4") - - def test_wgdos_real_ext(self): - mock_field = _DummyField( - lbext=5, - lblrec=-1, - lbnrec=100, - raw_lbpack=_WGDOS, - lbuser=[_REAL], - boundary_packing=None, - ) - self._test(mock_field, 800, ">f4") - - def test_wgdos_integer(self): - mock_field = _DummyField( - lbext=0, - lblrec=-1, - lbnrec=200, - raw_lbpack=_WGDOS, - lbuser=[_INTEGER], - boundary_packing=None, - ) - self._test(mock_field, 1600, ">i4") - - def test_wgdos_integer_ext(self): - mock_field = _DummyField( - lbext=10, - lblrec=-1, - lbnrec=200, - raw_lbpack=_WGDOS, - lbuser=[_INTEGER], - boundary_packing=None, - ) - self._test(mock_field, 1600, ">i4") - - def test_cray_real(self): - mock_field = _DummyField( - lbext=0, - lblrec=100, - lbnrec=-1, - raw_lbpack=_CRAY, - lbuser=[_REAL], - boundary_packing=None, - ) - self._test(mock_field, 400, ">f4") - - def test_cray_real_ext(self): - mock_field = _DummyField( - lbext=5, - lblrec=100, - lbnrec=-1, - raw_lbpack=_CRAY, - lbuser=[_REAL], - boundary_packing=None, - ) - self._test(mock_field, 380, ">f4") - - def test_cray_integer(self): - mock_field = _DummyField( - lbext=0, - lblrec=200, - lbnrec=-1, - raw_lbpack=_CRAY, - lbuser=[_INTEGER], - boundary_packing=None, - ) - self._test(mock_field, 800, ">i4") - - def test_cray_integer_ext(self): - mock_field = _DummyField( - lbext=10, - lblrec=200, - lbnrec=-1, - raw_lbpack=_CRAY, - lbuser=[_INTEGER], - boundary_packing=None, - ) - self._test(mock_field, 760, ">i4") - - def test_lbpack_unsupported(self): - mock_field = _DummyField( - lbext=10, - lblrec=200, - lbnrec=-1, - raw_lbpack=1239, - lbuser=[_INTEGER], - boundary_packing=None, - ) - with self.assertRaisesRegex( - NotYetImplementedError, - "PP fields with LBPACK of 1239 are not supported.", - ): - self._test(mock_field, None, None) - - def test_lbc_unpacked(self): - boundary_packing = _DummyBoundaryPacking( - x_halo=11, y_halo=7, rim_width=3 - ) - mock_field = _DummyFieldWithSize( - lbext=10, - lblrec=200, - lbnrec=-1, - raw_lbpack=_UNPACKED, - lbuser=[_REAL], - boundary_packing=boundary_packing, - lbnpt=47, - lbrow=34, - ) - self._test(mock_field, ((47 * 34) - (19 * 14)) * 8, ">f8") - - def test_lbc_wgdos_unsupported(self): - mock_field = _DummyField( - lbext=5, - lblrec=-1, - lbnrec=100, - raw_lbpack=_WGDOS, - lbuser=[_REAL], - # Anything not None will do here. - boundary_packing=0, - ) - with self.assertRaisesRegex( - ValueError, "packed LBC data is not supported" - ): - self._test(mock_field, None, None) - - def test_lbc_cray(self): - boundary_packing = _DummyBoundaryPacking( - x_halo=11, y_halo=7, rim_width=3 - ) - mock_field = _DummyFieldWithSize( - lbext=10, - lblrec=200, - lbnrec=-1, - raw_lbpack=_CRAY, - lbuser=[_REAL], - boundary_packing=boundary_packing, - lbnpt=47, - lbrow=34, - ) - self._test(mock_field, ((47 * 34) - (19 * 14)) * 4, ">f4") - - -class Test__det_border(tests.IrisTest): - def setUp(self): - _FFH_patch = mock.patch("iris.fileformats._ff.FFHeader") - _FFH_patch.start() - self.addCleanup(_FFH_patch.stop) - - def test_unequal_spacing_eitherside(self): - # Ensure that we do not interpret the case where there is not the same - # spacing on the lower edge as the upper edge. - ff2pp = FF2PP("dummy") - field_x = np.array([1, 2, 10]) - - msg = ( - "The x or y coordinates of your boundary condition field may " - "be incorrect, not having taken into account the boundary " - "size." - ) - - with mock.patch("warnings.warn") as warn: - result = ff2pp._det_border(field_x, None) - warn.assert_called_with(msg) - self.assertIs(result, field_x) - - def test_increasing_field_values(self): - # Field where its values a increasing. - ff2pp = FF2PP("dummy") - field_x = np.array([1, 2, 3]) - com = np.array([0, 1, 2, 3, 4]) - result = ff2pp._det_border(field_x, 1) - self.assertArrayEqual(result, com) - - def test_decreasing_field_values(self): - # Field where its values a decreasing. - ff2pp = FF2PP("dummy") - field_x = np.array([3, 2, 1]) - com = np.array([4, 3, 2, 1, 0]) - result = ff2pp._det_border(field_x, 1) - self.assertArrayEqual(result, com) - - -class Test__adjust_field_for_lbc(tests.IrisTest): - def setUp(self): - # Patch FFHeader to produce a mock header instead of opening a file. - self.mock_ff_header = mock.Mock() - self.mock_ff_header.dataset_type = 5 - self.mock_ff = self.patch( - "iris.fileformats._ff.FFHeader", return_value=self.mock_ff_header - ) - - # Create a mock LBC type PPField. - self.mock_field = mock.Mock() - field = self.mock_field - field.lbtim = 0 - field.lblev = 7777 - field.lbvc = 0 - field.lbnpt = 1001 - field.lbrow = 2001 - field.lbuser = (None, None, 80504) - field.lbpack = pp.SplittableInt(0) - field.boundary_packing = None - field.bdx = 1.0 - field.bzx = 0.0 - field.bdy = 1.0 - field.bzy = 0.0 - - def test__basic(self): - ff2pp = FF2PP("dummy_filename") - field = self.mock_field - ff2pp._adjust_field_for_lbc(field) - self.assertEqual(field.lbtim, 11) - self.assertEqual(field.lbvc, 65) - self.assertEqual(field.boundary_packing.rim_width, 8) - self.assertEqual(field.boundary_packing.y_halo, 5) - self.assertEqual(field.boundary_packing.x_halo, 4) - self.assertEqual(field.lbnpt, 1009) - self.assertEqual(field.lbrow, 2011) - - def test__bad_lbtim(self): - self.mock_field.lbtim = 717 - ff2pp = FF2PP("dummy_filename") - with self.assertRaisesRegex( - ValueError, "LBTIM of 717, expected only 0 or 11" - ): - ff2pp._adjust_field_for_lbc(self.mock_field) - - def test__bad_lbvc(self): - self.mock_field.lbvc = 312 - ff2pp = FF2PP("dummy_filename") - with self.assertRaisesRegex( - ValueError, "LBVC of 312, expected only 0 or 65" - ): - ff2pp._adjust_field_for_lbc(self.mock_field) - - -class Test__fields_over_all_levels(tests.IrisTest): - def setUp(self): - # Patch FFHeader to produce a mock header instead of opening a file. - self.mock_ff_header = mock.Mock() - self.mock_ff_header.dataset_type = 5 - - # Fake the level constants to look like 3 model levels. - self.n_all_levels = 3 - self.mock_ff_header.level_dependent_constants = np.zeros( - (self.n_all_levels) - ) - self.mock_ff = self.patch( - "iris.fileformats._ff.FFHeader", return_value=self.mock_ff_header - ) - - # Create a simple mock for a test field. - self.mock_field = mock.Mock() - field = self.mock_field - field.lbhem = 103 - self.original_lblev = mock.sentinel.untouched_lbev - field.lblev = self.original_lblev - - def _check_expected_levels(self, results, n_levels): - if n_levels == 0: - self.assertEqual(len(results), 1) - self.assertEqual(results[0].lblev, self.original_lblev) - else: - self.assertEqual(len(results), n_levels) - self.assertEqual( - [fld.lblev for fld in results], list(range(n_levels)) - ) - - def test__is_lbc(self): - ff2pp = FF2PP("dummy_filename") - field = self.mock_field - results = list(ff2pp._fields_over_all_levels(field)) - self._check_expected_levels(results, 3) - - def test__lbhem_too_small(self): - ff2pp = FF2PP("dummy_filename") - field = self.mock_field - field.lbhem = 100 - with self.assertRaisesRegex(ValueError, "hence >= 101"): - _ = list(ff2pp._fields_over_all_levels(field)) - - def test__lbhem_too_large(self): - ff2pp = FF2PP("dummy_filename") - field = self.mock_field - field.lbhem = 105 - with self.assertRaisesRegex( - ValueError, "more than the total number of levels in the file = 3" - ): - _ = list(ff2pp._fields_over_all_levels(field)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py b/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py deleted file mode 100644 index 6a65397086..0000000000 --- a/lib/iris/tests/unit/fileformats/ff/test_FFHeader.py +++ /dev/null @@ -1,77 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.fileformat.ff.FFHeader`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import collections -from unittest import mock - -import numpy as np - -from iris.fileformats._ff import FFHeader - -MyGrid = collections.namedtuple("MyGrid", "column row real horiz_grid_type") - - -class Test_grid(tests.IrisTest): - def _header(self, grid_staggering): - with mock.patch.object( - FFHeader, "__init__", mock.Mock(return_value=None) - ): - header = FFHeader() - header.grid_staggering = grid_staggering - header.column_dependent_constants = mock.sentinel.column - header.row_dependent_constants = mock.sentinel.row - header.real_constants = mock.sentinel.real - header.horiz_grid_type = mock.sentinel.horiz_grid_type - return header - - def _test_grid_staggering(self, grid_staggering): - header = self._header(grid_staggering) - with mock.patch.dict( - FFHeader.GRID_STAGGERING_CLASS, {grid_staggering: MyGrid} - ): - grid = header.grid() - self.assertIsInstance(grid, MyGrid) - self.assertIs(grid.column, mock.sentinel.column) - self.assertIs(grid.row, mock.sentinel.row) - self.assertIs(grid.real, mock.sentinel.real) - self.assertIs(grid.horiz_grid_type, mock.sentinel.horiz_grid_type) - - def test_new_dynamics(self): - self._test_grid_staggering(3) - - def test_end_game(self): - self._test_grid_staggering(6) - - def test_unknown(self): - header = self._header(0) - with mock.patch( - "iris.fileformats._ff.NewDynamics", - mock.Mock(return_value=mock.sentinel.grid), - ): - with mock.patch("warnings.warn") as warn: - grid = header.grid() - warn.assert_called_with( - "Staggered grid type: 0 not currently" - " interpreted, assuming standard C-grid" - ) - self.assertIs(grid, mock.sentinel.grid) - - -@tests.skip_data -class Test_integer_constants(tests.IrisTest): - def test_read_ints(self): - test_file_path = tests.get_data_path(("FF", "structured", "small")) - ff_header = FFHeader(test_file_path) - self.assertEqual(ff_header.integer_constants.dtype, np.dtype(">i8")) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_Grid.py b/lib/iris/tests/unit/fileformats/ff/test_Grid.py deleted file mode 100644 index b20c85b9a8..0000000000 --- a/lib/iris/tests/unit/fileformats/ff/test_Grid.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.fileformat.ff.Grid`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.fileformats._ff import Grid - - -class Test___init__(tests.IrisTest): - def test_attributes(self): - # Ensure the constructor initialises all the grid's attributes - # correctly, including unpacking values from the REAL constants. - reals = ( - mock.sentinel.ew, - mock.sentinel.ns, - mock.sentinel.first_lat, - mock.sentinel.first_lon, - mock.sentinel.pole_lat, - mock.sentinel.pole_lon, - ) - grid = Grid( - mock.sentinel.column, - mock.sentinel.row, - reals, - mock.sentinel.horiz_grid_type, - ) - self.assertIs(grid.column_dependent_constants, mock.sentinel.column) - self.assertIs(grid.row_dependent_constants, mock.sentinel.row) - self.assertIs(grid.ew_spacing, mock.sentinel.ew) - self.assertIs(grid.ns_spacing, mock.sentinel.ns) - self.assertIs(grid.first_lat, mock.sentinel.first_lat) - self.assertIs(grid.first_lon, mock.sentinel.first_lon) - self.assertIs(grid.pole_lat, mock.sentinel.pole_lat) - self.assertIs(grid.pole_lon, mock.sentinel.pole_lon) - self.assertIs(grid.horiz_grid_type, mock.sentinel.horiz_grid_type) - - -class Test_vectors(tests.IrisTest): - def setUp(self): - self.xp = mock.sentinel.xp - self.xu = mock.sentinel.xu - self.yp = mock.sentinel.yp - self.yv = mock.sentinel.yv - - def _test_subgrid_vectors(self, subgrid, expected): - grid = Grid(None, None, (None,) * 6, None) - grid._x_vectors = mock.Mock(return_value=(self.xp, self.xu)) - grid._y_vectors = mock.Mock(return_value=(self.yp, self.yv)) - result = grid.vectors(subgrid) - self.assertEqual(result, expected) - - def test_1(self): - # Data on atmospheric theta points. - self._test_subgrid_vectors(1, (self.xp, self.yp)) - - def test_2(self): - # Data on atmospheric theta points, values over land only. - self._test_subgrid_vectors(2, (self.xp, self.yp)) - - def test_3(self): - # Data on atmospheric theta points, values over sea only. - self._test_subgrid_vectors(3, (self.xp, self.yp)) - - def test_4(self): - # Data on atmospheric zonal theta points. - self._test_subgrid_vectors(4, (self.xp, self.yp)) - - def test_5(self): - # Data on atmospheric meridional theta points. - self._test_subgrid_vectors(5, (self.xp, self.yp)) - - def test_11(self): - # Data on atmospheric uv points. - self._test_subgrid_vectors(11, (self.xu, self.yv)) - - def test_18(self): - # Data on atmospheric u points on the 'c' grid. - self._test_subgrid_vectors(18, (self.xu, self.yp)) - - def test_19(self): - # Data on atmospheric v points on the 'c' grid. - self._test_subgrid_vectors(19, (self.xp, self.yv)) - - def test_26(self): - # Lateral boundary data at atmospheric theta points. - self._test_subgrid_vectors(26, (self.xp, self.yp)) - - def test_27(self): - # Lateral boundary data at atmospheric u points. - self._test_subgrid_vectors(27, (self.xu, self.yp)) - - def test_28(self): - # Lateral boundary data at atmospheric v points. - self._test_subgrid_vectors(28, (self.xp, self.yv)) - - def test_29(self): - # Orography field for atmospheric LBCs. - self._test_subgrid_vectors(29, (self.xp, self.yp)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py b/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py deleted file mode 100644 index 5f0d64da71..0000000000 --- a/lib/iris/tests/unit/fileformats/ff/test_NewDynamics.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.fileformat.ff.NewDynamics`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.fileformats._ff import NewDynamics - - -class Test(tests.IrisTest): - def test_class_attributes(self): - reals = np.arange(6) + 100 - grid = NewDynamics(None, None, reals, None) - self.assertEqual(grid._v_offset, 0.5) - - -class Test__y_vectors(tests.IrisTest): - def _test(self, row, yp, yv): - reals = np.arange(6) + 100 - grid = NewDynamics(None, row, reals, None) - result_yp, result_yv = grid._y_vectors() - self.assertArrayEqual(result_yp, yp) - self.assertArrayEqual(result_yv, yv) - - def test_none(self): - self._test(row=None, yp=None, yv=None) - - def test_1d(self): - self._test( - row=np.array([[0], [1], [2], [3]]), - yp=np.array([0, 1, 2, 3]), - yv=None, - ) - - def test_2d(self): - self._test( - row=np.array([[0, 0], [1, 10], [2, 20], [3, 30]]), - yp=np.array([0, 1, 2, 3]), - yv=np.array([0, 10, 20]), - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/name_loaders/__init__.py b/lib/iris/tests/unit/fileformats/name_loaders/__init__.py deleted file mode 100644 index 751801a176..0000000000 --- a/lib/iris/tests/unit/fileformats/name_loaders/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.fileformats.name_loaders` package.""" diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py deleted file mode 100644 index ded635984c..0000000000 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_cell_methods.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :func:`iris.fileformats.name_loaders._build_cell_methods`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris.coords -from iris.fileformats.name_loaders import _build_cell_methods - - -class Tests(tests.IrisTest): - def test_nameII_average(self): - av_or_int = ["something average ob bla"] - coord_name = "foo" - res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual(res, [iris.coords.CellMethod("mean", "foo")]) - - def test_nameIII_averaged(self): - av_or_int = ["something averaged ob bla"] - coord_name = "bar" - res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual(res, [iris.coords.CellMethod("mean", "bar")]) - - def test_nameII_integral(self): - av_or_int = ["something integral ob bla"] - coord_name = "ensemble" - res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual(res, [iris.coords.CellMethod("sum", "ensemble")]) - - def test_nameIII_integrated(self): - av_or_int = ["something integrated ob bla"] - coord_name = "time" - res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual(res, [iris.coords.CellMethod("sum", "time")]) - - def test_no_averaging(self): - av_or_int = [ - "No foo averaging", - "No bar averaging", - "no", - "", - "no averaging", - "no anything at all averaging", - ] - coord_name = "time" - res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual(res, [None] * len(av_or_int)) - - def test_nameII_mixed(self): - av_or_int = [ - "something integral ob bla", - "no averaging", - "other average", - ] - coord_name = "ensemble" - res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual( - res, - [ - iris.coords.CellMethod("sum", "ensemble"), - None, - iris.coords.CellMethod("mean", "ensemble"), - ], - ) - - def test_nameIII_mixed(self): - av_or_int = [ - "something integrated ob bla", - "no averaging", - "other averaged", - ] - coord_name = "ensemble" - res = _build_cell_methods(av_or_int, coord_name) - self.assertEqual( - res, - [ - iris.coords.CellMethod("sum", "ensemble"), - None, - iris.coords.CellMethod("mean", "ensemble"), - ], - ) - - def test_unrecognised(self): - unrecognised_heading = "bla else" - av_or_int = [ - "something average", - unrecognised_heading, - "something integral", - ] - coord_name = "foo" - with mock.patch("warnings.warn") as warn: - _ = _build_cell_methods(av_or_int, coord_name) - expected_msg = ( - "Unknown {} statistic: {!r}. Unable to " - "create cell method.".format(coord_name, unrecognised_heading) - ) - warn.assert_called_with(expected_msg) - - def test_unrecognised_similar_to_no_averaging(self): - unrecognised_headings = [ - "not averaging", - "this is not a valid no", - "nope", - "no daveraging", - "no averagingg", - "no something", - "noaveraging", - ] - for unrecognised_heading in unrecognised_headings: - av_or_int = [ - "something average", - unrecognised_heading, - "something integral", - ] - coord_name = "foo" - with mock.patch("warnings.warn") as warn: - _ = _build_cell_methods(av_or_int, coord_name) - expected_msg = ( - "Unknown {} statistic: {!r}. Unable to " - "create cell method.".format(coord_name, unrecognised_heading) - ) - warn.assert_called_with(expected_msg) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py b/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py deleted file mode 100644 index 5954823c54..0000000000 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__build_lat_lon_for_NAME_timeseries.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :func:`iris.analysis.name_loaders._build_lat_lon_for_NAME_timeseries`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.fileformats.name_loaders import ( - NAMECoord, - _build_lat_lon_for_NAME_timeseries, -) - - -class TestCellMethods(tests.IrisTest): - def test_float(self): - column_headings = { - "X": ["X = -.100 Lat-Long", "X = -1.600 Lat-Long"], - "Y": ["Y = 52.450 Lat-Long", "Y = 51. Lat-Long"], - } - lat, lon = _build_lat_lon_for_NAME_timeseries(column_headings) - self.assertIsInstance(lat, NAMECoord) - self.assertIsInstance(lon, NAMECoord) - self.assertEqual(lat.name, "latitude") - self.assertEqual(lon.name, "longitude") - self.assertIsNone(lat.dimension) - self.assertIsNone(lon.dimension) - self.assertArrayEqual(lat.values, [52.45, 51.0]) - self.assertArrayEqual(lon.values, [-0.1, -1.6]) - - def test_int(self): - column_headings = { - "X": ["X = -1 Lat-Long", "X = -2 Lat-Long"], - "Y": ["Y = 52 Lat-Long", "Y = 51 Lat-Long"], - } - lat, lon = _build_lat_lon_for_NAME_timeseries(column_headings) - self.assertIsInstance(lat, NAMECoord) - self.assertIsInstance(lon, NAMECoord) - self.assertEqual(lat.name, "latitude") - self.assertEqual(lon.name, "longitude") - self.assertIsNone(lat.dimension) - self.assertIsNone(lon.dimension) - self.assertArrayEqual(lat.values, [52.0, 51.0]) - self.assertArrayEqual(lon.values, [-1.0, -2.0]) - self.assertIsInstance(lat.values[0], float) - self.assertIsInstance(lon.values[0], float) diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py b/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py deleted file mode 100644 index c4cbde8c14..0000000000 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__calc_integration_period.py +++ /dev/null @@ -1,65 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :func:`iris.fileformats.name_loaders.__calc_integration_period`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import datetime - -from iris.fileformats.name_loaders import _calc_integration_period - - -class Test(tests.IrisTest): - def test_30_min_av(self): - time_avgs = [" 30min average"] - result = _calc_integration_period(time_avgs) - expected = [datetime.timedelta(0, (30 * 60))] - self.assertEqual(result, expected) - - def test_30_min_av_rspace(self): - time_avgs = [" 30min average "] - result = _calc_integration_period(time_avgs) - expected = [datetime.timedelta(0, (30 * 60))] - self.assertEqual(result, expected) - - def test_30_min_av_lstrip(self): - time_avgs = [" 30min average".lstrip()] - result = _calc_integration_period(time_avgs) - expected = [datetime.timedelta(0, (30 * 60))] - self.assertEqual(result, expected) - - def test_3_hour_av(self): - time_avgs = [" 3hr 0min average"] - result = _calc_integration_period(time_avgs) - expected = [datetime.timedelta(0, (3 * 60 * 60))] - self.assertEqual(result, expected) - - def test_3_hour_int(self): - time_avgs = [" 3hr 0min integral"] - result = _calc_integration_period(time_avgs) - expected = [datetime.timedelta(0, (3 * 60 * 60))] - self.assertEqual(result, expected) - - def test_12_hour_av(self): - time_avgs = [" 12hr 0min average"] - result = _calc_integration_period(time_avgs) - expected = [datetime.timedelta(0, (12 * 60 * 60))] - self.assertEqual(result, expected) - - def test_5_day_av(self): - time_avgs = [" 5day 0hr 0min integral"] - result = _calc_integration_period(time_avgs) - expected = [datetime.timedelta(0, (5 * 24 * 60 * 60))] - self.assertEqual(result, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py b/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py deleted file mode 100644 index 7ce66c3fef..0000000000 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__cf_height_from_name.py +++ /dev/null @@ -1,317 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.analysis.name_loaders._cf_height_from_name` -function. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.coords import AuxCoord -from iris.fileformats.name_loaders import _cf_height_from_name - - -class TestAll(tests.IrisTest): - def _default_coord(self, data): - # This private method returns a coordinate with values expected - # when no interpretation is made of the field header string. - return AuxCoord( - units="no-unit", - points=data, - bounds=None, - standard_name=None, - long_name="z", - attributes={"positive": "up"}, - ) - - -class TestAll_NAMEII(TestAll): - # NAMEII formats are defined by bounds, not points - def test_bounded_height_above_ground(self): - data = "From 0 - 100m agl" - res = _cf_height_from_name(data) - com = AuxCoord( - units="m", - points=50.0, - bounds=np.array([0.0, 100.0]), - standard_name="height", - long_name="height above ground level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_bounded_flight_level(self): - data = "From FL0 - FL100" - res = _cf_height_from_name(data) - com = AuxCoord( - units="unknown", - points=50.0, - bounds=np.array([0.0, 100.0]), - standard_name=None, - long_name="flight_level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_bounded_height_above_sea_level(self): - data = "From 0 - 100m asl" - res = _cf_height_from_name(data) - com = AuxCoord( - units="m", - points=50.0, - bounds=np.array([0.0, 100.0]), - standard_name="altitude", - long_name="altitude above sea level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_malformed_height_above_ground(self): - # Parse height above ground level with additional stuff on the end of - # the string (agl). - data = "From 0 - 100m agl and stuff" - res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) - - def test_malformed_height_above_sea_level(self): - # Parse height above ground level with additional stuff on the end of - # the string (agl). - data = "From 0 - 100m asl and stuff" - res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) - - def test_malformed_flight_level(self): - # Parse height above ground level with additional stuff on the end of - # the string (agl). - data = "From FL0 - FL100 and stuff" - res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) - - def test_float_bounded_height_above_ground(self): - # Parse height above ground level when its a float. - data = "From 0.0 - 100.0m agl" - res = _cf_height_from_name(data) - com = AuxCoord( - units="m", - points=50.0, - bounds=np.array([0.0, 100.0]), - standard_name="height", - long_name="height above ground level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_float_bounded_height_flight_level(self): - # Parse height above ground level, as a float (agl). - data = "From FL0.0 - FL100.0" - res = _cf_height_from_name(data) - com = AuxCoord( - units="unknown", - points=50.0, - bounds=np.array([0.0, 100.0]), - standard_name=None, - long_name="flight_level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_float_bounded_height_above_sea_level(self): - # Parse height above ground level as a float (agl). - data = "From 0.0 - 100.0m asl" - res = _cf_height_from_name(data) - com = AuxCoord( - units="m", - points=50.0, - bounds=np.array([0.0, 100.0]), - standard_name="altitude", - long_name="altitude above sea level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_no_match(self): - # Parse height information when there is no match. - # No interpretation, just returns default values. - data = "Vertical integral" - res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) - - def test_pressure(self): - # Parse air_pressure string. - data = "From 0 - 100 Pa" - res = _cf_height_from_name(data) - com = AuxCoord( - units="Pa", - points=50.0, - bounds=np.array([0.0, 100.0]), - standard_name="air_pressure", - long_name=None, - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - -class TestAll_NAMEIII(TestAll): - # NAMEIII formats are defined by points, not bounds. - def test_height_above_ground(self): - data = "Z = 50.00000 m agl" - res = _cf_height_from_name(data) - com = AuxCoord( - units="m", - points=50.0, - bounds=None, - standard_name="height", - long_name="height above ground level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_height_flight_level(self): - data = "Z = 50.00000 FL" - res = _cf_height_from_name(data) - com = AuxCoord( - units="unknown", - points=50.0, - bounds=None, - standard_name=None, - long_name="flight_level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_height_above_sea_level(self): - data = "Z = 50.00000 m asl" - res = _cf_height_from_name(data) - com = AuxCoord( - units="m", - points=50.0, - bounds=None, - standard_name="altitude", - long_name="altitude above sea level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_malformed_height_above_ground(self): - # Parse height above ground level, with additonal stuff at the string - # end (agl). - data = "Z = 50.00000 m agl and stuff" - res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) - - def test_malformed_height_above_sea_level(self): - # Parse height above ground level, with additional stuff at string - # end (agl). - data = "Z = 50.00000 m asl and stuff" - res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) - - def test_malformed_flight_level(self): - # Parse height above ground level (agl), with additional stuff at - # string end. - data = "Z = 50.00000 FL and stuff" - res = _cf_height_from_name(data) - com = self._default_coord(data) - self.assertEqual(com, res) - - def test_integer_height_above_ground(self): - # Parse height above ground level when its an integer. - data = "Z = 50 m agl" - res = _cf_height_from_name(data) - com = AuxCoord( - units="m", - points=50.0, - bounds=None, - standard_name="height", - long_name="height above ground level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_integer_height_flight_level(self): - # Parse flight level when its an integer. - data = "Z = 50 FL" - res = _cf_height_from_name(data) - com = AuxCoord( - units="unknown", - points=50.0, - bounds=None, - standard_name=None, - long_name="flight_level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_integer_height_above_sea_level(self): - # Parse height above sea level (asl) when its an integer. - data = "Z = 50 m asl" - res = _cf_height_from_name(data) - com = AuxCoord( - units="m", - points=50.0, - bounds=None, - standard_name="altitude", - long_name="altitude above sea level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_enotation_height_above_ground(self): - # Parse height above ground expressed in scientific notation - data = "Z = 0.0000000E+00 m agl" - res = _cf_height_from_name(data) - com = AuxCoord( - units="m", - points=0.0, - bounds=None, - standard_name="height", - long_name="height above ground level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_enotation_height_above_sea_level(self): - # Parse height above sea level expressed in scientific notation - data = "Z = 0.0000000E+00 m asl" - res = _cf_height_from_name(data) - com = AuxCoord( - units="m", - points=0.0, - bounds=None, - standard_name="altitude", - long_name="altitude above sea level", - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - def test_pressure(self): - # Parse pressure. - data = "Z = 50.00000 Pa" - res = _cf_height_from_name(data) - com = AuxCoord( - units="Pa", - points=50.0, - bounds=None, - standard_name="air_pressure", - long_name=None, - attributes={"positive": "up"}, - ) - self.assertEqual(com, res) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py b/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py deleted file mode 100644 index d50a7fdad1..0000000000 --- a/lib/iris/tests/unit/fileformats/name_loaders/test__generate_cubes.py +++ /dev/null @@ -1,168 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :func:`iris.analysis.name_loaders._generate_cubes`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from datetime import datetime, timedelta -from unittest import mock - -import numpy as np - -from iris.fileformats.name_loaders import NAMECoord, _generate_cubes - - -class TestCellMethods(tests.IrisTest): - def test_cell_methods(self): - header = mock.MagicMock() - column_headings = { - "Species": [1, 2, 3], - "Quantity": [4, 5, 6], - "Units": ["m", "m", "m"], - "Z": [1, 2, 3], - } - coords = mock.MagicMock() - data_arrays = [mock.Mock(), mock.Mock()] - cell_methods = ["cell_method_1", "cell_method_2"] - - self.patch("iris.fileformats.name_loaders._cf_height_from_name") - self.patch("iris.cube.Cube") - cubes = list( - _generate_cubes( - header, column_headings, coords, data_arrays, cell_methods - ) - ) - - cubes[0].assert_has_calls([mock.call.add_cell_method("cell_method_1")]) - cubes[1].assert_has_calls([mock.call.add_cell_method("cell_method_2")]) - - -class TestCircularLongitudes(tests.IrisTest): - def _simulate_with_coords(self, names, values, dimensions): - header = mock.MagicMock() - column_headings = { - "Species": [1, 2, 3], - "Quantity": [4, 5, 6], - "Units": ["m", "m", "m"], - "Z": [1, 2, 3], - } - coords = [ - NAMECoord(name, dim, vals) - for name, vals, dim in zip(names, values, dimensions) - ] - data_arrays = [mock.Mock()] - - self.patch("iris.fileformats.name_loaders._cf_height_from_name") - self.patch("iris.cube.Cube") - cubes = list( - _generate_cubes(header, column_headings, coords, data_arrays) - ) - return cubes - - def test_non_circular(self): - results = self._simulate_with_coords( - names=["longitude"], values=[[1, 7, 23]], dimensions=[0] - ) - self.assertEqual(len(results), 1) - add_coord_calls = results[0].add_dim_coord.call_args_list - self.assertEqual(len(add_coord_calls), 1) - coord = add_coord_calls[0][0][0] - self.assertEqual(coord.circular, False) - - def test_circular(self): - results = self._simulate_with_coords( - names=["longitude"], - values=[[5.0, 95.0, 185.0, 275.0]], - dimensions=[0], - ) - self.assertEqual(len(results), 1) - add_coord_calls = results[0].add_dim_coord.call_args_list - self.assertEqual(len(add_coord_calls), 1) - coord = add_coord_calls[0][0][0] - self.assertEqual(coord.circular, True) - - def test_lat_lon_byname(self): - results = self._simulate_with_coords( - names=["longitude", "latitude"], - values=[[5.0, 95.0, 185.0, 275.0], [5.0, 95.0, 185.0, 275.0]], - dimensions=[0, 1], - ) - self.assertEqual(len(results), 1) - add_coord_calls = results[0].add_dim_coord.call_args_list - self.assertEqual(len(add_coord_calls), 2) - lon_coord = add_coord_calls[0][0][0] - lat_coord = add_coord_calls[1][0][0] - self.assertEqual(lon_coord.circular, True) - self.assertEqual(lat_coord.circular, False) - - -class TestTimeCoord(tests.IrisTest): - def _simulate_with_coords(self, names, values, dimensions): - header = mock.MagicMock() - column_headings = { - "Species": [1, 2, 3], - "Quantity": [4, 5, 6], - "Units": ["m", "m", "m"], - "Av or Int period": [timedelta(hours=24)], - } - coords = [ - NAMECoord(name, dim, np.array(vals)) - for name, vals, dim in zip(names, values, dimensions) - ] - data_arrays = [mock.Mock()] - - self.patch("iris.fileformats.name_loaders._cf_height_from_name") - self.patch("iris.cube.Cube") - cubes = list( - _generate_cubes(header, column_headings, coords, data_arrays) - ) - return cubes - - def test_time_dim(self): - results = self._simulate_with_coords( - names=["longitude", "latitude", "time"], - values=[ - [10, 20], - [30, 40], - [datetime(2015, 6, 7), datetime(2015, 6, 8)], - ], - dimensions=[0, 1, 2], - ) - self.assertEqual(len(results), 1) - result = results[0] - dim_coord_calls = result.add_dim_coord.call_args_list - self.assertEqual(len(dim_coord_calls), 3) # lon, lat, time - t_coord = dim_coord_calls[2][0][0] - self.assertEqual(t_coord.standard_name, "time") - self.assertArrayEqual(t_coord.points, [398232, 398256]) - self.assertArrayEqual(t_coord.bounds[0], [398208, 398232]) - self.assertArrayEqual(t_coord.bounds[-1], [398232, 398256]) - - def test_time_scalar(self): - results = self._simulate_with_coords( - names=["longitude", "latitude", "time"], - values=[[10, 20], [30, 40], [datetime(2015, 6, 7)]], - dimensions=[0, 1, None], - ) - self.assertEqual(len(results), 1) - result = results[0] - dim_coord_calls = result.add_dim_coord.call_args_list - self.assertEqual(len(dim_coord_calls), 2) - aux_coord_calls = result.add_aux_coord.call_args_list - self.assertEqual(len(aux_coord_calls), 1) - t_coord = aux_coord_calls[0][0][0] - self.assertEqual(t_coord.standard_name, "time") - self.assertArrayEqual(t_coord.points, [398232]) - self.assertArrayEqual(t_coord.bounds, [[398208, 398232]]) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py deleted file mode 100644 index 2ea22c420b..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the module -:mod:`iris.fileformats.netcdf._nc_load_rules` . - -""" diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py deleted file mode 100644 index 0d3ed932e8..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the module :mod:`iris.fileformats._nc_load_rules.actions`. - -""" -from pathlib import Path -import shutil -import subprocess -import tempfile - -import iris.fileformats._nc_load_rules.engine -from iris.fileformats.cf import CFReader -import iris.fileformats.netcdf -from iris.fileformats.netcdf import _load_cube - -""" -Notes on testing method. - -IN cf : "def _load_cube(engine, cf, cf_var, filename)" -WHERE: - - engine is a :class:`iris.fileformats._nc_load_rules.engine.Engine` - - cf is a :class:`iris.fileformats.cf.CFReader` - - cf_var is a :class:`iris.fileformats.cf.CFDataVariable` - -As it's hard to construct a suitable CFReader from scratch, it would seem -simpler (for now) to use an ACTUAL FILE. -Likewise, the easiest approach to that is with CDL and "ncgen". -To do this, we need a test "fixture" that can create suitable test files in a -temporary directory. - -""" - - -class Mixin__nc_load_actions: - """ - Class to make testcases for rules or actions code, and check results. - - Defines standard setUpClass/tearDownClass methods, to create a temporary - directory for intermediate files. - NOTE: owing to peculiarities of unittest, these must be explicitly called - from a setUpClass/tearDownClass within the 'final' inheritor, i.e. the - actual Test_XXX class which also inherits unittest.TestCase. - - Testcases are manufactured by the '_make_testcase_cdl' method. - The 'run_testcase' method takes the '_make_testcase_cdl' kwargs and makes - a result cube (by: producing cdl, converting to netcdf, and loading the - 'phenom' variable only). - Likewise, a generalised 'check_result' method will be used to perform result - checking. - Both '_make_testcase_cdl' and 'check_result' are not defined here : They - are to be variously implemented by the inheritors. - - """ - - # "global" test setting : whether to output various debug info - debug = False - - @classmethod - def setUpClass(cls): - # Create a temp directory for temp files. - cls.temp_dirpath = Path(tempfile.mkdtemp()) - - @classmethod - def tearDownClass(cls): - # Destroy a temp directory for temp files. - shutil.rmtree(cls.temp_dirpath) - - def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): - """ - Load the 'phenom' data variable in a CDL testcase, as a cube. - - Using ncgen, CFReader and the _load_cube call. - - """ - # Write the CDL to a file. - with open(cdl_path, "w") as f_out: - f_out.write(cdl_string) - - # Create a netCDF file from the CDL file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - subprocess.check_call(command, shell=True) - - # Simulate the inner part of the file reading process. - cf = CFReader(nc_path) - # Grab a data variable : FOR NOW always grab the 'phenom' variable. - cf_var = cf.cf_group.data_variables["phenom"] - - engine = iris.fileformats.netcdf._actions_engine() - - # If debug enabled, switch on the activation summary debug output. - # Use 'patch' so it is restored after the test. - self.patch("iris.fileformats.netcdf.DEBUG", self.debug) - - # Call the main translation function to load a single cube. - # _load_cube establishes per-cube facts, activates rules and - # produces an actual cube. - cube = _load_cube(engine, cf, cf_var, nc_path) - - # Also Record, on the cubes, which hybrid coord elements were identified - # by the rules operation. - # Unlike the other translations, _load_cube does *not* convert this - # information into actual cube elements. That is instead done by - # `iris.fileformats.netcdf._load_aux_factory`. - # For rules testing, it is anyway more convenient to deal with the raw - # data, as each factory type has different validity requirements to - # build it, and none of that is relevant to the rules operation. - cube._formula_type_name = engine.requires.get("formula_type") - cube._formula_terms_byname = engine.requires.get("formula_terms") - - # Always returns a single cube. - return cube - - def run_testcase(self, warning=None, **testcase_kwargs): - """ - Run a testcase with chosen options, returning a test cube. - - The kwargs apply to the '_make_testcase_cdl' method. - - """ - cdl_path = str(self.temp_dirpath / "test.cdl") - nc_path = cdl_path.replace(".cdl", ".nc") - - cdl_string = self._make_testcase_cdl(**testcase_kwargs) - if self.debug: - print("CDL file content:") - print(cdl_string) - print("------\n") - - if warning is None: - context = self.assertNoWarningsRegexp() - else: - context = self.assertWarnsRegexp(warning) - with context: - cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) - - if self.debug: - print("\nCube:") - print(cube) - print("") - return cube - - def _make_testcase_cdl(self, **kwargs): - """Make a testcase CDL string.""" - # Override for specific uses... - raise NotImplementedError() - - def check_result(self, cube, **kwargs): - """Test a result cube.""" - # Override for specific uses... - raise NotImplementedError() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py deleted file mode 100644 index a2ecdf1490..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ /dev/null @@ -1,908 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the engine.activate() call within the -`iris.fileformats.netcdf._load_cube` function. - -Here, *specifically* testcases relating to grid-mappings and dim-coords. - -""" -import iris.tests as tests # isort: skip - -import iris.coord_systems as ics -import iris.fileformats._nc_load_rules.helpers as hh -from iris.tests.unit.fileformats.nc_load_rules.actions import ( - Mixin__nc_load_actions, -) - - -class Mixin__grid_mapping(Mixin__nc_load_actions): - # Testcase support routines for testing translation of grid-mappings - def _make_testcase_cdl( - self, - latitude_units=None, - gridmapvar_name=None, - gridmapvar_mappropertyname=None, - mapping_missingradius=False, - mapping_type_name=None, - mapping_scalefactor=None, - yco_values=None, - xco_name=None, - yco_name=None, - xco_units=None, - yco_units=None, - xco_is_dim=True, - yco_is_dim=True, - ): - """ - Create a CDL string for a testcase. - - This is the "master" routine for creating all our testcases. - Kwarg options modify a simple default testcase with a latlon grid. - The routine handles the various testcase options and their possible - interactions. This includes knowing what extra changes are required - to support different grid-mapping types (for example). - - """ - # The grid-mapping options are standard-latlon, rotated, or non-latlon. - # This affects names+units of the X and Y coords. - # We don't have an option to *not* include a grid-mapping variable, but - # we can mimic a missing grid-mapping by changing the varname from that - # which the data-variable refers to, with "gridmapvar_name=xxx". - # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by - # selecting an unkown 'grid_mapping_name' property, with - # "gridmapvar_mappropertyname=xxx". - if mapping_type_name is None: - # Default grid-mapping and coords are standard lat-lon. - mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON - xco_name_default = hh.CF_VALUE_STD_NAME_LON - yco_name_default = hh.CF_VALUE_STD_NAME_LAT - xco_units_default = "degrees_east" - # Special kwarg overrides some of the values. - if latitude_units is None: - yco_units_default = "degrees_north" - else: - # Override the latitude units (to invalidate). - yco_units_default = latitude_units - - elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: - # Rotated lat-lon coordinates. - xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON - yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT - xco_units_default = "degrees" - yco_units_default = "degrees" - - else: - # General non-latlon coordinates - # Exactly which depends on the grid_mapping name. - xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X - yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y - xco_units_default = "m" - yco_units_default = "m" - - # Options can override coord (standard) names and units. - if xco_name is None: - xco_name = xco_name_default - if yco_name is None: - yco_name = yco_name_default - if xco_units is None: - xco_units = xco_units_default - if yco_units is None: - yco_units = yco_units_default - - phenom_auxcoord_names = [] - if xco_is_dim: - # xdim has same name as xco, making xco a dim-coord - xdim_name = "xco" - else: - # use alternate dim-name, and put xco on the 'coords' list - # This makes the X coord an aux-coord - xdim_name = "xdim_altname" - phenom_auxcoord_names.append("xco") - if yco_is_dim: - # ydim has same name as yco, making yco a dim-coord - ydim_name = "yco" # This makes the Y coord a dim-coord - else: - # use alternate dim-name, and put yco on the 'coords' list - # This makes the Y coord an aux-coord - ydim_name = "ydim_altname" - phenom_auxcoord_names.append("yco") - - # Build a 'phenom:coords' string if needed. - if phenom_auxcoord_names: - phenom_coords_string = " ".join(phenom_auxcoord_names) - phenom_coords_string = f""" - phenom:coordinates = "{phenom_coords_string}" ; -""" - else: - phenom_coords_string = "" - - grid_mapping_name = "grid" - # Options can override the gridvar name and properties. - g_varname = gridmapvar_name - g_mapname = gridmapvar_mappropertyname - if g_varname is None: - g_varname = grid_mapping_name - if g_mapname is None: - # If you change this, it is no longer a valid grid-mapping var. - g_mapname = "grid_mapping_name" - - # Omit the earth radius, if requested. - if mapping_missingradius: - g_radius_string = "" - else: - g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" - g_string = f""" - int {g_varname} ; - {g_varname}:{g_mapname} = "{mapping_type_name}"; - {g_radius_string} - """ - - # Add a specified scale-factor, if requested. - if mapping_scalefactor is not None: - # Add a specific scale-factor term to the grid mapping. - # (Non-unity scale is not supported for Mercator/Stereographic). - sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN - g_string += f""" - {g_varname}:{sfapo_name} = {mapping_scalefactor} ; - """ - - # - # Add various additional (minimal) required properties for different - # grid mapping types. - # - - # Those which require 'latitude of projection origin' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_TRANSVERSE, - hh.CF_GRID_MAPPING_STEREO, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - hh.CF_GRID_MAPPING_VERTICAL, - ): - latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN - g_string += f""" - {g_varname}:{latpo_name} = 0.0 ; - """ - # Those which require 'longitude of projection origin' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_STEREO, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - hh.CF_GRID_MAPPING_VERTICAL, - ): - lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN - g_string += f""" - {g_varname}:{lonpo_name} = 0.0 ; - """ - # Those which require 'longitude of central meridian' - if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): - latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN - g_string += f""" - {g_varname}:{latcm_name} = 0.0 ; - """ - # Those which require 'perspective point height' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_VERTICAL, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - ): - pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT - g_string += f""" - {g_varname}:{pph_name} = 600000.0 ; - """ - # Those which require 'sweep angle axis' - if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): - saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS - g_string += f""" - {g_varname}:{saa_name} = "y" ; - """ - - # y-coord values - if yco_values is None: - yco_values = [10.0, 20.0] - yco_value_strings = [str(val) for val in yco_values] - yco_values_string = ", ".join(yco_value_strings) - - # Construct the total CDL string - cdl_string = f""" - netcdf test {{ - dimensions: - {ydim_name} = 2 ; - {xdim_name} = 3 ; - variables: - double phenom({ydim_name}, {xdim_name}) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; - phenom:grid_mapping = "grid" ; -{phenom_coords_string} - double yco({ydim_name}) ; - yco:axis = "Y" ; - yco:units = "{yco_units}" ; - yco:standard_name = "{yco_name}" ; - double xco({xdim_name}) ; - xco:axis = "X" ; - xco:units = "{xco_units}" ; - xco:standard_name = "{xco_name}" ; - {g_string} - data: - yco = {yco_values_string} ; - xco = 100., 110., 120. ; - }} - """ - return cdl_string - - def check_result( - self, - cube, - cube_cstype=None, - cube_no_cs=False, - cube_no_xycoords=False, - xco_no_cs=False, # N.B. no effect if cube_no_cs is True - yco_no_cs=False, # N.B. no effect if cube_no_cs is True - xco_is_aux=False, - yco_is_aux=False, - xco_stdname=True, - yco_stdname=True, - ): - """ - Check key properties of a result cube. - - Various options control the expected things which are tested. - """ - self.assertEqual(cube.standard_name, "air_temperature") - self.assertEqual(cube.var_name, "phenom") - - x_coords = cube.coords(dimensions=(1,)) - y_coords = cube.coords(dimensions=(0,)) - expected_dim_coords = [] - expected_aux_coords = [] - if yco_is_aux: - expected_aux_coords += y_coords - else: - expected_dim_coords += y_coords - if xco_is_aux: - expected_aux_coords += x_coords - else: - expected_dim_coords += x_coords - - self.assertEqual( - set(expected_dim_coords), set(cube.coords(dim_coords=True)) - ) - if cube_no_xycoords: - self.assertEqual(expected_dim_coords, []) - x_coord = None - y_coord = None - else: - self.assertEqual(len(x_coords), 1) - (x_coord,) = x_coords - self.assertEqual(len(y_coords), 1) - (y_coord,) = y_coords - - self.assertEqual( - set(expected_aux_coords), set(cube.coords(dim_coords=False)) - ) - - if x_coord: - if xco_stdname is None: - # no check - pass - elif xco_stdname is True: - self.assertIsNotNone(x_coord.standard_name) - elif xco_stdname is False: - self.assertIsNone(x_coord.standard_name) - else: - self.assertEqual(x_coord.standard_name, xco_stdname) - - if y_coord: - if yco_stdname is None: - # no check - pass - if yco_stdname is True: - self.assertIsNotNone(y_coord.standard_name) - elif yco_stdname is False: - self.assertIsNone(y_coord.standard_name) - else: - self.assertEqual(y_coord.standard_name, yco_stdname) - - cube_cs = cube.coord_system() - if cube_no_xycoords: - yco_cs = None - xco_cs = None - else: - yco_cs = y_coord.coord_system - xco_cs = x_coord.coord_system - if cube_no_cs: - self.assertIsNone(cube_cs) - self.assertIsNone(yco_cs) - self.assertIsNone(xco_cs) - else: - if cube_cstype is not None: - self.assertIsInstance(cube_cs, cube_cstype) - if xco_no_cs: - self.assertIsNone(xco_cs) - else: - self.assertEqual(xco_cs, cube_cs) - if yco_no_cs: - self.assertIsNone(yco_cs) - else: - self.assertEqual(yco_cs, cube_cs) - - -class Test__grid_mapping(Mixin__grid_mapping, tests.IrisTest): - # Various testcases for translation of grid-mappings - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def test_basic_latlon(self): - # A basic reference example with a lat-long grid. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_provides_coordinate_(latitude) - # 004 : fc_provides_coordinate_(longitude) - # 005 : fc_build_coordinate_(latitude) - # 006 : fc_build_coordinate_(longitude) - # Notes: - # * grid-mapping identified : regular latlon - # * dim-coords identified : lat+lon - # * coords built : standard latlon (with latlon coord-system) - result = self.run_testcase() - self.check_result(result) - - def test_missing_latlon_radius(self): - # Lat-long with a missing earth-radius causes an error. - # One of very few cases where activation may encounter an error. - # N.B. doesn't really test rules-activation, but maybe worth doing. - # (no rules trigger) - with self.assertRaisesRegex(ValueError, "No ellipsoid"): - self.run_testcase(mapping_missingradius=True) - - def test_bad_gridmapping_nameproperty(self): - # Fix the 'grid' var so it does not register as a grid-mapping. - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping --FAILED(no grid-mapping attr) - # 003 : fc_provides_coordinate_(latitude) - # 004 : fc_provides_coordinate_(longitude) - # 005 : fc_build_coordinate_(latitude)(no-cs) - # 006 : fc_build_coordinate_(longitude)(no-cs) - # Notes: - # * grid-mapping identified : NONE (thus, no coord-system) - # * dim-coords identified : lat+lon - # * coords built : lat+lon coords, with NO coord-system - result = self.run_testcase(gridmapvar_mappropertyname="mappy") - self.check_result(result, cube_no_cs=True) - - def test_latlon_bad_gridmapping_varname(self): - # rename the grid-mapping variable so it is effectively 'missing' - # (I.E. the var named in "data-variable:grid_mapping" does not exist). - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_coordinate_(latitude) - # 003 : fc_provides_coordinate_(longitude) - # 004 : fc_build_coordinate_(latitude)(no-cs) - # 005 : fc_build_coordinate_(longitude)(no-cs) - # Notes: - # * behaviours all the same as 'test_bad_gridmapping_nameproperty' - warning = "Missing.*grid mapping variable 'grid'" - result = self.run_testcase(warning=warning, gridmapvar_name="grid_2") - self.check_result(result, cube_no_cs=True) - - def test_latlon_bad_latlon_unit(self): - # Check with bad latitude units : 'degrees' in place of 'degrees_north'. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_default_coordinate_(provide-phase) - # 004 : fc_provides_coordinate_(longitude) - # 005 : fc_build_coordinate_(miscellaneous) - # 006 : fc_build_coordinate_(longitude) - # Notes: - # * grid-mapping identified : regular latlon - # * dim-coords identified : - # x is regular longitude dim-coord - # y is 'default' coord ==> builds as an 'extra' dim-coord - # * coords built : - # x(lon) is regular latlon with coord-system - # y(lat) is a dim-coord, but with NO coord-system - # * additional : - # "fc_provides_coordinate_latitude" did not trigger, - # because it is not a valid latitude coordinate. - result = self.run_testcase(latitude_units="degrees") - self.check_result(result, yco_no_cs=True) - - def test_mapping_rotated(self): - # Test with rotated-latlon grid-mapping - # Distinct from both regular-latlon and non-latlon cases, as the - # coordinate standard names and units are different. - # ('_make_testcase_cdl' and 'check_result' know how to handle that). - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) - # 003 : fc_provides_coordinate_(rotated_latitude) - # 004 : fc_provides_coordinate_(rotated_longitude) - # 005 : fc_build_coordinate_(rotated_latitude)(rotated) - # 006 : fc_build_coordinate_(rotated_longitude)(rotated) - # Notes: - # * grid-mapping identified : rotated lat-lon - # * dim-coords identified : lat+lon - # * coords built: lat+lon coords ROTATED, with coord-system - # - "rotated" means that they have a different name + units - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON - ) - self.check_result(result, cube_cstype=ics.RotatedGeogCS) - - # - # All non-latlon coordinate systems ... - # These all have projection-x/y coordinates with units of metres. - # They all work the same way, except that Mercator/Stereographic have - # parameter checking routines that can fail. - # NOTE: various mapping types *require* certain addtional properties - # - without which an error will occur during translation. - # - run_testcase/_make_testcase_cdl know how to provide these - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_() - # 003 : fc_provides_coordinate_(projection_y) - # 004 : fc_provides_coordinate_(projection_x) - # 005 : fc_build_coordinate_(projection_y) - # 006 : fc_build_coordinate_(projection_x) - # Notes: - # * grid-mapping identified : - # * dim-coords identified : projection__coordinate - # * coords built : projection__coordinate, with coord-system - def test_mapping_albers(self): - result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_ALBERS) - self.check_result(result, cube_cstype=ics.AlbersEqualArea) - - def test_mapping_geostationary(self): - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_GEOSTATIONARY - ) - self.check_result(result, cube_cstype=ics.Geostationary) - - def test_mapping_lambert_azimuthal(self): - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_LAMBERT_AZIMUTHAL - ) - self.check_result(result, cube_cstype=ics.LambertAzimuthalEqualArea) - - def test_mapping_lambert_conformal(self): - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL - ) - self.check_result(result, cube_cstype=ics.LambertConformal) - - def test_mapping_mercator(self): - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_MERCATOR - ) - self.check_result(result, cube_cstype=ics.Mercator) - - def test_mapping_mercator__fail_unsupported(self): - # Provide a mercator grid-mapping with a non-unity scale factor, which - # we cannot handle. - # Result : fails to convert into a coord-system, and emits a warning. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(mercator) --(FAILED check has_supported_mercator_parameters) - # 003 : fc_provides_coordinate_(projection_y) - # 004 : fc_provides_coordinate_(projection_x) - # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) - # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) - # Notes: - # * grid-mapping identified : NONE - # * dim-coords identified : proj-x and -y - # * coords built : NONE (no dim or aux coords: cube has no coords) - warning = "not yet supported for Mercator" - result = self.run_testcase( - warning=warning, - mapping_type_name=hh.CF_GRID_MAPPING_MERCATOR, - mapping_scalefactor=2.0, - ) - self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) - - def test_mapping_stereographic(self): - result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_STEREO) - self.check_result(result, cube_cstype=ics.Stereographic) - - def test_mapping_stereographic__fail_unsupported(self): - # As for 'test_mapping_mercator__fail_unsupported', provide a non-unity - # scale factor, which we cannot handle. - # Result : fails to convert into a coord-system, and emits a warning. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(stereographic) --(FAILED check has_supported_stereographic_parameters) - # 003 : fc_provides_coordinate_(projection_y) - # 004 : fc_provides_coordinate_(projection_x) - # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) - # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) - # Notes: - # as for 'mercator__fail_unsupported', above - warning = "not yet supported for stereographic" - result = self.run_testcase( - warning=warning, - mapping_type_name=hh.CF_GRID_MAPPING_STEREO, - mapping_scalefactor=2.0, - ) - self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) - - def test_mapping_transverse_mercator(self): - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_TRANSVERSE - ) - self.check_result(result, cube_cstype=ics.TransverseMercator) - - def test_mapping_vertical_perspective(self): - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_VERTICAL - ) - self.check_result(result, cube_cstype=ics.VerticalPerspective) - - def test_mapping_unsupported(self): - # Use azimuthal, which is a real thing but we don't yet support it. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping --FAILED(unhandled type azimuthal_equidistant) - # 003 : fc_provides_coordinate_(projection_y) - # 004 : fc_provides_coordinate_(projection_x) - # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) - # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) - # Notes: - # * NO grid-mapping is identified (or coord-system built) - # * There is no warning for this : it fails silently. - # TODO: perhaps there _should_ be a warning in such cases ? - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_AZIMUTHAL - ) - self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) - - def test_mapping_undefined(self): - # Use a random, unknown "mapping type". - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping --FAILED(unhandled type unknown) - # 003 : fc_provides_coordinate_(projection_y) - # 004 : fc_provides_coordinate_(projection_x) - # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) - # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) - # Notes: - # * There is no warning for this : it fails silently. - # TODO: perhaps there _should_ be a warning in such cases ? - result = self.run_testcase(mapping_type_name="unknown") - self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) - - # - # Cases where names(+units) of coords don't match the grid-mapping type. - # Effectively, there are 9 possibilities for (latlon/rotated/projected) - # coords mismatched to (latlon/rotated/projected/missing) coord-systems. - # - # N.B. the results are not all the same : - # - # 1. when a coord and the grid-mapping have the same 'type', - # i.e. plain-latlon, rotated-latlon or non-latlon, then dim-coords are - # built with a coord-system (as seen previously). - # 2. when there is no grid-mapping, we can build coords of any type, - # but with no coord-system. - # 3. when one of (coord + grid-mapping) is plain-latlon or rotated-latlon, - # and the other is non-latlon (i.e. any other type), - # then we build coords *without* a coord-system - # 4. when one of (coord + grid-mapping) is plain-latlon, and the other is - # rotated-latlon, we don't build coords at all. - # TODO: it's not clear why this needs to behave differently from case - # (3.) : possibly, these two should be made consistent. - # - # TODO: *all* these 'mismatch' cases should probably generate warnings, - # except for plain-latlon coords with no grid-mapping. - # At present, we _only_ warn when an expected grid-mapping is absent. - # - - def test_mapping__mismatch__latlon_coords_rotated_system(self): - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) - # 003 : fc_provides_coordinate_(latitude) - # 004 : fc_provides_coordinate_(longitude) - # 005 : fc_build_coordinate_(latitude)(FAILED : latlon coord with rotated cs) - # 006 : fc_build_coordinate_(longitude)(FAILED : latlon coord with rotated cs) - # Notes: - # * coords built : NONE (see above) - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, - xco_name="longitude", - xco_units="degrees_east", - yco_name="latitude", - yco_units="degrees_north", - ) - self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) - - def test_mapping__mismatch__latlon_coords_nonll_system(self): - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(albers_conical_equal_area) - # 003 : fc_provides_coordinate_(latitude) - # 004 : fc_provides_coordinate_(longitude) - # 005 : fc_build_coordinate_(latitude)(no-cs : discarded projected cs) - # 006 : fc_build_coordinate_(longitude)(no-cs : discarded projected cs) - # Notes: - # * coords built : lat + lon, with no coord-system (see above) - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_ALBERS, - xco_name="longitude", - xco_units="degrees_east", - yco_name="latitude", - yco_units="degrees_north", - ) - self.check_result(result, cube_no_cs=True) - - def test_mapping__mismatch__latlon_coords_missing_system(self): - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_coordinate_(latitude) - # 003 : fc_provides_coordinate_(longitude) - # 004 : fc_build_coordinate_(latitude)(no-cs) - # 005 : fc_build_coordinate_(longitude)(no-cs) - # Notes: - # * coords built : lat + lon, with no coord-system (see above) - warning = "Missing.*grid mapping variable 'grid'" - result = self.run_testcase( - warning=warning, - gridmapvar_name="moved", - xco_name="longitude", - xco_units="degrees_east", - yco_name="latitude", - yco_units="degrees_north", - ) - self.check_result(result, cube_no_cs=True) - - def test_mapping__mismatch__rotated_coords_latlon_system(self): - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_provides_coordinate_(rotated_latitude) - # 004 : fc_provides_coordinate_(rotated_longitude) - # 005 : fc_build_coordinate_(rotated_latitude)(FAILED rotated coord with latlon cs) - # 006 : fc_build_coordinate_(rotated_longitude)(FAILED rotated coord with latlon cs) - # Notes: - # * coords built : NONE (see above) - result = self.run_testcase( - xco_name="grid_longitude", - xco_units="degrees", - yco_name="grid_latitude", - yco_units="degrees", - ) - self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) - - def test_mapping__mismatch__rotated_coords_nonll_system(self): - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(albers_conical_equal_area) - # 003 : fc_provides_coordinate_(rotated_latitude) - # 004 : fc_provides_coordinate_(rotated_longitude) - # 005 : fc_build_coordinate_(rotated_latitude)(rotated no-cs : discarded projected cs) - # 006 : fc_build_coordinate_(rotated_longitude)(rotated no-cs : discarded projected cs) - # Notes: - # * coords built : rotated-lat + lon, with no coord-system (see above) - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_ALBERS, - xco_name="grid_longitude", - xco_units="degrees", - yco_name="grid_latitude", - yco_units="degrees", - ) - self.check_result(result, cube_no_cs=True) - - def test_mapping__mismatch__rotated_coords_missing_system(self): - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_coordinate_(rotated_latitude) - # 003 : fc_provides_coordinate_(rotated_longitude) - # 004 : fc_build_coordinate_(rotated_latitude)(rotated no-cs) - # 005 : fc_build_coordinate_(rotated_longitude)(rotated no-cs) - # Notes: - # * coords built : rotated lat + lon, with no coord-system (see above) - warning = "Missing.*grid mapping variable 'grid'" - result = self.run_testcase( - warning=warning, - gridmapvar_name="moved", - xco_name="grid_longitude", - xco_units="degrees", - yco_name="grid_latitude", - yco_units="degrees", - ) - self.check_result(result, cube_no_cs=True) - - def test_mapping__mismatch__nonll_coords_latlon_system(self): - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_default_coordinate_(provide-phase) - # 004 : fc_default_coordinate_(provide-phase) - # 005 : fc_build_coordinate_(miscellaneous) - # 006 : fc_build_coordinate_(miscellaneous) - # Notes: - # * coords built : projection x + y, with no coord-system (see above) - # * the coords build as "default" type : they have no standard-name - result = self.run_testcase( - xco_name="projection_x", - xco_units="m", - yco_name="projection_y", - yco_units="m", - ) - self.check_result( - result, cube_no_cs=True, xco_stdname=False, yco_stdname=False - ) - - def test_mapping__mismatch__nonll_coords_rotated_system(self): - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) - # 003 : fc_default_coordinate_(provide-phase) - # 004 : fc_default_coordinate_(provide-phase) - # 005 : fc_build_coordinate_(miscellaneous) - # 006 : fc_build_coordinate_(miscellaneous) - # Notes: - # * as previous case '__mismatch__nonll_' - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, - xco_name="projection_x", - xco_units="m", - yco_name="projection_y", - yco_units="m", - ) - self.check_result( - result, cube_no_cs=True, xco_stdname=False, yco_stdname=False - ) - - def test_mapping__mismatch__nonll_coords_missing_system(self): - # Rules Triggered: - # 001 : fc_default - # 002 : fc_default_coordinate_(provide-phase) - # 003 : fc_default_coordinate_(provide-phase) - # 004 : fc_build_coordinate_(miscellaneous) - # 005 : fc_build_coordinate_(miscellaneous) - # Notes: - # * effectively, just like previous 2 cases - warning = "Missing.*grid mapping variable 'grid'" - result = self.run_testcase( - warning=warning, - gridmapvar_name="moved", - xco_name="projection_x", - xco_units="m", - yco_name="projection_y", - yco_units="m", - ) - self.check_result( - result, cube_no_cs=True, xco_stdname=False, yco_stdname=False - ) - - -class Test__aux_latlons(Mixin__grid_mapping, tests.IrisTest): - # Testcases for translating auxiliary latitude+longitude variables - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def test_aux_lon(self): - # Change the name of xdim, and put xco on the coords list. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_provides_coordinate_(latitude) - # 004 : fc_build_coordinate_(latitude) - # 005 : fc_build_auxiliary_coordinate_longitude - result = self.run_testcase(xco_is_dim=False) - self.check_result(result, xco_is_aux=True, xco_no_cs=True) - - def test_aux_lat(self): - # As previous, but with the Y coord. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_provides_coordinate_(longitude) - # 004 : fc_build_coordinate_(longitude) - # 005 : fc_build_auxiliary_coordinate_latitude - result = self.run_testcase(yco_is_dim=False) - self.check_result(result, yco_is_aux=True, yco_no_cs=True) - - def test_aux_lat_and_lon(self): - # Make *both* X and Y coords into aux-coords. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_build_auxiliary_coordinate_longitude - # 004 : fc_build_auxiliary_coordinate_latitude - # Notes: - # * a grid-mapping is recognised, but discarded, as in this case - # there are no dim-coords to reference it. - result = self.run_testcase(xco_is_dim=False, yco_is_dim=False) - self.check_result( - result, xco_is_aux=True, yco_is_aux=True, cube_no_cs=True - ) - - def test_aux_lon_rotated(self): - # Rotated-style lat + lon coords, X is an aux-coord. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) - # 003 : fc_provides_coordinate_(rotated_latitude) - # 004 : fc_build_coordinate_(rotated_latitude)(rotated) - # 005 : fc_build_auxiliary_coordinate_longitude_rotated - # Notes: - # * as the plain-latlon case 'test_aux_lon'. - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, - xco_is_dim=False, - ) - self.check_result(result, xco_is_aux=True, xco_no_cs=True) - - def test_aux_lat_rotated(self): - # Rotated-style lat + lon coords, Y is an aux-coord. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) - # 003 : fc_provides_coordinate_(rotated_longitude) - # 004 : fc_build_coordinate_(rotated_longitude)(rotated) - # 005 : fc_build_auxiliary_coordinate_latitude_rotated - # Notes: - # * as the plain-latlon case 'test_aux_lat'. - result = self.run_testcase( - mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, - yco_is_dim=False, - ) - self.check_result(result, yco_is_aux=True, yco_no_cs=True) - - -class Test__nondimcoords(Mixin__grid_mapping, tests.IrisTest): - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def test_nondim_lats(self): - # Fix a coord's values so it cannot be a dim-coord. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_provides_coordinate_(latitude) - # 004 : fc_provides_coordinate_(longitude) - # 005 : fc_build_coordinate_(latitude) - # 006 : fc_build_coordinate_(longitude) - # Notes: - # * in terms of rule triggering, this is not distinct from the - # "normal" case : but latitude is now created as an aux-coord. - warning = "must be.* monotonic" - result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) - self.check_result(result, yco_is_aux=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py deleted file mode 100644 index 3413090a3d..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py +++ /dev/null @@ -1,313 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the engine.activate() call within the -`iris.fileformats.netcdf._load_cube` function. - -Test rules activation relating to hybrid vertical coordinates. - -""" -import iris.tests as tests # isort: skip - -import iris.fileformats._nc_load_rules.helpers as hh -from iris.tests.unit.fileformats.nc_load_rules.actions import ( - Mixin__nc_load_actions, -) - - -class Test__formulae_tests(Mixin__nc_load_actions, tests.IrisTest): - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def _make_testcase_cdl( - self, formula_root_name=None, term_names=None, extra_formula_type=None - ): - """Construct a testcase CDL for data with hybrid vertical coords.""" - if formula_root_name is None: - formula_root_name = "atmosphere_hybrid_height_coordinate" - if term_names is None: - term_names = hh.CF_COORD_VERTICAL.get(formula_root_name) - if term_names is None: - # unsupported type : just make something up - term_names = ["term1"] - - # Arrange to create additional term variables for an 'extra' hybrid - # formula, if requested. - if extra_formula_type is None: - term_names_extra = [] - phenom_coord_names = ["vert"] # always include the root variable - else: - phenom_coord_names = ["vert", "vert_2"] # two formula coords - term_names_extra = hh.CF_COORD_VERTICAL.get(extra_formula_type) - - # Build strings to define term variables. - formula_term_strings = [] - extra_formula_term_strings = [] - terms_string = "" - for term_name in term_names + term_names_extra: - term_varname = "v_" + term_name - # Include in the phenom coordinates list. - phenom_coord_names.append(term_varname) - term_string = f"{term_name}: {term_varname}" - if term_name in term_names: - # Include in the 'main' terms list. - formula_term_strings.append(term_string) - else: - # Include in the 'extra' terms list. - extra_formula_term_strings.append(term_string) - terms_string += f""" - double {term_varname}(h) ; - {term_varname}:long_name = "{term_name}_long_name" ; - {term_varname}:units = "m" ; -""" - - # Construct the reference strings. - phenom_coords_string = " ".join(phenom_coord_names) - formula_terms_string = " ".join(formula_term_strings) - extra_formula_terms_string = " ".join(extra_formula_term_strings) - - # Construct the 'extra' hybrid coord if requested. - if extra_formula_type is None: - extra_formula_string = "" - else: - # Create the lines to add an 'extra' formula. - # For now, put this on the same dim : makes no difference. - extra_formula_string = f""" - double vert_2(h) ; - vert_2:standard_name = "{extra_formula_type}" ; - vert_2:units = "m" ; - vert_2:formula_terms = "{extra_formula_terms_string}" ; -""" - - # Create the main result string. - cdl_str = f""" -netcdf test {{ -dimensions: - h = 2 ; -variables: - double phenom(h) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; - phenom:coordinates = "{phenom_coords_string}" ; - double vert(h) ; - vert:standard_name = "{formula_root_name}" ; - vert:long_name = "hybrid_vertical" ; - vert:units = "m" ; - vert:formula_terms = "{formula_terms_string}" ; -{terms_string} -{extra_formula_string} -}} -""" - return cdl_str - - def check_result(self, cube, factory_type="_auto", formula_terms="_auto"): - """Check the result of a cube load with a hybrid vertical coord.""" - if factory_type == "_auto": - # replace with our 'default', which is hybrid-height. - # N.B. 'None' is different: it means expect *no* factory. - factory_type = "atmosphere_hybrid_height_coordinate" - self.assertEqual(cube._formula_type_name, factory_type) - - if formula_terms == "_auto": - # Set default terms-expected, according to the expected factory - # type. - if factory_type is None: - # If no factory, expect no identified terms. - formula_terms = [] - else: - # Expect the correct ones defined for the factory type. - formula_terms = hh.CF_COORD_VERTICAL[factory_type] - - # Compare the formula_terms list with the 'expected' ones. - # N.B. first make the 'expected' list lower case, as the lists in - # hh.CF_COORD_VERTICAL include uppercase, but rules outputs don't. - formula_terms = [term.lower() for term in formula_terms] - - # N.B. the terms dictionary can be missing, if there were none - actual_terms = cube._formula_terms_byname or {} - self.assertEqual(sorted(formula_terms), sorted(actual_terms.keys())) - - # Check that there is an aux-coord of the expected name for each term - for var_name in actual_terms.values(): - coords = cube.coords(var_name=var_name, dim_coords=False) - self.assertEqual(len(coords), 1) - - # - # Actual testcase routines - # - - def test_basic_hybridheight(self): - # Rules Triggered: - # 001 : fc_default - # 002 : fc_build_auxiliary_coordinate - # 003 : fc_build_auxiliary_coordinate - # 004 : fc_build_auxiliary_coordinate - # 005 : fc_build_auxiliary_coordinate - # 008 : fc_formula_type_atmosphere_hybrid_height_coordinate - # 009 : fc_formula_term(a) - # 010 : fc_formula_term(b) - # 011 : fc_formula_term(orog) - result = self.run_testcase() - self.check_result(result) - - def test_missing_term(self): - # Check behaviour when a term is missing. - # For the test, omit "orography", which is common in practice. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_build_auxiliary_coordinate - # 003 : fc_build_auxiliary_coordinate - # 004 : fc_build_auxiliary_coordinate - # 007 : fc_formula_type_atmosphere_hybrid_height_coordinate - # 008 : fc_formula_term(a) - # 009 : fc_formula_term(b) - result = self.run_testcase( - term_names=["a", "b"] # missing the 'orog' term - ) - self.check_result(result, formula_terms=["a", "b"]) - - def test_no_terms(self): - # Check behaviour when *all* terms are missing. - # N.B. for any _actual_ type, this is probably invalid and would fail? - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_build_auxiliary_coordinate - result = self.run_testcase( - formula_root_name="atmosphere_hybrid_height_coordinate", - term_names=[], - ) - # This does *not* trigger - # 'fc_formula_type_atmosphere_hybrid_height_coordinate' - # This is because, within the 'assert_case_specific_facts' routine, - # formula_roots are only recognised by scanning the identified - # formula_terms. - self.check_result(result, factory_type=None) - - def test_unrecognised_verticaltype(self): - # Set the root variable name to something NOT a recognised hybrid type. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_build_auxiliary_coordinate - # 003 : fc_build_auxiliary_coordinate - # 004 : fc_build_auxiliary_coordinate - # 007 : fc_formula_type(FAILED - unrecognised formula type = 'unknown') - # 008 : fc_formula_term(a) - # 009 : fc_formula_term(b) - result = self.run_testcase( - formula_root_name="unknown", - term_names=["a", "b"], - warning="Ignored formula of unrecognised type: 'unknown'.", - ) - # Check that it picks up the terms, but *not* the factory root coord, - # which is simply discarded. - self.check_result(result, factory_type=None, formula_terms=["a", "b"]) - - def test_two_formulae(self): - # Construct an example with TWO hybrid coords. - # This is not errored, but we don't correctly support it. - # - # NOTE: the original Pyke implementation does not detect this problem - # By design, the new mechanism does + will raise a warning. - warning = ( - "Omitting factories for some hybrid coordinates.*" - "multiple hybrid coordinates.* not supported" - ) - - extra_type = "ocean_sigma_coordinate" - result = self.run_testcase( - extra_formula_type=extra_type, warning=warning - ) - # NOTE: FOR NOW, check expected behaviour : only one factory will be - # built, but there are coordinates (terms) for both types. - # TODO: this is a bug and needs fixing : translation should handle - # multiple hybrid coordinates in a sensible way. - self.check_result( - result, - factory_type=extra_type, - formula_terms=["a", "b", "depth", "eta", "orog", "sigma"], - ) - - def test_atmosphere_sigma_coordinate(self): - hybrid_type = "atmosphere_sigma_coordinate" - term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) - - def test_atmosphere_hybrid_sigma_pressure_coordinate(self): - hybrid_type = "atmosphere_hybrid_sigma_pressure_coordinate" - term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) - - def test_ocean_sigma_z_coordinate(self): - hybrid_type = "ocean_sigma_z_coordinate" - term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) - - def test_ocean_sigma_coordinate(self): - hybrid_type = "ocean_sigma_coordinate" - term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) - - def test_ocean_s_coordinate(self): - hybrid_type = "ocean_s_coordinate" - term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) - - def test_ocean_s_coordinate_g1(self): - hybrid_type = "ocean_s_coordinate_g1" - term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) - - def test_ocean_s_coordinate_g2(self): - hybrid_type = "ocean_s_coordinate_g2" - term_names = hh.CF_COORD_VERTICAL[hybrid_type] - result = self.run_testcase( - formula_root_name=hybrid_type, term_names=term_names - ) - self.check_result( - result, factory_type=hybrid_type, formula_terms=term_names - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py deleted file mode 100644 index dfa862c4d1..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__latlon_dimcoords.py +++ /dev/null @@ -1,337 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the engine.activate() call within the -`iris.fileformats.netcdf._load_cube` function. - -Tests for rules behaviour in identifying latitude/longitude dim-coords, both -rotated and non-rotated. - -""" -import iris.tests as tests # isort: skip - -from iris.coord_systems import GeogCS, RotatedGeogCS -from iris.tests.unit.fileformats.nc_load_rules.actions import ( - Mixin__nc_load_actions, -) - - -class Mixin_latlon_dimcoords(Mixin__nc_load_actions): - # Tests for the recognition and construction of latitude/longitude coords. - - # Control to test either longitude or latitude coords. - # Set by inheritor classes, which are actual TestCases. - lat_1_or_lon_0 = None - - def setUp(self): - super().setUp() - # Generate some useful settings : just to generalise operation over - # both latitude and longitude. - islat = self.lat_1_or_lon_0 - assert islat in (0, 1) - self.unrotated_name = "latitude" if islat else "longitude" - self.rotated_name = "grid_latitude" if islat else "grid_longitude" - self.unrotated_units = "degrees_north" if islat else "degrees_east" - # Note: there are many alternative valid forms for the rotated units, - # but we are not testing that here. - self.rotated_units = "degrees" # NB this one is actually constant - self.axis = "y" if islat else "x" - - def _make_testcase_cdl( - self, - standard_name=None, - long_name=None, - var_name=None, - units=None, - axis=None, - grid_mapping=None, - ): - # Inner routine called by 'run_testcase' (in Mixin__nc_load_actions), - # to generate CDL which is then translated into a testfile and loaded. - if var_name is None: - # Can't have *no* var-name - # N.B. it is also the name of the dimension. - var_name = "dim" - - def attribute_str(name, value): - if value is None or value == "": - result = "" - else: - result = f'{var_name}:{name} = "{value}" ;' - - return result - - standard_name_str = attribute_str("standard_name", standard_name) - long_name_str = attribute_str("long_name", long_name) - units_str = attribute_str("units", units) - axis_str = attribute_str("axis", axis) - if grid_mapping: - grid_mapping_str = 'phenom:grid_mapping = "crs" ;' - else: - grid_mapping_str = "" - - assert grid_mapping in (None, "latlon", "rotated") - if grid_mapping is None: - crs_str = "" - elif grid_mapping == "latlon": - crs_str = """ - int crs ; - crs:grid_mapping_name = "latitude_longitude" ; - crs:semi_major_axis = 6371000.0 ; - crs:inverse_flattening = 1000. ; -""" - elif grid_mapping == "rotated": - crs_str = """ - int crs ; - crs:grid_mapping_name = "rotated_latitude_longitude" ; - crs:grid_north_pole_latitude = 32.5 ; - crs:grid_north_pole_longitude = 170. ; -""" - - cdl_string = f""" -netcdf test {{ - dimensions: - {var_name} = 2 ; - variables: - double {var_name}({var_name}) ; - {standard_name_str} - {units_str} - {long_name_str} - {axis_str} - double phenom({var_name}) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; - {grid_mapping_str} - {crs_str} - data: - {var_name} = 0., 1. ; -}} -""" - return cdl_string - - def check_result( - self, - cube, - standard_name, - long_name, - units, - crs=None, - context_message="", - ): - # Check the existence, standard-name, long-name, units and coord-system - # of the resulting coord. Also that it is always a dim-coord. - # NOTE: there is no "axis" arg, as this information does *not* appear - # as a separate property (or attribute) of the resulting coord. - # However, whether the file variable has an axis attribute *does* - # affect the results here, in some cases. - coords = cube.coords() - # There should be one and only one coord. - self.assertEqual(1, len(coords)) - # It should also be a dim-coord - self.assertEqual(1, len(cube.coords(dim_coords=True))) - (coord,) = coords - if self.debug: - print("") - print("DEBUG : result coord =", coord) - print("") - - coord_stdname, coord_longname, coord_units, coord_crs = [ - getattr(coord, name) - for name in ("standard_name", "long_name", "units", "coord_system") - ] - self.assertEqual(standard_name, coord_stdname, context_message) - self.assertEqual(long_name, coord_longname, context_message) - self.assertEqual(units, coord_units, context_message) - assert crs in (None, "latlon", "rotated") - if crs is None: - self.assertEqual(None, coord_crs, context_message) - elif crs == "latlon": - self.assertIsInstance(coord_crs, GeogCS, context_message) - elif crs == "rotated": - self.assertIsInstance(coord_crs, RotatedGeogCS, context_message) - - # - # Testcase routines - # - # NOTE: all these testcases have been verified against the older behaviour - # in v3.0.4, based on Pyke rules. - # - - def test_minimal(self): - # Nothing but a var-name --> unrecognised dim-coord. - result = self.run_testcase() - self.check_result(result, None, None, "unknown") - - def test_fullinfo_unrotated(self): - # Check behaviour with all normal info elements for 'unrotated' case. - # Includes a grid-mapping, but no axis (should not be needed). - result = self.run_testcase( - standard_name=self.unrotated_name, - units=self.unrotated_units, - grid_mapping="latlon", - ) - self.check_result( - result, self.unrotated_name, None, "degrees", "latlon" - ) - - def test_fullinfo_rotated(self): - # Check behaviour with all normal info elements for 'rotated' case. - # Includes a grid-mapping, but no axis (should not be needed). - result = self.run_testcase( - standard_name=self.rotated_name, - units=self.rotated_units, - grid_mapping="rotated", - ) - self.check_result( - result, self.rotated_name, None, "degrees", "rotated" - ) - - def test_axis(self): - # A suitable axis --> unrotated lat/lon coord, but unknown units. - result = self.run_testcase(axis=self.axis) - self.check_result(result, self.unrotated_name, None, "unknown") - - def test_units_unrotated(self): - # With a unit like 'degrees_east', we automatically identify this as a - # latlon coord, *and* convert units to plain 'degrees' on loading. - result = self.run_testcase(units=self.unrotated_units) - self.check_result(result, self.unrotated_name, None, "degrees") - - def test_units_rotated(self): - # With no info except a "degrees" unit, we **don't** identify a latlon, - # i.e. we do not set the standard-name - result = self.run_testcase(units="degrees") - self.check_result(result, None, None, "degrees") - - def test_units_unrotated_gridmapping(self): - # With an unrotated unit *AND* a suitable grid-mapping, we identify a - # rotated latlon coordinate + assign it the coord-system. - result = self.run_testcase( - units=self.unrotated_units, grid_mapping="latlon" - ) - self.check_result( - result, self.unrotated_name, None, "degrees", "latlon" - ) - - def test_units_rotated_gridmapping_noname(self): - # Rotated units and grid-mapping, but *without* the expected name. - # Does not translate, no coord-system (i.e. grid-mapping is discarded). - result = self.run_testcase( - units="degrees", - grid_mapping="rotated", - ) - self.check_result(result, None, None, "degrees", None) - - def test_units_rotated_gridmapping_withname(self): - # With a "degrees" unit, a rotated grid-mapping *AND* a suitable - # standard-name, it recognises a rotated dimcoord. - result = self.run_testcase( - standard_name=self.rotated_name, - units="degrees", - grid_mapping="rotated", - ) - self.check_result( - result, self.rotated_name, None, "degrees", "rotated" - ) - - def test_units_rotated_gridmapping_varname(self): - # Same but with var-name containing the standard-name : in this case we - # get NO COORDINATE-SYSTEM (which is a bit weird). - result = self.run_testcase( - var_name=self.rotated_name, - units="degrees", - grid_mapping="rotated", - ) - self.check_result(result, self.rotated_name, None, "degrees", None) - - def test_varname_unrotated(self): - # With a recognised name in the var-name, we set standard-name. - # But units are left undetermined. - result = self.run_testcase(var_name=self.unrotated_name) - self.check_result(result, self.unrotated_name, None, "unknown") - - def test_varname_rotated(self): - # With a *rotated* name in the var-name, we set standard-name. - # But units are left undetermined. - result = self.run_testcase(var_name=self.rotated_name) - self.check_result(result, self.rotated_name, None, "unknown") - - def test_varname_unrotated_units_rotated(self): - # With a "degrees" unit and a suitable var-name, we do identify - # (= set standard-name). - # N.B. this accepts "degrees" as a generic term, and so does *not* - # interpret it as a rotated coordinate. - result = self.run_testcase( - var_name=self.unrotated_name, units="degrees" - ) - self.check_result(result, self.unrotated_name, None, "degrees") - - def test_longname(self): - # A recognised form in long-name is *not* translated into standard-name. - result = self.run_testcase(long_name=self.unrotated_name) - self.check_result(result, None, self.unrotated_name, "unknown") - - def test_stdname_unrotated(self): - # Only an (unrotated) standard name : units is not specified - result = self.run_testcase(standard_name=self.unrotated_name) - self.check_result(result, self.unrotated_name, None, None) - - def test_stdname_rotated(self): - # Only a (rotated) standard name : units is not specified - result = self.run_testcase(standard_name=self.rotated_name) - self.check_result(result, self.rotated_name, None, None) - - def test_stdname_unrotated_gridmapping(self): - # An unrotated standard-name and grid-mapping, translates into a - # coordinate system. - result = self.run_testcase( - standard_name=self.unrotated_name, grid_mapping="latlon" - ) - self.check_result( - result, self.unrotated_name, None, "unknown", "latlon" - ) - - def test_stdname_rotated_gridmapping(self): - # An *rotated* standard-name and grid-mapping, translates into a - # coordinate system. - result = self.run_testcase( - standard_name=self.rotated_name, grid_mapping="rotated" - ) - self.check_result(result, self.rotated_name, None, None, "rotated") - - -class Test__longitude_coords(Mixin_latlon_dimcoords, tests.IrisTest): - lat_1_or_lon_0 = 0 - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def setUp(self): - super().setUp() - - -class Test__latitude_coords(Mixin_latlon_dimcoords, tests.IrisTest): - lat_1_or_lon_0 = 1 - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def setUp(self): - super().setUp() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py deleted file mode 100644 index a8e44747dd..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py +++ /dev/null @@ -1,223 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the engine.activate() call within the -`iris.fileformats.netcdf._load_cube` function. - -Tests for rules activation relating to some isolated aspects : - * UKMO um-specific metadata - * label coordinates - * cell measures - * ancillary variables - -""" -import iris.tests as tests # isort: skip - -from iris.coords import AncillaryVariable, AuxCoord, CellMeasure -from iris.fileformats.pp import STASH -from iris.tests.unit.fileformats.nc_load_rules.actions import ( - Mixin__nc_load_actions, -) - - -class Test__ukmo_attributes(Mixin__nc_load_actions, tests.IrisTest): - # Tests for handling of the special UM-specific data-var attributes. - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def _make_testcase_cdl(self, **add_attrs): - phenom_attrs_string = "" - for key, value in add_attrs.items(): - phenom_attrs_string += f""" - phenom:{key} = "{value}" ; -""" - - cdl_string = f""" -netcdf test {{ - dimensions: - xdim = 2 ; - variables: - double phenom(xdim) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; -{phenom_attrs_string} -}} -""" - return cdl_string - - def check_result(self, cube, stashcode=None, processflags=None): - cube_stashattr = cube.attributes.get("STASH") - cube_processflags = cube.attributes.get("ukmo__process_flags") - - if stashcode is not None: - self.assertIsInstance(cube_stashattr, STASH) - self.assertEqual(str(stashcode), str(cube_stashattr)) - else: - self.assertIsNone(cube_stashattr) - - if processflags is not None: - self.assertIsInstance(cube_processflags, tuple) - self.assertEqual(set(cube_processflags), set(processflags)) - else: - self.assertIsNone(cube_processflags) - - # - # Testcase routines - # - stashcode = "m01s02i034" # Just one valid STASH msi string for testing - - def test_stash(self): - cube = self.run_testcase(um_stash_source=self.stashcode) - self.check_result(cube, stashcode=self.stashcode) - - def test_stash_altname(self): - cube = self.run_testcase(ukmo__um_stash_source=self.stashcode) - self.check_result(cube, stashcode=self.stashcode) - - def test_stash_empty(self): - value = "" - cube = self.run_testcase(ukmo__um_stash_source=value) - self.assertNotIn("STASH", cube.attributes) - self.assertEqual(cube.attributes["ukmo__um_stash_source"], value) - - def test_stash_invalid(self): - value = "XXX" - cube = self.run_testcase(ukmo__um_stash_source="XXX") - self.assertNotIn("STASH", cube.attributes) - self.assertEqual(cube.attributes["ukmo__um_stash_source"], value) - - def test_processflags_single(self): - cube = self.run_testcase(ukmo__process_flags="this") - self.check_result(cube, processflags=["this"]) - - def test_processflags_multi_with_underscores(self): - flags_testinput = "this that_1 the_other_one x" - flags_expectresult = ["this", "that 1", "the other one", "x"] - cube = self.run_testcase(ukmo__process_flags=flags_testinput) - self.check_result(cube, processflags=flags_expectresult) - - def test_processflags_empty(self): - cube = self.run_testcase(ukmo__process_flags="") - expected_result = [""] # May seem odd, but that's what it does. - self.check_result(cube, processflags=expected_result) - - -class Test__labels_cellmeasures_ancils(Mixin__nc_load_actions, tests.IrisTest): - # Tests for some simple rules that translate facts directly into cube data, - # with no alternative actions, complications or failure modes to test. - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def _make_testcase_cdl( - self, - include_label=False, - include_cellmeasure=False, - include_ancil=False, - ): - - phenom_extra_attrs_string = "" - extra_vars_string = "" - - if include_label: - phenom_extra_attrs_string += """ - phenom:coordinates = "v_label" ; -""" - extra_vars_string += """ - char v_label(xdim, strdim) ; - v_label:long_name = "string data" ; -""" - - if include_cellmeasure: - # One simple case : a valid link + a variable definition. - phenom_extra_attrs_string += """ - phenom:cell_measures = "area: v_cellm" ; -""" - extra_vars_string += """ - double v_cellm(xdim) ; - v_cellm:long_name = "cell areas" ; -""" - - if include_ancil: - # One simple case : a valid link + a variable definition. - phenom_extra_attrs_string += """ - phenom:ancillary_variables = "v_ancil" ; -""" - extra_vars_string += """ - double v_ancil(xdim) ; - v_ancil:long_name = "ancillary values" ; -""" - cdl_string = f""" - netcdf test {{ - dimensions: - xdim = 2 ; - strdim = 5 ; - variables: - double phenom(xdim) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; -{phenom_extra_attrs_string} -{extra_vars_string} - }} - """ - return cdl_string - - def check_result( - self, - cube, - expect_label=False, - expect_cellmeasure=False, - expect_ancil=False, - ): - label_coords = cube.coords(var_name="v_label") - if expect_label: - self.assertEqual(len(label_coords), 1) - (coord,) = label_coords - self.assertIsInstance(coord, AuxCoord) - self.assertEqual(coord.dtype.kind, "U") - else: - self.assertEqual(len(label_coords), 0) - - cell_measures = cube.cell_measures() - if expect_cellmeasure: - self.assertEqual(len(cell_measures), 1) - (cellm,) = cell_measures - self.assertIsInstance(cellm, CellMeasure) - else: - self.assertEqual(len(cell_measures), 0) - - ancils = cube.ancillary_variables() - if expect_ancil: - self.assertEqual(len(ancils), 1) - (ancil,) = ancils - self.assertIsInstance(ancil, AncillaryVariable) - else: - self.assertEqual(len(ancils), 0) - - def test_label(self): - cube = self.run_testcase(include_label=True) - self.check_result(cube, expect_label=True) - - def test_ancil(self): - cube = self.run_testcase(include_ancil=True) - self.check_result(cube, expect_ancil=True) - - def test_cellmeasure(self): - cube = self.run_testcase(include_cellmeasure=True) - self.check_result(cube, expect_cellmeasure=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py deleted file mode 100644 index 47760aadcb..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ /dev/null @@ -1,462 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the engine.activate() call within the -`iris.fileformats.netcdf._load_cube` function. - -Tests for rules activation relating to 'time' and 'time_period' coords. - -""" -import iris.tests as tests # isort: skip - -from iris.coords import AuxCoord, DimCoord -from iris.tests.unit.fileformats.nc_load_rules.actions import ( - Mixin__nc_load_actions, -) - - -class Opts(dict): - # A dict-like thing which provides '.' access in place of indexing. - def __init__(self, **kwargs): - # Init like a dict - super().__init__(**kwargs) - # Alias contents "self['key']", as properties "self.key" - # See: https://stackoverflow.com/a/14620633/2615050 - self.__dict__ = self - - -# Per-coord options settings for testcase definitions. -_COORD_OPTIONS_TEMPLATE = { - "which": "", # set to "something" - "stdname": "_auto_which", # default = time / time_period - "varname": "_as_which", # default = time / period - "dimname": "_as_which", - "in_phenomvar_dims": True, - "in_phenomvar_coords": False, # set for an aux-coord - "values_all_zero": False, # set to block CFDimensionVariable identity - "units": "_auto_which", # specific to time/period -} - - -class Mixin__timecoords__common(Mixin__nc_load_actions): - def _make_testcase_cdl( - self, - phenom_dims="_auto", # =get from time+period opts - phenom_coords="_auto", # =get from time+period opts - time_opts=None, - period_opts=None, - timedim_name="time", - perioddim_name="period", - ): - opt_t = None - opt_p = None - if time_opts is not None: - # Convert a non-null kwarg into an options dict for 'time' options - opt_t = Opts(**_COORD_OPTIONS_TEMPLATE) - opt_t.update(which="time", **time_opts) - if period_opts is not None: - # Convert a non-null kwarg into an options dict for 'period' options - opt_p = Opts(**_COORD_OPTIONS_TEMPLATE) - opt_p.update(which="period", **period_opts) - - # Define the 'standard' dimensions which we will create - # NB we don't necessarily *use* either of these - dims_and_lens = {timedim_name: 2, perioddim_name: 3} - dims_string = "\n".join( - [ - f" {name} = {length} ;" - for name, length in dims_and_lens.items() - ] - ) - - phenom_auto_dims = [] - phenom_auto_coords = [] - coord_variables_string = "" - data_string = "" - for opt in (opt_t, opt_p): - # Handle computed defaults and common info for both coord options. - if opt: - if opt.which not in ("time", "period"): - raise ValueError(f"unrecognised opt.which={opt.which}") - - # Do computed defaults. - if opt.stdname == "_auto_which": - if opt.which == "time": - opt.stdname = "time" - else: - assert opt.which == "period" - opt.stdname = "forecast_period" - if opt.varname == "_as_which": - opt.varname = opt.which - if opt.dimname == "_as_which": - opt.dimname = opt.which - if opt.units == "_auto_which": - if opt.which == "time": - opt.units = "hours since 2000-01-01" - else: - assert opt.which == "period" - opt.units = "hours" - - # Build 'auto' lists of phenom dims and (aux) coordinates. - if opt.in_phenomvar_dims: - phenom_auto_dims.append(opt.dimname) - if opt.in_phenomvar_coords: - phenom_auto_coords.append(opt.varname) - - # Add a definition of the coord variable. - coord_variables_string += f""" - double {opt.varname}({opt.dimname}) ; - {opt.varname}:standard_name = "{opt.stdname}" ; - {opt.varname}:units = "{opt.units}" ; -""" - # NOTE: we don't bother with an 'axis' property. - # We can probe the behaviour we need without that, because we - # are *not* testing the cf.py categorisation code, or the - # helper "build_xxx" routines. - - # Define coord-var data values (so it can be a dimension). - varname = opt.varname - if opt.values_all_zero: - # Use 'values_all_zero' to prevent a dim-var from - # identifying as a CFDimensionCoordinate (as it is - # non-monotonic). - dim_vals = [0.0] * dims_and_lens[opt.dimname] - else: - # "otherwise", assign an ascending sequence. - dim_vals = range(dims_and_lens[opt.dimname]) - dimvals_string = ", ".join(f"{val:0.1f}" for val in dim_vals) - data_string += f"\n {varname} = {dimvals_string} ;" - - if phenom_dims == "_auto": - phenom_dims = phenom_auto_dims - if not phenom_dims: - phenom_dims_string = "" - else: - phenom_dims_string = ", ".join(phenom_dims) - - if phenom_coords == "_auto": - phenom_coords = phenom_auto_coords - if not phenom_coords: - phenom_coords_string = "" - else: - phenom_coords_string = " ".join(phenom_coords) - phenom_coords_string = ( - " " - f'phenom:coordinates = "{phenom_coords_string}" ; ' - ) - - # Create a testcase with time dims + coords. - cdl_string = f""" -netcdf test {{ - dimensions: -{dims_string} - variables: - double phenom({phenom_dims_string}) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; -{phenom_coords_string} - -{coord_variables_string} - data: -{data_string} -}} -""" - return cdl_string - - def check_result(self, cube, time_is="dim", period_is="missing"): - """ - Check presence of expected dim/aux-coords in the result cube. - - Both of 'time_is' and 'period_is' can take values 'dim', 'aux' or - 'missing'. - - """ - options = ("dim", "aux", "missing") - msg = f'Invalid "{{name}}" = {{opt}} : Not one of {options!r}.' - if time_is not in options: - raise ValueError(msg.format(name="time_is", opt=time_is)) - if period_is not in options: - raise ValueError(msg.format(name="period_is", opt=period_is)) - - # Get the facts we want to check - time_name = "time" - period_name = "forecast_period" - time_dimcos = cube.coords(time_name, dim_coords=True) - time_auxcos = cube.coords(time_name, dim_coords=False) - period_dimcos = cube.coords(period_name, dim_coords=True) - period_auxcos = cube.coords(period_name, dim_coords=False) - - if time_is == "dim": - self.assertEqual(len(time_dimcos), 1) - self.assertEqual(len(time_auxcos), 0) - elif time_is == "aux": - self.assertEqual(len(time_dimcos), 0) - self.assertEqual(len(time_auxcos), 1) - else: - self.assertEqual(len(time_dimcos), 0) - self.assertEqual(len(time_auxcos), 0) - - if period_is == "dim": - self.assertEqual(len(period_dimcos), 1) - self.assertEqual(len(period_auxcos), 0) - elif period_is == "aux": - self.assertEqual(len(period_dimcos), 0) - self.assertEqual(len(period_auxcos), 1) - else: - self.assertEqual(len(period_dimcos), 0) - self.assertEqual(len(period_auxcos), 0) - - # Also check expected built Coord types. - if time_is == "dim": - self.assertIsInstance(time_dimcos[0], DimCoord) - elif time_is == "aux": - self.assertIsInstance(time_auxcos[0], AuxCoord) - - if period_is == "dim": - self.assertIsInstance(period_dimcos[0], DimCoord) - elif period_is == "aux": - self.assertIsInstance(period_auxcos[0], AuxCoord) - - -class Mixin__singlecoord__tests(Mixin__timecoords__common): - # Coordinate tests to be run for both 'time' and 'period' coordinate vars. - # Set (in inheritors) to select time/period testing. - which = None - - def run_testcase(self, coord_dim_name=None, **opts): - """ - Specialise 'run_testcase' for single-coord 'time' or 'period' testing. - """ - which = self.which - assert which in ("time", "period") - - # Separate the 'Opt' keywords from "others" : others are passed - # directly to the parent routine, whereas 'Opt' ones are passed to - # 'time_opts' / 'period_opts' keys accordingly. - general_opts = {} - for key, value in list(opts.items()): - if key not in _COORD_OPTIONS_TEMPLATE.keys(): - del opts[key] - general_opts[key] = value - - if coord_dim_name is not None: - # Translate this into one of timedim_name/perioddim_name - general_opts[f"{which}dim_name"] = coord_dim_name - - period_opts = None - time_opts = None - if which == "time": - time_opts = opts - else: - period_opts = opts - - result = super().run_testcase( - time_opts=time_opts, period_opts=period_opts, **general_opts - ) - - return result - - def check_result(self, cube, coord_is="dim"): - """ - Specialise 'check_result' for single-coord 'time' or 'period' testing. - """ - # Pass generic 'coord_is' option to parent as time/period options. - which = self.which - assert which in ("time", "period") - - if which == "time": - time_is = coord_is - period_is = "missing" - else: - period_is = coord_is - time_is = "missing" - - super().check_result(cube, time_is=time_is, period_is=period_is) - - # - # Generic single-coordinate testcases. - # ( these are repeated for both 'time' and 'time_period' ) - # - - def test_dimension(self): - # Coord is a normal dimension --> dimcoord - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_coordinate_(time[[_period]]) - # 003 : fc_build_coordinate_(time[[_period]]) - result = self.run_testcase() - self.check_result(result, "dim") - - def test_dimension_in_phenom_coords(self): - # Dimension coord also present in phenom:coords. - # Strictly wrong but a common error in datafiles : must tolerate. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_coordinate_(time[[_period]]) - # 003 : fc_build_coordinate_(time[[_period]]) - result = self.run_testcase(in_phenomvar_coords=True) - self.check_result(result, "dim") - - def test_dim_nonmonotonic(self): - # Coord has all-zero values, which prevents it being a dimcoord. - # The rule has a special way of treating it as an aux coord - # -- even though it doesn't appear in the phenom coords. - # ( Done by the build_coord routine, so not really a rules issue). - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_coordinate_(time[[_period]]) - # 003 : fc_build_coordinate_(time[[_period]]) - msg = "Failed to create.* dimension coordinate" - result = self.run_testcase(values_all_zero=True, warning=msg) - self.check_result(result, "aux") - - def test_dim_fails_typeident(self): - # Provide a coord variable, identified as a CFDimensionCoordinate by - # cf.py, but with the "wrong" units for a time or period coord. - # This causes it to fail both 'is_time' and 'is_period' tests and so, - # within the 'action_provides_coordinate' routine, does not trigger as - # a 'provides_coord_(time[[_period]])' rule, but instead as a - # 'default_coordinate_(provide-phase)'. - # As a result, it is built as a 'miscellaneous' dim-coord. - # N.B. this makes *no* practical difference, because a 'misc' dim - # coord is still a dim coord (albeit one with incorrect units). - # N.B.#2 that is different from lat/lon coords, where the coord-specific - # 'build' rules have the extra effect of setting a fixed standard-name. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_default_coordinate_(provide-phase) - # 003 : fc_build_coordinate_(miscellaneous) - result = self.run_testcase(units="1") - self.check_result(result, "dim") - - def test_aux(self): - # time/period is installed as an auxiliary coord. - # For this, rename both DIMENSIONS, so that the generated coords are - # not actually CF coordinates. - # For a valid case, we must *also* have a ref in phenom:coordinates - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_build_auxiliary_coordinate_time[[_period]] - result = self.run_testcase( - coord_dim_name="dim_renamed", - dimname="dim_renamed", - in_phenomvar_coords=True, - ) - self.check_result(result, "aux") - - def test_aux_not_in_phenom_coords(self): - # time/period is installed as an auxiliary coord, - # but we DIDN'T list it in phenom:coords -- otherwise as previous. - # Should have no result at all. - # - # Rules Triggered: - # 001 : fc_default - result = self.run_testcase( - coord_dim_name="dim_renamed", - dimname="dim_renamed", - in_phenomvar_coords=False, - ) # "should" be True for an aux-coord - self.check_result(result, "missing") - - def test_aux_fails_typeident(self): - # We provide a non-dimension coord variable, identified as a - # CFAuxiliaryCoordinate by cf.py, but we also give it "wrong" units, - # unsuitable for a time or period coord. - # Because it fails both 'is_time' and 'is_period' tests, it then does - # not trigger 'fc_build_auxiliary_coordinate_time[[_period]]'. - # As in the above testcase 'test_dim_fails_typeident', the routine - # 'action_build_auxiliary_coordinate' therefore builds this as a - # 'miscellaneous' rather than a specific coord type (time or period). - # However, also as in that other case, this makes absolutely no - # practical difference -- unlike for latitude or longitutude coords, - # where it may affect the standard-name. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_build_auxiliary_coordinate - result = self.run_testcase( - coord_dim_name="dim_renamed", - dimname="dim_renamed", - in_phenomvar_coords=True, - units="1", - ) - self.check_result(result, "aux") - - -class Test__time(Mixin__singlecoord__tests, tests.IrisTest): - # Run 'time' coord tests - which = "time" - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - -class Test__period(Mixin__singlecoord__tests, tests.IrisTest): - # Run 'time_period' coord tests - which = "period" - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - -class Test__dualcoord(Mixin__timecoords__common, tests.IrisTest): - # Coordinate tests for a combination of 'time' and 'time_period'. - # Not strictly necessary, as handling is independent, but a handy check - # on typical usage. - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def test_time_and_period(self): - # Test case with both 'time' and 'period', with separate dims. - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_coordinate_(time) - # 003 : fc_provides_coordinate_(time_period) - # 004 : fc_build_coordinate_(time) - # 005 : fc_build_coordinate_(time_period) - result = self.run_testcase(time_opts={}, period_opts={}) - self.check_result(result, time_is="dim", period_is="dim") - - def test_time_dim_period_aux(self): - # Test case with both 'time' and 'period' sharing a dim. - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_coordinate_(time) - # 003 : fc_build_coordinate_(time) - # 004 : fc_build_auxiliary_coordinate_time_period - result = self.run_testcase( - time_opts={}, - period_opts=dict( - dimname="time", - in_phenomvar_dims=False, - in_phenomvar_coords=True, - ), - ) - self.check_result(result, time_is="dim", period_is="aux") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py deleted file mode 100644 index e6508bea85..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the module -:mod:`iris.fileformats.netcdf._nc_load_rules.engine` . - -""" diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py deleted file mode 100644 index df5fbd4922..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py +++ /dev/null @@ -1,103 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module. - -""" -from unittest import mock - -from iris.fileformats._nc_load_rules.engine import Engine, FactEntity -import iris.tests as tests - - -class Test_Engine(tests.IrisTest): - def setUp(self): - self.empty_engine = Engine() - engine = Engine() - engine.add_fact("this", ("that", "other")) - self.nonempty_engine = engine - - def test__init(self): - # Check that init creates an empty Engine. - engine = Engine() - self.assertIsInstance(engine, Engine) - self.assertIsInstance(engine.facts, FactEntity) - self.assertEqual(list(engine.facts.entity_lists.keys()), []) - - def test_reset(self): - # Check that calling reset() causes a non-empty engine to be emptied. - engine = self.nonempty_engine - fact_names = list(engine.facts.entity_lists.keys()) - self.assertNotEqual(len(fact_names), 0) - engine.reset() - fact_names = list(engine.facts.entity_lists.keys()) - self.assertEqual(len(fact_names), 0) - - def test_activate(self): - # Check that calling engine.activate() --> actions.run_actions(engine) - engine = self.empty_engine - target = "iris.fileformats._nc_load_rules.engine.run_actions" - run_call = self.patch(target) - engine.activate() - self.assertEqual(run_call.call_args_list, [mock.call(engine)]) - - def test_add_case_specific_fact__newname(self): - # Adding a new fact to a new fact-name records as expected. - engine = self.nonempty_engine - engine.add_case_specific_fact("new_fact", ("a1", "a2")) - self.assertEqual(engine.fact_list("new_fact"), [("a1", "a2")]) - - def test_add_case_specific_fact__existingname(self): - # Adding a new fact to an existing fact-name records as expected. - engine = self.nonempty_engine - name = "this" - self.assertEqual(engine.fact_list(name), [("that", "other")]) - engine.add_case_specific_fact(name, ("yetanother",)) - self.assertEqual( - engine.fact_list(name), [("that", "other"), ("yetanother",)] - ) - - def test_add_case_specific_fact__emptyargs(self): - # Check that empty args work ok, and will create a new fact. - engine = self.empty_engine - engine.add_case_specific_fact("new_fact", ()) - self.assertIn("new_fact", engine.facts.entity_lists) - self.assertEqual(engine.fact_list("new_fact"), [()]) - - def test_add_fact(self): - # Check that 'add_fact' is equivalent to (short for) a call to - # 'add_case_specific_fact'. - engine = self.empty_engine - target = ( - "iris.fileformats._nc_load_rules.engine.Engine" - ".add_case_specific_fact" - ) - acsf_call = self.patch(target) - engine.add_fact("extra", ()) - self.assertEqual(acsf_call.call_count, 1) - self.assertEqual( - acsf_call.call_args_list, - [mock.call(fact_name="extra", fact_arglist=())], - ) - - def test_get_kb(self): - # Check that this stub just returns the facts database. - engine = self.nonempty_engine - kb = engine.get_kb() - self.assertIsInstance(kb, FactEntity) - self.assertIs(kb, engine.facts) - - def test_fact_list__existing(self): - self.assertEqual( - self.nonempty_engine.fact_list("this"), [("that", "other")] - ) - - def test_fact_list__nonexisting(self): - self.assertEqual(self.empty_engine.fact_list("odd-unknown"), []) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py deleted file mode 100644 index 69a536b9ae..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the module -:mod:`iris.fileformats.netcdf._nc_load_rules.helpers` . - -""" diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py deleted file mode 100644 index c040d43ca0..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -build_albers_equal_area_coordinate_system`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris -from iris.coord_systems import AlbersEqualArea -from iris.fileformats._nc_load_rules.helpers import ( - build_albers_equal_area_coordinate_system, -) - - -class TestBuildAlbersEqualAreaCoordinateSystem(tests.IrisTest): - def _test(self, inverse_flattening=False, no_optionals=False): - if no_optionals: - # Most properties are optional for this system. - gridvar_props = {} - # Setup all the expected default values - test_lat = 0 - test_lon = 0 - test_easting = 0 - test_northing = 0 - test_parallels = (20, 50) - else: - # Choose test values and setup corresponding named properties. - test_lat = -35 - test_lon = 175 - test_easting = -100 - test_northing = 200 - test_parallels = (-27, 3) - gridvar_props = dict( - latitude_of_projection_origin=test_lat, - longitude_of_central_meridian=test_lon, - false_easting=test_easting, - false_northing=test_northing, - standard_parallel=test_parallels, - ) - - # Add ellipsoid args. - gridvar_props["semi_major_axis"] = 6377563.396 - if inverse_flattening: - gridvar_props["inverse_flattening"] = 299.3249646 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, inverse_flattening=299.3249646 - ) - else: - gridvar_props["semi_minor_axis"] = 6356256.909 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909 - ) - - cf_grid_var = mock.Mock(spec=[], **gridvar_props) - - cs = build_albers_equal_area_coordinate_system(None, cf_grid_var) - - expected = AlbersEqualArea( - latitude_of_projection_origin=test_lat, - longitude_of_central_meridian=test_lon, - false_easting=test_easting, - false_northing=test_northing, - standard_parallels=test_parallels, - ellipsoid=expected_ellipsoid, - ) - - self.assertEqual(cs, expected) - - def test_basic(self): - self._test() - - def test_inverse_flattening(self): - # Check when inverse_flattening is provided instead of semi_minor_axis. - self._test(inverse_flattening=True) - - def test_no_optionals(self): - # Check defaults, when all optional attributes are absent. - self._test(no_optionals=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py deleted file mode 100644 index 95f892454b..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py +++ /dev/null @@ -1,310 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -build_auxilliary_coordinate`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.coords import AuxCoord -from iris.fileformats._nc_load_rules.helpers import build_auxiliary_coordinate -from iris.fileformats.cf import CFVariable - - -class TestBoundsVertexDim(tests.IrisTest): - # Lookup for various tests (which change the dimension order). - dim_names_lens = { - "foo": 2, - "bar": 3, - "nv": 4, - # 'x' and 'y' used as aliases for 'foo' and 'bar' - "x": 2, - "y": 3, - } - - def setUp(self): - # Create coordinate cf variables and pyke engine. - dimension_names = ("foo", "bar") - points, cf_data = self._make_array_and_cf_data(dimension_names) - self.cf_coord_var = mock.Mock( - spec=CFVariable, - dimensions=dimension_names, - cf_name="wibble", - cf_data=cf_data, - standard_name=None, - long_name="wibble", - units="m", - shape=points.shape, - dtype=points.dtype, - __getitem__=lambda self, key: points[key], - ) - - expected_bounds, _ = self._make_array_and_cf_data( - dimension_names=("foo", "bar", "nv") - ) - self.expected_coord = AuxCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=expected_bounds, - ) - - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar"), cf_data=cf_data), - filename="DUMMY", - cube_parts=dict(coordinates=[]), - ) - - # Patch the deferred loading that prevents attempted file access. - # This assumes that self.cf_bounds_var is defined in the test case. - def patched__getitem__(proxy_self, keys): - for var in (self.cf_coord_var, self.cf_bounds_var): - if proxy_self.variable_name == var.cf_name: - return var[keys] - raise RuntimeError() - - self.patch( - "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", - new=patched__getitem__, - ) - - # Patch the helper function that retrieves the bounds cf variable, - # and a False flag for climatological. - # This avoids the need for setting up further mocking of cf objects. - def _get_per_test_bounds_var(_coord_unused): - # Return the 'cf_bounds_var' created by the current test. - return (self.cf_bounds_var, False) - - self.patch( - "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", - new=_get_per_test_bounds_var, - ) - - @classmethod - def _make_array_and_cf_data(cls, dimension_names): - shape = tuple(cls.dim_names_lens[name] for name in dimension_names) - cf_data = mock.MagicMock(_FillValue=None, spec=[]) - cf_data.chunking = mock.MagicMock(return_value=shape) - return np.zeros(shape), cf_data - - def _make_cf_bounds_var(self, dimension_names): - # Create the bounds cf variable. - bounds, cf_data = self._make_array_and_cf_data(dimension_names) - cf_bounds_var = mock.Mock( - spec=CFVariable, - dimensions=dimension_names, - cf_name="wibble_bnds", - cf_data=cf_data, - shape=bounds.shape, - dtype=bounds.dtype, - __getitem__=lambda self, key: bounds[key], - ) - - return bounds, cf_bounds_var - - def _check_case(self, dimension_names): - bounds, self.cf_bounds_var = self._make_cf_bounds_var( - dimension_names=dimension_names - ) - - # Asserts must lie within context manager because of deferred loading. - build_auxiliary_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with( - self.expected_coord, [0, 1] - ) - - # Test that engine.cube_parts container is correctly populated. - expected_list = [(self.expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) - - def test_fastest_varying_vertex_dim(self): - # The usual order. - self._check_case(dimension_names=("foo", "bar", "nv")) - - def test_slowest_varying_vertex_dim(self): - # Bounds in the first (slowest varying) dimension. - self._check_case(dimension_names=("nv", "foo", "bar")) - - def test_fastest_with_different_dim_names(self): - # Despite the dimension names ('x', and 'y') differing from the coord's - # which are 'foo' and 'bar' (as permitted by the cf spec), - # this should still work because the vertex dim is the fastest varying. - self._check_case(dimension_names=("x", "y", "nv")) - - -class TestDtype(tests.IrisTest): - def setUp(self): - # Create coordinate cf variables and pyke engine. - points = np.arange(6).reshape(2, 3) - cf_data = mock.MagicMock(_FillValue=None) - cf_data.chunking = mock.MagicMock(return_value=points.shape) - - self.cf_coord_var = mock.Mock( - spec=CFVariable, - dimensions=("foo", "bar"), - cf_name="wibble", - cf_data=cf_data, - standard_name=None, - long_name="wibble", - units="m", - shape=points.shape, - dtype=points.dtype, - __getitem__=lambda self, key: points[key], - ) - - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), - filename="DUMMY", - cube_parts=dict(coordinates=[]), - ) - - def patched__getitem__(proxy_self, keys): - if proxy_self.variable_name == self.cf_coord_var.cf_name: - return self.cf_coord_var[keys] - raise RuntimeError() - - self.deferred_load_patch = mock.patch( - "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", - new=patched__getitem__, - ) - - def test_scale_factor_add_offset_int(self): - self.cf_coord_var.scale_factor = 3 - self.cf_coord_var.add_offset = 5 - - with self.deferred_load_patch: - build_auxiliary_coordinate(self.engine, self.cf_coord_var) - - coord, _ = self.engine.cube_parts["coordinates"][0] - self.assertEqual(coord.dtype.kind, "i") - - def test_scale_factor_float(self): - self.cf_coord_var.scale_factor = 3.0 - - with self.deferred_load_patch: - build_auxiliary_coordinate(self.engine, self.cf_coord_var) - - coord, _ = self.engine.cube_parts["coordinates"][0] - self.assertEqual(coord.dtype.kind, "f") - - def test_add_offset_float(self): - self.cf_coord_var.add_offset = 5.0 - - with self.deferred_load_patch: - build_auxiliary_coordinate(self.engine, self.cf_coord_var) - - coord, _ = self.engine.cube_parts["coordinates"][0] - self.assertEqual(coord.dtype.kind, "f") - - -class TestCoordConstruction(tests.IrisTest): - def setUp(self): - # Create dummy pyke engine. - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), - filename="DUMMY", - cube_parts=dict(coordinates=[]), - ) - - points = np.arange(6) - self.cf_coord_var = mock.Mock( - dimensions=("foo",), - scale_factor=1, - add_offset=0, - cf_name="wibble", - cf_data=mock.MagicMock( - chunking=mock.Mock(return_value=None), spec=[] - ), - standard_name=None, - long_name="wibble", - units="days since 1970-01-01", - calendar=None, - shape=points.shape, - dtype=points.dtype, - __getitem__=lambda self, key: points[key], - ) - - bounds = np.arange(12).reshape(6, 2) - self.cf_bounds_var = mock.Mock( - dimensions=("x", "nv"), - scale_factor=1, - add_offset=0, - cf_name="wibble_bnds", - cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None)), - shape=bounds.shape, - dtype=bounds.dtype, - __getitem__=lambda self, key: bounds[key], - ) - self.bounds = bounds - - # Create patch for deferred loading that prevents attempted - # file access. This assumes that self.cf_coord_var and - # self.cf_bounds_var are defined in the test case. - def patched__getitem__(proxy_self, keys): - for var in (self.cf_coord_var, self.cf_bounds_var): - if proxy_self.variable_name == var.cf_name: - return var[keys] - raise RuntimeError() - - self.patch( - "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", - new=patched__getitem__, - ) - - # Patch the helper function that retrieves the bounds cf variable. - # This avoids the need for setting up further mocking of cf objects. - self.use_climatology_bounds = False # Set this when you need to. - - def get_cf_bounds_var(coord_var): - return self.cf_bounds_var, self.use_climatology_bounds - - self.patch( - "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", - new=get_cf_bounds_var, - ) - - def check_case_aux_coord_construction(self, climatology=False): - # Test a generic auxiliary coordinate, with or without - # a climatological coord. - self.use_climatology_bounds = climatology - - expected_coord = AuxCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=self.bounds, - climatological=climatology, - ) - - build_auxiliary_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with(expected_coord, [0]) - - def test_aux_coord_construction(self): - self.check_case_aux_coord_construction(climatology=False) - - def test_aux_coord_construction__climatology(self): - self.check_case_aux_coord_construction(climatology=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py deleted file mode 100644 index a13fa6cca0..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers\ -build_cube_metadata`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.cube import Cube -from iris.fileformats._nc_load_rules.helpers import build_cube_metadata - - -def _make_engine(global_attributes=None, standard_name=None, long_name=None): - if global_attributes is None: - global_attributes = {} - - cf_group = mock.Mock(global_attributes=global_attributes) - - cf_var = mock.MagicMock( - cf_name="wibble", - standard_name=standard_name, - long_name=long_name, - units="m", - dtype=np.float64, - cell_methods=None, - cf_group=cf_group, - ) - - engine = mock.Mock(cube=Cube([23]), cf_var=cf_var) - - return engine - - -class TestInvalidGlobalAttributes(tests.IrisTest): - def test_valid(self): - global_attributes = { - "Conventions": "CF-1.5", - "comment": "Mocked test object", - } - engine = _make_engine(global_attributes) - build_cube_metadata(engine) - expected = global_attributes - self.assertEqual(engine.cube.attributes, expected) - - def test_invalid(self): - global_attributes = { - "Conventions": "CF-1.5", - "comment": "Mocked test object", - "calendar": "standard", - } - engine = _make_engine(global_attributes) - with mock.patch("warnings.warn") as warn: - build_cube_metadata(engine) - # Check for a warning. - self.assertEqual(warn.call_count, 1) - self.assertIn( - "Skipping global attribute 'calendar'", warn.call_args[0][0] - ) - # Check resulting attributes. The invalid entry 'calendar' - # should be filtered out. - global_attributes.pop("calendar") - expected = global_attributes - self.assertEqual(engine.cube.attributes, expected) - - -class TestCubeName(tests.IrisTest): - def check_cube_names(self, inputs, expected): - # Inputs - attributes on the fake CF Variable. - standard_name, long_name = inputs - # Expected - The expected cube attributes. - exp_standard_name, exp_long_name = expected - - engine = _make_engine(standard_name=standard_name, long_name=long_name) - build_cube_metadata(engine) - - # Check the cube's standard name and long name are as expected. - self.assertEqual(engine.cube.standard_name, exp_standard_name) - self.assertEqual(engine.cube.long_name, exp_long_name) - - def test_standard_name_none_long_name_none(self): - inputs = (None, None) - expected = (None, None) - self.check_cube_names(inputs, expected) - - def test_standard_name_none_long_name_set(self): - inputs = (None, "ice_thickness_long_name") - expected = (None, "ice_thickness_long_name") - self.check_cube_names(inputs, expected) - - def test_standard_name_valid_long_name_none(self): - inputs = ("sea_ice_thickness", None) - expected = ("sea_ice_thickness", None) - self.check_cube_names(inputs, expected) - - def test_standard_name_valid_long_name_set(self): - inputs = ("sea_ice_thickness", "ice_thickness_long_name") - expected = ("sea_ice_thickness", "ice_thickness_long_name") - self.check_cube_names(inputs, expected) - - def test_standard_name_invalid_long_name_none(self): - inputs = ("not_a_standard_name", None) - expected = ( - None, - "not_a_standard_name", - ) - self.check_cube_names(inputs, expected) - - def test_standard_name_invalid_long_name_set(self): - inputs = ("not_a_standard_name", "ice_thickness_long_name") - expected = (None, "ice_thickness_long_name") - self.check_cube_names(inputs, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py deleted file mode 100644 index a75678d923..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py +++ /dev/null @@ -1,523 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -build_dimension_coordinate`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock -import warnings - -import numpy as np - -from iris.coords import AuxCoord, DimCoord -from iris.fileformats._nc_load_rules.helpers import build_dimension_coordinate - - -class RulesTestMixin: - def setUp(self): - # Create dummy pyke engine. - self.engine = mock.Mock( - cube=mock.Mock(), - cf_var=mock.Mock(dimensions=("foo", "bar")), - filename="DUMMY", - cube_parts=dict(coordinates=[]), - ) - - # Create patch for deferred loading that prevents attempted - # file access. This assumes that self.cf_coord_var and - # self.cf_bounds_var are defined in the test case. - def patched__getitem__(proxy_self, keys): - for var in (self.cf_coord_var, self.cf_bounds_var): - if proxy_self.variable_name == var.cf_name: - return var[keys] - raise RuntimeError() - - self.deferred_load_patch = mock.patch( - "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", - new=patched__getitem__, - ) - - # Patch the helper function that retrieves the bounds cf variable. - # This avoids the need for setting up further mocking of cf objects. - self.use_climatology_bounds = False # Set this when you need to. - - def get_cf_bounds_var(coord_var): - return self.cf_bounds_var, self.use_climatology_bounds - - self.get_cf_bounds_var_patch = mock.patch( - "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", - new=get_cf_bounds_var, - ) - - -class TestCoordConstruction(tests.IrisTest, RulesTestMixin): - def setUp(self): - # Call parent setUp explicitly, because of how unittests work. - RulesTestMixin.setUp(self) - - bounds = np.arange(12).reshape(6, 2) - self.cf_bounds_var = mock.Mock( - dimensions=("x", "nv"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) - self.bounds = bounds - - def _set_cf_coord_var(self, points): - self.cf_coord_var = mock.Mock( - dimensions=("foo",), - cf_name="wibble", - cf_data=mock.Mock(spec=[]), - standard_name=None, - long_name="wibble", - units="days since 1970-01-01", - calendar=None, - shape=points.shape, - dtype=points.dtype, - __getitem__=lambda self, key: points[key], - ) - - def check_case_dim_coord_construction(self, climatology=False): - # Test a generic dimension coordinate, with or without - # a climatological coord. - self.use_climatology_bounds = climatology - self._set_cf_coord_var(np.arange(6)) - - expected_coord = DimCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=self.bounds, - climatological=climatology, - ) - - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) - - def test_dim_coord_construction(self): - self.check_case_dim_coord_construction(climatology=False) - - def test_dim_coord_construction__climatology(self): - self.check_case_dim_coord_construction(climatology=True) - - def test_dim_coord_construction_masked_array(self): - self._set_cf_coord_var( - np.ma.array( - np.arange(6), - mask=[True, False, False, False, False, False], - fill_value=-999, - ) - ) - - expected_coord = DimCoord( - np.array([-999, 1, 2, 3, 4, 5]), - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=self.bounds, - ) - - with warnings.catch_warnings(record=True) as w: - # Asserts must lie within context manager because of deferred - # loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) - - # Assert warning is raised - assert len(w) == 1 - assert "Gracefully filling" in w[0].message.args[0] - - def test_dim_coord_construction_masked_array_mask_does_nothing(self): - self._set_cf_coord_var( - np.ma.array( - np.arange(6), - mask=False, - ) - ) - - expected_coord = DimCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=self.bounds, - ) - - with warnings.catch_warnings(record=True) as w: - # Asserts must lie within context manager because of deferred - # loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) - - # Assert no warning is raised - assert len(w) == 0 - - def test_dim_coord_construction_masked_bounds_mask_does_nothing(self): - self.bounds = np.ma.array(np.arange(12).reshape(6, 2), mask=False) - self._set_cf_coord_var(np.arange(6)) - - expected_coord = DimCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=self.bounds, - ) - - with warnings.catch_warnings(record=True) as w: - # Asserts must lie within context manager because of deferred - # loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) - - # Assert no warning is raised - assert len(w) == 0 - - def test_aux_coord_construction(self): - # Use non monotonically increasing coordinates to force aux coord - # construction. - self._set_cf_coord_var(np.array([1, 3, 2, 4, 6, 5])) - - expected_coord = AuxCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=self.bounds, - ) - - warning_patch = mock.patch("warnings.warn") - - # Asserts must lie within context manager because of deferred loading. - with warning_patch, self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with( - expected_coord, [0] - ) - self.assertIn( - "creating 'wibble' auxiliary coordinate instead", - warnings.warn.call_args[0][0], - ) - - -class TestBoundsVertexDim(tests.IrisTest, RulesTestMixin): - def setUp(self): - # Call parent setUp explicitly, because of how unittests work. - RulesTestMixin.setUp(self) - # Create test coordinate cf variable. - points = np.arange(6) - self.cf_coord_var = mock.Mock( - dimensions=("foo",), - cf_name="wibble", - standard_name=None, - long_name="wibble", - cf_data=mock.Mock(spec=[]), - units="m", - shape=points.shape, - dtype=points.dtype, - __getitem__=lambda self, key: points[key], - ) - - def test_slowest_varying_vertex_dim(self): - # Create the bounds cf variable. - bounds = np.arange(12).reshape(2, 6) - self.cf_bounds_var = mock.Mock( - dimensions=("nv", "foo"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) - - # Expected bounds on the resulting coordinate should be rolled so that - # the vertex dimension is at the end. - expected_bounds = bounds.transpose() - expected_coord = DimCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=expected_bounds, - ) - - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) - - # Test that engine.cube_parts container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual( - self.engine.cube_parts["coordinates"], expected_list - ) - - def test_fastest_varying_vertex_dim(self): - bounds = np.arange(12).reshape(6, 2) - self.cf_bounds_var = mock.Mock( - dimensions=("foo", "nv"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) - - expected_coord = DimCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=bounds, - ) - - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) - - # Test that engine.cube_parts container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual( - self.engine.cube_parts["coordinates"], expected_list - ) - - def test_fastest_with_different_dim_names(self): - # Despite the dimension names 'x' differing from the coord's - # which is 'foo' (as permitted by the cf spec), - # this should still work because the vertex dim is the fastest varying. - bounds = np.arange(12).reshape(6, 2) - self.cf_bounds_var = mock.Mock( - dimensions=("x", "nv"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) - - expected_coord = DimCoord( - self.cf_coord_var[:], - long_name=self.cf_coord_var.long_name, - var_name=self.cf_coord_var.cf_name, - units=self.cf_coord_var.units, - bounds=bounds, - ) - - # Asserts must lie within context manager because of deferred loading. - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var) - - # Test that expected coord is built and added to cube. - self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0] - ) - - # Test that engine.cube_parts container is correctly populated. - expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual( - self.engine.cube_parts["coordinates"], expected_list - ) - - -class TestCircular(tests.IrisTest, RulesTestMixin): - # Test the rules logic for marking a coordinate "circular". - def setUp(self): - # Call parent setUp explicitly, because of how unittests work. - RulesTestMixin.setUp(self) - self.cf_bounds_var = None - - def _make_vars(self, points, bounds=None, units="degrees"): - points = np.array(points) - self.cf_coord_var = mock.MagicMock( - dimensions=("foo",), - cf_name="wibble", - standard_name=None, - long_name="wibble", - cf_data=mock.Mock(spec=[]), - units=units, - shape=points.shape, - dtype=points.dtype, - __getitem__=lambda self, key: points[key], - ) - if bounds: - bounds = np.array(bounds).reshape(self.cf_coord_var.shape + (2,)) - self.cf_bounds_var = mock.Mock( - dimensions=("x", "nv"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) - - def _check_circular(self, circular, *args, **kwargs): - if "coord_name" in kwargs: - coord_name = kwargs.pop("coord_name") - else: - coord_name = "longitude" - self._make_vars(*args, **kwargs) - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate( - self.engine, self.cf_coord_var, coord_name=coord_name - ) - self.assertEqual(self.engine.cube.add_dim_coord.call_count, 1) - coord, dims = self.engine.cube.add_dim_coord.call_args[0] - self.assertEqual(coord.circular, circular) - - def check_circular(self, *args, **kwargs): - self._check_circular(True, *args, **kwargs) - - def check_noncircular(self, *args, **kwargs): - self._check_circular(False, *args, **kwargs) - - def test_single_zero_noncircular(self): - self.check_noncircular([0.0]) - - def test_single_lt_modulus_noncircular(self): - self.check_noncircular([-1.0]) - - def test_single_eq_modulus_circular(self): - self.check_circular([360.0]) - - def test_single_gt_modulus_circular(self): - self.check_circular([361.0]) - - def test_single_bounded_noncircular(self): - self.check_noncircular([180.0], bounds=[90.0, 240.0]) - - def test_single_bounded_circular(self): - self.check_circular([180.0], bounds=[90.0, 450.0]) - - def test_multiple_unbounded_circular(self): - self.check_circular([0.0, 90.0, 180.0, 270.0]) - - def test_non_angle_noncircular(self): - points = [0.0, 90.0, 180.0, 270.0] - self.check_noncircular(points, units="m") - - def test_non_longitude_noncircular(self): - points = [0.0, 90.0, 180.0, 270.0] - self.check_noncircular(points, coord_name="depth") - - def test_multiple_unbounded_irregular_noncircular(self): - self.check_noncircular([0.0, 90.0, 189.999, 270.0]) - - def test_multiple_unbounded_offset_circular(self): - self.check_circular([45.0, 135.0, 225.0, 315.0]) - - def test_multiple_unbounded_shortrange_circular(self): - self.check_circular([0.0, 90.0, 180.0, 269.9999]) - - def test_multiple_bounded_circular(self): - self.check_circular( - [0.0, 120.3, 240.0], - bounds=[[-45.0, 50.0], [100.0, 175.0], [200.0, 315.0]], - ) - - def test_multiple_bounded_noncircular(self): - self.check_noncircular( - [0.0, 120.3, 240.0], - bounds=[[-45.0, 50.0], [100.0, 175.0], [200.0, 355.0]], - ) - - -class TestCircularScalar(tests.IrisTest, RulesTestMixin): - def setUp(self): - RulesTestMixin.setUp(self) - - def _make_vars(self, bounds): - # Create cf vars for the coordinate and its bounds. - # Note that for a scalar the shape of the array from - # the cf var is (), rather than (1,). - points = np.array([0.0]) - self.cf_coord_var = mock.Mock( - dimensions=(), - cf_name="wibble", - standard_name=None, - long_name="wibble", - units="degrees", - cf_data=mock.Mock(spec=[]), - shape=(), - dtype=points.dtype, - __getitem__=lambda self, key: points[key], - ) - - bounds = np.array(bounds) - self.cf_bounds_var = mock.Mock( - dimensions=("bnds"), - cf_name="wibble_bnds", - shape=bounds.shape, - __getitem__=lambda self, key: bounds[key], - ) - - def _assert_circular(self, value): - with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate( - self.engine, self.cf_coord_var, coord_name="longitude" - ) - self.assertEqual(self.engine.cube.add_aux_coord.call_count, 1) - coord, dims = self.engine.cube.add_aux_coord.call_args[0] - self.assertEqual(coord.circular, value) - - def test_two_bounds_noncircular(self): - self._make_vars([0.0, 180.0]) - self._assert_circular(False) - - def test_two_bounds_circular(self): - self._make_vars([0.0, 360.0]) - self._assert_circular(True) - - def test_two_bounds_circular_decreasing(self): - self._make_vars([360.0, 0.0]) - self._assert_circular(True) - - def test_two_bounds_circular_alt(self): - self._make_vars([-180.0, 180.0]) - self._assert_circular(True) - - def test_two_bounds_circular_alt_decreasing(self): - self._make_vars([180.0, -180.0]) - self._assert_circular(True) - - def test_four_bounds(self): - self._make_vars([0.0, 10.0, 20.0, 30.0]) - self._assert_circular(False) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py deleted file mode 100644 index 28b3d8ab9a..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -build_geostationary_coordinate_system`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris -from iris.coord_systems import Geostationary -from iris.fileformats._nc_load_rules.helpers import ( - build_geostationary_coordinate_system, -) - - -class TestBuildGeostationaryCoordinateSystem(tests.IrisTest): - def _test( - self, inverse_flattening=False, replace_props=None, remove_props=None - ): - """ - Generic test that can check vertical perspective validity with or - without inverse flattening. - """ - # Make a dictionary of the non-ellipsoid properties to be added to both a test - # coord-system, and a test grid-mapping cf_var. - non_ellipsoid_kwargs = { - "latitude_of_projection_origin": 0.0, - "longitude_of_projection_origin": 2.0, - "perspective_point_height": 2000000.0, - "sweep_angle_axis": "x", - "false_easting": 100.0, - "false_northing": 200.0, - } - - # Make specified adjustments to the non-ellipsoid properties. - if remove_props: - for key in remove_props: - non_ellipsoid_kwargs.pop(key, None) - if replace_props: - for key, value in replace_props.items(): - non_ellipsoid_kwargs[key] = value - - # Make a dictionary of ellipsoid properties, to be added to both a test - # ellipsoid and the grid-mapping cf_var. - ellipsoid_kwargs = {"semi_major_axis": 6377563.396} - if inverse_flattening: - ellipsoid_kwargs["inverse_flattening"] = 299.3249646 - else: - ellipsoid_kwargs["semi_minor_axis"] = 6356256.909 - - cf_grid_var_kwargs = non_ellipsoid_kwargs.copy() - cf_grid_var_kwargs.update(ellipsoid_kwargs) - cf_grid_var = mock.Mock(spec=[], **cf_grid_var_kwargs) - cs = build_geostationary_coordinate_system(None, cf_grid_var) - ellipsoid = iris.coord_systems.GeogCS(**ellipsoid_kwargs) - expected = Geostationary(ellipsoid=ellipsoid, **non_ellipsoid_kwargs) - self.assertEqual(cs, expected) - - def test_valid(self): - self._test(inverse_flattening=False) - - def test_inverse_flattening(self): - self._test(inverse_flattening=True) - - def test_false_offsets_missing(self): - self._test(remove_props=["false_easting", "false_northing"]) - - def test_false_offsets_none(self): - self._test( - replace_props={"false_easting": None, "false_northing": None} - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py deleted file mode 100644 index 05185a4cf5..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -build_lambert_azimuthal_equal_area_coordinate_system`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris -from iris.coord_systems import LambertAzimuthalEqualArea -from iris.fileformats._nc_load_rules.helpers import ( - build_lambert_azimuthal_equal_area_coordinate_system, -) - - -class TestBuildLambertAzimuthalEqualAreaCoordinateSystem(tests.IrisTest): - def _test(self, inverse_flattening=False, no_optionals=False): - if no_optionals: - # Most properties are optional for this system. - gridvar_props = {} - # Setup all the expected default values - test_lat = 0 - test_lon = 0 - test_easting = 0 - test_northing = 0 - else: - # Choose test values and setup corresponding named properties. - test_lat = -35 - test_lon = 175 - test_easting = -100 - test_northing = 200 - gridvar_props = dict( - latitude_of_projection_origin=test_lat, - longitude_of_projection_origin=test_lon, - false_easting=test_easting, - false_northing=test_northing, - ) - - # Add ellipsoid args. - gridvar_props["semi_major_axis"] = 6377563.396 - if inverse_flattening: - gridvar_props["inverse_flattening"] = 299.3249646 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, inverse_flattening=299.3249646 - ) - else: - gridvar_props["semi_minor_axis"] = 6356256.909 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909 - ) - - cf_grid_var = mock.Mock(spec=[], **gridvar_props) - - cs = build_lambert_azimuthal_equal_area_coordinate_system( - None, cf_grid_var - ) - - expected = LambertAzimuthalEqualArea( - latitude_of_projection_origin=test_lat, - longitude_of_projection_origin=test_lon, - false_easting=test_easting, - false_northing=test_northing, - ellipsoid=expected_ellipsoid, - ) - - self.assertEqual(cs, expected) - - def test_basic(self): - self._test() - - def test_inverse_flattening(self): - # Check when inverse_flattening is provided instead of semi_minor_axis. - self._test(inverse_flattening=True) - - def test_no_optionals(self): - # Check defaults, when all optional attributes are absent. - self._test(no_optionals=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py deleted file mode 100644 index 22bb7149b1..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -build_lambert_conformal_coordinate_system`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris -from iris.coord_systems import LambertConformal -from iris.fileformats._nc_load_rules.helpers import ( - build_lambert_conformal_coordinate_system, -) - - -class TestBuildLambertConformalCoordinateSystem(tests.IrisTest): - def _test(self, inverse_flattening=False, no_optionals=False): - if no_optionals: - # Most properties are optional in this case. - gridvar_props = {} - # Setup all the expected default values - test_lat = 39 - test_lon = -96 - test_easting = 0 - test_northing = 0 - test_parallels = (33, 45) - else: - # Choose test values and setup corresponding named properties. - test_lat = -35 - test_lon = 175 - test_easting = -100 - test_northing = 200 - test_parallels = (-27, 3) - gridvar_props = dict( - latitude_of_projection_origin=test_lat, - longitude_of_central_meridian=test_lon, - false_easting=test_easting, - false_northing=test_northing, - standard_parallel=test_parallels, - ) - - # Add ellipsoid args. - gridvar_props["semi_major_axis"] = 6377563.396 - if inverse_flattening: - gridvar_props["inverse_flattening"] = 299.3249646 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, inverse_flattening=299.3249646 - ) - else: - gridvar_props["semi_minor_axis"] = 6356256.909 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909 - ) - - cf_grid_var = mock.Mock(spec=[], **gridvar_props) - - cs = build_lambert_conformal_coordinate_system(None, cf_grid_var) - - expected = LambertConformal( - central_lat=test_lat, - central_lon=test_lon, - false_easting=test_easting, - false_northing=test_northing, - secant_latitudes=test_parallels, - ellipsoid=expected_ellipsoid, - ) - - self.assertEqual(cs, expected) - - def test_basic(self): - self._test() - - def test_inverse_flattening(self): - # Check when inverse_flattening is provided instead of semi_minor_axis. - self._test(inverse_flattening=True) - - def test_no_optionals(self): - # Check defaults, when all optional attributes are absent. - self._test(no_optionals=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py deleted file mode 100644 index 2be5477cb7..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -build_mercator_coordinate_system`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris -from iris.coord_systems import Mercator -from iris.fileformats._nc_load_rules.helpers import ( - build_mercator_coordinate_system, -) - - -class TestBuildMercatorCoordinateSystem(tests.IrisTest): - def test_valid(self): - cf_grid_var = mock.Mock( - spec=[], - longitude_of_projection_origin=-90, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - - cs = build_mercator_coordinate_system(None, cf_grid_var) - - expected = Mercator( - longitude_of_projection_origin=( - cf_grid_var.longitude_of_projection_origin - ), - ellipsoid=iris.coord_systems.GeogCS( - cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis - ), - ) - self.assertEqual(cs, expected) - - def test_inverse_flattening(self): - cf_grid_var = mock.Mock( - spec=[], - longitude_of_projection_origin=-90, - semi_major_axis=6377563.396, - inverse_flattening=299.3249646, - ) - - cs = build_mercator_coordinate_system(None, cf_grid_var) - - expected = Mercator( - longitude_of_projection_origin=( - cf_grid_var.longitude_of_projection_origin - ), - ellipsoid=iris.coord_systems.GeogCS( - cf_grid_var.semi_major_axis, - inverse_flattening=cf_grid_var.inverse_flattening, - ), - ) - self.assertEqual(cs, expected) - - def test_longitude_missing(self): - cf_grid_var = mock.Mock( - spec=[], - semi_major_axis=6377563.396, - inverse_flattening=299.3249646, - ) - - cs = build_mercator_coordinate_system(None, cf_grid_var) - - expected = Mercator( - ellipsoid=iris.coord_systems.GeogCS( - cf_grid_var.semi_major_axis, - inverse_flattening=cf_grid_var.inverse_flattening, - ) - ) - self.assertEqual(cs, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py deleted file mode 100644 index 5058e4d7d3..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -build_sterographic_coordinate_system`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris -from iris.coord_systems import Stereographic -from iris.fileformats._nc_load_rules.helpers import ( - build_stereographic_coordinate_system, -) - - -class TestBuildStereographicCoordinateSystem(tests.IrisTest): - def _test(self, inverse_flattening=False, no_offsets=False): - test_easting = -100 - test_northing = 200 - gridvar_props = dict( - latitude_of_projection_origin=0, - longitude_of_projection_origin=0, - false_easting=test_easting, - false_northing=test_northing, - scale_factor_at_projection_origin=1, - semi_major_axis=6377563.396, - ) - - if inverse_flattening: - gridvar_props["inverse_flattening"] = 299.3249646 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, inverse_flattening=299.3249646 - ) - else: - gridvar_props["semi_minor_axis"] = 6356256.909 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909 - ) - - if no_offsets: - del gridvar_props["false_easting"] - del gridvar_props["false_northing"] - test_easting = 0 - test_northing = 0 - - cf_grid_var = mock.Mock(spec=[], **gridvar_props) - - cs = build_stereographic_coordinate_system(None, cf_grid_var) - - expected = Stereographic( - central_lat=cf_grid_var.latitude_of_projection_origin, - central_lon=cf_grid_var.longitude_of_projection_origin, - false_easting=test_easting, - false_northing=test_northing, - ellipsoid=expected_ellipsoid, - ) - - self.assertEqual(cs, expected) - - def test_basic(self): - self._test() - - def test_inverse_flattening(self): - # Check when inverse_flattening is provided instead of semi_minor_axis. - self._test(inverse_flattening=True) - - def test_no_offsets(self): - # Check when false_easting/northing attributes are absent. - self._test(no_offsets=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py deleted file mode 100644 index 0096c5df4b..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py +++ /dev/null @@ -1,88 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -build_transverse_mercator_coordinate_system`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris -from iris.coord_systems import TransverseMercator -from iris.fileformats._nc_load_rules.helpers import ( - build_transverse_mercator_coordinate_system, -) - - -class TestBuildTransverseMercatorCoordinateSystem(tests.IrisTest): - def _test(self, inverse_flattening=False, no_options=False): - test_easting = -100 - test_northing = 200 - test_scale_factor = 1.234 - gridvar_props = dict( - latitude_of_projection_origin=35.3, - longitude_of_central_meridian=-75, - false_easting=test_easting, - false_northing=test_northing, - scale_factor_at_central_meridian=test_scale_factor, - semi_major_axis=6377563.396, - ) - - if inverse_flattening: - gridvar_props["inverse_flattening"] = 299.3249646 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, inverse_flattening=299.3249646 - ) - else: - gridvar_props["semi_minor_axis"] = 6356256.909 - expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909 - ) - - if no_options: - del gridvar_props["false_easting"] - del gridvar_props["false_northing"] - del gridvar_props["scale_factor_at_central_meridian"] - test_easting = 0 - test_northing = 0 - test_scale_factor = 1.0 - - cf_grid_var = mock.Mock(spec=[], **gridvar_props) - - cs = build_transverse_mercator_coordinate_system(None, cf_grid_var) - - expected = TransverseMercator( - latitude_of_projection_origin=( - cf_grid_var.latitude_of_projection_origin - ), - longitude_of_central_meridian=( - cf_grid_var.longitude_of_central_meridian - ), - false_easting=test_easting, - false_northing=test_northing, - scale_factor_at_central_meridian=test_scale_factor, - ellipsoid=expected_ellipsoid, - ) - - self.assertEqual(cs, expected) - - def test_basic(self): - self._test() - - def test_inverse_flattening(self): - self._test(inverse_flattening=True) - - def test_missing_optionals(self): - self._test(no_options=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py deleted file mode 100644 index f34992c2be..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -build_vertical_perspective_coordinate_system`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris -from iris.coord_systems import VerticalPerspective -from iris.fileformats._nc_load_rules.helpers import ( - build_vertical_perspective_coordinate_system, -) - - -class TestBuildVerticalPerspectiveCoordinateSystem(tests.IrisTest): - def _test(self, inverse_flattening=False, no_offsets=False): - """ - Generic test that can check vertical perspective validity with or - without inverse flattening, and false_east/northing-s. - """ - test_easting = 100.0 - test_northing = 200.0 - cf_grid_var_kwargs = { - "spec": [], - "latitude_of_projection_origin": 1.0, - "longitude_of_projection_origin": 2.0, - "perspective_point_height": 2000000.0, - "false_easting": test_easting, - "false_northing": test_northing, - "semi_major_axis": 6377563.396, - } - - ellipsoid_kwargs = {"semi_major_axis": 6377563.396} - if inverse_flattening: - ellipsoid_kwargs["inverse_flattening"] = 299.3249646 - else: - ellipsoid_kwargs["semi_minor_axis"] = 6356256.909 - cf_grid_var_kwargs.update(ellipsoid_kwargs) - - if no_offsets: - del cf_grid_var_kwargs["false_easting"] - del cf_grid_var_kwargs["false_northing"] - test_easting = 0 - test_northing = 0 - - cf_grid_var = mock.Mock(**cf_grid_var_kwargs) - ellipsoid = iris.coord_systems.GeogCS(**ellipsoid_kwargs) - - cs = build_vertical_perspective_coordinate_system(None, cf_grid_var) - expected = VerticalPerspective( - latitude_of_projection_origin=cf_grid_var.latitude_of_projection_origin, - longitude_of_projection_origin=cf_grid_var.longitude_of_projection_origin, - perspective_point_height=cf_grid_var.perspective_point_height, - false_easting=test_easting, - false_northing=test_northing, - ellipsoid=ellipsoid, - ) - - self.assertEqual(cs, expected) - - def test_valid(self): - self._test(inverse_flattening=False) - - def test_inverse_flattening(self): - # Check when inverse_flattening is provided instead of semi_minor_axis. - self._test(inverse_flattening=True) - - def test_no_offsets(self): - # Check when false_easting/northing attributes are absent. - self._test(no_offsets=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py deleted file mode 100644 index a159ef81a8..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -get_attr_units`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.fileformats._nc_load_rules.helpers import get_attr_units - - -class TestGetAttrUnits(tests.IrisTest): - @staticmethod - def _make_cf_var(global_attributes=None): - if global_attributes is None: - global_attributes = {} - - cf_group = mock.Mock(global_attributes=global_attributes) - - cf_var = mock.MagicMock( - cf_name="sound_frequency", - cf_data=mock.Mock(spec=[]), - standard_name=None, - long_name=None, - units="\u266b", - dtype=np.float64, - cell_methods=None, - cf_group=cf_group, - ) - return cf_var - - def test_unicode_character(self): - attributes = {} - expected_attributes = {"invalid_units": "\u266b"} - cf_var = self._make_cf_var() - attr_units = get_attr_units(cf_var, attributes) - self.assertEqual(attr_units, "?") - self.assertEqual(attributes, expected_attributes) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py deleted file mode 100644 index ff9c51f40b..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -get_cf_bounds_var`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.fileformats._nc_load_rules.helpers import ( - CF_ATTR_BOUNDS, - CF_ATTR_CLIMATOLOGY, - get_cf_bounds_var, -) - - -class TestGetCFBoundsVar(tests.IrisTest): - # Tests to check that get_cf_bounds_var will return the bounds_var and - # the correct climatological flag. - def _generic_test(self, test_climatological_bounds=False): - cf_coord_var = mock.MagicMock() - - cf_group_dict = {"TEST": mock.sentinel.bounds_var} - if test_climatological_bounds: - cf_coord_var.cf_group.climatology = cf_group_dict - test_attr = CF_ATTR_CLIMATOLOGY - else: - cf_coord_var.cf_group.bounds = cf_group_dict - test_attr = CF_ATTR_BOUNDS - - for attr in (CF_ATTR_BOUNDS, CF_ATTR_CLIMATOLOGY): - attr_val = "TEST" if attr == test_attr else None - setattr(cf_coord_var, attr, attr_val) - - bounds_var, climatological = get_cf_bounds_var(cf_coord_var) - self.assertIs(bounds_var, mock.sentinel.bounds_var) - self.assertEqual(climatological, test_climatological_bounds) - - def test_bounds_normal(self): - self._generic_test(test_climatological_bounds=False) - - def test_bounds_climatological(self): - self._generic_test(test_climatological_bounds=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py deleted file mode 100644 index 3c7c496b54..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py +++ /dev/null @@ -1,292 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -get_names`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.fileformats._nc_load_rules.helpers import get_names - - -class TestGetNames(tests.IrisTest): - """ - The tests included in this class cover all the variations of possible - combinations of the following inputs: - * standard_name = [None, 'projection_y_coordinate', 'latitude_coordinate'] - * long_name = [None, 'lat_long_name'] - * var_name = ['grid_latitude', 'lat_var_name'] - * coord_name = [None, 'latitude'] - - standard_name, var_name and coord_name each contain a different valid CF - standard name so that it is clear which is being used to set the resulting - standard_name. - - """ - - @staticmethod - def _make_cf_var(standard_name, long_name, cf_name): - cf_var = mock.Mock( - cf_name=cf_name, - standard_name=standard_name, - long_name=long_name, - units="degrees", - dtype=np.float64, - cell_methods=None, - cf_group=mock.Mock(global_attributes={}), - ) - return cf_var - - def check_names(self, inputs, expected): - # Inputs - attributes on the fake CF Variable. Note: coord_name is - # optionally set in some pyke rules. - standard_name, long_name, var_name, coord_name = inputs - # Expected - The expected names and attributes. - exp_std_name, exp_long_name, exp_var_name, exp_attributes = expected - - cf_var = self._make_cf_var( - standard_name=standard_name, long_name=long_name, cf_name=var_name - ) - attributes = {} - res_standard_name, res_long_name, res_var_name = get_names( - cf_var, coord_name, attributes - ) - - # Check the names and attributes are as expected. - self.assertEqual(res_standard_name, exp_std_name) - self.assertEqual(res_long_name, exp_long_name) - self.assertEqual(res_var_name, exp_var_name) - self.assertEqual(attributes, exp_attributes) - - def test_var_name_valid(self): - # Only var_name is set and it is set to a valid standard name. - inp = (None, None, "grid_latitude", None) - exp = ("grid_latitude", None, "grid_latitude", {}) - self.check_names(inp, exp) - - def test_var_name_valid_coord_name_set(self): - # var_name is a valid standard name, coord_name is also set. - inp = (None, None, "grid_latitude", "latitude") - exp = ("latitude", None, "grid_latitude", {}) - self.check_names(inp, exp) - - def test_var_name_invalid(self): - # Only var_name is set but it is not a valid standard name. - inp = (None, None, "lat_var_name", None) - exp = (None, None, "lat_var_name", {}) - self.check_names(inp, exp) - - def test_var_name_invalid_coord_name_set(self): - # var_name is not a valid standard name, the coord_name is also set. - inp = (None, None, "lat_var_name", "latitude") - exp = ("latitude", None, "lat_var_name", {}) - self.check_names(inp, exp) - - def test_long_name_set_var_name_valid(self): - # long_name is not None, var_name is set to a valid standard name. - inp = (None, "lat_long_name", "grid_latitude", None) - exp = ("grid_latitude", "lat_long_name", "grid_latitude", {}) - self.check_names(inp, exp) - - def test_long_name_set_var_name_valid_coord_name_set(self): - # long_name is not None, var_name is set to a valid standard name, and - # coord_name is set. - inp = (None, "lat_long_name", "grid_latitude", "latitude") - exp = ("latitude", "lat_long_name", "grid_latitude", {}) - self.check_names(inp, exp) - - def test_long_name_set_var_name_invalid(self): - # long_name is not None, var_name is not set to a valid standard name. - inp = (None, "lat_long_name", "lat_var_name", None) - exp = (None, "lat_long_name", "lat_var_name", {}) - self.check_names(inp, exp) - - def test_long_name_set_var_name_invalid_coord_name_set(self): - # long_name is not None, var_name is not set to a valid standard name, - # and coord_name is set. - inp = (None, "lat_long_name", "lat_var_name", "latitude") - exp = ("latitude", "lat_long_name", "lat_var_name", {}) - self.check_names(inp, exp) - - def test_std_name_valid_var_name_valid(self): - # standard_name is a valid standard name, var_name is a valid standard - # name. - inp = ("projection_y_coordinate", None, "grid_latitude", None) - exp = ("projection_y_coordinate", None, "grid_latitude", {}) - self.check_names(inp, exp) - - def test_std_name_valid_var_name_valid_coord_name_set(self): - # standard_name is a valid standard name, var_name is a valid standard - # name, coord_name is set. - inp = ("projection_y_coordinate", None, "grid_latitude", "latitude") - exp = ("projection_y_coordinate", None, "grid_latitude", {}) - self.check_names(inp, exp) - - def test_std_name_valid_var_name_invalid(self): - # standard_name is a valid standard name, var_name is not a valid - # standard name. - inp = ("projection_y_coordinate", None, "lat_var_name", None) - exp = ("projection_y_coordinate", None, "lat_var_name", {}) - self.check_names(inp, exp) - - def test_std_name_valid_var_name_invalid_coord_name_set(self): - # standard_name is a valid standard name, var_name is not a valid - # standard name, coord_name is set. - inp = ("projection_y_coordinate", None, "lat_var_name", "latitude") - exp = ("projection_y_coordinate", None, "lat_var_name", {}) - self.check_names(inp, exp) - - def test_std_name_valid_long_name_set_var_name_valid(self): - # standard_name is a valid standard name, long_name is not None, - # var_name is a valid standard name. - inp = ( - "projection_y_coordinate", - "lat_long_name", - "grid_latitude", - None, - ) - exp = ("projection_y_coordinate", "lat_long_name", "grid_latitude", {}) - self.check_names(inp, exp) - - def test_std_name_valid_long_name_set_var_name_valid_coord_name_set(self): - # standard_name is a valid standard name, long_name is not None, - # var_name is a valid standard name, coord_name is set. - inp = ( - "projection_y_coordinate", - "lat_long_name", - "grid_latitude", - "latitude", - ) - exp = ("projection_y_coordinate", "lat_long_name", "grid_latitude", {}) - self.check_names(inp, exp) - - def test_std_name_valid_long_name_set_var_name_invalid(self): - # standard_name is a valid standard name, long_name is not None, - # var_name is not a valid standard name. - inp = ( - "projection_y_coordinate", - "lat_long_name", - "lat_var_name", - None, - ) - exp = ("projection_y_coordinate", "lat_long_name", "lat_var_name", {}) - self.check_names(inp, exp) - - def test_std_name_valid_long_name_set_var_name_invalid_coord_name_set( - self, - ): - # standard_name is a valid standard name, long_name is not None, - # var_name is not a valid standard name, coord_name is set. - inp = ( - "projection_y_coordinate", - "lat_long_name", - "lat_var_name", - "latitude", - ) - exp = ("projection_y_coordinate", "lat_long_name", "lat_var_name", {}) - self.check_names(inp, exp) - - def test_std_name_invalid_var_name_valid(self): - # standard_name is not a valid standard name, var_name is a valid - # standard name. - inp = ("latitude_coord", None, "grid_latitude", None) - exp = ("grid_latitude", None, "grid_latitude", {}) - self.check_names(inp, exp) - - def test_std_name_invalid_var_name_valid_coord_name_set(self): - # standard_name is not a valid standard name, var_name is a valid - # standard name, coord_name is set. - inp = ("latitude_coord", None, "grid_latitude", "latitude") - exp = ( - "latitude", - None, - "grid_latitude", - {"invalid_standard_name": "latitude_coord"}, - ) - self.check_names(inp, exp) - - def test_std_name_invalid_var_name_invalid(self): - # standard_name is not a valid standard name, var_name is not a valid - # standard name. - inp = ("latitude_coord", None, "lat_var_name", None) - exp = (None, None, "lat_var_name", {}) - self.check_names(inp, exp) - - def test_std_name_invalid_var_name_invalid_coord_name_set(self): - # standard_name is not a valid standard name, var_name is not a valid - # standard name, coord_name is set. - inp = ("latitude_coord", None, "lat_var_name", "latitude") - exp = ( - "latitude", - None, - "lat_var_name", - {"invalid_standard_name": "latitude_coord"}, - ) - self.check_names(inp, exp) - - def test_std_name_invalid_long_name_set_var_name_valid(self): - # standard_name is not a valid standard name, long_name is not None - # var_name is a valid standard name. - inp = ("latitude_coord", "lat_long_name", "grid_latitude", None) - exp = ( - "grid_latitude", - "lat_long_name", - "grid_latitude", - {"invalid_standard_name": "latitude_coord"}, - ) - self.check_names(inp, exp) - - def test_std_name_invalid_long_name_set_var_name_valid_coord_name_set( - self, - ): - # standard_name is not a valid standard name, long_name is not None, - # var_name is a valid standard name, coord_name is set. - inp = ("latitude_coord", "lat_long_name", "grid_latitude", "latitude") - exp = ( - "latitude", - "lat_long_name", - "grid_latitude", - {"invalid_standard_name": "latitude_coord"}, - ) - self.check_names(inp, exp) - - def test_std_name_invalid_long_name_set_var_name_invalid(self): - # standard_name is not a valid standard name, long_name is not None - # var_name is not a valid standard name. - inp = ("latitude_coord", "lat_long_name", "lat_var_name", None) - exp = ( - None, - "lat_long_name", - "lat_var_name", - {"invalid_standard_name": "latitude_coord"}, - ) - self.check_names(inp, exp) - - def test_std_name_invalid_long_name_set_var_name_invalid_coord_name_set( - self, - ): - # standard_name is not a valid standard name, long_name is not None, - # var_name is not a valid standard name, coord_name is set. - inp = ("latitude_coord", "lat_long_name", "lat_var_name", "latitude") - exp = ( - "latitude", - "lat_long_name", - "lat_var_name", - {"invalid_standard_name": "latitude_coord"}, - ) - self.check_names(inp, exp) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py deleted file mode 100644 index dfe2895f29..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -has_supported_mercator_parameters`. - -""" - -from unittest import mock -import warnings - -from iris.fileformats._nc_load_rules.helpers import ( - has_supported_mercator_parameters, -) - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - - -def _engine(cf_grid_var, cf_name): - cf_group = {cf_name: cf_grid_var} - cf_var = mock.Mock(cf_group=cf_group) - return mock.Mock(cf_var=cf_var) - - -class TestHasSupportedMercatorParameters(tests.IrisTest): - def test_valid(self): - cf_name = "mercator" - cf_grid_var = mock.Mock( - spec=[], - longitude_of_projection_origin=-90, - false_easting=0, - false_northing=0, - scale_factor_at_projection_origin=1, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - engine = _engine(cf_grid_var, cf_name) - - is_valid = has_supported_mercator_parameters(engine, cf_name) - - self.assertTrue(is_valid) - - def test_invalid_scale_factor(self): - # Iris does not yet support scale factors other than one for - # Mercator projections - cf_name = "mercator" - cf_grid_var = mock.Mock( - spec=[], - longitude_of_projection_origin=0, - false_easting=0, - false_northing=0, - scale_factor_at_projection_origin=0.9, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - engine = _engine(cf_grid_var, cf_name) - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) - - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "Scale factor") - - def test_invalid_standard_parallel(self): - # Iris does not yet support standard parallels other than zero for - # Mercator projections - cf_name = "mercator" - cf_grid_var = mock.Mock( - spec=[], - longitude_of_projection_origin=0, - false_easting=0, - false_northing=0, - standard_parallel=30, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - engine = _engine(cf_grid_var, cf_name) - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) - - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "Standard parallel") - - def test_invalid_false_easting(self): - # Iris does not yet support false eastings other than zero for - # Mercator projections - cf_name = "mercator" - cf_grid_var = mock.Mock( - spec=[], - longitude_of_projection_origin=0, - false_easting=100, - false_northing=0, - scale_factor_at_projection_origin=1, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - engine = _engine(cf_grid_var, cf_name) - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) - - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "False easting") - - def test_invalid_false_northing(self): - # Iris does not yet support false northings other than zero for - # Mercator projections - cf_name = "mercator" - cf_grid_var = mock.Mock( - spec=[], - longitude_of_projection_origin=0, - false_easting=0, - false_northing=100, - scale_factor_at_projection_origin=1, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - engine = _engine(cf_grid_var, cf_name) - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_mercator_parameters(engine, cf_name) - - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "False northing") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py deleted file mode 100644 index 8bec823f4b..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -has_supported_stereographic_parameters`. - -""" - -from unittest import mock -import warnings - -from iris.fileformats._nc_load_rules.helpers import ( - has_supported_stereographic_parameters, -) - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - - -def _engine(cf_grid_var, cf_name): - cf_group = {cf_name: cf_grid_var} - cf_var = mock.Mock(cf_group=cf_group) - return mock.Mock(cf_var=cf_var) - - -class TestHasSupportedStereographicParameters(tests.IrisTest): - def test_valid(self): - cf_name = "stereographic" - cf_grid_var = mock.Mock( - spec=[], - latitude_of_projection_origin=0, - longitude_of_projection_origin=0, - false_easting=-100, - false_northing=200, - scale_factor_at_projection_origin=1, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - engine = _engine(cf_grid_var, cf_name) - - is_valid = has_supported_stereographic_parameters(engine, cf_name) - - self.assertTrue(is_valid) - - def test_invalid_scale_factor(self): - # Iris does not yet support scale factors other than one for - # stereographic projections - cf_name = "stereographic" - cf_grid_var = mock.Mock( - spec=[], - latitude_of_projection_origin=0, - longitude_of_projection_origin=0, - false_easting=-100, - false_northing=200, - scale_factor_at_projection_origin=0.9, - semi_major_axis=6377563.396, - semi_minor_axis=6356256.909, - ) - engine = _engine(cf_grid_var, cf_name) - - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter("always") - is_valid = has_supported_stereographic_parameters(engine, cf_name) - - self.assertFalse(is_valid) - self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), "Scale factor") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py deleted file mode 100644 index 1ee0cfbf2e..0000000000 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.fileformats._nc_load_rules.helpers.\ -reorder_bounds_data`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.fileformats._nc_load_rules.helpers import reorder_bounds_data - - -class Test(tests.IrisTest): - def test_fastest_varying(self): - bounds_data = np.arange(24).reshape(2, 3, 4) - cf_bounds_var = mock.Mock( - dimensions=("foo", "bar", "nv"), cf_name="wibble_bnds" - ) - cf_coord_var = mock.Mock(dimensions=("foo", "bar")) - - res = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) - # Vertex dimension (nv) is already at the end. - self.assertArrayEqual(res, bounds_data) - - def test_slowest_varying(self): - bounds_data = np.arange(24).reshape(4, 2, 3) - cf_bounds_var = mock.Mock(dimensions=("nv", "foo", "bar")) - cf_coord_var = mock.Mock(dimensions=("foo", "bar")) - - res = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) - # Move zeroth dimension (nv) to the end. - expected = np.rollaxis(bounds_data, 0, bounds_data.ndim) - self.assertArrayEqual(res, expected) - - def test_different_dim_names(self): - bounds_data = np.arange(24).reshape(2, 3, 4) - cf_bounds_var = mock.Mock( - dimensions=("foo", "bar", "nv"), cf_name="wibble_bnds" - ) - cf_coord_var = mock.Mock(dimensions=("x", "y"), cf_name="wibble") - with self.assertRaisesRegex(ValueError, "dimension names"): - reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/__init__.py deleted file mode 100644 index 732094f67a..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.fileformats.netcdf` module.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py deleted file mode 100644 index ee814ea168..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver.py +++ /dev/null @@ -1,1054 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.netcdf.Saver` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import collections -from contextlib import contextmanager -from unittest import mock - -import netCDF4 as nc -import numpy as np -from numpy import ma - -import iris -from iris.coord_systems import ( - AlbersEqualArea, - GeogCS, - Geostationary, - LambertAzimuthalEqualArea, - LambertConformal, - Mercator, - RotatedGeogCS, - Stereographic, - TransverseMercator, - VerticalPerspective, -) -from iris.coords import DimCoord -from iris.cube import Cube -from iris.fileformats.netcdf import Saver -import iris.tests.stock as stock - - -class Test_write(tests.IrisTest): - # ------------------------------------------------------------------------- - # It is not considered necessary to have integration tests for saving - # EVERY coordinate system. A subset are tested below. - # ------------------------------------------------------------------------- - - # Attribute is substituted in test_Saver__lazy. - array_lib = np - - def _transverse_mercator_cube(self, ellipsoid=None): - data = self.array_lib.arange(12).reshape(3, 4) - cube = Cube(data, "air_pressure_anomaly") - trans_merc = TransverseMercator( - 49.0, -2.0, -400000.0, 100000.0, 0.9996012717, ellipsoid - ) - coord = DimCoord( - np.arange(3), - "projection_y_coordinate", - units="m", - coord_system=trans_merc, - ) - cube.add_dim_coord(coord, 0) - coord = DimCoord( - np.arange(4), - "projection_x_coordinate", - units="m", - coord_system=trans_merc, - ) - cube.add_dim_coord(coord, 1) - return cube - - def _mercator_cube(self, ellipsoid=None): - data = self.array_lib.arange(12).reshape(3, 4) - cube = Cube(data, "air_pressure_anomaly") - merc = Mercator(49.0, ellipsoid) - coord = DimCoord( - np.arange(3), - "projection_y_coordinate", - units="m", - coord_system=merc, - ) - cube.add_dim_coord(coord, 0) - coord = DimCoord( - np.arange(4), - "projection_x_coordinate", - units="m", - coord_system=merc, - ) - cube.add_dim_coord(coord, 1) - return cube - - def _stereo_cube(self, ellipsoid=None): - data = self.array_lib.arange(12).reshape(3, 4) - cube = Cube(data, "air_pressure_anomaly") - stereo = Stereographic( - -10.0, 20.0, 500000.0, -200000.0, None, ellipsoid - ) - coord = DimCoord( - np.arange(3), - "projection_y_coordinate", - units="m", - coord_system=stereo, - ) - cube.add_dim_coord(coord, 0) - coord = DimCoord( - np.arange(4), - "projection_x_coordinate", - units="m", - coord_system=stereo, - ) - cube.add_dim_coord(coord, 1) - return cube - - def test_transverse_mercator(self): - # Create a Cube with a transverse Mercator coordinate system. - ellipsoid = GeogCS(6377563.396, 6356256.909) - cube = self._transverse_mercator_cube(ellipsoid) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) - - def test_transverse_mercator_no_ellipsoid(self): - # Create a Cube with a transverse Mercator coordinate system. - cube = self._transverse_mercator_cube() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) - - def test_mercator(self): - # Create a Cube with a Mercator coordinate system. - ellipsoid = GeogCS(6377563.396, 6356256.909) - cube = self._mercator_cube(ellipsoid) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) - - def test_stereographic(self): - # Create a Cube with a stereographic coordinate system. - ellipsoid = GeogCS(6377563.396, 6356256.909) - cube = self._stereo_cube(ellipsoid) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) - - def test_mercator_no_ellipsoid(self): - # Create a Cube with a Mercator coordinate system. - cube = self._mercator_cube() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) - - def test_stereographic_no_ellipsoid(self): - # Create a Cube with a stereographic coordinate system. - cube = self._stereo_cube() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) - - def _simple_cube(self, dtype): - data = self.array_lib.arange(12, dtype=dtype).reshape(3, 4) - points = np.arange(3, dtype=dtype) - bounds = np.arange(6, dtype=dtype).reshape(3, 2) - cube = Cube(data, "air_pressure_anomaly") - coord = DimCoord(points, bounds=bounds, units="1") - cube.add_dim_coord(coord, 0) - return cube - - def test_little_endian(self): - # Create a Cube with little-endian data. - cube = self._simple_cube("f4") - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - result_path = self.result_path("endian", "cdl") - self.assertCDL(nc_path, result_path, flags="") - - def test_zlib(self): - cube = self._simple_cube(">f4") - api = self.patch("iris.fileformats.netcdf.netCDF4") - # Define mocked default fill values to prevent deprecation warning (#4374). - api.default_fillvals = collections.defaultdict(lambda: -99.0) - with Saver("/dummy/path", "NETCDF4") as saver: - saver.write(cube, zlib=True) - dataset = api.Dataset.return_value - create_var_call = mock.call( - "air_pressure_anomaly", - np.dtype("float32"), - ["dim0", "dim1"], - fill_value=None, - shuffle=True, - least_significant_digit=None, - contiguous=False, - zlib=True, - fletcher32=False, - endian="native", - complevel=4, - chunksizes=None, - ) - self.assertIn(create_var_call, dataset.createVariable.call_args_list) - - def test_least_significant_digit(self): - cube = Cube( - self.array_lib.array([1.23, 4.56, 7.89]), - standard_name="surface_temperature", - long_name=None, - var_name="temp", - units="K", - ) - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, least_significant_digit=1) - cube_saved = iris.load_cube(nc_path) - self.assertEqual( - cube_saved.attributes["least_significant_digit"], 1 - ) - self.assertFalse(np.all(cube.data == cube_saved.data)) - self.assertArrayAllClose(cube.data, cube_saved.data, 0.1) - - def test_default_unlimited_dimensions(self): - # Default is no unlimited dimensions. - cube = self._simple_cube(">f4") - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - ds = nc.Dataset(nc_path) - self.assertFalse(ds.dimensions["dim0"].isunlimited()) - self.assertFalse(ds.dimensions["dim1"].isunlimited()) - ds.close() - - def test_no_unlimited_dimensions(self): - cube = self._simple_cube(">f4") - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=None) - ds = nc.Dataset(nc_path) - for dim in ds.dimensions.values(): - self.assertFalse(dim.isunlimited()) - ds.close() - - def test_invalid_unlimited_dimensions(self): - cube = self._simple_cube(">f4") - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - # should not raise an exception - saver.write(cube, unlimited_dimensions=["not_found"]) - - def test_custom_unlimited_dimensions(self): - cube = self._transverse_mercator_cube() - unlimited_dimensions = [ - "projection_y_coordinate", - "projection_x_coordinate", - ] - # test coordinates by name - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=unlimited_dimensions) - ds = nc.Dataset(nc_path) - for dim in unlimited_dimensions: - self.assertTrue(ds.dimensions[dim].isunlimited()) - ds.close() - # test coordinate arguments - with self.temp_filename(".nc") as nc_path: - coords = [cube.coord(dim) for dim in unlimited_dimensions] - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=coords) - ds = nc.Dataset(nc_path) - for dim in unlimited_dimensions: - self.assertTrue(ds.dimensions[dim].isunlimited()) - ds.close() - - def test_reserved_attributes(self): - cube = self._simple_cube(">f4") - cube.attributes["dimensions"] = "something something_else" - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - ds = nc.Dataset(nc_path) - res = ds.getncattr("dimensions") - ds.close() - self.assertEqual(res, "something something_else") - - def test_with_climatology(self): - cube = stock.climatology_3d() - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube) - self.assertCDL(nc_path) - - -class Test__create_cf_bounds(tests.IrisTest): - # Method is substituted in test_Saver__lazy. - @staticmethod - def climatology_3d(): - return stock.climatology_3d() - - def _check_bounds_setting(self, climatological=False): - # Generic test that can run with or without a climatological coord. - cube = self.climatology_3d() - coord = cube.coord("time").copy() - # Over-write original value from stock.climatology_3d with test value. - coord.climatological = climatological - - # Set up expected strings. - if climatological: - property_name = "climatology" - varname_extra = "climatology" - else: - property_name = "bounds" - varname_extra = "bnds" - boundsvar_name = "time_" + varname_extra - - # Set up arguments for testing _create_cf_bounds. - saver = mock.MagicMock(spec=Saver) - # NOTE: 'saver' must have spec=Saver to fake isinstance(save, Saver), - # so it can pass as 'self' in the call to _create_cf_cbounds. - # Mock a '_dataset' property; not automatic because 'spec=Saver'. - saver._dataset = mock.MagicMock() - # Mock the '_ensure_valid_dtype' method to return an object with a - # suitable 'shape' and 'dtype'. - saver._ensure_valid_dtype.return_value = mock.Mock( - shape=coord.bounds.shape, dtype=coord.bounds.dtype - ) - var = mock.MagicMock(spec=nc.Variable) - - # Make the main call. - Saver._create_cf_bounds(saver, coord, var, "time") - - # Test the call of _setncattr in _create_cf_bounds. - setncattr_call = mock.call( - property_name, boundsvar_name.encode(encoding="ascii") - ) - self.assertEqual(setncattr_call, var.setncattr.call_args) - - # Test the call of createVariable in _create_cf_bounds. - dataset = saver._dataset - expected_dimensions = var.dimensions + ("bnds",) - create_var_call = mock.call( - boundsvar_name, coord.bounds.dtype, expected_dimensions - ) - self.assertEqual(create_var_call, dataset.createVariable.call_args) - - def test_set_bounds_default(self): - self._check_bounds_setting(climatological=False) - - def test_set_bounds_climatology(self): - self._check_bounds_setting(climatological=True) - - -class Test_write__valid_x_cube_attributes(tests.IrisTest): - """Testing valid_range, valid_min and valid_max attributes.""" - - # Attribute is substituted in test_Saver__lazy. - array_lib = np - - def test_valid_range_saved(self): - cube = tests.stock.lat_lon_cube() - cube.data = cube.data.astype("int32") - - vrange = self.array_lib.array([1, 2], dtype="int32") - cube.attributes["valid_range"] = vrange - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) - self.assertArrayEqual(ds.valid_range, vrange) - ds.close() - - def test_valid_min_saved(self): - cube = tests.stock.lat_lon_cube() - cube.data = cube.data.astype("int32") - - cube.attributes["valid_min"] = 1 - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) - self.assertArrayEqual(ds.valid_min, 1) - ds.close() - - def test_valid_max_saved(self): - cube = tests.stock.lat_lon_cube() - cube.data = cube.data.astype("int32") - - cube.attributes["valid_max"] = 2 - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) - self.assertArrayEqual(ds.valid_max, 2) - ds.close() - - -class Test_write__valid_x_coord_attributes(tests.IrisTest): - """Testing valid_range, valid_min and valid_max attributes.""" - - # Attribute is substituted in test_Saver__lazy. - array_lib = np - - def test_valid_range_saved(self): - cube = tests.stock.lat_lon_cube() - cube.data = cube.data.astype("int32") - - vrange = self.array_lib.array([1, 2], dtype="int32") - cube.coord(axis="x").attributes["valid_range"] = vrange - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) - self.assertArrayEqual( - ds.variables["longitude"].valid_range, vrange - ) - ds.close() - - def test_valid_min_saved(self): - cube = tests.stock.lat_lon_cube() - cube.data = cube.data.astype("int32") - - cube.coord(axis="x").attributes["valid_min"] = 1 - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) - self.assertArrayEqual(ds.variables["longitude"].valid_min, 1) - ds.close() - - def test_valid_max_saved(self): - cube = tests.stock.lat_lon_cube() - cube.data = cube.data.astype("int32") - - cube.coord(axis="x").attributes["valid_max"] = 2 - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, unlimited_dimensions=[]) - ds = nc.Dataset(nc_path) - self.assertArrayEqual(ds.variables["longitude"].valid_max, 2) - ds.close() - - -class Test_write_fill_value(tests.IrisTest): - # Attribute is substituted in test_Saver__lazy. - array_lib = np - - def _make_cube(self, dtype, masked_value=None, masked_index=None): - data = self.array_lib.arange(12, dtype=dtype).reshape(3, 4) - if masked_value is not None: - data = ma.masked_equal(data, masked_value) - if masked_index is not None: - data = self.array_lib.ma.masked_array(data) - data[masked_index] = ma.masked - lat = DimCoord(np.arange(3), "latitude", units="degrees") - lon = DimCoord(np.arange(4), "longitude", units="degrees") - return Cube( - data, - standard_name="air_temperature", - units="K", - dim_coords_and_dims=[(lat, 0), (lon, 1)], - ) - - @contextmanager - def _netCDF_var(self, cube, **kwargs): - # Get the netCDF4 Variable for a cube from a temp file - standard_name = cube.standard_name - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube, **kwargs) - ds = nc.Dataset(nc_path) - (var,) = [ - var - for var in ds.variables.values() - if var.standard_name == standard_name - ] - yield var - - def test_fill_value(self): - # Test that a passed fill value is saved as a _FillValue attribute. - cube = self._make_cube(">f4") - fill_value = 12345.0 - with self._netCDF_var(cube, fill_value=fill_value) as var: - self.assertEqual(fill_value, var._FillValue) - - def test_default_fill_value(self): - # Test that if no fill value is passed then there is no _FillValue. - # attribute. - cube = self._make_cube(">f4") - with self._netCDF_var(cube) as var: - self.assertNotIn("_FillValue", var.ncattrs()) - - def test_mask_fill_value(self): - # Test that masked data saves correctly when given a fill value. - index = (1, 1) - fill_value = 12345.0 - cube = self._make_cube(">f4", masked_index=index) - with self._netCDF_var(cube, fill_value=fill_value) as var: - self.assertEqual(fill_value, var._FillValue) - self.assertTrue(var[index].mask) - - def test_mask_default_fill_value(self): - # Test that masked data saves correctly using the default fill value. - index = (1, 1) - cube = self._make_cube(">f4", masked_index=index) - with self._netCDF_var(cube) as var: - self.assertNotIn("_FillValue", var.ncattrs()) - self.assertTrue(var[index].mask) - - def test_contains_fill_value_passed(self): - # Test that a warning is raised if the data contains the fill value. - cube = self._make_cube(">f4") - fill_value = 1 - with self.assertWarnsRegexp( - "contains unmasked data points equal to the fill-value" - ): - with self._netCDF_var(cube, fill_value=fill_value): - pass - - def test_contains_fill_value_byte(self): - # Test that a warning is raised if the data contains the fill value - # when it is of a byte type. - cube = self._make_cube(">i1") - fill_value = 1 - with self.assertWarnsRegexp( - "contains unmasked data points equal to the fill-value" - ): - with self._netCDF_var(cube, fill_value=fill_value): - pass - - def test_contains_default_fill_value(self): - # Test that a warning is raised if the data contains the default fill - # value if no fill_value argument is supplied. - cube = self._make_cube(">f4") - cube.data[0, 0] = nc.default_fillvals["f4"] - with self.assertWarnsRegexp( - "contains unmasked data points equal to the fill-value" - ): - with self._netCDF_var(cube): - pass - - def test_contains_default_fill_value_byte(self): - # Test that no warning is raised if the data contains the default fill - # value if no fill_value argument is supplied when the data is of a - # byte type. - cube = self._make_cube(">i1") - with self.assertNoWarningsRegexp(r"\(fill\|mask\)"): - with self._netCDF_var(cube): - pass - - def test_contains_masked_fill_value(self): - # Test that no warning is raised if the data contains the fill_value at - # a masked point. - fill_value = 1 - cube = self._make_cube(">f4", masked_value=fill_value) - with self.assertNoWarningsRegexp(r"\(fill\|mask\)"): - with self._netCDF_var(cube, fill_value=fill_value): - pass - - def test_masked_byte_default_fill_value(self): - # Test that a warning is raised when saving masked byte data with no - # fill value supplied. - cube = self._make_cube(">i1", masked_value=1) - with self.assertNoWarningsRegexp(r"\(fill\|mask\)"): - with self._netCDF_var(cube): - pass - - def test_masked_byte_fill_value_passed(self): - # Test that no warning is raised when saving masked byte data with a - # fill value supplied if the the data does not contain the fill_value. - fill_value = 100 - cube = self._make_cube(">i1", masked_value=2) - with self.assertNoWarningsRegexp(r"\(fill\|mask\)"): - with self._netCDF_var(cube, fill_value=fill_value): - pass - - -class Test_cf_valid_var_name(tests.IrisTest): - def test_no_replacement(self): - self.assertEqual(Saver.cf_valid_var_name("valid_Nam3"), "valid_Nam3") - - def test_special_chars(self): - self.assertEqual(Saver.cf_valid_var_name("inv?alid"), "inv_alid") - - def test_leading_underscore(self): - self.assertEqual(Saver.cf_valid_var_name("_invalid"), "var__invalid") - - def test_leading_number(self): - self.assertEqual(Saver.cf_valid_var_name("2invalid"), "var_2invalid") - - def test_leading_invalid(self): - self.assertEqual(Saver.cf_valid_var_name("?invalid"), "var__invalid") - - def test_no_hyphen(self): - # CF explicitly prohibits hyphen, even though it is fine in NetCDF. - self.assertEqual( - Saver.cf_valid_var_name("valid-netcdf"), "valid_netcdf" - ) - - -class _Common__check_attribute_compliance: - # Attribute is substituted in test_Saver__lazy. - array_lib = np - - def setUp(self): - self.container = mock.Mock(name="container", attributes={}) - self.data_dtype = np.dtype("int32") - - patch = mock.patch("netCDF4.Dataset") - _ = patch.start() - self.addCleanup(patch.stop) - - def set_attribute(self, value): - self.container.attributes[self.attribute] = value - - def assertAttribute(self, value): - self.assertEqual( - np.asarray(self.container.attributes[self.attribute]).dtype, value - ) - - def check_attribute_compliance_call(self, value): - self.set_attribute(value) - with Saver(mock.Mock(), "NETCDF4") as saver: - saver.check_attribute_compliance(self.container, self.data_dtype) - - -class Test_check_attribute_compliance__valid_range( - _Common__check_attribute_compliance, tests.IrisTest -): - @property - def attribute(self): - return "valid_range" - - def test_valid_range_type_coerce(self): - value = self.array_lib.array([1, 2], dtype="float") - self.check_attribute_compliance_call(value) - self.assertAttribute(self.data_dtype) - - def test_valid_range_unsigned_int8_data_signed_range(self): - self.data_dtype = np.dtype("uint8") - value = self.array_lib.array([1, 2], dtype="int8") - self.check_attribute_compliance_call(value) - self.assertAttribute(value.dtype) - - def test_valid_range_cannot_coerce(self): - value = self.array_lib.array([1.5, 2.5], dtype="float64") - msg = '"valid_range" is not of a suitable value' - with self.assertRaisesRegex(ValueError, msg): - self.check_attribute_compliance_call(value) - - def test_valid_range_not_numpy_array(self): - # Ensure we handle the case when not a numpy array is provided. - self.data_dtype = np.dtype("int8") - value = [1, 2] - self.check_attribute_compliance_call(value) - self.assertAttribute(np.int64) - - -class Test_check_attribute_compliance__valid_min( - _Common__check_attribute_compliance, tests.IrisTest -): - @property - def attribute(self): - return "valid_min" - - def test_valid_range_type_coerce(self): - value = self.array_lib.array(1, dtype="float") - self.check_attribute_compliance_call(value) - self.assertAttribute(self.data_dtype) - - def test_valid_range_unsigned_int8_data_signed_range(self): - self.data_dtype = np.dtype("uint8") - value = self.array_lib.array(1, dtype="int8") - self.check_attribute_compliance_call(value) - self.assertAttribute(value.dtype) - - def test_valid_range_cannot_coerce(self): - value = self.array_lib.array(1.5, dtype="float64") - msg = '"valid_min" is not of a suitable value' - with self.assertRaisesRegex(ValueError, msg): - self.check_attribute_compliance_call(value) - - def test_valid_range_not_numpy_array(self): - # Ensure we handle the case when not a numpy array is provided. - self.data_dtype = np.dtype("int8") - value = 1 - self.check_attribute_compliance_call(value) - self.assertAttribute(np.int64) - - -class Test_check_attribute_compliance__valid_max( - _Common__check_attribute_compliance, tests.IrisTest -): - @property - def attribute(self): - return "valid_max" - - def test_valid_range_type_coerce(self): - value = self.array_lib.array(2, dtype="float") - self.check_attribute_compliance_call(value) - self.assertAttribute(self.data_dtype) - - def test_valid_range_unsigned_int8_data_signed_range(self): - self.data_dtype = np.dtype("uint8") - value = self.array_lib.array(2, dtype="int8") - self.check_attribute_compliance_call(value) - self.assertAttribute(value.dtype) - - def test_valid_range_cannot_coerce(self): - value = self.array_lib.array(2.5, dtype="float64") - msg = '"valid_max" is not of a suitable value' - with self.assertRaisesRegex(ValueError, msg): - self.check_attribute_compliance_call(value) - - def test_valid_range_not_numpy_array(self): - # Ensure we handle the case when not a numpy array is provided. - self.data_dtype = np.dtype("int8") - value = 2 - self.check_attribute_compliance_call(value) - self.assertAttribute(np.int64) - - -class Test_check_attribute_compliance__exception_handling( - _Common__check_attribute_compliance, tests.IrisTest -): - def test_valid_range_and_valid_min_valid_max_provided(self): - # Conflicting attributes should raise a suitable exception. - self.data_dtype = np.dtype("int8") - self.container.attributes["valid_range"] = [1, 2] - self.container.attributes["valid_min"] = [1] - msg = 'Both "valid_range" and "valid_min"' - with Saver(mock.Mock(), "NETCDF4") as saver: - with self.assertRaisesRegex(ValueError, msg): - saver.check_attribute_compliance( - self.container, self.data_dtype - ) - - -class Test__cf_coord_identity(tests.IrisTest): - def check_call(self, coord_name, coord_system, units, expected_units): - coord = iris.coords.DimCoord( - [30, 45], coord_name, units=units, coord_system=coord_system - ) - result = Saver._cf_coord_standardised_units(coord) - self.assertEqual(result, expected_units) - - def test_geogcs_latitude(self): - crs = iris.coord_systems.GeogCS(60, 0) - self.check_call( - "latitude", - coord_system=crs, - units="degrees", - expected_units="degrees_north", - ) - - def test_geogcs_longitude(self): - crs = iris.coord_systems.GeogCS(60, 0) - self.check_call( - "longitude", - coord_system=crs, - units="degrees", - expected_units="degrees_east", - ) - - def test_no_coord_system_latitude(self): - self.check_call( - "latitude", - coord_system=None, - units="degrees", - expected_units="degrees_north", - ) - - def test_no_coord_system_longitude(self): - self.check_call( - "longitude", - coord_system=None, - units="degrees", - expected_units="degrees_east", - ) - - def test_passthrough_units(self): - crs = iris.coord_systems.LambertConformal(0, 20) - self.check_call( - "projection_x_coordinate", - coord_system=crs, - units="km", - expected_units="km", - ) - - -class Test__create_cf_grid_mapping(tests.IrisTest): - def _cube_with_cs(self, coord_system): - """Return a simple 2D cube that uses the given coordinate system.""" - cube = stock.lat_lon_cube() - x, y = cube.coord("longitude"), cube.coord("latitude") - x.coord_system = y.coord_system = coord_system - return cube - - def _grid_mapping_variable(self, coord_system): - """ - Return a mock netCDF variable that represents the conversion - of the given coordinate system. - - """ - cube = self._cube_with_cs(coord_system) - - class NCMock(mock.Mock): - def setncattr(self, name, attr): - setattr(self, name, attr) - - # Calls the actual NetCDF saver with appropriate mocking, returning - # the grid variable that gets created. - grid_variable = NCMock(name="NetCDFVariable") - create_var_fn = mock.Mock(side_effect=[grid_variable]) - dataset = mock.Mock(variables=[], createVariable=create_var_fn) - saver = mock.Mock(spec=Saver, _coord_systems=[], _dataset=dataset) - variable = NCMock() - - # This is the method we're actually testing! - Saver._create_cf_grid_mapping(saver, cube, variable) - - self.assertEqual(create_var_fn.call_count, 1) - self.assertEqual( - variable.grid_mapping, grid_variable.grid_mapping_name - ) - return grid_variable - - def _variable_attributes(self, coord_system): - """ - Return the attributes dictionary for the grid mapping variable - that is created from the given coordinate system. - - """ - mock_grid_variable = self._grid_mapping_variable(coord_system) - - # Get the attributes defined on the mock object. - attributes = sorted(mock_grid_variable.__dict__.keys()) - attributes = [name for name in attributes if not name.startswith("_")] - attributes.remove("method_calls") - return {key: getattr(mock_grid_variable, key) for key in attributes} - - def _test(self, coord_system, expected): - actual = self._variable_attributes(coord_system) - - # To see obvious differences, check that they keys are the same. - self.assertEqual(sorted(actual.keys()), sorted(expected.keys())) - # Now check that the values are equivalent. - self.assertEqual(actual, expected) - - def test_rotated_geog_cs(self): - coord_system = RotatedGeogCS(37.5, 177.5, ellipsoid=GeogCS(6371229.0)) - expected = { - "grid_mapping_name": b"rotated_latitude_longitude", - "north_pole_grid_longitude": 0.0, - "grid_north_pole_longitude": 177.5, - "grid_north_pole_latitude": 37.5, - "longitude_of_prime_meridian": 0.0, - "earth_radius": 6371229.0, - } - self._test(coord_system, expected) - - def test_spherical_geog_cs(self): - coord_system = GeogCS(6371229.0) - expected = { - "grid_mapping_name": b"latitude_longitude", - "longitude_of_prime_meridian": 0.0, - "earth_radius": 6371229.0, - } - self._test(coord_system, expected) - - def test_elliptic_geog_cs(self): - coord_system = GeogCS(637, 600) - expected = { - "grid_mapping_name": b"latitude_longitude", - "longitude_of_prime_meridian": 0.0, - "semi_minor_axis": 600.0, - "semi_major_axis": 637.0, - } - self._test(coord_system, expected) - - def test_lambert_conformal(self): - coord_system = LambertConformal( - central_lat=44, - central_lon=2, - false_easting=-2, - false_northing=-5, - secant_latitudes=(38, 50), - ellipsoid=GeogCS(6371000), - ) - expected = { - "grid_mapping_name": b"lambert_conformal_conic", - "latitude_of_projection_origin": 44, - "longitude_of_central_meridian": 2, - "false_easting": -2, - "false_northing": -5, - "standard_parallel": (38, 50), - "earth_radius": 6371000, - "longitude_of_prime_meridian": 0, - } - self._test(coord_system, expected) - - def test_laea_cs(self): - coord_system = LambertAzimuthalEqualArea( - latitude_of_projection_origin=52, - longitude_of_projection_origin=10, - false_easting=100, - false_northing=200, - ellipsoid=GeogCS(6377563.396, 6356256.909), - ) - expected = { - "grid_mapping_name": b"lambert_azimuthal_equal_area", - "latitude_of_projection_origin": 52, - "longitude_of_projection_origin": 10, - "false_easting": 100, - "false_northing": 200, - "semi_major_axis": 6377563.396, - "semi_minor_axis": 6356256.909, - "longitude_of_prime_meridian": 0, - } - self._test(coord_system, expected) - - def test_aea_cs(self): - coord_system = AlbersEqualArea( - latitude_of_projection_origin=52, - longitude_of_central_meridian=10, - false_easting=100, - false_northing=200, - standard_parallels=(38, 50), - ellipsoid=GeogCS(6377563.396, 6356256.909), - ) - expected = { - "grid_mapping_name": b"albers_conical_equal_area", - "latitude_of_projection_origin": 52, - "longitude_of_central_meridian": 10, - "false_easting": 100, - "false_northing": 200, - "standard_parallel": (38, 50), - "semi_major_axis": 6377563.396, - "semi_minor_axis": 6356256.909, - "longitude_of_prime_meridian": 0, - } - self._test(coord_system, expected) - - def test_vp_cs(self): - latitude_of_projection_origin = 1.0 - longitude_of_projection_origin = 2.0 - perspective_point_height = 2000000.0 - false_easting = 100.0 - false_northing = 200.0 - - semi_major_axis = 6377563.396 - semi_minor_axis = 6356256.909 - ellipsoid = GeogCS(semi_major_axis, semi_minor_axis) - - coord_system = VerticalPerspective( - latitude_of_projection_origin=latitude_of_projection_origin, - longitude_of_projection_origin=longitude_of_projection_origin, - perspective_point_height=perspective_point_height, - false_easting=false_easting, - false_northing=false_northing, - ellipsoid=ellipsoid, - ) - expected = { - "grid_mapping_name": b"vertical_perspective", - "latitude_of_projection_origin": latitude_of_projection_origin, - "longitude_of_projection_origin": longitude_of_projection_origin, - "perspective_point_height": perspective_point_height, - "false_easting": false_easting, - "false_northing": false_northing, - "semi_major_axis": semi_major_axis, - "semi_minor_axis": semi_minor_axis, - "longitude_of_prime_meridian": 0, - } - self._test(coord_system, expected) - - def test_geo_cs(self): - latitude_of_projection_origin = 0.0 - longitude_of_projection_origin = 2.0 - perspective_point_height = 2000000.0 - sweep_angle_axis = "x" - false_easting = 100.0 - false_northing = 200.0 - - semi_major_axis = 6377563.396 - semi_minor_axis = 6356256.909 - ellipsoid = GeogCS(semi_major_axis, semi_minor_axis) - - coord_system = Geostationary( - latitude_of_projection_origin=latitude_of_projection_origin, - longitude_of_projection_origin=longitude_of_projection_origin, - perspective_point_height=perspective_point_height, - sweep_angle_axis=sweep_angle_axis, - false_easting=false_easting, - false_northing=false_northing, - ellipsoid=ellipsoid, - ) - expected = { - "grid_mapping_name": b"geostationary", - "latitude_of_projection_origin": latitude_of_projection_origin, - "longitude_of_projection_origin": longitude_of_projection_origin, - "perspective_point_height": perspective_point_height, - "sweep_angle_axis": sweep_angle_axis, - "false_easting": false_easting, - "false_northing": false_northing, - "semi_major_axis": semi_major_axis, - "semi_minor_axis": semi_minor_axis, - "longitude_of_prime_meridian": 0, - } - self._test(coord_system, expected) - - -class Test__create_cf_cell_measure_variable(tests.IrisTest): - # Saving of masked data is disallowed. - - # Attribute is substituted in test_Saver__lazy. - array_lib = np - - def setUp(self): - self.cube = stock.lat_lon_cube() - self.names_map = ["latitude", "longitude"] - masked_array = self.array_lib.ma.masked_array( - [0, 1, 2], mask=[True, False, True] - ) - self.cm = iris.coords.CellMeasure(masked_array, var_name="cell_area") - self.cube.add_cell_measure(self.cm, data_dims=0) - self.exp_emsg = "Cell measures with missing data are not supported." - - def test_masked_data__insitu(self): - # Test that the error is raised in the right place. - with self.temp_filename(".nc") as nc_path: - saver = Saver(nc_path, "NETCDF4") - with self.assertRaisesRegex(ValueError, self.exp_emsg): - saver._create_generic_cf_array_var( - self.cube, self.names_map, self.cm - ) - - def test_masked_data__save_pipeline(self): - # Test that the right error is raised by the saver pipeline. - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - with self.assertRaisesRegex(ValueError, self.exp_emsg): - saver.write(self.cube) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py deleted file mode 100644 index eab09b9e4f..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__lazy.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Mirror of :mod:`iris.tests.unit.fileformats.netcdf.test_Saver`, but with lazy arrays.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from dask import array as da - -from iris.coords import AuxCoord -from iris.fileformats.netcdf import Saver -from iris.tests import stock -from iris.tests.unit.fileformats.netcdf import test_Saver - - -class LazyMixin(tests.IrisTest): - array_lib = da - - def result_path(self, basename=None, ext=""): - # Precisely mirroring the tests in test_Saver, so use those CDL's. - original = super().result_path(basename, ext) - return original.replace("Saver__lazy", "Saver") - - -class Test_write(LazyMixin, test_Saver.Test_write): - pass - - -class Test__create_cf_bounds(test_Saver.Test__create_cf_bounds): - @staticmethod - def climatology_3d(): - cube = stock.climatology_3d() - aux_coord = AuxCoord.from_coord(cube.coord("time")) - lazy_coord = aux_coord.copy( - aux_coord.lazy_points(), aux_coord.lazy_bounds() - ) - cube.replace_coord(lazy_coord) - return cube - - -class Test_write__valid_x_cube_attributes( - LazyMixin, test_Saver.Test_write__valid_x_cube_attributes -): - pass - - -class Test_write__valid_x_coord_attributes( - LazyMixin, test_Saver.Test_write__valid_x_coord_attributes -): - pass - - -class Test_write_fill_value(LazyMixin, test_Saver.Test_write_fill_value): - pass - - -class Test_check_attribute_compliance__valid_range( - LazyMixin, test_Saver.Test_check_attribute_compliance__valid_range -): - pass - - -class Test_check_attribute_compliance__valid_min( - LazyMixin, test_Saver.Test_check_attribute_compliance__valid_min -): - pass - - -class Test_check_attribute_compliance__valid_max( - LazyMixin, test_Saver.Test_check_attribute_compliance__valid_max -): - pass - - -class Test_check_attribute_compliance__exception_handling( - LazyMixin, test_Saver.Test_check_attribute_compliance__exception_handling -): - pass - - -class Test__create_cf_cell_measure_variable( - LazyMixin, test_Saver.Test__create_cf_cell_measure_variable -): - pass - - -class TestStreamed(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_2d() - self.store_watch = self.patch("dask.array.store") - - def save_common(self, cube_to_save): - with self.temp_filename(".nc") as nc_path: - with Saver(nc_path, "NETCDF4") as saver: - saver.write(cube_to_save) - - def test_realised_not_streamed(self): - self.save_common(self.cube) - self.assertFalse(self.store_watch.called) - - def test_lazy_streamed_data(self): - self.cube.data = self.cube.lazy_data() - self.save_common(self.cube) - self.assertTrue(self.store_watch.called) - - def test_lazy_streamed_coord(self): - aux_coord = AuxCoord.from_coord(self.cube.coords()[0]) - lazy_coord = aux_coord.copy( - aux_coord.lazy_points(), aux_coord.lazy_bounds() - ) - self.cube.replace_coord(lazy_coord) - self.save_common(self.cube) - self.assertTrue(self.store_watch.called) - - def test_lazy_streamed_bounds(self): - aux_coord = AuxCoord.from_coord(self.cube.coords()[0]) - lazy_coord = aux_coord.copy(aux_coord.points, aux_coord.lazy_bounds()) - self.cube.replace_coord(lazy_coord) - self.save_common(self.cube) - self.assertTrue(self.store_watch.called) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py b/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py deleted file mode 100644 index a914dd3314..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test_Saver__ugrid.py +++ /dev/null @@ -1,1280 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :class:`iris.fileformats.netcdf.Saver` class. - -WHEN MODIFYING THIS MODULE, CHECK IF ANY CORRESPONDING CHANGES ARE NEEDED IN -:mod:`iris.tests.unit.fileformats.netcdf.test_Saver__lazy.` - -""" -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from pathlib import Path -import shutil -import tempfile - -import netCDF4 as nc -import numpy as np - -from iris import save -from iris.coords import AuxCoord -from iris.cube import Cube, CubeList -from iris.experimental.ugrid.mesh import Connectivity, Mesh -from iris.experimental.ugrid.save import save_mesh -from iris.tests.stock import realistic_4d - -XY_LOCS = ("x", "y") -XY_NAMES = ("longitude", "latitude") - - -def build_mesh( - n_nodes=2, - n_faces=0, - n_edges=0, - nodecoord_xyargs=None, - edgecoord_xyargs=None, - facecoord_xyargs=None, - conn_role_kwargs=None, # mapping {connectivity-role: connectivity-kwargs} - mesh_kwargs=None, -): - """ - Make a test mesh. - - Mesh has faces edges, face-coords and edge-coords, numbers of which can be - controlled. - - Args: - - * n_nodes, n_faces, n_edges (int): - Basic dimensions of mesh components. Zero means no such location. - * nodecoord_xyargs, edgecoord_xyargs, facecoord_xyargs (pair of dict): - Pairs (x,y) of settings kwargs, applied after initial creation the - relevant location coordinates. - * conn_role_kwargs (dict of string:dict): - Mapping from cf_role name to settings kwargs for connectivities, - applied after initially creating them. - * mesh_kwargs (dict): - Dictionary of key settings to apply to the Mesh, after creating it. - - """ - - def applyargs(coord, kwargs): - if kwargs: - for key, val in kwargs.items(): - # kwargs is a dict - setattr(coord, key, val) - - def apply_xyargs(coords, xyargs): - if xyargs: - for coord, kwargs in zip(coords, xyargs): - # coords and xyargs both iterables : implicitly=(x,y) - applyargs(coord, kwargs) - - node_coords = [ - AuxCoord(np.arange(n_nodes), standard_name=name) for name in XY_NAMES - ] - apply_xyargs(node_coords, nodecoord_xyargs) - - connectivities = {} - edge_coords = [] - face_coords = [] - topology_dimension = 0 - if n_edges: - topology_dimension = 1 - connectivities["edge_node_connectivity"] = Connectivity( - np.zeros((n_edges, 2), np.int32), cf_role="edge_node_connectivity" - ) - edge_coords = [ - AuxCoord(np.arange(n_edges), standard_name=name) - for name in XY_NAMES - ] - apply_xyargs(edge_coords, edgecoord_xyargs) - - if n_faces: - topology_dimension = 2 - connectivities["face_node_connectivity"] = Connectivity( - np.zeros((n_faces, 4), np.int32), cf_role="face_node_connectivity" - ) - face_coords = [ - AuxCoord(np.arange(n_faces), standard_name=name) - for name in XY_NAMES - ] - apply_xyargs(face_coords, facecoord_xyargs) - - mesh_dims = {"node": n_nodes, "edge": n_edges, "face": n_faces} - - if conn_role_kwargs: - for role, kwargs in conn_role_kwargs.items(): - if role in connectivities: - conn = connectivities[role] - else: - loc_from, loc_to, _ = role.split("_") - dims = [mesh_dims[loc] for loc in (loc_from, loc_to)] - conn = Connectivity( - np.zeros(dims, dtype=np.int32), cf_role=role - ) - connectivities[role] = conn - applyargs(conn, kwargs) - - mesh = Mesh( - topology_dimension=topology_dimension, - node_coords_and_axes=zip(node_coords, XY_LOCS), - edge_coords_and_axes=zip(edge_coords, XY_LOCS), - face_coords_and_axes=zip(face_coords, XY_LOCS), - connectivities=connectivities.values(), - ) - applyargs(mesh, mesh_kwargs) - - return mesh - - -def make_mesh(basic=True, **kwargs): - """ - Create a test mesh, with some built-in 'standard' settings. - - Kwargs: - - * basic (bool): - If true (default), create with 'standard' set of test properties. - * kwargs (dict): - Additional kwargs, passed through to 'build_mesh'. - Items here override the 'standard' settings. - - """ - if basic: - # Use some helpful non-minimal settings as our 'basic' mesh. - use_kwargs = dict( - n_nodes=5, - n_faces=2, - nodecoord_xyargs=tuple( - dict(var_name=f"node_{loc}") for loc in XY_LOCS - ), - facecoord_xyargs=tuple( - dict(var_name=f"face_{loc}") for loc in XY_LOCS - ), - mesh_kwargs=dict( - var_name="Mesh2d", - node_dimension="Mesh2d_nodes", - face_dimension="Mesh2d_faces", - ), - ) - use_kwargs.update(kwargs) - else: - use_kwargs = kwargs - - mesh = build_mesh(**use_kwargs) - return mesh - - -def mesh_location_size(mesh, location): - """Get the length of a location-dimension from a mesh.""" - if location == "node": - # Use a node coordinate (which always exists). - node_coord = mesh.node_coords[0] - result = node_coord.shape[0] - else: - # Use a _node_connectivity, if any. - conn_name = f"{location}_node_connectivity" - conn = getattr(mesh, conn_name, None) - if conn is None: - result = 0 - else: - result = conn.shape[conn.location_axis] - return result - - -# A simple "standard" test mesh for multiple uses, which we can use for cubes -# that *share* a mesh (since we don't support mesh equality). -# However, we defer creating this until needed, as it can cause an import loop. -_DEFAULT_MESH = None - - -def default_mesh(): - """Return the unique default mesh, creating it if needed.""" - global _DEFAULT_MESH - if _DEFAULT_MESH is None: - _DEFAULT_MESH = make_mesh() - return _DEFAULT_MESH - - -def make_cube(mesh=None, location="face", **kwargs): - """ - Create a test cube, based on a given mesh + location. - - Kwargs: - - * mesh (:class:`iris.experimental.ugrid.mesh.Mesh` or None): - If None, use 'default_mesh()' - * location (string): - Which mesh element to map the cube to. - * kwargs (dict): - Additional property settings to apply to the cube (after creation). - - """ - if mesh is None: - mesh = default_mesh() - dim_length = mesh_location_size(mesh, location) - cube = Cube(np.zeros(dim_length, np.float32)) - for meshco in mesh.to_MeshCoords(location): - cube.add_aux_coord(meshco, (0,)) - for key, val in kwargs.items(): - setattr(cube, key, val) - return cube - - -def add_height_dim(cube): - """Add an extra initial 'height' dimension onto a cube.""" - cube = cube.copy() # Avoid trashing the input cube. - cube.add_aux_coord(AuxCoord([0.0], standard_name="height", units="m")) - # Make three copies with different heights - cubes = [cube.copy() for _ in range(3)] - for i_cube, cube in enumerate(cubes): - cube.coord("height").points = [i_cube] - # Merge to create an additional 'height' dimension. - cube = CubeList(cubes).merge_cube() - return cube - - -# Special key-string for storing the dimensions of a variable -_VAR_DIMS = "" - - -def scan_dataset(filepath): - """ - Snapshot a netcdf dataset (the key metadata). - - Returns: - dimsdict, varsdict - * dimsdict (dict): - A map of dimension-name: length. - * varsdict (dict): - A map of each variable's properties, {var_name: propsdict} - Each propsdict is {attribute-name: value} over the var's ncattrs(). - Each propsdict ALSO contains a [_VAR_DIMS] entry listing the - variable's dims. - - """ - ds = nc.Dataset(filepath) - # dims dict is {name: len} - dimsdict = {name: dim.size for name, dim in ds.dimensions.items()} - # vars dict is {name: {attr:val}} - varsdict = {} - for name, var in ds.variables.items(): - varsdict[name] = {prop: getattr(var, prop) for prop in var.ncattrs()} - varsdict[name][_VAR_DIMS] = list(var.dimensions) - ds.close() - return dimsdict, varsdict - - -def vars_w_props(varsdict, **kwargs): - """ - Subset a vars dict, {name:props}, returning only those where each - =, defined by the given keywords. - Except that '="*"' means that '' merely _exists_, with any value. - - """ - - def check_attrs_match(attrs): - result = True - for key, val in kwargs.items(): - result = key in attrs - if result: - # val='*'' for a simple existence check - result = (val == "*") or attrs[key] == val - if not result: - break - return result - - varsdict = { - name: attrs - for name, attrs in varsdict.items() - if check_attrs_match(attrs) - } - return varsdict - - -def vars_w_dims(varsdict, dim_names): - """Subset a vars dict, returning those which map all the specified dims.""" - varsdict = { - name: propsdict - for name, propsdict in varsdict.items() - if all(dim in propsdict[_VAR_DIMS] for dim in dim_names) - } - return varsdict - - -def vars_meshnames(vars): - """Return the names of all the mesh variables (found by cf_role).""" - return list(vars_w_props(vars, cf_role="mesh_topology").keys()) - - -def vars_meshdim(vars, location, mesh_name=None): - """ - Extract a dim-name for a given element location. - - Args: - * vars (varsdict): - file varsdict, as returned from 'snapshot_dataset'. - * location (string): - a mesh location : 'node' / 'edge' / 'face' - * mesh_name (string or None): - If given, identifies the mesh var. - Otherwise, find a unique mesh var (i.e. there must be exactly 1). - - Returns: - dim_name (string) - The dim-name of the mesh dim for the given location. - - TODO: relies on the element having coordinates, which in future will not - always be the case. This can be fixed - - """ - if mesh_name is None: - # Find "the" meshvar -- assuming there is just one. - (mesh_name,) = vars_meshnames(vars) - mesh_props = vars[mesh_name] - loc_coords = mesh_props[f"{location}_coordinates"].split(" ") - (single_location_dim,) = vars[loc_coords[0]][_VAR_DIMS] - return single_location_dim - - -class TestSaveUgrid__cube(tests.IrisTest): - """Test for saving cubes which have meshes.""" - - @classmethod - def setUpClass(cls): - cls.temp_dir = Path(tempfile.mkdtemp()) - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.temp_dir) - - def check_save_cubes(self, cube_or_cubes): - """ - Write cubes to a new file in the common temporary directory. - - Use a name unique to this testcase, to avoid any clashes. - - """ - # use 'result_path' to name the file after the test function - tempfile_path = self.result_path(ext=".nc") - # Create a file of that name, but discard the result path and put it - # in the common temporary directory. - tempfile_path = self.temp_dir / Path(tempfile_path).name - - # Save data to the file. - save(cube_or_cubes, tempfile_path) - - return tempfile_path - - def test_basic_mesh(self): - # Save a small mesh example and check aspects of the resulting file. - cube = make_cube() # A simple face-mapped data example. - - # Save and snapshot the result - tempfile_path = self.check_save_cubes(cube) - dims, vars = scan_dataset(tempfile_path) - - # There is exactly 1 mesh var. - (mesh_name,) = vars_meshnames(vars) - - # There is exactly 1 mesh-linked (data)var - data_vars = vars_w_props(vars, mesh="*") - ((a_name, a_props),) = data_vars.items() - mesh_props = vars[mesh_name] - - # The mesh var links to the mesh, with location 'faces' - self.assertEqual(a_name, "unknown") - self.assertEqual(a_props["mesh"], mesh_name) - self.assertEqual(a_props["location"], "face") - - # There are 2 face coords == those listed in the mesh - face_coords = mesh_props["face_coordinates"].split(" ") - self.assertEqual(len(face_coords), 2) - - # The face coords should both map that single dim. - face_dim = vars_meshdim(vars, "face") - self.assertTrue( - all(vars[co][_VAR_DIMS] == [face_dim] for co in face_coords) - ) - - # The dims of the datavar also == [] - self.assertEqual(a_props[_VAR_DIMS], [face_dim]) - - # There are 2 node coordinates == those listed in the mesh. - node_coords = mesh_props["node_coordinates"].split(" ") - self.assertEqual(len(node_coords), 2) - # These are the *only* ones using the 'nodes' dimension. - node_dim = vars_meshdim(vars, "node") - self.assertEqual( - sorted(node_coords), sorted(vars_w_dims(vars, [node_dim]).keys()) - ) - - # There are no edges. - self.assertNotIn("edge_node_connectivity", mesh_props) - self.assertEqual( - len(vars_w_props(vars, cf_role="edge_node_connectivity")), 0 - ) - - # The dims are precisely (nodes, faces, nodes-per-face), in that order. - self.assertEqual( - list(dims.keys()), - ["Mesh2d_nodes", "Mesh2d_faces", "Mesh2d_face_N_nodes"], - ) - - # The variables are exactly (mesh, 2*node-coords, 2*face-coords, - # face-nodes, data) -- in that order - self.assertEqual( - list(vars.keys()), - [ - "Mesh2d", - "node_x", - "node_y", - "face_x", - "face_y", - "mesh2d_faces", - "unknown", - ], - ) - - # For completeness, also check against a full CDL snapshot - self.assertCDL(tempfile_path) - - def test_multi_cubes_common_mesh(self): - cube1 = make_cube(var_name="a") - cube2 = make_cube(var_name="b") - - # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) - dims, vars = scan_dataset(tempfile_path) - - # there is exactly 1 mesh in the file - (mesh_name,) = vars_meshnames(vars) - - # both the main variables reference the same mesh, and 'face' location - v_a, v_b = vars["a"], vars["b"] - self.assertEqual(v_a["mesh"], mesh_name) - self.assertEqual(v_a["location"], "face") - self.assertEqual(v_b["mesh"], mesh_name) - self.assertEqual(v_b["location"], "face") - - def test_multi_cubes_different_locations(self): - cube1 = make_cube(var_name="a", location="face") - cube2 = make_cube(var_name="b", location="node") - - # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) - dims, vars = scan_dataset(tempfile_path) - - # there is exactly 1 mesh in the file - (mesh_name,) = vars_meshnames(vars) - - # the main variables reference the same mesh at different locations - v_a, v_b = vars["a"], vars["b"] - self.assertEqual(v_a["mesh"], mesh_name) - self.assertEqual(v_a["location"], "face") - self.assertEqual(v_b["mesh"], mesh_name) - self.assertEqual(v_b["location"], "node") - - # the main variables map the face and node dimensions - face_dim = vars_meshdim(vars, "face") - node_dim = vars_meshdim(vars, "node") - self.assertEqual(v_a[_VAR_DIMS], [face_dim]) - self.assertEqual(v_b[_VAR_DIMS], [node_dim]) - - def test_multi_cubes_equal_meshes(self): - # Make 2 identical meshes - # NOTE: *can't* name these explicitly, as it stops them being identical. - mesh1 = make_mesh() - mesh2 = make_mesh() - cube1 = make_cube(var_name="a", mesh=mesh1) - cube2 = make_cube(var_name="b", mesh=mesh2) - - # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) - dims, vars = scan_dataset(tempfile_path) - - # there is exactly 1 mesh in the file - mesh_names = vars_meshnames(vars) - self.assertEqual(sorted(mesh_names), ["Mesh2d"]) - - # same dimensions - self.assertEqual( - vars_meshdim(vars, "node", mesh_name="Mesh2d"), "Mesh2d_nodes" - ) - self.assertEqual( - vars_meshdim(vars, "face", mesh_name="Mesh2d"), "Mesh2d_faces" - ) - - # there are exactly two data-variables with a 'mesh' property - mesh_datavars = vars_w_props(vars, mesh="*") - self.assertEqual(["a", "b"], list(mesh_datavars)) - - # the data variables reference the same mesh - a_props, b_props = vars["a"], vars["b"] - for props in a_props, b_props: - self.assertEqual(props["mesh"], "Mesh2d") - self.assertEqual(props["location"], "face") - - # the data variables map the appropriate node dimensions - self.assertEqual(a_props[_VAR_DIMS], ["Mesh2d_faces"]) - self.assertEqual(b_props[_VAR_DIMS], ["Mesh2d_faces_0"]) - - def test_multi_cubes_different_mesh(self): - # Check that we can correctly distinguish 2 different meshes. - cube1 = make_cube(var_name="a") - cube2 = make_cube(var_name="b", mesh=make_mesh(n_faces=4)) - - # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube1, cube2]) - dims, vars = scan_dataset(tempfile_path) - - # there are 2 meshes in the file - mesh_names = vars_meshnames(vars) - self.assertEqual(len(mesh_names), 2) - - # there are two (data)variables with a 'mesh' property - mesh_datavars = vars_w_props(vars, mesh="*") - self.assertEqual(2, len(mesh_datavars)) - self.assertEqual(["a", "b"], sorted(mesh_datavars.keys())) - - # the main variables reference the correct meshes, and 'face' location - a_props, b_props = vars["a"], vars["b"] - mesh_a, loc_a = a_props["mesh"], a_props["location"] - mesh_b, loc_b = b_props["mesh"], b_props["location"] - self.assertNotEqual(mesh_a, mesh_b) - self.assertEqual(loc_a, "face") - self.assertEqual(loc_b, "face") - - def test_nonmesh_dim(self): - # Check where the data variable has a 'normal' dim and a mesh dim. - cube = make_cube() - cube = add_height_dim(cube) - - # Save and snapshot the result - tempfile_path = self.check_save_cubes(cube) - dims, vars = scan_dataset(tempfile_path) - - # have just 1 mesh, including a face and node coordinates. - (mesh_name,) = vars_meshnames(vars) - # Check we have faces, and identify the faces dim - face_dim = vars_meshdim(vars, "face", mesh_name) - # Also just check we *have* a recognisable node-coordinate - vars_meshdim(vars, "node", mesh_name) - - # have just 1 data-variable - ((data_name, data_props),) = vars_w_props(vars, mesh="*").items() - - # data maps to the height + mesh dims - self.assertEqual(data_props[_VAR_DIMS], ["height", face_dim]) - self.assertEqual(data_props["mesh"], mesh_name) - self.assertEqual(data_props["location"], "face") - - def test_nonmesh_hybrid_dim(self): - # Check a case with a hybrid non-mesh dimension - cube = realistic_4d() - # Strip off the time and longitude dims, to make it simpler. - cube = cube[0, ..., 0] - # Remove all the unwanted coords (also loses the coord-system) - lose_coords = ( - "time", - "forecast_period", - "grid_longitude", - "grid_latitude", - ) - for coord in lose_coords: - cube.remove_coord(coord) - - # Add a mesh on the remaining (now anonymous) horizontal dimension. - i_horizontal_dim = len(cube.shape) - 1 - n_places = cube.shape[i_horizontal_dim] - mesh = make_mesh( - n_faces=n_places, - n_nodes=30, # arbitrary + unrealistic, but doesn't actually matter - ) - # Attach the mesh by adding MeshCoords - for coord in mesh.to_MeshCoords("face"): - cube.add_aux_coord(coord, (i_horizontal_dim,)) - - # Save and snapshot the result - tempfile_path = self.check_save_cubes(cube) - dims, vars = scan_dataset(tempfile_path) - - # have just 1 mesh, including face and node coordinates. - (mesh_name,) = vars_meshnames(vars) - face_dim = vars_meshdim(vars, "face", mesh_name) - _ = vars_meshdim(vars, "node", mesh_name) - - # have hybrid vertical dimension, with all the usual term variables. - self.assertIn("model_level_number", dims) - vert_vars = list(vars_w_dims(vars, ["model_level_number"]).keys()) - # The list of file variables mapping the vertical dimension: - # = the data-var, plus all the height terms - self.assertEqual( - vert_vars, - [ - "air_potential_temperature", - "model_level_number", - "level_height", - "level_height_bnds", - "sigma", - "sigma_bnds", - ], - ) - - # have just 1 data-variable, which maps to hybrid-height and mesh dims - ((data_name, data_props),) = vars_w_props(vars, mesh="*").items() - self.assertEqual( - data_props[_VAR_DIMS], ["model_level_number", face_dim] - ) - self.assertEqual(data_props["mesh"], mesh_name) - self.assertEqual(data_props["location"], "face") - - def test_alternate_cube_dim_order(self): - # A cube transposed from the 'usual' order - # Should work much the same as the "basic" case. - cube_1 = make_cube(var_name="a") - cube_1 = add_height_dim(cube_1) - - cube_2 = cube_1.copy() - cube_2.var_name = "b" - cube_2.transpose() - - # Save and snapshot the result - tempfile_path = self.check_save_cubes([cube_1, cube_2]) - dims, vars = scan_dataset(tempfile_path) - - # There is only 1 mesh - (mesh_name,) = vars_meshnames(vars) - - # both variables reference the same mesh - v_a, v_b = vars["a"], vars["b"] - self.assertEqual(v_a["mesh"], mesh_name) - self.assertEqual(v_a["location"], "face") - self.assertEqual(v_b["mesh"], mesh_name) - self.assertEqual(v_b["location"], "face") - - # Check the var dimensions - self.assertEqual(v_a[_VAR_DIMS], ["height", "Mesh2d_faces"]) - self.assertEqual(v_b[_VAR_DIMS], ["Mesh2d_faces", "height"]) - - -class TestSaveUgrid__mesh(tests.IrisTest): - """Tests for saving meshes to a file.""" - - @classmethod - def setUpClass(cls): - cls.temp_dir = Path(tempfile.mkdtemp()) - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.temp_dir) - - def check_save_mesh(self, mesh): - """ - Write a mesh to a new file in the common temporary directory. - - Use a name unique to this testcase, to avoid any clashes. - - """ - # use 'result_path' to name the file after the test function - tempfile_path = self.result_path(ext=".nc") - # Create a file of that name, but discard the result path and put it - # in the common temporary directory. - tempfile_path = self.temp_dir / Path(tempfile_path).name - - # Save data to the file. - save_mesh(mesh, tempfile_path) - - return tempfile_path - - def test_connectivity_dim_order(self): - """ - Test a mesh with some connectivities in the 'other' order. - - This should also create a property with the dimension name. - - """ - # Make a mesh with both faces *and* some edges - mesh = make_mesh(n_edges=7) - # Get the face-node and edge-node connectivities - face_nodes_conn = mesh.face_node_connectivity - edge_nodes_conn = mesh.edge_node_connectivity - # Transpose them : N.B. this sets location_axis=1, as it should be. - nodesfirst_faces_conn = face_nodes_conn.transpose() - nodesfirst_edges_conn = edge_nodes_conn.transpose() - # Make a new mesh with both face and edge connectivities 'transposed'. - mesh2 = Mesh( - topology_dimension=mesh.topology_dimension, - node_coords_and_axes=zip(mesh.node_coords, XY_LOCS), - face_coords_and_axes=zip(mesh.face_coords, XY_LOCS), - connectivities=[nodesfirst_faces_conn, nodesfirst_edges_conn], - ) - - # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh2) - dims, vars = scan_dataset(tempfile_path) - - # Check shape and dimensions of the associated connectivity variables. - (mesh_name,) = vars_meshnames(vars) - mesh_props = vars[mesh_name] - faceconn_name = mesh_props["face_node_connectivity"] - edgeconn_name = mesh_props["edge_node_connectivity"] - faceconn_props = vars[faceconn_name] - edgeconn_props = vars[edgeconn_name] - self.assertEqual( - faceconn_props[_VAR_DIMS], ["Mesh_2d_face_N_nodes", "Mesh2d_face"] - ) - self.assertEqual( - edgeconn_props[_VAR_DIMS], ["Mesh_2d_edge_N_nodes", "Mesh2d_edge"] - ) - - # Check the dimension lengths are also as expected - self.assertEqual(dims["Mesh2d_face"], 2) - self.assertEqual(dims["Mesh_2d_face_N_nodes"], 4) - self.assertEqual(dims["Mesh2d_edge"], 7) - self.assertEqual(dims["Mesh_2d_edge_N_nodes"], 2) - - # the mesh has extra location-dimension properties - self.assertEqual(mesh_props["face_dimension"], "Mesh2d_face") - self.assertEqual(mesh_props["edge_dimension"], "Mesh2d_edge") - - def test_connectivity_start_index(self): - """Test a mesh where some connectivities have start_index = 1.""" - # Make a mesh with both faces *and* some edges - mesh = make_mesh(n_edges=7) - # Get the face-node and edge-node connectivities - face_nodes_conn = mesh.face_node_connectivity - edge_nodes_conn = mesh.edge_node_connectivity - edge_nodes_conn2 = Connectivity( - indices=edge_nodes_conn.indices + 1, - cf_role=edge_nodes_conn.cf_role, - var_name="edges_x_2", - start_index=1, - ) - # Make a new mesh with altered connectivities. - mesh2 = Mesh( - topology_dimension=mesh.topology_dimension, - node_coords_and_axes=zip(mesh.node_coords, XY_LOCS), - face_coords_and_axes=zip(mesh.face_coords, XY_LOCS), - connectivities=[face_nodes_conn, edge_nodes_conn2], - ) - - # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh2) - dims, vars = scan_dataset(tempfile_path) - - # Check shape and dimensions of the associated connectivity variables. - (mesh_name,) = vars_meshnames(vars) - mesh_props = vars[mesh_name] - faceconn_name = mesh_props["face_node_connectivity"] - edgeconn_name = mesh_props["edge_node_connectivity"] - faceconn_props = vars[faceconn_name] - edgeconn_props = vars[edgeconn_name] - self.assertEqual(faceconn_props["start_index"], 0) - self.assertEqual(edgeconn_props["start_index"], 1) - - def test_nonuniform_connectivity(self): - # Check handling of connectivities with missing points. - n_faces = 7 - mesh = make_mesh(n_faces=n_faces) - - # In this case, add on a partial face-face connectivity. - # construct a vaguely plausible face-face index array - indices = np.ma.arange(n_faces * 4).reshape((7, 4)) - indices = indices % 7 - # make some missing points -- i.e. not all faces have 4 neighbours - indices[(2, (2, 3))] = np.ma.masked - indices[(3, (0, 2))] = np.ma.masked - indices[6, :] = np.ma.masked - - conn = Connectivity( - indices, - cf_role="face_face_connectivity", - ) - mesh.add_connectivities(conn) - - # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh) - dims, vars = scan_dataset(tempfile_path) - - # Check that the mesh saved with the additional connectivity - (mesh_name,) = vars_meshnames(vars) - mesh_props = vars[mesh_name] - self.assertIn("face_face_connectivity", mesh_props) - ff_conn_name = mesh_props["face_face_connectivity"] - - # check that the connectivity has the corrects dims and fill-property - ff_props = vars[ff_conn_name] - self.assertEqual( - ff_props[_VAR_DIMS], ["Mesh2d_faces", "Mesh2d_face_N_faces"] - ) - self.assertIn("_FillValue", ff_props) - self.assertEqual(ff_props["_FillValue"], -1) - - # Check that a 'normal' connectivity does *not* have a _FillValue - fn_conn_name = mesh_props["face_node_connectivity"] - fn_props = vars[fn_conn_name] - self.assertNotIn("_FillValue", fn_props) - - # For what it's worth, *also* check the actual data array in the file - ds = nc.Dataset(tempfile_path) - conn_var = ds.variables[ff_conn_name] - data = conn_var[:] - ds.close() - self.assertIsInstance(data, np.ma.MaskedArray) - self.assertEqual(data.fill_value, -1) - # Compare raw values stored to indices, but with -1 at missing points - raw_data = data.data - filled_indices = indices.filled(-1) - self.assertArrayEqual(raw_data, filled_indices) - - def test_one_dimensional(self): - # Test a mesh with edges only. - mesh = make_mesh( - n_edges=5, n_faces=0, mesh_kwargs={"var_name": "Mesh1d"} - ) - - # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh) - dims, vars = scan_dataset(tempfile_path) - - # there is a single mesh-var - (mesh_name,) = vars_meshnames(vars) - - # the dims include edges but not faces - self.assertEqual( - list(dims.keys()), - ["Mesh1d_node", "Mesh1d_edge", "Mesh1d_edge_N_nodes"], - ) - self.assertEqual(vars_meshdim(vars, "node"), "Mesh1d_node") - self.assertEqual(vars_meshdim(vars, "edge"), "Mesh1d_edge") - - # check suitable mesh properties - self.assertEqual(mesh_name, "Mesh1d") - mesh_props = vars[mesh_name] - self.assertEqual(mesh_props["topology_dimension"], 1) - self.assertIn("edge_node_connectivity", mesh_props) - self.assertNotIn("face_node_connectivity", mesh_props) - - def test_location_coord_units(self): - # Check that units on mesh locations are handled correctly. - # NOTE: at present, the Mesh class cannot handle coordinates that are - # not recognised by 'guess_coord_axis' == suitable standard names - mesh = make_mesh( - nodecoord_xyargs=( - { - "standard_name": "projection_x_coordinate", - "var_name": "node_x", - "units": "degrees", # should NOT convert to 'degrees_east' - "axis": "x", # N.B. this is quietly dropped !! - }, - { - "standard_name": "projection_y_coordinate", - "var_name": "node_y", - "units": "ms-1", - "axis": "y", # N.B. this is quietly dropped !! - }, - ), - facecoord_xyargs=( - { - "standard_name": "longitude", - "var_name": "face_x", - "units": "", # SHOULD result in no units property - }, - { - "standard_name": "latitude", - "var_name": "face_y", # SHOULD convert to 'degrees_north' - "units": "degrees", - }, - ), - ) - - # Save and snapshot the result - tempfile_path = self.check_save_mesh(mesh) - dims, vars = scan_dataset(tempfile_path) - - # there is a single mesh-var - (mesh_name,) = vars_meshnames(vars) - - # find the node- and face-coordinate variables - node_x = vars["node_x"] - node_y = vars["node_y"] - face_x = vars["face_x"] - face_y = vars["face_y"] - - # Check that units are as expected. - # 1. 'long/lat' degree units are converted to east/north - # 2. non- (plain) lonlat are NOT converted - # 3. other names remain as whatever was given - # 4. no units on input --> none on output - self.assertEqual(node_x["units"], "degrees") - self.assertEqual(node_y["units"], "ms-1") - self.assertNotIn("units", face_x) - self.assertEqual(face_y["units"], "degrees_north") - - # Check also that we did not add 'axis' properties. - # We should *only* do that for dim-coords. - self.assertNotIn("axis", node_x) - self.assertNotIn("axis", node_y) - self.assertNotIn("axis", face_x) - self.assertNotIn("axis", face_y) - - @staticmethod - def _namestext(names): - name_texts = [ - f'{title}="{name}"' - for title, name in zip(("standard", "long", "var"), names) - ] - return f'({" ".join(name_texts)})' - - def test_mesh_names(self): - # Check the selection of mesh-variables names. - # N.B. this is basically centralised in Saver._get_mesh_variable_name, - # but we test in an implementation-neutral way (as it's fairly easy). - mesh_names_tests = [ - # no names : based on dimensionality - ( - (None, None, None), - (None, None, "Mesh_2d"), - ), - # var_name only - ( - (None, None, "meshvar_x"), - (None, None, "meshvar_x"), - ), - # standard_name only : does not apply to Mesh - ( - ("air_temperature", None, None), - ("air_temperature", None, "Mesh_2d"), - ), - # long_name only - ( - (None, "my_long_name", None), - (None, "my_long_name", "my_long_name"), - ), - # long_name that needs "fixing" - ( - (None, "my long name&%!", None), - (None, "my long name&%!", "my_long_name___"), - ), - # standard + long names - ( - ("air_temperature", "this_long_name", None), - ("air_temperature", "this_long_name", "this_long_name"), - ), - # long + var names - ( - (None, "my_longname", "varname"), - (None, "my_longname", "varname"), - ), - # all 3 names - ( - ("air_temperature", "airtemp long name", "meshvar_varname_1"), - ("air_temperature", "airtemp long name", "meshvar_varname_1"), - ), - ] - for given_names, expected_names in mesh_names_tests: - mesh_stdname, mesh_longname, mesh_varname = given_names - mesh_name_kwargs = { - "standard_name": mesh_stdname, - "long_name": mesh_longname, - "var_name": mesh_varname, - } - # Make a mesh, with the mesh names set for the testcase - mesh = make_mesh(mesh_kwargs=mesh_name_kwargs) - - filepath = self.check_save_mesh(mesh) - dims, vars = scan_dataset(filepath) - - (mesh_name,) = vars_meshnames(vars) - mesh_props = vars[mesh_name] - result_names = ( - mesh_props.get("standard_name", None), - mesh_props.get("long_name", None), - mesh_name, - ) - fail_msg = ( - f"Unexpected resulting names {self._namestext(result_names)} " - f"when saving mesh with {self._namestext(given_names)}" - ) - self.assertEqual(expected_names, result_names, fail_msg) - - def test_location_coord_names(self): - # Check the selection of mesh-element coordinate names. - # Check the selection of mesh-variables names. - # N.B. this is basically centralised in Saver._get_mesh_variable_name, - # but we test in an implementation-neutral way (as it's fairly easy). - - # Options here are limited because the Mesh relies on guess_axis so, - # for now anyway, coords *must* have a known X/Y-type standard-name - coord_names_tests = [ - # standard_name only - ( - ("longitude", None, None), - ("longitude", None, "longitude"), - ), - # standard + long names --> standard - ( - ("grid_longitude", "long name", None), - ("grid_longitude", "long name", "grid_longitude"), - ), - # standard + var names - ( - ("grid_longitude", None, "var_name"), - ("grid_longitude", None, "var_name"), - ), - # all 3 names - ( - ("projection_x_coordinate", "long name", "x_var_name"), - ("projection_x_coordinate", "long name", "x_var_name"), - ), - # # no standard name ? - # # not possible at present, as Mesh requires a recognisable - # # standard_name to identify the axis of a location-coord. - # # TODO: test this if+when Mesh usage is relaxed - # ( - # (None, None, 'node_x'), - # (None, None, "node_x"), - # ), - ] - for given_names, expected_names in coord_names_tests: - mesh_stdname, mesh_longname, mesh_varname = given_names - - mesh = make_mesh() - # Apply the names to the node_x coord of the mesh - coord = mesh.node_coords[0] - for key, name in zip( - ("standard_name", "long_name", "var_name"), given_names - ): - setattr(coord, key, name) - - filepath = self.check_save_mesh(mesh) - dims, vars = scan_dataset(filepath) - - (mesh_name,) = vars_meshnames(vars) - coord_varname = vars[mesh_name]["node_coordinates"].split(" ")[0] - coord_props = vars[coord_varname] - result_names = ( - coord_props.get("standard_name", None), - coord_props.get("long_name", None), - coord_varname, - ) - fail_msg = ( - f"Unexpected resulting names {self._namestext(result_names)} " - "when saving mesh coordinate " - f"with {self._namestext(given_names)}" - ) - self.assertEqual(expected_names, result_names, fail_msg) - - def test_mesh_dim_names(self): - # Check the selection of dimension names from the mesh. - - dim_names_tests = [ - (None, "Mesh2d_face"), - ("my_face_dimension", "my_face_dimension"), - ("dim invalid-name &%!", "dim_invalid_name____"), - ] - for given_name, expected_name in dim_names_tests: - - mesh = make_mesh(mesh_kwargs={"face_dimension": given_name}) - - filepath = self.check_save_mesh(mesh) - dims, vars = scan_dataset(filepath) - - (mesh_name,) = vars_meshnames(vars) - conn_varname = vars[mesh_name]["face_node_connectivity"] - face_dim = vars[conn_varname][_VAR_DIMS][0] - fail_msg = ( - f'Unexpected resulting dimension name "{face_dim}" ' - f'when saving mesh with dimension name of "{given_name}".' - ) - self.assertEqual(expected_name, face_dim, fail_msg) - - def test_connectivity_names(self): - # Check the selection of connectivity names. - conn_names_tests = [ - # var_name only - ( - (None, None, "meshvar_x"), - (None, None, "meshvar_x"), - ), - # standard_name only - ( - ("air_temperature", None, None), - ("air_temperature", None, "air_temperature"), - ), - # long_name only - ( - (None, "my_long_name", None), - (None, "my_long_name", "my_long_name"), - ), - # standard + long names - ( - ("air_temperature", "airtemp long name", None), - ("air_temperature", "airtemp long name", "air_temperature"), - ), - # standard + var names - ( - ("air_temperature", None, "my_var"), - ("air_temperature", None, "my_var"), - ), - # all 3 names - ( - ("air_temperature", "airtemp long name", "meshvar_varname_1"), - ("air_temperature", "airtemp long name", "meshvar_varname_1"), - ), - # long name only, with invalid content - # N.B. behaves *differently* from same in mesh/coord context - ( - (None, "name with spaces", None), # character validation - (None, "name with spaces", "mesh2d_faces"), - ), - ] - for given_names, expected_names in conn_names_tests: - mesh_stdname, mesh_longname, mesh_varname = given_names - - # Make a mesh and afterwards set the names of one connectivity - mesh = make_mesh() - # Apply test names to the face-node connectivity - conn = mesh.face_node_connectivity - for key, name in zip( - ("standard_name", "long_name", "var_name"), given_names - ): - setattr(conn, key, name) - - filepath = self.check_save_mesh(mesh) - dims, vars = scan_dataset(filepath) - - (mesh_name,) = vars_meshnames(vars) - mesh_props = vars[mesh_name] - conn_name = mesh_props["face_node_connectivity"] - conn_props = vars[conn_name] - result_names = ( - conn_props.get("standard_name", None), - conn_props.get("long_name", None), - conn_name, - ) - fail_msg = ( - f"Unexpected resulting names {self._namestext(result_names)} " - "when saving connectivity " - f"with {self._namestext(given_names)}" - ) - self.assertEqual(expected_names, result_names, fail_msg) - - def _check_two_different_meshes(self, vars): - # there are exactly 2 meshes in the file - mesh_names = vars_meshnames(vars) - self.assertEqual(sorted(mesh_names), ["Mesh2d", "Mesh2d_0"]) - - # they use different dimensions - # mesh1 - self.assertEqual( - vars_meshdim(vars, "node", mesh_name="Mesh2d"), "Mesh2d_nodes" - ) - self.assertEqual( - vars_meshdim(vars, "face", mesh_name="Mesh2d"), "Mesh2d_faces" - ) - if "edge_coordinates" in vars["Mesh2d"]: - self.assertEqual( - vars_meshdim(vars, "edge", mesh_name="Mesh2d"), "Mesh2d_edge" - ) - - # mesh2 - self.assertEqual( - vars_meshdim(vars, "node", mesh_name="Mesh2d_0"), "Mesh2d_nodes_0" - ) - self.assertEqual( - vars_meshdim(vars, "face", mesh_name="Mesh2d_0"), "Mesh2d_faces_0" - ) - if "edge_coordinates" in vars["Mesh2d_0"]: - self.assertEqual( - vars_meshdim(vars, "edge", mesh_name="Mesh2d_0"), - "Mesh2d_edge_0", - ) - - # the relevant coords + connectivities are also distinct - # mesh1 - self.assertEqual(vars["node_x"][_VAR_DIMS], ["Mesh2d_nodes"]) - self.assertEqual(vars["face_x"][_VAR_DIMS], ["Mesh2d_faces"]) - self.assertEqual( - vars["mesh2d_faces"][_VAR_DIMS], - ["Mesh2d_faces", "Mesh2d_face_N_nodes"], - ) - if "edge_coordinates" in vars["Mesh2d"]: - self.assertEqual(vars["longitude"][_VAR_DIMS], ["Mesh2d_edge"]) - self.assertEqual( - vars["mesh2d_edge"][_VAR_DIMS], - ["Mesh2d_edge", "Mesh2d_edge_N_nodes"], - ) - - # mesh2 - self.assertEqual(vars["node_x_0"][_VAR_DIMS], ["Mesh2d_nodes_0"]) - self.assertEqual(vars["face_x_0"][_VAR_DIMS], ["Mesh2d_faces_0"]) - self.assertEqual( - vars["mesh2d_faces_0"][_VAR_DIMS], - ["Mesh2d_faces_0", "Mesh2d_0_face_N_nodes"], - ) - if "edge_coordinates" in vars["Mesh2d_0"]: - self.assertEqual(vars["longitude_0"][_VAR_DIMS], ["Mesh2d_edge_0"]) - self.assertEqual( - vars["mesh2d_edge_0"][_VAR_DIMS], - ["Mesh2d_edge_0", "Mesh2d_0_edge_N_nodes"], - ) - - def test_multiple_equal_mesh(self): - mesh1 = make_mesh() - mesh2 = make_mesh() - - # Save and snapshot the result - tempfile_path = self.check_save_mesh([mesh1, mesh2]) - dims, vars = scan_dataset(tempfile_path) - - # In this case there should be only *one* mesh. - mesh_names = vars_meshnames(vars) - self.assertEqual(1, len(mesh_names)) - - # Check it has the correct number of coords + conns (no duplicates) - # Should have 2 each X and Y coords (face+node): _no_ edge coords. - coord_vars_x = vars_w_props(vars, standard_name="longitude") - coord_vars_y = vars_w_props(vars, standard_name="latitude") - self.assertEqual(2, len(coord_vars_x)) - self.assertEqual(2, len(coord_vars_y)) - - # Check the connectivities are all present: _only_ 1 var of each type. - for conn in mesh1.all_connectivities: - if conn is not None: - conn_vars = vars_w_props(vars, cf_role=conn.cf_role) - self.assertEqual(1, len(conn_vars)) - - def test_multiple_different_meshes(self): - # Create 2 meshes with different faces, but same edges. - # N.B. they should *not* then share an edge dimension ! - mesh1 = make_mesh(n_faces=3, n_edges=2) - mesh2 = make_mesh(n_faces=4, n_edges=2) - - # Save and snapshot the result - tempfile_path = self.check_save_mesh([mesh1, mesh2]) - dims, vars = scan_dataset(tempfile_path) - - # Check there are two independent meshes - self._check_two_different_meshes(vars) - - # Check the dims are as expected - self.assertEqual(dims["Mesh2d_faces"], 3) - self.assertEqual(dims["Mesh2d_faces_0"], 4) - self.assertEqual(dims["Mesh2d_edge"], 2) - self.assertEqual(dims["Mesh2d_edge_0"], 2) - - -# WHEN MODIFYING THIS MODULE, CHECK IF ANY CORRESPONDING CHANGES ARE NEEDED IN -# :mod:`iris.tests.unit.fileformats.netcdf.test_Saver__lazy.` - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__FillValueMaskCheckAndStoreTarget.py b/lib/iris/tests/unit/fileformats/netcdf/test__FillValueMaskCheckAndStoreTarget.py deleted file mode 100644 index 01ba7ff38d..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test__FillValueMaskCheckAndStoreTarget.py +++ /dev/null @@ -1,92 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the `iris.fileformats.netcdf._FillValueMaskCheckAndStoreTarget` -class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.fileformats.netcdf import _FillValueMaskCheckAndStoreTarget - - -class Test__FillValueMaskCheckAndStoreTarget(tests.IrisTest): - def _call_target(self, fill_value, keys, vals): - inner_target = mock.MagicMock() - target = _FillValueMaskCheckAndStoreTarget( - inner_target, fill_value=fill_value - ) - - for key, val in zip(keys, vals): - target[key] = val - - calls = [mock.call(key, val) for key, val in zip(keys, vals)] - inner_target.__setitem__.assert_has_calls(calls) - - return target - - def test___setitem__(self): - self._call_target(None, [1], [2]) - - def test_no_fill_value_not_masked(self): - # Test when the fill value is not present and the data is not masked - keys = [slice(0, 10), slice(10, 15)] - vals = [np.arange(10), np.arange(5)] - fill_value = 16 - target = self._call_target(fill_value, keys, vals) - self.assertFalse(target.contains_value) - self.assertFalse(target.is_masked) - - def test_contains_fill_value_not_masked(self): - # Test when the fill value is present and the data is not masked - keys = [slice(0, 10), slice(10, 15)] - vals = [np.arange(10), np.arange(5)] - fill_value = 5 - target = self._call_target(fill_value, keys, vals) - self.assertTrue(target.contains_value) - self.assertFalse(target.is_masked) - - def test_no_fill_value_masked(self): - # Test when the fill value is not present and the data is masked - keys = [slice(0, 10), slice(10, 15)] - vals = [np.arange(10), np.ma.masked_equal(np.arange(5), 3)] - fill_value = 16 - target = self._call_target(fill_value, keys, vals) - self.assertFalse(target.contains_value) - self.assertTrue(target.is_masked) - - def test_contains_fill_value_masked(self): - # Test when the fill value is present and the data is masked - keys = [slice(0, 10), slice(10, 15)] - vals = [np.arange(10), np.ma.masked_equal(np.arange(5), 3)] - fill_value = 5 - target = self._call_target(fill_value, keys, vals) - self.assertTrue(target.contains_value) - self.assertTrue(target.is_masked) - - def test_fill_value_None(self): - # Test when the fill value is None - keys = [slice(0, 10), slice(10, 15)] - vals = [np.arange(10), np.arange(5)] - fill_value = None - target = self._call_target(fill_value, keys, vals) - self.assertFalse(target.contains_value) - - def test_contains_masked_fill_value(self): - # Test when the fill value is present but masked the data is masked - keys = [slice(0, 10), slice(10, 15)] - vals = [np.arange(10), np.ma.masked_equal(np.arange(10, 15), 13)] - fill_value = 13 - target = self._call_target(fill_value, keys, vals) - self.assertFalse(target.contains_value) - self.assertTrue(target.is_masked) diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__get_cf_var_data.py b/lib/iris/tests/unit/fileformats/netcdf/test__get_cf_var_data.py deleted file mode 100644 index 1bf39591d2..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test__get_cf_var_data.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.netcdf._get_cf_var_data` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from dask.array import Array as dask_array -import numpy as np - -from iris._lazy_data import _optimum_chunksize -import iris.fileformats.cf -from iris.fileformats.netcdf import _get_cf_var_data - - -class Test__get_cf_var_data(tests.IrisTest): - def setUp(self): - self.filename = "DUMMY" - self.shape = (300000, 240, 200) - self.expected_chunks = _optimum_chunksize(self.shape, self.shape) - - def _make(self, chunksizes): - cf_data = mock.Mock(_FillValue=None) - cf_data.chunking = mock.MagicMock(return_value=chunksizes) - cf_var = mock.MagicMock( - spec=iris.fileformats.cf.CFVariable, - dtype=np.dtype("i4"), - cf_data=cf_data, - cf_name="DUMMY_VAR", - shape=self.shape, - ) - return cf_var - - def test_cf_data_type(self): - chunks = [1, 12, 100] - cf_var = self._make(chunks) - lazy_data = _get_cf_var_data(cf_var, self.filename) - self.assertIsInstance(lazy_data, dask_array) - - def test_cf_data_chunks(self): - chunks = [2500, 240, 200] - cf_var = self._make(chunks) - lazy_data = _get_cf_var_data(cf_var, self.filename) - lazy_data_chunks = [c[0] for c in lazy_data.chunks] - expected_chunks = _optimum_chunksize(chunks, self.shape) - self.assertArrayEqual(lazy_data_chunks, expected_chunks) - - def test_cf_data_no_chunks(self): - # No chunks means chunks are calculated from the array's shape by - # `iris._lazy_data._optimum_chunksize()`. - chunks = None - cf_var = self._make(chunks) - lazy_data = _get_cf_var_data(cf_var, self.filename) - lazy_data_chunks = [c[0] for c in lazy_data.chunks] - self.assertArrayEqual(lazy_data_chunks, self.expected_chunks) - - def test_cf_data_contiguous(self): - # Chunks 'contiguous' is equivalent to no chunks. - chunks = "contiguous" - cf_var = self._make(chunks) - lazy_data = _get_cf_var_data(cf_var, self.filename) - lazy_data_chunks = [c[0] for c in lazy_data.chunks] - self.assertArrayEqual(lazy_data_chunks, self.expected_chunks) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py deleted file mode 100644 index eb9da6b5d6..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py +++ /dev/null @@ -1,193 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.netcdf._load_aux_factory` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock -import warnings - -import numpy as np - -from iris.coords import DimCoord -from iris.cube import Cube -from iris.fileformats.netcdf import _load_aux_factory - - -class TestAtmosphereHybridSigmaPressureCoordinate(tests.IrisTest): - def setUp(self): - standard_name = "atmosphere_hybrid_sigma_pressure_coordinate" - self.requires = dict(formula_type=standard_name) - self.ap = mock.MagicMock(units="units") - self.ps = mock.MagicMock(units="units") - coordinates = [(mock.sentinel.b, "b"), (self.ps, "ps")] - self.cube_parts = dict(coordinates=coordinates) - self.engine = mock.Mock( - requires=self.requires, cube_parts=self.cube_parts - ) - self.cube = mock.create_autospec(Cube, spec_set=True, instance=True) - # Patch out the check_dependencies functionality. - func = "iris.aux_factory.HybridPressureFactory._check_dependencies" - patcher = mock.patch(func) - patcher.start() - self.addCleanup(patcher.stop) - - def test_formula_terms_ap(self): - self.cube_parts["coordinates"].append((self.ap, "ap")) - self.requires["formula_terms"] = dict(ap="ap", b="b", ps="ps") - _load_aux_factory(self.engine, self.cube) - # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 0) - # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) - args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) - factory = args[0] - self.assertEqual(factory.delta, self.ap) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) - - def test_formula_terms_a_p0(self): - coord_a = DimCoord(np.arange(5), units="1") - coord_p0 = DimCoord(10, units="Pa") - coord_expected = DimCoord( - np.arange(5) * 10, - units="Pa", - long_name="vertical pressure", - var_name="ap", - ) - self.cube_parts["coordinates"].extend( - [(coord_a, "a"), (coord_p0, "p0")] - ) - self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") - _load_aux_factory(self.engine, self.cube) - # Check cube.coord_dims method. - self.assertEqual(self.cube.coord_dims.call_count, 1) - args, _ = self.cube.coord_dims.call_args - self.assertEqual(len(args), 1) - self.assertIs(args[0], coord_a) - # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 1) - args, _ = self.cube.add_aux_coord.call_args - self.assertEqual(len(args), 2) - self.assertEqual(args[0], coord_expected) - self.assertIsInstance(args[1], mock.Mock) - # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) - args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) - factory = args[0] - self.assertEqual(factory.delta, coord_expected) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) - - def test_formula_terms_a_p0__promote_a_units_unknown_to_dimensionless( - self, - ): - coord_a = DimCoord(np.arange(5), units="unknown") - coord_p0 = DimCoord(10, units="Pa") - coord_expected = DimCoord( - np.arange(5) * 10, - units="Pa", - long_name="vertical pressure", - var_name="ap", - ) - self.cube_parts["coordinates"].extend( - [(coord_a, "a"), (coord_p0, "p0")] - ) - self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") - _load_aux_factory(self.engine, self.cube) - # Check cube.coord_dims method. - self.assertEqual(self.cube.coord_dims.call_count, 1) - args, _ = self.cube.coord_dims.call_args - self.assertEqual(len(args), 1) - self.assertIs(args[0], coord_a) - self.assertEqual("1", args[0].units) - # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 1) - args, _ = self.cube.add_aux_coord.call_args - self.assertEqual(len(args), 2) - self.assertEqual(args[0], coord_expected) - self.assertIsInstance(args[1], mock.Mock) - # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) - args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) - factory = args[0] - self.assertEqual(factory.delta, coord_expected) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) - - def test_formula_terms_p0_non_scalar(self): - coord_p0 = DimCoord(np.arange(5)) - self.cube_parts["coordinates"].append((coord_p0, "p0")) - self.requires["formula_terms"] = dict(p0="p0") - with self.assertRaises(ValueError): - _load_aux_factory(self.engine, self.cube) - - def test_formula_terms_p0_bounded(self): - coord_a = DimCoord(np.arange(5)) - coord_p0 = DimCoord(1, bounds=[0, 2], var_name="p0") - self.cube_parts["coordinates"].extend( - [(coord_a, "a"), (coord_p0, "p0")] - ) - self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") - with warnings.catch_warnings(record=True) as warn: - warnings.simplefilter("always") - _load_aux_factory(self.engine, self.cube) - self.assertEqual(len(warn), 1) - msg = ( - "Ignoring atmosphere hybrid sigma pressure scalar " - "coordinate {!r} bounds.".format(coord_p0.name()) - ) - self.assertEqual(msg, str(warn[0].message)) - - def _check_no_delta(self): - # Check cube.add_aux_coord method. - self.assertEqual(self.cube.add_aux_coord.call_count, 0) - # Check cube.add_aux_factory method. - self.assertEqual(self.cube.add_aux_factory.call_count, 1) - args, _ = self.cube.add_aux_factory.call_args - self.assertEqual(len(args), 1) - factory = args[0] - # Check that the factory has no delta term - self.assertEqual(factory.delta, None) - self.assertEqual(factory.sigma, mock.sentinel.b) - self.assertEqual(factory.surface_air_pressure, self.ps) - - def test_formula_terms_ap_missing_coords(self): - self.requires["formula_terms"] = dict(ap="ap", b="b", ps="ps") - with mock.patch("warnings.warn") as warn: - _load_aux_factory(self.engine, self.cube) - warn.assert_called_once_with( - "Unable to find coordinate for variable " "'ap'" - ) - self._check_no_delta() - - def test_formula_terms_no_delta_terms(self): - self.requires["formula_terms"] = dict(b="b", ps="ps") - _load_aux_factory(self.engine, self.cube) - self._check_no_delta() - - def test_formula_terms_no_p0_term(self): - coord_a = DimCoord(np.arange(5), units="Pa") - self.cube_parts["coordinates"].append((coord_a, "a")) - self.requires["formula_terms"] = dict(a="a", b="b", ps="ps") - _load_aux_factory(self.engine, self.cube) - self._check_no_delta() - - def test_formula_terms_no_a_term(self): - coord_p0 = DimCoord(10, units="1") - self.cube_parts["coordinates"].append((coord_p0, "p0")) - self.requires["formula_terms"] = dict(a="p0", b="b", ps="ps") - _load_aux_factory(self.engine, self.cube) - self._check_no_delta() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py b/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py deleted file mode 100644 index 0e98eec916..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py +++ /dev/null @@ -1,181 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.netcdf._load_cube` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.coords import DimCoord -import iris.fileformats.cf -from iris.fileformats.netcdf import _load_cube - - -class TestCoordAttributes(tests.IrisTest): - @staticmethod - def _patcher(engine, cf, cf_group): - coordinates = [] - for coord in cf_group: - engine.cube.add_aux_coord(coord) - coordinates.append((coord, coord.name())) - engine.cube_parts["coordinates"] = coordinates - - def setUp(self): - this = "iris.fileformats.netcdf._assert_case_specific_facts" - patch = mock.patch(this, side_effect=self._patcher) - patch.start() - self.addCleanup(patch.stop) - self.engine = mock.Mock() - self.filename = "DUMMY" - self.flag_masks = mock.sentinel.flag_masks - self.flag_meanings = mock.sentinel.flag_meanings - self.flag_values = mock.sentinel.flag_values - self.valid_range = mock.sentinel.valid_range - self.valid_min = mock.sentinel.valid_min - self.valid_max = mock.sentinel.valid_max - - def _make(self, names, attrs): - coords = [DimCoord(i, long_name=name) for i, name in enumerate(names)] - shape = (1,) - - cf_group = {} - for name, cf_attrs in zip(names, attrs): - cf_attrs_unused = mock.Mock(return_value=cf_attrs) - cf_group[name] = mock.Mock(cf_attrs_unused=cf_attrs_unused) - cf = mock.Mock(cf_group=cf_group) - - cf_data = mock.Mock(_FillValue=None) - cf_data.chunking = mock.MagicMock(return_value=shape) - cf_var = mock.MagicMock( - spec=iris.fileformats.cf.CFVariable, - dtype=np.dtype("i4"), - cf_data=cf_data, - cf_name="DUMMY_VAR", - cf_group=coords, - shape=shape, - ) - return cf, cf_var - - def test_flag_pass_thru(self): - items = [ - ("masks", "flag_masks", self.flag_masks), - ("meanings", "flag_meanings", self.flag_meanings), - ("values", "flag_values", self.flag_values), - ] - for name, attr, value in items: - names = [name] - attrs = [[(attr, value)]] - cf, cf_var = self._make(names, attrs) - cube = _load_cube(self.engine, cf, cf_var, self.filename) - self.assertEqual(len(cube.coords(name)), 1) - coord = cube.coord(name) - self.assertEqual(len(coord.attributes), 1) - self.assertEqual(list(coord.attributes.keys()), [attr]) - self.assertEqual(list(coord.attributes.values()), [value]) - - def test_flag_pass_thru_multi(self): - names = ["masks", "meanings", "values"] - attrs = [ - [("flag_masks", self.flag_masks), ("wibble", "wibble")], - [ - ("flag_meanings", self.flag_meanings), - ("add_offset", "add_offset"), - ], - [("flag_values", self.flag_values)], - [("valid_range", self.valid_range)], - [("valid_min", self.valid_min)], - [("valid_max", self.valid_max)], - ] - cf, cf_var = self._make(names, attrs) - cube = _load_cube(self.engine, cf, cf_var, self.filename) - self.assertEqual(len(cube.coords()), 3) - self.assertEqual(set([c.name() for c in cube.coords()]), set(names)) - expected = [ - attrs[0], - [attrs[1][0]], - attrs[2], - attrs[3], - attrs[4], - attrs[5], - ] - for name, expect in zip(names, expected): - attributes = cube.coord(name).attributes - self.assertEqual(set(attributes.items()), set(expect)) - - -class TestCubeAttributes(tests.IrisTest): - def setUp(self): - this = "iris.fileformats.netcdf._assert_case_specific_facts" - patch = mock.patch(this) - patch.start() - self.addCleanup(patch.stop) - self.engine = mock.Mock() - self.cf = None - self.filename = "DUMMY" - self.flag_masks = mock.sentinel.flag_masks - self.flag_meanings = mock.sentinel.flag_meanings - self.flag_values = mock.sentinel.flag_values - self.valid_range = mock.sentinel.valid_range - self.valid_min = mock.sentinel.valid_min - self.valid_max = mock.sentinel.valid_max - - def _make(self, attrs): - shape = (1,) - cf_attrs_unused = mock.Mock(return_value=attrs) - cf_data = mock.Mock(_FillValue=None) - cf_data.chunking = mock.MagicMock(return_value=shape) - cf_var = mock.MagicMock( - spec=iris.fileformats.cf.CFVariable, - dtype=np.dtype("i4"), - cf_data=cf_data, - cf_name="DUMMY_VAR", - cf_group=mock.Mock(), - cf_attrs_unused=cf_attrs_unused, - shape=shape, - ) - return cf_var - - def test_flag_pass_thru(self): - attrs = [ - ("flag_masks", self.flag_masks), - ("flag_meanings", self.flag_meanings), - ("flag_values", self.flag_values), - ] - for key, value in attrs: - cf_var = self._make([(key, value)]) - cube = _load_cube(self.engine, self.cf, cf_var, self.filename) - self.assertEqual(len(cube.attributes), 1) - self.assertEqual(list(cube.attributes.keys()), [key]) - self.assertEqual(list(cube.attributes.values()), [value]) - - def test_flag_pass_thru_multi(self): - attrs = [ - ("flag_masks", self.flag_masks), - ("wibble", "wobble"), - ("flag_meanings", self.flag_meanings), - ("add_offset", "add_offset"), - ("flag_values", self.flag_values), - ("standard_name", "air_temperature"), - ("valid_range", self.valid_range), - ("valid_min", self.valid_min), - ("valid_max", self.valid_max), - ] - - # Expect everything from above to be returned except those - # corresponding to exclude_ind. - expected = set([attrs[ind] for ind in [0, 1, 2, 4, 6, 7, 8]]) - cf_var = self._make(attrs) - cube = _load_cube(self.engine, self.cf, cf_var, self.filename) - self.assertEqual(len(cube.attributes), len(expected)) - self.assertEqual(set(cube.attributes.items()), expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__translate_constraints_to_var_callback.py b/lib/iris/tests/unit/fileformats/netcdf/test__translate_constraints_to_var_callback.py deleted file mode 100644 index fb08ffda2b..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test__translate_constraints_to_var_callback.py +++ /dev/null @@ -1,100 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for -:func:`iris.fileformats.netcdf._translate_constraints_to_var_callback`. - -""" - -from unittest.mock import MagicMock - -import iris -from iris.fileformats.cf import CFDataVariable -from iris.fileformats.netcdf import _translate_constraints_to_var_callback - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests - - -class Test(tests.IrisTest): - data_variables = [ - CFDataVariable("var1", MagicMock(standard_name="x_wind")), - CFDataVariable("var2", MagicMock(standard_name="y_wind")), - CFDataVariable("var1", MagicMock(long_name="x component of wind")), - CFDataVariable( - "var1", - MagicMock(standard_name="x_wind", long_name="x component of wind"), - ), - CFDataVariable("var1", MagicMock()), - ] - - def test_multiple_constraints(self): - constrs = [ - iris.NameConstraint(standard_name="x_wind"), - iris.NameConstraint(var_name="var1"), - ] - result = _translate_constraints_to_var_callback(constrs) - self.assertIsNone(result) - - def test_non_NameConstraint(self): - constr = iris.AttributeConstraint(STASH="m01s00i002") - result = _translate_constraints_to_var_callback(constr) - self.assertIsNone(result) - - def test_str_constraint(self): - result = _translate_constraints_to_var_callback("x_wind") - self.assertIsNone(result) - - def test_Constaint_with_name(self): - constr = iris.Constraint(name="x_wind") - result = _translate_constraints_to_var_callback(constr) - self.assertIsNone(result) - - def test_NameConstraint_standard_name(self): - constr = iris.NameConstraint(standard_name="x_wind") - callback = _translate_constraints_to_var_callback(constr) - result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [True, False, False, True, False]) - - def test_NameConstraint_long_name(self): - constr = iris.NameConstraint(long_name="x component of wind") - callback = _translate_constraints_to_var_callback(constr) - result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [False, False, True, True, False]) - - def test_NameConstraint_var_name(self): - constr = iris.NameConstraint(var_name="var1") - callback = _translate_constraints_to_var_callback(constr) - result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [True, False, True, True, True]) - - def test_NameConstraint_standard_name_var_name(self): - constr = iris.NameConstraint(standard_name="x_wind", var_name="var1") - callback = _translate_constraints_to_var_callback(constr) - result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [True, False, False, True, False]) - - def test_NameConstraint_standard_name_long_name_var_name(self): - constr = iris.NameConstraint( - standard_name="x_wind", - long_name="x component of wind", - var_name="var1", - ) - callback = _translate_constraints_to_var_callback(constr) - result = [callback(var) for var in self.data_variables] - self.assertArrayEqual(result, [False, False, False, True, False]) - - def test_NameConstraint_with_STASH(self): - constr = iris.NameConstraint( - standard_name="x_wind", STASH="m01s00i024" - ) - result = _translate_constraints_to_var_callback(constr) - self.assertIsNone(result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py b/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py deleted file mode 100644 index bbcf2cc72b..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test_load_cubes.py +++ /dev/null @@ -1,321 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.fileformats.netcdf.load_cubes` function. - -todo: migrate the remaining unit-esque tests from iris.tests.test_netcdf, - switching to use netcdf.load_cubes() instead of iris.load()/load_cube(). - -""" - -from pathlib import Path -from shutil import rmtree -from subprocess import check_call -import tempfile - -from cf_units import as_unit -import numpy as np - -from iris.coords import AncillaryVariable, CellMeasure -from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD -from iris.experimental.ugrid.mesh import MeshCoord -from iris.fileformats.netcdf import load_cubes, logger - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests - - -def setUpModule(): - global TMP_DIR - TMP_DIR = Path(tempfile.mkdtemp()) - - -def tearDownModule(): - if TMP_DIR is not None: - rmtree(TMP_DIR) - - -def cdl_to_nc(cdl): - cdl_path = TMP_DIR / "tst.cdl" - nc_path = TMP_DIR / "tst.nc" - # Write CDL string into a temporary CDL file. - with open(cdl_path, "w") as f_out: - f_out.write(cdl) - # Use ncgen to convert this into an actual (temporary) netCDF file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - check_call(command, shell=True) - return str(nc_path) - - -class Tests(tests.IrisTest): - def test_ancillary_variables(self): - # Note: using a CDL string as a test data reference, rather than a - # binary file. - ref_cdl = """ - netcdf cm_attr { - dimensions: - axv = 3 ; - variables: - int64 qqv(axv) ; - qqv:long_name = "qq" ; - qqv:units = "1" ; - qqv:ancillary_variables = "my_av" ; - int64 axv(axv) ; - axv:units = "1" ; - axv:long_name = "x" ; - double my_av(axv) ; - my_av:units = "1" ; - my_av:long_name = "refs" ; - my_av:custom = "extra-attribute"; - data: - axv = 1, 2, 3; - my_av = 11., 12., 13.; - } - """ - nc_path = cdl_to_nc(ref_cdl) - - # Load with iris.fileformats.netcdf.load_cubes, and check expected content. - cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) - avs = cubes[0].ancillary_variables() - self.assertEqual(len(avs), 1) - expected = AncillaryVariable( - np.ma.array([11.0, 12.0, 13.0]), - long_name="refs", - var_name="my_av", - units="1", - attributes={"custom": "extra-attribute"}, - ) - self.assertEqual(avs[0], expected) - - def test_status_flags(self): - # Note: using a CDL string as a test data reference, rather than a binary file. - ref_cdl = """ - netcdf cm_attr { - dimensions: - axv = 3 ; - variables: - int64 qqv(axv) ; - qqv:long_name = "qq" ; - qqv:units = "1" ; - qqv:ancillary_variables = "my_av" ; - int64 axv(axv) ; - axv:units = "1" ; - axv:long_name = "x" ; - byte my_av(axv) ; - my_av:long_name = "qq status_flag" ; - my_av:flag_values = 1b, 2b ; - my_av:flag_meanings = "a b" ; - data: - axv = 11, 21, 31; - my_av = 1b, 1b, 2b; - } - """ - nc_path = cdl_to_nc(ref_cdl) - - # Load with iris.fileformats.netcdf.load_cubes, and check expected content. - cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) - avs = cubes[0].ancillary_variables() - self.assertEqual(len(avs), 1) - expected = AncillaryVariable( - np.ma.array([1, 1, 2], dtype=np.int8), - long_name="qq status_flag", - var_name="my_av", - units="no_unit", - attributes={ - "flag_values": np.array([1, 2], dtype=np.int8), - "flag_meanings": "a b", - }, - ) - self.assertEqual(avs[0], expected) - - def test_cell_measures(self): - # Note: using a CDL string as a test data reference, rather than a binary file. - ref_cdl = """ - netcdf cm_attr { - dimensions: - axv = 3 ; - ayv = 2 ; - variables: - int64 qqv(ayv, axv) ; - qqv:long_name = "qq" ; - qqv:units = "1" ; - qqv:cell_measures = "area: my_areas" ; - int64 ayv(ayv) ; - ayv:units = "1" ; - ayv:long_name = "y" ; - int64 axv(axv) ; - axv:units = "1" ; - axv:long_name = "x" ; - double my_areas(ayv, axv) ; - my_areas:units = "m2" ; - my_areas:long_name = "standardised cell areas" ; - my_areas:custom = "extra-attribute"; - data: - axv = 11, 12, 13; - ayv = 21, 22; - my_areas = 110., 120., 130., 221., 231., 241.; - } - """ - nc_path = cdl_to_nc(ref_cdl) - - # Load with iris.fileformats.netcdf.load_cubes, and check expected content. - cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) - cms = cubes[0].cell_measures() - self.assertEqual(len(cms), 1) - expected = CellMeasure( - np.ma.array([[110.0, 120.0, 130.0], [221.0, 231.0, 241.0]]), - measure="area", - var_name="my_areas", - long_name="standardised cell areas", - units="m2", - attributes={"custom": "extra-attribute"}, - ) - self.assertEqual(cms[0], expected) - - def test_default_units(self): - # Note: using a CDL string as a test data reference, rather than a binary file. - ref_cdl = """ - netcdf cm_attr { - dimensions: - axv = 3 ; - ayv = 2 ; - variables: - int64 qqv(ayv, axv) ; - qqv:long_name = "qq" ; - qqv:ancillary_variables = "my_av" ; - qqv:cell_measures = "area: my_areas" ; - int64 ayv(ayv) ; - ayv:long_name = "y" ; - int64 axv(axv) ; - axv:units = "1" ; - axv:long_name = "x" ; - double my_av(axv) ; - my_av:long_name = "refs" ; - double my_areas(ayv, axv) ; - my_areas:long_name = "areas" ; - data: - axv = 11, 12, 13; - ayv = 21, 22; - my_areas = 110., 120., 130., 221., 231., 241.; - } - """ - nc_path = cdl_to_nc(ref_cdl) - - # Load with iris.fileformats.netcdf.load_cubes, and check expected content. - cubes = list(load_cubes(nc_path)) - self.assertEqual(len(cubes), 1) - self.assertEqual(cubes[0].units, as_unit("unknown")) - self.assertEqual(cubes[0].coord("y").units, as_unit("unknown")) - self.assertEqual(cubes[0].coord("x").units, as_unit(1)) - self.assertEqual( - cubes[0].ancillary_variable("refs").units, as_unit("unknown") - ) - self.assertEqual( - cubes[0].cell_measure("areas").units, as_unit("unknown") - ) - - -class TestsMesh(tests.IrisTest): - @classmethod - def setUpClass(cls): - cls.ref_cdl = """ - netcdf mesh_test { - dimensions: - node = 3 ; - face = 1 ; - vertex = 3 ; - levels = 2 ; - variables: - int mesh ; - mesh:cf_role = "mesh_topology" ; - mesh:topology_dimension = 2 ; - mesh:node_coordinates = "node_x node_y" ; - mesh:face_coordinates = "face_x face_y" ; - mesh:face_node_connectivity = "face_nodes" ; - float node_x(node) ; - node_x:standard_name = "longitude" ; - float node_y(node) ; - node_y:standard_name = "latitude" ; - float face_x(face) ; - face_x:standard_name = "longitude" ; - float face_y(face) ; - face_y:standard_name = "latitude" ; - int face_nodes(face, vertex) ; - face_nodes:cf_role = "face_node_connectivity" ; - face_nodes:start_index = 0 ; - int levels(levels) ; - float node_data(levels, node) ; - node_data:coordinates = "node_x node_y" ; - node_data:location = "node" ; - node_data:mesh = "mesh" ; - float face_data(levels, face) ; - face_data:coordinates = "face_x face_y" ; - face_data:location = "face" ; - face_data:mesh = "mesh" ; - data: - mesh = 0; - node_x = 0., 2., 1.; - node_y = 0., 0., 1.; - face_x = 0.5; - face_y = 0.5; - face_nodes = 0, 1, 2; - levels = 1, 2; - node_data = 0., 0., 0.; - face_data = 0.; - } - """ - cls.nc_path = cdl_to_nc(cls.ref_cdl) - with PARSE_UGRID_ON_LOAD.context(): - cls.mesh_cubes = list(load_cubes(cls.nc_path)) - - def test_mesh_handled(self): - cubes_no_ugrid = list(load_cubes(self.nc_path)) - self.assertEqual(4, len(cubes_no_ugrid)) - self.assertEqual(2, len(self.mesh_cubes)) - - def test_standard_dims(self): - for cube in self.mesh_cubes: - self.assertIsNotNone(cube.coords("levels")) - - def test_mesh_coord(self): - cube = [ - cube for cube in self.mesh_cubes if cube.var_name == "face_data" - ][0] - face_x = cube.coord("longitude") - face_y = cube.coord("latitude") - - for coord in (face_x, face_y): - self.assertIsInstance(coord, MeshCoord) - self.assertEqual("face", coord.location) - self.assertArrayEqual(np.ma.array([0.5]), coord.points) - - self.assertEqual("x", face_x.axis) - self.assertEqual("y", face_y.axis) - self.assertEqual(face_x.mesh, face_y.mesh) - self.assertArrayEqual(np.ma.array([[0.0, 2.0, 1.0]]), face_x.bounds) - self.assertArrayEqual(np.ma.array([[0.0, 0.0, 1.0]]), face_y.bounds) - - def test_shared_mesh(self): - cube_meshes = [cube.coord("latitude").mesh for cube in self.mesh_cubes] - self.assertEqual(cube_meshes[0], cube_meshes[1]) - - def test_missing_mesh(self): - ref_cdl = self.ref_cdl.replace( - 'face_data:mesh = "mesh"', 'face_data:mesh = "mesh2"' - ) - nc_path = cdl_to_nc(ref_cdl) - - # No error when mesh handling not activated. - _ = list(load_cubes(nc_path)) - - with PARSE_UGRID_ON_LOAD.context(): - log_regex = r"File does not contain mesh.*" - with self.assertLogs(logger, level="DEBUG", msg_regex=log_regex): - _ = list(load_cubes(nc_path)) diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py b/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py deleted file mode 100644 index 9c4fbf622b..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test_parse_cell_methods.py +++ /dev/null @@ -1,137 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :func:`iris.fileformats.netcdf.parse_cell_methods`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.coords import CellMethod -from iris.fileformats.netcdf import parse_cell_methods - - -class Test(tests.IrisTest): - def test_simple(self): - cell_method_strings = [ - "time: mean", - "time : mean", - ] - expected = (CellMethod(method="mean", coords="time"),) - for cell_method_str in cell_method_strings: - res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) - - def test_with_interval(self): - cell_method_strings = [ - "time: variance (interval: 1 hr)", - "time : variance (interval: 1 hr)", - ] - expected = ( - CellMethod(method="variance", coords="time", intervals="1 hr"), - ) - for cell_method_str in cell_method_strings: - res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) - - def test_multiple(self): - cell_method_strings = [ - "time: maximum (interval: 1 hr) time: mean (interval: 1 day)", - "time : maximum (interval: 1 hr) time: mean (interval: 1 day)", - "time: maximum (interval: 1 hr) time : mean (interval: 1 day)", - "time : maximum (interval: 1 hr) time : mean (interval: 1 day)", - ] - expected = ( - CellMethod(method="maximum", coords="time", intervals="1 hr"), - CellMethod(method="mean", coords="time", intervals="1 day"), - ) - for cell_method_str in cell_method_strings: - res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) - - def test_comment(self): - cell_method_strings = [ - "time: maximum (interval: 1 hr comment: first bit) " - "time: mean (interval: 1 day comment: second bit)", - "time : maximum (interval: 1 hr comment: first bit) " - "time: mean (interval: 1 day comment: second bit)", - "time: maximum (interval: 1 hr comment: first bit) " - "time : mean (interval: 1 day comment: second bit)", - "time : maximum (interval: 1 hr comment: first bit) " - "time : mean (interval: 1 day comment: second bit)", - ] - expected = ( - CellMethod( - method="maximum", - coords="time", - intervals="1 hr", - comments="first bit", - ), - CellMethod( - method="mean", - coords="time", - intervals="1 day", - comments="second bit", - ), - ) - for cell_method_str in cell_method_strings: - res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) - - def test_portions_of_cells(self): - cell_method_strings = [ - "area: mean where sea_ice over sea", - "area : mean where sea_ice over sea", - ] - expected = ( - CellMethod(method="mean where sea_ice over sea", coords="area"), - ) - for cell_method_str in cell_method_strings: - res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) - - def test_climatology(self): - cell_method_strings = [ - "time: minimum within days time: mean over days", - "time : minimum within days time: mean over days", - "time: minimum within days time : mean over days", - "time : minimum within days time : mean over days", - ] - expected = ( - CellMethod(method="minimum within days", coords="time"), - CellMethod(method="mean over days", coords="time"), - ) - for cell_method_str in cell_method_strings: - res = parse_cell_methods(cell_method_str) - self.assertEqual(res, expected) - - def test_climatology_with_unknown_method(self): - cell_method_strings = [ - "time: min within days time: mean over days", - "time : min within days time: mean over days", - "time: min within days time : mean over days", - "time : min within days time : mean over days", - ] - expected = ( - CellMethod(method="min within days", coords="time"), - CellMethod(method="mean over days", coords="time"), - ) - for cell_method_str in cell_method_strings: - with mock.patch("warnings.warn") as warn: - res = parse_cell_methods(cell_method_str) - self.assertIn( - "NetCDF variable contains unknown cell method 'min'", - warn.call_args[0][0], - ) - self.assertEqual(res, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test_save.py b/lib/iris/tests/unit/fileformats/netcdf/test_save.py deleted file mode 100644 index 830d8c5e52..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/test_save.py +++ /dev/null @@ -1,215 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.netcdf.save` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import netCDF4 as nc -import numpy as np - -import iris -from iris.coords import DimCoord -from iris.cube import Cube, CubeList -from iris.fileformats.netcdf import CF_CONVENTIONS_VERSION, save -from iris.tests.stock import lat_lon_cube - - -class Test_conventions(tests.IrisTest): - def setUp(self): - self.cube = Cube([0]) - self.custom_conventions = "convention1 convention2" - self.cube.attributes["Conventions"] = self.custom_conventions - self.options = iris.config.netcdf - - def test_custom_conventions__ignored(self): - # Ensure that we drop existing conventions attributes and replace with - # CF convention. - with self.temp_filename(".nc") as nc_path: - save(self.cube, nc_path, "NETCDF4") - ds = nc.Dataset(nc_path) - res = ds.getncattr("Conventions") - ds.close() - self.assertEqual(res, CF_CONVENTIONS_VERSION) - - def test_custom_conventions__allowed(self): - # Ensure that existing conventions attributes are passed through if the - # relevant Iris option is set. - with mock.patch.object(self.options, "conventions_override", True): - with self.temp_filename(".nc") as nc_path: - save(self.cube, nc_path, "NETCDF4") - ds = nc.Dataset(nc_path) - res = ds.getncattr("Conventions") - ds.close() - self.assertEqual(res, self.custom_conventions) - - def test_custom_conventions__allowed__missing(self): - # Ensure the default conventions attribute is set if the relevant Iris - # option is set but there is no custom conventions attribute. - del self.cube.attributes["Conventions"] - with mock.patch.object(self.options, "conventions_override", True): - with self.temp_filename(".nc") as nc_path: - save(self.cube, nc_path, "NETCDF4") - ds = nc.Dataset(nc_path) - res = ds.getncattr("Conventions") - ds.close() - self.assertEqual(res, CF_CONVENTIONS_VERSION) - - -class Test_attributes(tests.IrisTest): - def test_attributes_arrays(self): - # Ensure that attributes containing NumPy arrays can be equality - # checked and their cubes saved as appropriate. - c1 = Cube([1], attributes={"bar": np.arange(2)}) - c2 = Cube([2], attributes={"bar": np.arange(2)}) - - with self.temp_filename("foo.nc") as nc_out: - save([c1, c2], nc_out) - ds = nc.Dataset(nc_out) - res = ds.getncattr("bar") - ds.close() - self.assertArrayEqual(res, np.arange(2)) - - def test_no_special_attribute_clash(self): - # Ensure that saving multiple cubes with netCDF4 protected attributes - # works as expected. - # Note that here we are testing variable attribute clashes only - by - # saving multiple cubes the attributes are saved as variable - # attributes rather than global attributes. - c1 = Cube([0], var_name="test", attributes={"name": "bar"}) - c2 = Cube([0], var_name="test_1", attributes={"name": "bar_1"}) - - with self.temp_filename("foo.nc") as nc_out: - save([c1, c2], nc_out) - ds = nc.Dataset(nc_out) - res = ds.variables["test"].getncattr("name") - res_1 = ds.variables["test_1"].getncattr("name") - ds.close() - self.assertEqual(res, "bar") - self.assertEqual(res_1, "bar_1") - - -class Test_unlimited_dims(tests.IrisTest): - def test_no_unlimited_dims(self): - cube = lat_lon_cube() - with self.temp_filename("foo.nc") as nc_out: - save(cube, nc_out) - ds = nc.Dataset(nc_out) - self.assertFalse(ds.dimensions["latitude"].isunlimited()) - - def test_unlimited_dim_latitude(self): - cube = lat_lon_cube() - unlim_dim_name = "latitude" - with self.temp_filename("foo.nc") as nc_out: - save(cube, nc_out, unlimited_dimensions=[unlim_dim_name]) - ds = nc.Dataset(nc_out) - self.assertTrue(ds.dimensions[unlim_dim_name].isunlimited()) - - -class Test_fill_value(tests.IrisTest): - def setUp(self): - self.standard_names = [ - "air_temperature", - "air_potential_temperature", - "air_temperature_anomaly", - ] - - def _make_cubes(self): - lat = DimCoord(np.arange(3), "latitude", units="degrees") - lon = DimCoord(np.arange(4), "longitude", units="degrees") - data = np.arange(12, dtype="f4").reshape(3, 4) - return CubeList( - Cube( - data, - standard_name=name, - units="K", - dim_coords_and_dims=[(lat, 0), (lon, 1)], - ) - for name in self.standard_names - ) - - def test_None(self): - # Test that when no fill_value argument is passed, the fill_value - # argument to Saver.write is None or not present. - cubes = self._make_cubes() - with mock.patch("iris.fileformats.netcdf.Saver") as Saver: - save(cubes, "dummy.nc") - - # Get the Saver.write mock - with Saver() as saver: - write = saver.write - - self.assertEqual(3, write.call_count) - for call in write.mock_calls: - _, _, kwargs = call - if "fill_value" in kwargs: - self.assertIs(None, kwargs["fill_value"]) - - def test_single(self): - # Test that when a single value is passed as the fill_value argument, - # that value is passed to each call to Saver.write - cubes = self._make_cubes() - fill_value = 12345.0 - with mock.patch("iris.fileformats.netcdf.Saver") as Saver: - save(cubes, "dummy.nc", fill_value=fill_value) - - # Get the Saver.write mock - with Saver() as saver: - write = saver.write - - self.assertEqual(3, write.call_count) - for call in write.mock_calls: - _, _, kwargs = call - self.assertEqual(fill_value, kwargs["fill_value"]) - - def test_multiple(self): - # Test that when a list is passed as the fill_value argument, - # each element is passed to separate calls to Saver.write - cubes = self._make_cubes() - fill_values = [123.0, 456.0, 789.0] - with mock.patch("iris.fileformats.netcdf.Saver") as Saver: - save(cubes, "dummy.nc", fill_value=fill_values) - - # Get the Saver.write mock - with Saver() as saver: - write = saver.write - - self.assertEqual(3, write.call_count) - for call, fill_value in zip(write.mock_calls, fill_values): - _, _, kwargs = call - self.assertEqual(fill_value, kwargs["fill_value"]) - - def test_single_string(self): - # Test that when a string is passed as the fill_value argument, - # that value is passed to calls to Saver.write - cube = Cube(["abc", "def", "hij"]) - fill_value = "xyz" - with mock.patch("iris.fileformats.netcdf.Saver") as Saver: - save(cube, "dummy.nc", fill_value=fill_value) - - # Get the Saver.write mock - with Saver() as saver: - write = saver.write - - self.assertEqual(1, write.call_count) - _, _, kwargs = write.mock_calls[0] - self.assertEqual(fill_value, kwargs["fill_value"]) - - def test_multi_wrong_length(self): - # Test that when a list of a different length to the number of cubes - # is passed as the fill_value argument, an error is raised - cubes = self._make_cubes() - fill_values = [1.0, 2.0, 3.0, 4.0] - with mock.patch("iris.fileformats.netcdf.Saver"): - with self.assertRaises(ValueError): - save(cubes, "dummy.nc", fill_value=fill_values) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/__init__.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/__init__.py deleted file mode 100644 index 429ee9ce1f..0000000000 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.fileformats.nimrod_load_rules` module.""" diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py deleted file mode 100644 index a15337f849..0000000000 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_units.py +++ /dev/null @@ -1,147 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the `iris.fileformats.nimrod_load_rules.units` function. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.cube import Cube -from iris.fileformats.nimrod import NimrodField -from iris.fileformats.nimrod_load_rules import NIMROD_DEFAULT, units - - -class Test(tests.IrisTest): - NIMROD_LOCATION = "iris.fileformats.nimrod_load_rules" - - def setUp(self): - self.field = mock.Mock( - units="", - int_mdi=-32767, - float32_mdi=NIMROD_DEFAULT, - spec=NimrodField, - ) - self.cube = Cube(np.ones((3, 3), dtype=np.float32)) - - def _call_units(self, data=None, units_str=None): - if data is not None: - self.cube.data = data - if units_str: - self.field.units = units_str - units(self.cube, self.field) - - def test_null(self): - with mock.patch("warnings.warn") as warn: - self._call_units(units_str="m") - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "m") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) - - def test_times32(self): - with mock.patch("warnings.warn") as warn: - self._call_units( - data=np.ones_like(self.cube.data) * 32, units_str="mm/hr*32" - ) - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "mm/hr") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) - self.assertEqual(self.cube.data.dtype, np.float32) - - def test_visibility_units(self): - with mock.patch("warnings.warn") as warn: - self._call_units( - data=((np.ones_like(self.cube.data) / 2) - 25000), - units_str="m/2-25k", - ) - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "m") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) - self.assertEqual(self.cube.data.dtype, np.float32) - - def test_power_in_units(self): - with mock.patch("warnings.warn") as warn: - self._call_units( - data=np.ones_like(self.cube.data) * 1000, units_str="mm*10^3" - ) - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "mm") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) - self.assertEqual(self.cube.data.dtype, np.float32) - - def test_ug_per_m3_units(self): - with mock.patch("warnings.warn") as warn: - self._call_units( - data=((np.ones_like(self.cube.data) * 10)), - units_str="ug/m3E1", - ) - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "ug/m3") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) - self.assertEqual(self.cube.data.dtype, np.float32) - - def test_g_per_kg(self): - with mock.patch("warnings.warn") as warn: - self._call_units( - data=((np.ones_like(self.cube.data) * 1000)), units_str="g/Kg" - ) - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "kg/kg") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) - self.assertEqual(self.cube.data.dtype, np.float32) - - def test_unit_expection_dictionary(self): - with mock.patch("warnings.warn") as warn: - self._call_units(units_str="mb") - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "hPa") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) - self.assertEqual(self.cube.data.dtype, np.float32) - - def test_per_second(self): - with mock.patch("warnings.warn") as warn: - self._call_units(units_str="/s") - self.assertEqual(warn.call_count, 0) - self.assertEqual(self.cube.units, "s^-1") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) - self.assertEqual(self.cube.data.dtype, np.float32) - - def test_unhandled_unit(self): - with mock.patch("warnings.warn") as warn: - self._call_units(units_str="kittens") - self.assertEqual(warn.call_count, 1) - self.assertEqual(self.cube.units, "") - self.assertArrayAlmostEqual( - self.cube.data, np.ones_like(self.cube.data) - ) - self.assertEqual(self.cube.data.dtype, np.float32) - self.assertEqual(self.cube.attributes["invalid_units"], "kittens") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py b/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py deleted file mode 100644 index 44dcf8ac48..0000000000 --- a/lib/iris/tests/unit/fileformats/nimrod_load_rules/test_vertical_coord.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the `iris.fileformats.nimrod_load_rules.vertical_coord` -function. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.fileformats.nimrod import NimrodField -from iris.fileformats.nimrod_load_rules import ( - NIMROD_DEFAULT, - TranslationWarning, - vertical_coord, -) - - -class Test(tests.IrisTest): - NIMROD_LOCATION = "iris.fileformats.nimrod_load_rules" - - def setUp(self): - self.field = mock.Mock( - vertical_coord=NIMROD_DEFAULT, - vertical_coord_type=NIMROD_DEFAULT, - reference_vertical_coord=NIMROD_DEFAULT, - reference_vertical_coord_type=NIMROD_DEFAULT, - int_mdi=-32767, - float32_mdi=NIMROD_DEFAULT, - spec=NimrodField, - ) - self.cube = mock.Mock() - - def _call_vertical_coord( - self, - vertical_coord_val=None, - vertical_coord_type=None, - reference_vertical_coord=None, - reference_vertical_coord_type=None, - ): - if vertical_coord_val: - self.field.vertical_coord = vertical_coord_val - if vertical_coord_type: - self.field.vertical_coord_type = vertical_coord_type - if reference_vertical_coord: - self.field.reference_vertical_coord = reference_vertical_coord - if reference_vertical_coord_type: - self.field.reference_vertical_coord_type = ( - reference_vertical_coord_type - ) - vertical_coord(self.cube, self.field) - - def test_unhandled(self): - with mock.patch("warnings.warn") as warn: - self._call_vertical_coord( - vertical_coord_val=1.0, vertical_coord_type=-1 - ) - warn.assert_called_once_with( - "Vertical coord -1 not yet handled", TranslationWarning - ) - - def test_null(self): - with mock.patch("warnings.warn") as warn: - self._call_vertical_coord(vertical_coord_type=NIMROD_DEFAULT) - self._call_vertical_coord(vertical_coord_type=self.field.int_mdi) - self.assertEqual(warn.call_count, 0) - - def test_ground_level(self): - with mock.patch("warnings.warn") as warn: - self._call_vertical_coord( - vertical_coord_val=9999.0, vertical_coord_type=0 - ) - self.assertEqual(warn.call_count, 0) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/__init__.py b/lib/iris/tests/unit/fileformats/pp/__init__.py deleted file mode 100644 index f309b6848a..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.fileformats.pp` module.""" diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py b/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py deleted file mode 100644 index d70e573296..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test_PPDataProxy.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.pp.PPDataProxy` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.fileformats.pp import PPDataProxy, SplittableInt - - -class Test_lbpack(tests.IrisTest): - def test_lbpack_SplittableInt(self): - lbpack = mock.Mock(spec_set=SplittableInt) - proxy = PPDataProxy(None, None, None, None, None, lbpack, None, None) - self.assertEqual(proxy.lbpack, lbpack) - self.assertIs(proxy.lbpack, lbpack) - - def test_lbpack_raw(self): - lbpack = 4321 - proxy = PPDataProxy(None, None, None, None, None, lbpack, None, None) - self.assertEqual(proxy.lbpack, lbpack) - self.assertIsNot(proxy.lbpack, lbpack) - self.assertIsInstance(proxy.lbpack, SplittableInt) - self.assertEqual(proxy.lbpack.n1, lbpack % 10) - self.assertEqual(proxy.lbpack.n2, lbpack // 10 % 10) - self.assertEqual(proxy.lbpack.n3, lbpack // 100 % 10) - self.assertEqual(proxy.lbpack.n4, lbpack // 1000 % 10) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test_PPField.py b/lib/iris/tests/unit/fileformats/pp/test_PPField.py deleted file mode 100644 index 5ce41402ad..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test_PPField.py +++ /dev/null @@ -1,355 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.pp.PPField` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -import iris.fileformats.pp as pp -from iris.fileformats.pp import PPField, SplittableInt - -# The PPField class is abstract, so to test we define a minimal, -# concrete subclass with the `t1` and `t2` properties. -# -# NB. We define dummy header items to allow us to zero the unused header -# items when written to disk and get consistent results. - - -DUMMY_HEADER = [ - ("dummy1", (0, 11)), - ("lbtim", (12,)), - ("dummy2", (13,)), - ("lblrec", (14,)), - ("dummy3", (15, 16)), - ("lbrow", (17,)), - ("dummy4", (18,)), - ("lbext", (19,)), - ("lbpack", (20,)), - ("dummy5", (21, 37)), - ("lbuser", (38, 39, 40, 41, 42, 43, 44)), - ("brsvd", (45, 46, 47, 48)), - ("bdatum", (49,)), - ("dummy6", (50, 61)), - ("bmdi", (62,)), - ("dummy7", (63,)), -] - - -class TestPPField(PPField): - - HEADER_DEFN = DUMMY_HEADER - HEADER_DICT = dict(DUMMY_HEADER) - - def _ready_for_save(self): - self.dummy1 = 0 - self.dummy2 = 0 - self.dummy3 = 0 - self.dummy4 = 0 - self.dummy5 = 0 - self.dummy6 = 0 - self.dummy7 = 0 - self.lbtim = 0 - self.lblrec = 0 - self.lbrow = 0 - self.lbext = 0 - self.lbpack = 0 - self.lbuser = 0 - self.brsvd = 0 - self.bdatum = 0 - self.bmdi = -1e30 - return self - - @property - def t1(self): - return None - - @property - def t2(self): - return None - - -class Test_save(tests.IrisTest): - def test_float64(self): - # Tests down-casting of >f8 data to >f4. - - def field_checksum(data): - field = TestPPField()._ready_for_save() - field.data = data - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) - checksum = self.file_checksum(temp_filename) - return checksum - - data_64 = np.linspace(0, 1, num=10, endpoint=False).reshape(2, 5) - checksum_32 = field_checksum(data_64.astype(">f4")) - msg = "Downcasting array precision from float64 to float32 for save." - with self.assertWarnsRegexp(msg): - checksum_64 = field_checksum(data_64.astype(">f8")) - self.assertEqual(checksum_32, checksum_64) - - def test_masked_mdi_value_warning(self): - # Check that an unmasked MDI value raises a warning. - field = TestPPField()._ready_for_save() - field.bmdi = -123.4 - # Make float32 data, as float64 default produces an extra warning. - field.data = np.ma.masked_array( - [1.0, field.bmdi, 3.0], dtype=np.float32 - ) - msg = "PPField data contains unmasked points" - with self.assertWarnsRegexp(msg): - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) - - def test_unmasked_mdi_value_warning(self): - # Check that MDI in *unmasked* data raises a warning. - field = TestPPField()._ready_for_save() - field.bmdi = -123.4 - # Make float32 data, as float64 default produces an extra warning. - field.data = np.array([1.0, field.bmdi, 3.0], dtype=np.float32) - msg = "PPField data contains unmasked points" - with self.assertWarnsRegexp(msg): - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) - - def test_mdi_masked_value_nowarning(self): - # Check that a *masked* MDI value does not raise a warning. - field = TestPPField()._ready_for_save() - field.bmdi = -123.4 - # Make float32 data, as float64 default produces an extra warning. - field.data = np.ma.masked_array( - [1.0, 2.0, 3.0], mask=[0, 1, 0], dtype=np.float32 - ) - # Set underlying data value at masked point to BMDI value. - field.data.data[1] = field.bmdi - self.assertArrayAllClose(field.data.data[1], field.bmdi) - with self.assertNoWarningsRegexp(r"\(mask\|fill\)"): - with self.temp_filename(".pp") as temp_filename: - with open(temp_filename, "wb") as pp_file: - field.save(pp_file) - - -class Test_calendar(tests.IrisTest): - def test_greg(self): - field = TestPPField() - field.lbtim = SplittableInt(1, {"ia": 2, "ib": 1, "ic": 0}) - self.assertEqual(field.calendar, "gregorian") - - def test_360(self): - field = TestPPField() - field.lbtim = SplittableInt(2, {"ia": 2, "ib": 1, "ic": 0}) - self.assertEqual(field.calendar, "360_day") - - def test_365(self): - field = TestPPField() - field.lbtim = SplittableInt(4, {"ia": 2, "ib": 1, "ic": 0}) - self.assertEqual(field.calendar, "365_day") - - -class Test_coord_system(tests.IrisTest): - def _check_cs(self, bplat, bplon, rotated): - field = TestPPField() - field.bplat = bplat - field.bplon = bplon - with mock.patch( - "iris.fileformats.pp.iris.coord_systems" - ) as mock_cs_mod: - result = field.coord_system() - if not rotated: - # It should return a standard unrotated CS. - self.assertTrue(mock_cs_mod.GeogCS.call_count == 1) - self.assertEqual(result, mock_cs_mod.GeogCS()) - else: - # It should return a rotated CS with the correct makeup. - self.assertTrue(mock_cs_mod.GeogCS.call_count == 1) - self.assertTrue(mock_cs_mod.RotatedGeogCS.call_count == 1) - self.assertEqual(result, mock_cs_mod.RotatedGeogCS()) - self.assertEqual( - mock_cs_mod.RotatedGeogCS.call_args_list[0], - mock.call(bplat, bplon, ellipsoid=mock_cs_mod.GeogCS()), - ) - - def test_normal_unrotated(self): - # Check that 'normal' BPLAT,BPLON=90,0 produces an unrotated system. - self._check_cs(bplat=90, bplon=0, rotated=False) - - def test_bplon_180_unrotated(self): - # Check that BPLAT,BPLON=90,180 behaves the same as 90,0. - self._check_cs(bplat=90, bplon=180, rotated=False) - - def test_odd_bplat_rotated(self): - # Show that BPLAT != 90 produces a rotated field. - self._check_cs(bplat=75, bplon=180, rotated=True) - - def test_odd_bplon_rotated(self): - # Show that BPLON != 0 or 180 produces a rotated field. - self._check_cs(bplat=90, bplon=123.45, rotated=True) - - -class Test__init__(tests.IrisTest): - def setUp(self): - header_longs = np.zeros(pp.NUM_LONG_HEADERS, dtype=np.int_) - header_floats = np.zeros(pp.NUM_FLOAT_HEADERS, dtype=np.float64) - self.header = list(header_longs) + list(header_floats) - - def test_no_headers(self): - field = TestPPField() - self.assertIsNone(field._raw_header) - self.assertIsNone(field.raw_lbtim) - self.assertIsNone(field.raw_lbpack) - - def test_lbtim_lookup(self): - self.assertEqual(TestPPField.HEADER_DICT["lbtim"], (12,)) - - def test_lbpack_lookup(self): - self.assertEqual(TestPPField.HEADER_DICT["lbpack"], (20,)) - - def test_raw_lbtim(self): - raw_lbtim = 4321 - (loc,) = TestPPField.HEADER_DICT["lbtim"] - self.header[loc] = raw_lbtim - field = TestPPField(header=self.header) - self.assertEqual(field.raw_lbtim, raw_lbtim) - - def test_raw_lbpack(self): - raw_lbpack = 4321 - (loc,) = TestPPField.HEADER_DICT["lbpack"] - self.header[loc] = raw_lbpack - field = TestPPField(header=self.header) - self.assertEqual(field.raw_lbpack, raw_lbpack) - - -class Test__getattr__(tests.IrisTest): - def setUp(self): - header_longs = np.zeros(pp.NUM_LONG_HEADERS, dtype=np.int_) - header_floats = np.zeros(pp.NUM_FLOAT_HEADERS, dtype=np.float64) - self.header = list(header_longs) + list(header_floats) - - def test_attr_singular_long(self): - lbrow = 1234 - (loc,) = TestPPField.HEADER_DICT["lbrow"] - self.header[loc] = lbrow - field = TestPPField(header=self.header) - self.assertEqual(field.lbrow, lbrow) - - def test_attr_multi_long(self): - lbuser = (100, 101, 102, 103, 104, 105, 106) - loc = TestPPField.HEADER_DICT["lbuser"] - self.header[loc[0] : loc[-1] + 1] = lbuser - field = TestPPField(header=self.header) - self.assertEqual(field.lbuser, lbuser) - - def test_attr_singular_float(self): - bdatum = 1234 - (loc,) = TestPPField.HEADER_DICT["bdatum"] - self.header[loc] = bdatum - field = TestPPField(header=self.header) - self.assertEqual(field.bdatum, bdatum) - - def test_attr_multi_float(self): - brsvd = (100, 101, 102, 103) - loc = TestPPField.HEADER_DICT["brsvd"] - start = loc[0] - stop = loc[-1] + 1 - self.header[start:stop] = brsvd - field = TestPPField(header=self.header) - self.assertEqual(field.brsvd, brsvd) - - def test_attr_lbtim(self): - raw_lbtim = 4321 - (loc,) = TestPPField.HEADER_DICT["lbtim"] - self.header[loc] = raw_lbtim - field = TestPPField(header=self.header) - result = field.lbtim - self.assertEqual(result, raw_lbtim) - self.assertIsInstance(result, SplittableInt) - result = field._lbtim - self.assertEqual(result, raw_lbtim) - self.assertIsInstance(result, SplittableInt) - - def test_attr_lbpack(self): - raw_lbpack = 4321 - (loc,) = TestPPField.HEADER_DICT["lbpack"] - self.header[loc] = raw_lbpack - field = TestPPField(header=self.header) - result = field.lbpack - self.assertEqual(result, raw_lbpack) - self.assertIsInstance(result, SplittableInt) - result = field._lbpack - self.assertEqual(result, raw_lbpack) - self.assertIsInstance(result, SplittableInt) - - def test_attr_raw_lbtim_assign(self): - field = TestPPField(header=self.header) - self.assertEqual(field.raw_lbpack, 0) - self.assertEqual(field.lbtim, 0) - raw_lbtim = 4321 - field.lbtim = raw_lbtim - self.assertEqual(field.raw_lbtim, raw_lbtim) - self.assertNotIsInstance(field.raw_lbtim, SplittableInt) - - def test_attr_raw_lbpack_assign(self): - field = TestPPField(header=self.header) - self.assertEqual(field.raw_lbpack, 0) - self.assertEqual(field.lbpack, 0) - raw_lbpack = 4321 - field.lbpack = raw_lbpack - self.assertEqual(field.raw_lbpack, raw_lbpack) - self.assertNotIsInstance(field.raw_lbpack, SplittableInt) - - def test_attr_unknown(self): - with self.assertRaises(AttributeError): - TestPPField().x - - -class Test_lbtim(tests.IrisTest): - def test_get_splittable(self): - headers = [0] * 64 - headers[12] = 12345 - field = TestPPField(headers) - self.assertIsInstance(field.lbtim, SplittableInt) - self.assertEqual(field.lbtim.ia, 123) - self.assertEqual(field.lbtim.ib, 4) - self.assertEqual(field.lbtim.ic, 5) - - def test_set_int(self): - headers = [0] * 64 - headers[12] = 12345 - field = TestPPField(headers) - field.lbtim = 34567 - self.assertIsInstance(field.lbtim, SplittableInt) - self.assertEqual(field.lbtim.ia, 345) - self.assertEqual(field.lbtim.ib, 6) - self.assertEqual(field.lbtim.ic, 7) - self.assertEqual(field.raw_lbtim, 34567) - - def test_set_splittable(self): - # Check that assigning a SplittableInt to lbtim uses the integer - # value. In other words, check that you can't assign an - # arbitrary SplittableInt with crazy named attributes. - headers = [0] * 64 - headers[12] = 12345 - field = TestPPField(headers) - si = SplittableInt(34567, {"foo": 0}) - field.lbtim = si - self.assertIsInstance(field.lbtim, SplittableInt) - with self.assertRaises(AttributeError): - field.lbtim.foo - self.assertEqual(field.lbtim.ia, 345) - self.assertEqual(field.lbtim.ib, 6) - self.assertEqual(field.lbtim.ic, 7) - self.assertEqual(field.raw_lbtim, 34567) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py b/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py deleted file mode 100644 index 514e326393..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test__convert_constraints.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.pp.load` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris -from iris.fileformats.pp import STASH, _convert_constraints - - -class Test_convert_constraints(tests.IrisTest): - def _single_stash(self): - constraint = iris.AttributeConstraint(STASH="m01s03i236") - return _convert_constraints(constraint) - - def test_single_stash(self): - pp_filter = self._single_stash() - stcube = mock.Mock(stash=STASH.from_msi("m01s03i236")) - self.assertTrue(pp_filter(stcube)) - - def test_stash_object(self): - constraint = iris.AttributeConstraint( - STASH=STASH.from_msi("m01s03i236") - ) - pp_filter = _convert_constraints(constraint) - stcube = mock.Mock(stash=STASH.from_msi("m01s03i236")) - self.assertTrue(pp_filter(stcube)) - - def test_surface_altitude(self): - # Ensure that surface altitude fields are not filtered. - pp_filter = self._single_stash() - orography_cube = mock.Mock(stash=STASH.from_msi("m01s00i033")) - self.assertTrue(pp_filter(orography_cube)) - - def test_surface_pressure(self): - # Ensure that surface pressure fields are not filtered. - pp_filter = self._single_stash() - pressure_cube = mock.Mock(stash=STASH.from_msi("m01s00i001")) - self.assertTrue(pp_filter(pressure_cube)) - - def test_double_stash(self): - stcube236 = mock.Mock(stash=STASH.from_msi("m01s03i236")) - stcube4 = mock.Mock(stash=STASH.from_msi("m01s00i004")) - stcube7 = mock.Mock(stash=STASH.from_msi("m01s00i007")) - constraints = [ - iris.AttributeConstraint(STASH="m01s03i236"), - iris.AttributeConstraint(STASH="m01s00i004"), - ] - pp_filter = _convert_constraints(constraints) - self.assertTrue(pp_filter(stcube236)) - self.assertTrue(pp_filter(stcube4)) - self.assertFalse(pp_filter(stcube7)) - - def test_callable_stash(self): - stcube236 = mock.Mock(stash=STASH.from_msi("m01s03i236")) - stcube4 = mock.Mock(stash=STASH.from_msi("m01s00i004")) - stcube7 = mock.Mock(stash=STASH.from_msi("m01s00i007")) - con1 = iris.AttributeConstraint(STASH=lambda s: s.endswith("004")) - con2 = iris.AttributeConstraint(STASH=lambda s: s == "m01s00i007") - constraints = [con1, con2] - pp_filter = _convert_constraints(constraints) - self.assertFalse(pp_filter(stcube236)) - self.assertTrue(pp_filter(stcube4)) - self.assertTrue(pp_filter(stcube7)) - - def test_multiple_with_stash(self): - constraints = [ - iris.Constraint("air_potential_temperature"), - iris.AttributeConstraint(STASH="m01s00i004"), - ] - pp_filter = _convert_constraints(constraints) - self.assertIsNone(pp_filter) - - def test_no_stash(self): - constraints = [ - iris.Constraint("air_potential_temperature"), - iris.AttributeConstraint(source="asource"), - ] - pp_filter = _convert_constraints(constraints) - self.assertIsNone(pp_filter) - - def test_no_constraint(self): - constraints = [] - pp_filter = _convert_constraints(constraints) - self.assertIsNone(pp_filter) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py b/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py deleted file mode 100644 index 16d2b500a5..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test__create_field_data.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.pp._create_field_data` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -import iris.fileformats.pp as pp - - -class Test__create_field_data(tests.IrisTest): - def test_loaded_bytes(self): - # Check that a field with LoadedArrayBytes in core_data gets the - # result of a suitable call to _data_bytes_to_shaped_array(). - mock_loaded_bytes = mock.Mock(spec=pp.LoadedArrayBytes) - core_data = mock.MagicMock(return_value=mock_loaded_bytes) - field = mock.Mock(core_data=core_data) - data_shape = mock.Mock() - land_mask = mock.Mock() - with mock.patch( - "iris.fileformats.pp._data_bytes_to_shaped_array" - ) as convert_bytes: - convert_bytes.return_value = mock.sentinel.array - pp._create_field_data(field, data_shape, land_mask) - - self.assertIs(field.data, mock.sentinel.array) - convert_bytes.assert_called_once_with( - mock_loaded_bytes.bytes, - field.lbpack, - field.boundary_packing, - data_shape, - mock_loaded_bytes.dtype, - field.bmdi, - land_mask, - ) - - def test_deferred_bytes(self): - # Check that a field with deferred array bytes in core_data gets a - # dask array. - fname = mock.sentinel.fname - position = mock.sentinel.position - n_bytes = mock.sentinel.n_bytes - newbyteorder = mock.Mock(return_value=mock.sentinel.dtype) - dtype = mock.Mock(newbyteorder=newbyteorder) - deferred_bytes = (fname, position, n_bytes, dtype) - core_data = mock.MagicMock(return_value=deferred_bytes) - field = mock.Mock(core_data=core_data) - data_shape = (100, 120) - proxy = mock.Mock( - dtype=np.dtype("f4"), - shape=data_shape, - spec=pp.PPDataProxy, - ndim=len(data_shape), - ) - # We can't directly inspect the concrete data source underlying - # the dask array, so instead we patch the proxy creation and check it's - # being created and invoked correctly. - with mock.patch("iris.fileformats.pp.PPDataProxy") as PPDataProxy: - PPDataProxy.return_value = proxy - pp._create_field_data(field, data_shape, land_mask_field=None) - # The data should be assigned via field.data. As this is a mock object - # we can check the attribute directly. - self.assertEqual(field.data.shape, data_shape) - self.assertEqual(field.data.dtype, np.dtype("f4")) - # Is it making use of a correctly configured proxy? - # NB. We know it's *using* the result of this call because - # that's where the dtype came from above. - PPDataProxy.assert_called_once_with( - (data_shape), - dtype, - fname, - position, - n_bytes, - field.raw_lbpack, - field.boundary_packing, - field.bmdi, - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py b/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py deleted file mode 100644 index 83475c6782..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test__data_bytes_to_shaped_array.py +++ /dev/null @@ -1,170 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the `iris.fileformats.pp._data_bytes_to_shaped_array` function. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import io -from unittest import mock - -import numpy as np -import numpy.ma as ma - -import iris.fileformats.pp as pp - - -class Test__data_bytes_to_shaped_array__lateral_boundary_compression( - tests.IrisTest -): - def setUp(self): - self.data_shape = 30, 40 - y_halo, x_halo, rim = 2, 3, 4 - - data_len = np.prod(self.data_shape) - decompressed = np.arange(data_len).reshape(*self.data_shape) - decompressed *= np.arange(self.data_shape[1]) % 3 + 1 - - decompressed_mask = np.zeros(self.data_shape, np.bool_) - decompressed_mask[ - y_halo + rim : -(y_halo + rim), x_halo + rim : -(x_halo + rim) - ] = True - - self.decompressed = ma.masked_array( - decompressed, mask=decompressed_mask - ) - - self.north = decompressed[-(y_halo + rim) :, :] - self.east = decompressed[ - y_halo + rim : -(y_halo + rim), -(x_halo + rim) : - ] - self.south = decompressed[: y_halo + rim, :] - self.west = decompressed[ - y_halo + rim : -(y_halo + rim), : x_halo + rim - ] - - # Get the bytes of the north, east, south, west arrays combined. - buf = io.BytesIO() - buf.write(self.north.copy()) - buf.write(self.east.copy()) - buf.write(self.south.copy()) - buf.write(self.west.copy()) - buf.seek(0) - self.data_payload_bytes = buf.read() - - def test_boundary_decompression(self): - boundary_packing = mock.Mock(rim_width=4, x_halo=3, y_halo=2) - lbpack = mock.Mock(n1=0) - r = pp._data_bytes_to_shaped_array( - self.data_payload_bytes, - lbpack, - boundary_packing, - self.data_shape, - self.decompressed.dtype, - -9223372036854775808, - ) - r = ma.masked_array(r, np.isnan(r), fill_value=-9223372036854775808) - self.assertMaskedArrayEqual(r, self.decompressed) - - -class Test__data_bytes_to_shaped_array__land_packed(tests.IrisTest): - def setUp(self): - # Sets up some useful arrays for use with the land/sea mask - # decompression. - self.land = np.array( - [[0, 1, 0, 0], [1, 0, 0, 0], [0, 0, 0, 1]], dtype=np.float64 - ) - sea = ~self.land.astype(np.bool_) - self.land_masked_data = np.array([1, 3, 4.5]) - self.sea_masked_data = np.array([1, 3, 4.5, -4, 5, 0, 1, 2, 3]) - - # Compute the decompressed land mask data. - self.decomp_land_data = ma.masked_array( - [[0, 1, 0, 0], [3, 0, 0, 0], [0, 0, 0, 4.5]], - mask=sea, - dtype=np.float64, - ) - # Compute the decompressed sea mask data. - self.decomp_sea_data = ma.masked_array( - [[1, -10, 3, 4.5], [-10, -4, 5, 0], [1, 2, 3, -10]], - mask=self.land, - dtype=np.float64, - ) - - self.land_mask = mock.Mock( - data=self.land, lbrow=self.land.shape[0], lbnpt=self.land.shape[1] - ) - - def create_lbpack(self, value): - name_mapping = dict(n5=slice(4, None), n4=3, n3=2, n2=1, n1=0) - return pp.SplittableInt(value, name_mapping) - - def test_no_land_mask(self): - # Check that without a mask, it returns the raw (compressed) data. - with mock.patch("numpy.frombuffer", return_value=np.arange(3)): - result = pp._data_bytes_to_shaped_array( - mock.Mock(), - self.create_lbpack(120), - None, - (3, 4), - np.dtype(">f4"), - -999, - mask=None, - ) - self.assertArrayAllClose(result, np.arange(3)) - - def test_land_mask(self): - # Check basic land unpacking. - field_data = self.land_masked_data - result = self.check_read_data(field_data, 120, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_land_data) - - def test_land_masked_data_too_long(self): - # Check land unpacking with field data that is larger than the mask. - field_data = np.tile(self.land_masked_data, 2) - result = self.check_read_data(field_data, 120, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_land_data) - - def test_sea_mask(self): - # Check basic land unpacking. - field_data = self.sea_masked_data - result = self.check_read_data(field_data, 220, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_sea_data) - - def test_sea_masked_data_too_long(self): - # Check sea unpacking with field data that is larger than the mask. - field_data = np.tile(self.sea_masked_data, 2) - result = self.check_read_data(field_data, 220, self.land_mask) - self.assertMaskedArrayEqual(result, self.decomp_sea_data) - - def test_bad_lbpack(self): - # Check basic land unpacking. - field_data = self.sea_masked_data - with self.assertRaises(ValueError): - self.check_read_data(field_data, 320, self.land_mask) - - def check_read_data(self, field_data, lbpack, mask): - # Calls pp._data_bytes_to_shaped_array with the necessary mocked - # items, an lbpack instance, the correct data shape and mask instance. - with mock.patch("numpy.frombuffer", return_value=field_data): - data = pp._data_bytes_to_shaped_array( - mock.Mock(), - self.create_lbpack(lbpack), - None, - mask.shape, - np.dtype(">f4"), - -999, - mask=mask, - ) - return ma.masked_array(data, np.isnan(data), fill_value=-999) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test__field_gen.py b/lib/iris/tests/unit/fileformats/pp/test__field_gen.py deleted file mode 100644 index 31ac4f6b19..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test__field_gen.py +++ /dev/null @@ -1,112 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.pp._field_gen` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import contextlib -import io -from unittest import mock -import warnings - -import numpy as np - -import iris.fileformats.pp as pp - - -class Test(tests.IrisTest): - @contextlib.contextmanager - def mock_for_field_gen(self, fields): - side_effect_fields = list(fields)[:] - - def make_pp_field_override(*args): - # Iterates over the fields passed to this context manager, - # until there are no more, upon which the np.fromfile - # returns an empty list and the while loop in load() is - # broken. - result = side_effect_fields.pop(0) - if not side_effect_fields: - np.fromfile.return_value = [] - return result - - open_func = "builtins.open" - with mock.patch("numpy.fromfile", return_value=[0]), mock.patch( - open_func - ), mock.patch("struct.unpack_from", return_value=[4]), mock.patch( - "iris.fileformats.pp.make_pp_field", - side_effect=make_pp_field_override, - ): - yield - - def gen_fields(self, fields): - with self.mock_for_field_gen(fields): - return list(pp._field_gen("mocked", "mocked")) - - def test_lblrec_invalid(self): - pp_field = mock.Mock(lblrec=2, lbext=0) - with warnings.catch_warnings(record=True) as warn: - warnings.simplefilter("always") - self.gen_fields([pp_field]) - self.assertEqual(len(warn), 1) - wmsg = ( - "LBLREC has a different value to the .* the header in the " - r"file \(8 and 4\)\. Skipping .*" - ) - self.assertRegex(str(warn[0].message), wmsg) - - def test_read_headers_call(self): - # Checks that the two calls to np.fromfile are called in the - # expected way. - pp_field = mock.Mock(lblrec=1, lbext=0, lbuser=[0]) - with self.mock_for_field_gen([pp_field]): - open_fh = mock.MagicMock(spec=io.RawIOBase) - open.return_value = open_fh - next(pp._field_gen("mocked", read_data_bytes=False)) - with open_fh as open_fh_ctx: - calls = [ - mock.call(open_fh_ctx, count=45, dtype=">i4"), - mock.call(open_fh_ctx, count=19, dtype=">f4"), - ] - np.fromfile.assert_has_calls(calls) - with open_fh as open_fh_ctx: - expected_deferred_bytes = ( - "mocked", - open_fh_ctx.tell(), - 4, - np.dtype(">f4"), - ) - self.assertEqual(pp_field.data, expected_deferred_bytes) - - def test_read_data_call(self): - # Checks that data is read if read_data is True. - pp_field = mock.Mock(lblrec=1, lbext=0, lbuser=[0]) - with self.mock_for_field_gen([pp_field]): - open_fh = mock.MagicMock(spec=io.RawIOBase) - open.return_value = open_fh - next(pp._field_gen("mocked", read_data_bytes=True)) - with open_fh as open_fh_ctx: - expected_loaded_bytes = pp.LoadedArrayBytes( - open_fh_ctx.read(), np.dtype(">f4") - ) - self.assertEqual(pp_field.data, expected_loaded_bytes) - - def test_invalid_header_release(self): - # Check that an unknown LBREL value just results in a warning - # and the end of the file iteration instead of raising an error. - with self.temp_filename() as temp_path: - np.zeros(65, dtype="i4").tofile(temp_path) - generator = pp._field_gen(temp_path, False) - with mock.patch("warnings.warn") as warn: - with self.assertRaises(StopIteration): - next(generator) - self.assertEqual(warn.call_count, 1) - self.assertIn("header release number", warn.call_args[0][0]) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py b/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py deleted file mode 100644 index 0b83cade76..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test__interpret_field.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.pp._interpret_field` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from copy import deepcopy -from unittest import mock - -import numpy as np - -import iris -import iris.fileformats.pp as pp - - -class Test__interpret_fields__land_packed_fields(tests.IrisTest): - def setUp(self): - return_value = ("dummy", 0, 0, np.dtype("f4")) - core_data = mock.MagicMock(return_value=return_value) - # A field packed using a land/sea mask. - self.pp_field = mock.Mock( - lblrec=1, - lbext=0, - lbuser=[0] * 7, - lbrow=0, - lbnpt=0, - raw_lbpack=21, - lbpack=mock.Mock(n1=0, n2=2, n3=1), - core_data=core_data, - ) - # The field specifying the land/seamask. - lbuser = [None, None, None, 30, None, None, 1] # m01s00i030 - self.land_mask_field = mock.Mock( - lblrec=1, - lbext=0, - lbuser=lbuser, - lbrow=3, - lbnpt=4, - raw_lbpack=0, - core_data=core_data, - ) - - def test_non_deferred_fix_lbrow_lbnpt(self): - # Checks the fix_lbrow_lbnpt is applied to fields which are not - # deferred. - f1, mask = self.pp_field, self.land_mask_field - self.assertEqual(f1.lbrow, 0) - self.assertEqual(f1.lbnpt, 0) - list(pp._interpret_fields([mask, f1])) - self.assertEqual(f1.lbrow, 3) - self.assertEqual(f1.lbnpt, 4) - # Check the data's shape has been updated too. - self.assertEqual(f1.data.shape, (3, 4)) - - def test_fix_lbrow_lbnpt_no_mask_available(self): - # Check a warning is issued when loading a land masked field - # without a land mask. - with mock.patch("warnings.warn") as warn: - list(pp._interpret_fields([self.pp_field])) - self.assertEqual(warn.call_count, 1) - warn_msg = warn.call_args[0][0] - self.assertTrue( - warn_msg.startswith( - "Landmask compressed fields " "existed without a landmask" - ), - "Unexpected warning message: {!r}".format(warn_msg), - ) - - def test_deferred_mask_field(self): - # Check that the order of the load is yielded last if the mask - # hasn't yet been seen. - result = list( - pp._interpret_fields([self.pp_field, self.land_mask_field]) - ) - self.assertEqual(result, [self.land_mask_field, self.pp_field]) - - def test_not_deferred_mask_field(self): - # Check that the order of the load is unchanged if a land mask - # has already been seen. - f1, mask = self.pp_field, self.land_mask_field - mask2 = deepcopy(mask) - result = list(pp._interpret_fields([mask, f1, mask2])) - self.assertEqual(result, [mask, f1, mask2]) - - def test_deferred_fix_lbrow_lbnpt(self): - # Check the fix is also applied to fields which are deferred. - f1, mask = self.pp_field, self.land_mask_field - list(pp._interpret_fields([f1, mask])) - self.assertEqual(f1.lbrow, 3) - self.assertEqual(f1.lbnpt, 4) - - @tests.skip_data - def test_landsea_unpacking_uses_dask(self): - # Ensure that the graph of the (lazy) landsea-masked data contains an - # explicit reference to a (lazy) landsea-mask field. - # Otherwise its compute() will need to invoke another compute(). - # See https://github.com/SciTools/iris/issues/3237 - - # This is too complex to explore in a mock-ist way, so let's load a - # tiny bit of real data ... - testfile_path = tests.get_data_path( - ["FF", "landsea_masked", "testdata_mini_lsm.ff"] - ) - landsea_mask, soil_temp = iris.load_cubes( - testfile_path, ("land_binary_mask", "soil_temperature") - ) - - # Now check that the soil-temp dask graph correctly references the - # landsea mask, in its dask graph. - lazy_mask_array = landsea_mask.core_data() - lazy_soildata_array = soil_temp.core_data() - - # Work out the main dask key for the mask data, as used by 'compute()'. - mask_toplev_key = (lazy_mask_array.name,) + (0,) * lazy_mask_array.ndim - # Get the 'main' calculation entry. - mask_toplev_item = lazy_mask_array.dask[mask_toplev_key] - # This should be a task (a simple fetch). - self.assertTrue(callable(mask_toplev_item[0])) - # Get the key (name) of the array that it fetches. - mask_data_name = mask_toplev_item[1] - - # Check that the item this refers to is a PPDataProxy. - self.assertIsInstance( - lazy_mask_array.dask[mask_data_name], pp.PPDataProxy - ) - - # Check that the soil-temp graph references the *same* lazy element, - # showing that the mask+data calculation is handled by dask. - self.assertIn(mask_data_name, lazy_soildata_array.dask.keys()) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test_as_fields.py b/lib/iris/tests/unit/fileformats/pp/test_as_fields.py deleted file mode 100644 index 3ff228e106..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test_as_fields.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.pp.as_fields` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import iris.fileformats.pp as pp -import iris.tests.stock as stock - - -class TestAsFields(tests.IrisTest): - def setUp(self): - self.cube = stock.realistic_3d() - - def test_cube_only(self): - fields = pp.as_fields(self.cube) - for field in fields: - self.assertEqual(field.lbcode, 101) - - def test_field_coords(self): - fields = pp.as_fields( - self.cube, field_coords=["grid_longitude", "grid_latitude"] - ) - for field in fields: - self.assertEqual(field.lbcode, 101) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test_load.py b/lib/iris/tests/unit/fileformats/pp/test_load.py deleted file mode 100644 index 77da1288c2..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test_load.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.pp.load` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris.fileformats.pp as pp - - -class Test_load(tests.IrisTest): - def test_call_structure(self): - # Check that the load function calls the two necessary utility - # functions. - extract_result = mock.Mock() - interpret_patch = mock.patch( - "iris.fileformats.pp._interpret_fields", - autospec=True, - return_value=iter([]), - ) - field_gen_patch = mock.patch( - "iris.fileformats.pp._field_gen", - autospec=True, - return_value=extract_result, - ) - with interpret_patch as interpret, field_gen_patch as field_gen: - pp.load("mock", read_data=True) - - interpret.assert_called_once_with(extract_result) - field_gen.assert_called_once_with( - "mock", read_data_bytes=True, little_ended=False - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py deleted file mode 100644 index 45012dc8bd..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test_save.py +++ /dev/null @@ -1,339 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.pp.save` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import cf_units -import cftime - -from iris.coords import CellMethod, DimCoord -from iris.fileformats._ff_cross_references import STASH_TRANS -import iris.fileformats.pp as pp -from iris.fileformats.pp_save_rules import _lbproc_rules, verify -import iris.tests.stock as stock - - -def _pp_save_ppfield_values(cube): - """ - Emulate saving a cube as PP, and capture the resulting PP field values. - - """ - # Create a test object to stand in for a real PPField. - pp_field = mock.MagicMock(spec=pp.PPField3) - # Add minimal content required by the pp.save operation. - pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN - # Save cube to a dummy file, mocking the internally created PPField - with mock.patch("iris.fileformats.pp.PPField3", return_value=pp_field): - target_filelike = mock.Mock(name="target") - target_filelike.mode = "b" - pp.save(cube, target_filelike) - # Return pp-field mock with all the written properties - return pp_field - - -class TestVertical(tests.IrisTest): - def setUp(self): - self.cube = stock.lat_lon_cube() - - def test_pseudo_level(self): - pseudo_level = 123 - coord = DimCoord(pseudo_level, long_name="pseudo_level", units="1") - self.cube.add_aux_coord(coord) - lbuser5_produced = _pp_save_ppfield_values(self.cube).lbuser[4] - self.assertEqual(pseudo_level, lbuser5_produced) - - def test_soil_level(self): - soil_level = 314 - coord = DimCoord(soil_level, long_name="soil_model_level_number") - self.cube.add_aux_coord(coord) - self.cube.standard_name = "moisture_content_of_soil_layer" - field = _pp_save_ppfield_values(self.cube) - self.assertEqual(field.lbvc, 6) - self.assertEqual(field.lblev, soil_level) - self.assertEqual(field.blev, soil_level) - self.assertEqual(field.brsvd[0], 0) - self.assertEqual(field.brlev, 0) - - def test_soil_depth(self): - lower, point, upper = 1, 2, 3 - coord = DimCoord(point, standard_name="depth", bounds=[[lower, upper]]) - self.cube.add_aux_coord(coord) - self.cube.standard_name = "moisture_content_of_soil_layer" - field = _pp_save_ppfield_values(self.cube) - self.assertEqual(field.lbvc, 6) - self.assertEqual(field.lblev, 0) - self.assertEqual(field.blev, point) - self.assertEqual(field.brsvd[0], lower) - self.assertEqual(field.brlev, upper) - - -class TestLbfcProduction(tests.IrisTest): - def setUp(self): - self.cube = stock.lat_lon_cube() - - def check_cube_stash_yields_lbfc(self, stash, lbfc_expected): - if stash: - self.cube.attributes["STASH"] = stash - lbfc_produced = _pp_save_ppfield_values(self.cube).lbfc - self.assertEqual(lbfc_produced, lbfc_expected) - - def test_known_stash(self): - stashcode_str = "m04s07i002" - self.assertIn(stashcode_str, STASH_TRANS) - self.check_cube_stash_yields_lbfc(stashcode_str, 359) - - def test_unknown_stash(self): - stashcode_str = "m99s99i999" - self.assertNotIn(stashcode_str, STASH_TRANS) - self.check_cube_stash_yields_lbfc(stashcode_str, 0) - - def test_no_stash(self): - self.assertNotIn("STASH", self.cube.attributes) - self.check_cube_stash_yields_lbfc(None, 0) - - def check_cube_name_units_yields_lbfc(self, name, units, lbfc_expected): - self.cube.rename(name) - self.cube.units = units - lbfc_produced = _pp_save_ppfield_values(self.cube).lbfc - self.assertEqual( - lbfc_produced, - lbfc_expected, - "Lbfc for ({!r} / {!r}) should be {:d}, " - "got {:d}".format(name, units, lbfc_expected, lbfc_produced), - ) - - def test_name_units_to_lbfc(self): - # Check LBFC value produced from name and units. - self.check_cube_name_units_yields_lbfc("sea_ice_temperature", "K", 209) - - def test_bad_name_units_to_lbfc_0(self): - # Check that badly-formed / unrecognised cases yield LBFC == 0. - self.check_cube_name_units_yields_lbfc( - "sea_ice_temperature", "degC", 0 - ) - self.check_cube_name_units_yields_lbfc("Junk_Name", "K", 0) - - -class TestLbsrceProduction(tests.IrisTest): - def setUp(self): - self.cube = stock.lat_lon_cube() - - def check_cube_um_source_yields_lbsrce( - self, source_str=None, um_version_str=None, lbsrce_expected=None - ): - if source_str is not None: - self.cube.attributes["source"] = source_str - if um_version_str is not None: - self.cube.attributes["um_version"] = um_version_str - lbsrce_produced = _pp_save_ppfield_values(self.cube).lbsrce - self.assertEqual(lbsrce_produced, lbsrce_expected) - - def test_none(self): - self.check_cube_um_source_yields_lbsrce(None, None, 0) - - def test_source_only_no_version(self): - self.check_cube_um_source_yields_lbsrce( - "Data from Met Office Unified Model", None, 1111 - ) - - def test_source_only_with_version(self): - self.check_cube_um_source_yields_lbsrce( - "Data from Met Office Unified Model 12.17", None, 12171111 - ) - - def test_um_version(self): - self.check_cube_um_source_yields_lbsrce( - "Data from Met Office Unified Model 12.17", "25.36", 25361111 - ) - - -class Test_Save__LbprocProduction(tests.IrisTest): - # This test class is a little different to the others. - # If it called `pp.save` via `_pp_save_ppfield_values` it would run - # `pp_save_rules.verify` and run all the save rules. As this class uses - # a 3D cube with a time coord it would run the time rules, which would fail - # because the mock object does not set up the `pp.lbtim` attribute - # correctly (i.e. as a `SplittableInt` object). - # To work around this we call the lbproc rules directly here. - - def setUp(self): - self.cube = stock.realistic_3d() - self.pp_field = mock.MagicMock(spec=pp.PPField3) - self.pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN - self.patch("iris.fileformats.pp.PPField3", return_value=self.pp_field) - - def test_no_cell_methods(self): - lbproc = _lbproc_rules(self.cube, self.pp_field).lbproc - self.assertEqual(lbproc, 0) - - def test_mean(self): - self.cube.cell_methods = (CellMethod("mean", "time", "1 hour"),) - lbproc = _lbproc_rules(self.cube, self.pp_field).lbproc - self.assertEqual(lbproc, 128) - - def test_minimum(self): - self.cube.cell_methods = (CellMethod("minimum", "time", "1 hour"),) - lbproc = _lbproc_rules(self.cube, self.pp_field).lbproc - self.assertEqual(lbproc, 4096) - - def test_maximum(self): - self.cube.cell_methods = (CellMethod("maximum", "time", "1 hour"),) - lbproc = _lbproc_rules(self.cube, self.pp_field).lbproc - self.assertEqual(lbproc, 8192) - - -class TestTimeMean(tests.IrisTest): - """ - Tests that time mean cell method is converted to pp appropriately. - - Pattern is pairs of tests - one with time mean method, and one without, to - show divergent behaviour. - - """ - - def test_t1_time_mean(self): - cube = _get_single_time_cube(set_time_mean=True) - tc = cube.coord(axis="t") - expected = tc.units.num2date(0) - - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: - verify(cube, pp_field) - actual = pp_field.t1 - - self.assertEqual(expected, actual) - - def test_t1_no_time_mean(self): - cube = _get_single_time_cube() - tc = cube.coord(axis="t") - expected = tc.units.num2date(15) - - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: - verify(cube, pp_field) - actual = pp_field.t1 - - self.assertEqual(expected, actual) - - def test_t2_time_mean(self): - cube = _get_single_time_cube(set_time_mean=True) - tc = cube.coord(axis="t") - expected = tc.units.num2date(30) - - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: - verify(cube, pp_field) - actual = pp_field.t2 - - self.assertEqual(expected, actual) - - def test_t2_no_time_mean(self): - cube = _get_single_time_cube(set_time_mean=False) - expected = cftime.datetime(0, 0, 0, calendar=None, has_year_zero=True) - - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: - verify(cube, pp_field) - actual = pp_field.t2 - self.assertEqual(expected, actual) - - def test_lbft_no_forecast_time(self): - # Different pattern here: checking that lbft hasn't been changed from - # the default value. - cube = _get_single_time_cube() - mock_lbft = mock.sentinel.lbft - - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: - pp_field.lbft = mock_lbft - verify(cube, pp_field) - actual = pp_field.lbft - - assert mock_lbft is actual - - def test_lbtim_no_time_mean(self): - cube = _get_single_time_cube() - expected_ib = 0 - expected_ic = 2 # 360 day calendar - - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: - verify(cube, pp_field) - actual_ib = pp_field.lbtim.ib - actual_ic = pp_field.lbtim.ic - - self.assertEqual(expected_ib, actual_ib) - self.assertEqual(expected_ic, actual_ic) - - def test_lbtim_time_mean(self): - cube = _get_single_time_cube(set_time_mean=True) - expected_ib = 2 # Time mean - expected_ic = 2 # 360 day calendar - - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: - verify(cube, pp_field) - actual_ib = pp_field.lbtim.ib - actual_ic = pp_field.lbtim.ic - - self.assertEqual(expected_ib, actual_ib) - self.assertEqual(expected_ic, actual_ic) - - def test_lbproc_no_time_mean(self): - cube = _get_single_time_cube() - expected = 0 - - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: - verify(cube, pp_field) - actual = pp_field.lbproc - - self.assertEqual(expected, actual) - - def test_lbproc_time_mean(self): - cube = _get_single_time_cube(set_time_mean=True) - expected = 128 - - with mock.patch( - "iris.fileformats.pp.PPField3", autospec=True - ) as pp_field: - verify(cube, pp_field) - actual = pp_field.lbproc - - self.assertEqual(expected, actual) - - -def _get_single_time_cube(set_time_mean=False): - cube = stock.realistic_3d()[0:1, :, :] - cube.remove_coord("time") - cube.remove_coord("forecast_period") - tc = DimCoord( - points=[15], - standard_name="time", - units=cf_units.Unit("days since epoch", calendar="360_day"), - bounds=[[0, 30]], - ) - cube.add_dim_coord(tc, 0) - if set_time_mean: - cube.cell_methods = (CellMethod("mean", coords="time"),) - return cube - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test_save_fields.py b/lib/iris/tests/unit/fileformats/pp/test_save_fields.py deleted file mode 100644 index fdd470cb47..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test_save_fields.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.pp.save_fields` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -import iris.fileformats.pp as pp - - -def asave(afilehandle): - afilehandle.write("saved") - - -class TestSaveFields(tests.IrisTest): - def setUp(self): - # Create a test object to stand in for a real PPField. - self.pp_field = mock.MagicMock(spec=pp.PPField3) - # Add minimal content required by the pp.save operation. - self.pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN - self.pp_field.data = np.zeros((1, 1)) - self.pp_field.save = asave - - def test_save(self): - open_func = "builtins.open" - m = mock.mock_open() - with mock.patch(open_func, m, create=True): - pp.save_fields([self.pp_field], "foo.pp") - self.assertTrue(mock.call("foo.pp", "wb") in m.mock_calls) - self.assertTrue(mock.call().write("saved") in m.mock_calls) - - def test_save_append(self): - open_func = "builtins.open" - m = mock.mock_open() - with mock.patch(open_func, m, create=True): - pp.save_fields([self.pp_field], "foo.pp", append=True) - self.assertTrue(mock.call("foo.pp", "ab") in m.mock_calls) - self.assertTrue(mock.call().write("saved") in m.mock_calls) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py b/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py deleted file mode 100644 index cdd3c9cd49..0000000000 --- a/lib/iris/tests/unit/fileformats/pp/test_save_pairs_from_cube.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.pp.save_pairs_from_cube` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.fileformats.pp import save_pairs_from_cube -import iris.tests.stock as stock - - -class TestSaveFields(tests.IrisTest): - def setUp(self): - self.cube = stock.realistic_3d() - - def test_cube_only(self): - slices_and_fields = save_pairs_from_cube(self.cube) - for aslice, field in slices_and_fields: - self.assertEqual(aslice.shape, (9, 11)) - self.assertEqual(field.lbcode, 101) - - def test_field_coords(self): - slices_and_fields = save_pairs_from_cube( - self.cube, field_coords=["grid_longitude", "grid_latitude"] - ) - for aslice, field in slices_and_fields: - self.assertEqual(aslice.shape, (11, 9)) - self.assertEqual(field.lbcode, 101) - - def test_lazy_data(self): - cube = self.cube.copy() - # "Rebase" the cube onto a lazy version of its data. - cube.data = cube.lazy_data() - # Check that lazy data is preserved in save-pairs generation. - slices_and_fields = save_pairs_from_cube(cube) - for aslice, _ in slices_and_fields: - self.assertTrue(aslice.has_lazy_data()) - - def test_default_bmdi(self): - slices_and_fields = save_pairs_from_cube(self.cube) - _, field = next(slices_and_fields) - self.assertEqual(field.bmdi, -1e30) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py b/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py deleted file mode 100644 index 70d28f7c09..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.fileformats.pp_load_rules` module.""" diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py deleted file mode 100644 index 62eb7ff019..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__all_other_rules.py +++ /dev/null @@ -1,307 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the `iris.fileformats.pp_load_rules._all_other_rules` function. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from cf_units import CALENDAR_360_DAY, Unit -from cftime import datetime as nc_datetime -import numpy as np - -from iris.coords import AuxCoord, CellMethod, DimCoord -from iris.fileformats.pp import SplittableInt -from iris.fileformats.pp_load_rules import _all_other_rules -from iris.tests.unit.fileformats import TestField - -# iris.fileformats.pp_load_rules._all_other_rules() returns a tuple of -# of various metadata. This constant is the index into this -# tuple to obtain the cell methods. -CELL_METHODS_INDEX = 5 -DIM_COORDS_INDEX = 6 -AUX_COORDS_INDEX = 7 - - -class TestCellMethods(tests.IrisTest): - def test_time_mean(self): - # lbproc = 128 -> mean - # lbtim.ib = 2 -> simple t1 to t2 interval. - field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=0, ib=2, ic=3)) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [CellMethod("mean", "time")] - self.assertEqual(res, expected) - - def test_hourly_mean(self): - # lbtim.ia = 1 -> hourly - field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=1, ib=2, ic=3)) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [CellMethod("mean", "time", "1 hour")] - self.assertEqual(res, expected) - - def test_daily_mean(self): - # lbtim.ia = 24 -> daily - field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=24, ib=2, ic=3)) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [CellMethod("mean", "time", "24 hour")] - self.assertEqual(res, expected) - - def test_custom_max(self): - field = mock.MagicMock(lbproc=8192, lbtim=mock.Mock(ia=47, ib=2, ic=3)) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [CellMethod("maximum", "time", "47 hour")] - self.assertEqual(res, expected) - - def test_daily_min(self): - # lbproc = 4096 -> min - field = mock.MagicMock(lbproc=4096, lbtim=mock.Mock(ia=24, ib=2, ic=3)) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [CellMethod("minimum", "time", "24 hour")] - self.assertEqual(res, expected) - - def test_time_mean_over_multiple_years(self): - # lbtim.ib = 3 -> interval within a year, over multiple years. - field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=0, ib=3, ic=3)) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [ - CellMethod("mean within years", "time"), - CellMethod("mean over years", "time"), - ] - self.assertEqual(res, expected) - - def test_hourly_mean_over_multiple_years(self): - field = mock.MagicMock(lbproc=128, lbtim=mock.Mock(ia=1, ib=3, ic=3)) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [ - CellMethod("mean within years", "time", "1 hour"), - CellMethod("mean over years", "time"), - ] - self.assertEqual(res, expected) - - def test_climatology_max(self): - field = mock.MagicMock(lbproc=8192, lbtim=mock.Mock(ia=24, ib=3, ic=3)) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [CellMethod("maximum", "time")] - self.assertEqual(res, expected) - - def test_climatology_min(self): - field = mock.MagicMock(lbproc=4096, lbtim=mock.Mock(ia=24, ib=3, ic=3)) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [CellMethod("minimum", "time")] - self.assertEqual(res, expected) - - def test_other_lbtim_ib(self): - # lbtim.ib = 5 -> non-specific aggregation - field = mock.MagicMock(lbproc=4096, lbtim=mock.Mock(ia=24, ib=5, ic=3)) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [CellMethod("minimum", "time")] - self.assertEqual(res, expected) - - def test_multiple_unordered_lbprocs(self): - field = mock.MagicMock( - lbproc=192, - bzx=0, - bdx=1, - lbnpt=3, - lbrow=3, - lbtim=mock.Mock(ia=24, ib=5, ic=3), - lbcode=SplittableInt(1), - x_bounds=None, - _x_coord_name=lambda: "longitude", - _y_coord_name=lambda: "latitude", - ) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [ - CellMethod("mean", "time"), - CellMethod("mean", "longitude"), - ] - self.assertEqual(res, expected) - - def test_multiple_unordered_rotated_lbprocs(self): - field = mock.MagicMock( - lbproc=192, - bzx=0, - bdx=1, - lbnpt=3, - lbrow=3, - lbtim=mock.Mock(ia=24, ib=5, ic=3), - lbcode=SplittableInt(101), - x_bounds=None, - _x_coord_name=lambda: "grid_longitude", - _y_coord_name=lambda: "grid_latitude", - ) - res = _all_other_rules(field)[CELL_METHODS_INDEX] - expected = [ - CellMethod("mean", "time"), - CellMethod("mean", "grid_longitude"), - ] - self.assertEqual(res, expected) - - -class TestCrossSectionalTime(TestField): - def test_lbcode3x23(self): - time_bounds = np.array( - [[0.875, 1.125], [1.125, 1.375], [1.375, 1.625], [1.625, 1.875]] - ) - field = mock.MagicMock( - lbproc=0, - bzx=0, - bdx=0, - lbnpt=3, - lbrow=4, - t1=nc_datetime(2000, 1, 2, hour=0, minute=0, second=0), - t2=nc_datetime(2000, 1, 3, hour=0, minute=0, second=0), - lbtim=mock.Mock(ia=1, ib=2, ic=2), - lbcode=SplittableInt( - 31323, {"iy": slice(0, 2), "ix": slice(2, 4)} - ), - x_bounds=None, - y_bounds=time_bounds, - _x_coord_name=lambda: "longitude", - _y_coord_name=lambda: "latitude", - ) - - spec = [ - "lbtim", - "lbcode", - "lbrow", - "lbnpt", - "lbproc", - "lbsrce", - "lbuser", - "bzx", - "bdx", - "bdy", - "bmdi", - "t1", - "t2", - "stash", - "x_bounds", - "y_bounds", - "_x_coord_name", - "_y_coord_name", - ] - field.mock_add_spec(spec) - res = _all_other_rules(field)[DIM_COORDS_INDEX] - - expected_time_points = np.array([1, 1.25, 1.5, 1.75]) + (2000 * 360) - expected_unit = Unit( - "days since 0000-01-01 00:00:00", calendar=CALENDAR_360_DAY - ) - expected = [ - ( - DimCoord( - expected_time_points, - standard_name="time", - units=expected_unit, - bounds=time_bounds, - ), - 0, - ) - ] - self.assertCoordsAndDimsListsMatch(res, expected) - - -class TestLBTIMx2x_ZeroYears(TestField): - - _spec = [ - "lbtim", - "lbcode", - "lbrow", - "lbnpt", - "lbproc", - "lbsrce", - "lbhem", - "lbuser", - "bzx", - "bdx", - "bdy", - "bmdi", - "t1", - "t2", - "stash", - "x_bounds", - "y_bounds", - "_x_coord_name", - "_y_coord_name", - ] - - def _make_field( - self, - lbyr=0, - lbyrd=0, - lbmon=3, - lbmond=3, - lbft=0, - bdx=1, - bdy=1, - bmdi=0, - ia=0, - ib=2, - ic=1, - lbcode=SplittableInt(3), - ): - return mock.MagicMock( - lbyr=lbyr, - lbyrd=lbyrd, - lbmon=lbmon, - lbmond=lbmond, - lbft=lbft, - bdx=bdx, - bdy=bdy, - bmdi=bmdi, - lbtim=mock.Mock(ia=ia, ib=ib, ic=ic), - lbcode=lbcode, - ) - - def test_month_coord(self): - field = self._make_field() - field.mock_add_spec(self._spec) - res = _all_other_rules(field)[AUX_COORDS_INDEX] - - expected = [ - (AuxCoord(3, long_name="month_number", units="1"), None), - (AuxCoord("Mar", long_name="month", units=Unit("no unit")), None), - ( - DimCoord( - points=0, - standard_name="forecast_period", - units=Unit("hours"), - ), - None, - ), - ] - self.assertCoordsAndDimsListsMatch(res, expected) - - def test_diff_month(self): - field = self._make_field(lbmon=3, lbmond=4) - field.mock_add_spec(self._spec) - res = _all_other_rules(field)[AUX_COORDS_INDEX] - - self.assertCoordsAndDimsListsMatch(res, []) - - def test_nonzero_year(self): - field = self._make_field(lbyr=1) - field.mock_add_spec(self._spec) - res = _all_other_rules(field)[AUX_COORDS_INDEX] - - self.assertCoordsAndDimsListsMatch(res, []) - - def test_nonzero_yeard(self): - field = self._make_field(lbyrd=1) - field.mock_add_spec(self._spec) - res = _all_other_rules(field)[AUX_COORDS_INDEX] - - self.assertCoordsAndDimsListsMatch(res, []) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py deleted file mode 100644 index 0f2a8a2d4b..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__collapse_degenerate_points_and_bounds.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for -:func:`iris.fileformats.pp_load_rules._collapse_degenerate_points_and_bounds`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.fileformats.pp_load_rules import ( - _collapse_degenerate_points_and_bounds, -) - - -class Test(tests.IrisTest): - def test_scalar(self): - array = np.array(1) - points, bounds = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(points, array) - self.assertIsNone(bounds) - - def test_1d_nochange(self): - array = np.array([1, 1, 3]) - result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, array) - - def test_1d_collapse(self): - array = np.array([1, 1, 1]) - result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, np.array([1])) - - def test_2d_nochange(self): - array = np.array([[1, 2, 3], [4, 5, 6]]) - result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, array) - - def test_2d_collapse_dim0(self): - array = np.array([[1, 2, 3], [1, 2, 3]]) - result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, np.array([[1, 2, 3]])) - - def test_2d_collapse_dim1(self): - array = np.array([[1, 1, 1], [2, 2, 2]]) - result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, np.array([[1], [2]])) - - def test_2d_collapse_both(self): - array = np.array([[3, 3, 3], [3, 3, 3]]) - result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, np.array([[3]])) - - def test_3d(self): - array = np.array([[[3, 3, 3], [4, 4, 4]], [[3, 3, 3], [4, 4, 4]]]) - result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertArrayEqual(result, np.array([[[3], [4]]])) - - def test_multiple_odd_dims(self): - # Test to ensure multiple collapsed dimensions don't interfere. - # make a 5-D array where dimensions 0, 2 and 3 are degenerate. - array = np.arange(3 ** 5).reshape([3] * 5) - array[1:] = array[0:1] - array[:, :, 1:] = array[:, :, 0:1] - array[:, :, :, 1:] = array[:, :, :, 0:1] - result, _ = _collapse_degenerate_points_and_bounds(array) - self.assertEqual(array.shape, (3, 3, 3, 3, 3)) - self.assertEqual(result.shape, (1, 3, 1, 1, 3)) - self.assertTrue(np.all(result == array[0:1, :, 0:1, 0:1, :])) - - def test_bounds_collapse(self): - points = np.array([1, 1, 1]) - bounds = np.array([[0, 1], [0, 1], [0, 1]]) - result_pts, result_bds = _collapse_degenerate_points_and_bounds( - points, bounds - ) - self.assertArrayEqual(result_pts, np.array([1])) - self.assertArrayEqual(result_bds, np.array([[0, 1]])) - - def test_bounds_no_collapse(self): - points = np.array([1, 1, 1]) - bounds = np.array([[0, 1], [0, 1], [0, 2]]) - result_pts, result_bds = _collapse_degenerate_points_and_bounds( - points, bounds - ) - self.assertArrayEqual(result_pts, points) - self.assertArrayEqual(result_bds, bounds) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py deleted file mode 100644 index d3046ee63e..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_pseudo_level_coords.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for -:func:`iris.fileformats.pp_load_rules._convert_pseudo_level_coords`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.coords import DimCoord -from iris.fileformats.pp_load_rules import _convert_scalar_pseudo_level_coords -from iris.tests.unit.fileformats import TestField - - -class Test(TestField): - def test_valid(self): - coords_and_dims = _convert_scalar_pseudo_level_coords(lbuser5=21) - self.assertEqual( - coords_and_dims, - [(DimCoord([21], long_name="pseudo_level", units="1"), None)], - ) - - def test_missing_indicator(self): - coords_and_dims = _convert_scalar_pseudo_level_coords(lbuser5=0) - self.assertEqual(coords_and_dims, []) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py deleted file mode 100644 index 759a399dad..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_scalar_realization_coords.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for -:func:`iris.fileformats.pp_load_rules._convert_scalar_realization_coords`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.coords import DimCoord -from iris.fileformats.pp_load_rules import _convert_scalar_realization_coords -from iris.tests.unit.fileformats import TestField - - -class Test(TestField): - def test_valid(self): - coords_and_dims = _convert_scalar_realization_coords(lbrsvd4=21) - self.assertEqual( - coords_and_dims, - [(DimCoord([21], standard_name="realization", units="1"), None)], - ) - - def test_missing_indicator(self): - coords_and_dims = _convert_scalar_realization_coords(lbrsvd4=0) - self.assertEqual(coords_and_dims, []) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py deleted file mode 100644 index d975884cb0..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_time_coords.py +++ /dev/null @@ -1,795 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for -:func:`iris.fileformats.pp_load_rules._convert_time_coords`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import unittest - -from cf_units import CALENDAR_360_DAY, CALENDAR_GREGORIAN, Unit -from cftime import datetime as nc_datetime -import numpy as np - -from iris.coords import AuxCoord, DimCoord -from iris.fileformats.pp import SplittableInt -from iris.fileformats.pp_load_rules import _convert_time_coords -from iris.tests.unit.fileformats import TestField - - -def _lbtim(ia=0, ib=0, ic=0): - return SplittableInt(ic + 10 * (ib + 10 * ia), {"ia": 2, "ib": 1, "ic": 0}) - - -def _lbcode(value=None, ix=None, iy=None): - if value is not None: - result = SplittableInt(value, {"iy": slice(0, 2), "ix": slice(2, 4)}) - else: - # N.B. if 'value' is None, both ix and iy must be set. - result = SplittableInt( - 10000 + 100 * ix + iy, {"iy": slice(0, 2), "ix": slice(2, 4)} - ) - return result - - -_EPOCH_HOURS_UNIT = Unit("hours since epoch", calendar=CALENDAR_GREGORIAN) -_HOURS_UNIT = Unit("hours") - - -class TestLBTIMx0x_SingleTimepoint(TestField): - def _check_timepoint(self, lbcode, expect_match=True): - lbtim = _lbtim(ib=0, ic=1) - t1 = nc_datetime(1970, 1, 1, hour=6, minute=0, second=0) - t2 = nc_datetime( - 0, 0, 0, calendar=None, has_year_zero=True - ) # not used in result - lbft = None # unused - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - ) - if expect_match: - expect_result = [ - ( - DimCoord( - 24 * 0.25, - standard_name="time", - units=_EPOCH_HOURS_UNIT, - ), - None, - ) - ] - else: - expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - - def test_normal_xy_dims(self): - self._check_timepoint(_lbcode(1)) - - def test_non_time_cross_section(self): - self._check_timepoint(_lbcode(ix=1, iy=2)) - - def test_time_cross_section(self): - self._check_timepoint(_lbcode(ix=1, iy=20), expect_match=False) - - -class TestLBTIMx1x_Forecast(TestField): - def _check_forecast(self, lbcode, expect_match=True): - lbtim = _lbtim(ib=1, ic=1) - # Validity time - t1 = nc_datetime(1970, 1, 10, hour=6, minute=0, second=0) - # Forecast time - t2 = nc_datetime(1970, 1, 9, hour=3, minute=0, second=0) - lbft = None # unused - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - ) - if expect_match: - expect_result = [ - ( - DimCoord( - 24 * 1.125, - standard_name="forecast_period", - units="hours", - ), - None, - ), - ( - DimCoord( - 24 * 9.25, - standard_name="time", - units=_EPOCH_HOURS_UNIT, - ), - None, - ), - ( - DimCoord( - 24 * 8.125, - standard_name="forecast_reference_time", - units=_EPOCH_HOURS_UNIT, - ), - None, - ), - ] - else: - expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - - def test_normal_xy(self): - self._check_forecast(_lbcode(1)) - - def test_non_time_cross_section(self): - self._check_forecast(_lbcode(ix=1, iy=2)) - - def test_time_cross_section(self): - self._check_forecast(_lbcode(ix=1, iy=20), expect_match=False) - - def test_exact_hours(self): - lbtim = _lbtim(ib=1, ic=1) - t1 = nc_datetime(2015, 1, 20, hour=7, minute=0, second=0) - t2 = nc_datetime(2015, 1, 20, hour=0, minute=0, second=0) - coords_and_dims = _convert_time_coords( - lbcode=_lbcode(1), - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=None, - ) - (fp, _), (t, _), (frt, _) = coords_and_dims - # These should both be exact whole numbers. - self.assertEqual(fp.points[0], 7) - self.assertEqual(t.points[0], 394927) - - def test_not_exact_hours(self): - lbtim = _lbtim(ib=1, ic=1) - t1 = nc_datetime(2015, 1, 20, hour=7, minute=10, second=0) - t2 = nc_datetime(2015, 1, 20, hour=0, minute=0, second=0) - coords_and_dims = _convert_time_coords( - lbcode=_lbcode(1), - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=None, - ) - (fp, _), (t, _), (frt, _) = coords_and_dims - self.assertArrayAllClose(fp.points[0], 7.1666666, atol=0.0001, rtol=0) - self.assertArrayAllClose(t.points[0], 394927.166666, atol=0.01, rtol=0) - - -class TestLBTIMx2x_TimePeriod(TestField): - def _check_period(self, lbcode, expect_match=True): - lbtim = _lbtim(ib=2, ic=1) - # Start time - t1 = nc_datetime(1970, 1, 9, hour=3, minute=0, second=0) - # End time - t2 = nc_datetime(1970, 1, 10, hour=3, minute=0, second=0) - lbft = 2.0 # sample period - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - ) - if expect_match: - expect_result = [ - ( - DimCoord( - 24 * 9.125 - 2.0, - standard_name="forecast_reference_time", - units=_EPOCH_HOURS_UNIT, - ), - None, - ), - ( - DimCoord( - standard_name="forecast_period", - units="hours", - points=[-10.0], - bounds=[-22.0, 2.0], - ), - None, - ), - ( - DimCoord( - standard_name="time", - units=_EPOCH_HOURS_UNIT, - points=[24 * 8.625], - bounds=[24 * 8.125, 24 * 9.125], - ), - None, - ), - ] - else: - expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - - def test_normal_xy(self): - self._check_period(_lbcode(1)) - - def test_non_time_cross_section(self): - self._check_period(_lbcode(ix=1, iy=2)) - - def test_time_cross_section(self): - self._check_period(_lbcode(ix=1, iy=20), expect_match=False) - - -class TestLBTIMx3x_YearlyAggregation(TestField): - def _check_yearly(self, lbcode, expect_match=True): - lbtim = _lbtim(ib=3, ic=1) - # Start time - t1 = nc_datetime(1970, 1, 9, hour=9, minute=0, second=0) - # End time - t2 = nc_datetime(1972, 1, 11, hour=9, minute=0, second=0) - lbft = 3.0 # sample period - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - ) - if expect_match: - t1_hours = 24 * 8.375 - t2_hours = 24 * (10.375 + 2 * 365) - period_hours = 24.0 * (2 * 365 + 2) - expect_result = [ - ( - DimCoord( - [t2_hours - lbft], - standard_name="forecast_reference_time", - units=_EPOCH_HOURS_UNIT, - ), - None, - ), - ( - DimCoord( - standard_name="forecast_period", - units="hours", - points=[lbft], - bounds=[lbft - period_hours, lbft], - ), - None, - ), - ( - DimCoord( - standard_name="time", - units=_EPOCH_HOURS_UNIT, - points=[t2_hours], - bounds=[t1_hours, t2_hours], - ), - None, - ), - ] - else: - expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - - def test_normal_xy(self): - self._check_yearly(_lbcode(1)) - - def test_non_time_cross_section(self): - self._check_yearly(_lbcode(ix=1, iy=2)) - - def test_time_cross_section(self): - self._check_yearly(_lbcode(ix=1, iy=20), expect_match=False) - - -class TestLBTIMx2x_ZeroYear(TestField): - def test_(self): - lbtim = _lbtim(ib=2, ic=1) - t1 = nc_datetime(0, 1, 1, has_year_zero=True) - t2 = nc_datetime(0, 1, 31, 23, 59, 00, has_year_zero=True) - lbft = 0 - lbcode = _lbcode(1) - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - ) - self.assertEqual(coords_and_dims, []) - - -class TestLBTIMxxx_Unhandled(TestField): - def test_unrecognised(self): - lbtim = _lbtim(ib=4, ic=1) - t1 = nc_datetime(0, 0, 0, calendar=None, has_year_zero=True) - t2 = nc_datetime(0, 0, 0, calendar=None, has_year_zero=True) - lbft = None - lbcode = _lbcode(0) - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - ) - self.assertEqual(coords_and_dims, []) - - -class TestLBCODE3xx(TestField): - def test(self): - lbcode = _lbcode(value=31323) - lbtim = _lbtim(ib=2, ic=2) - calendar = CALENDAR_360_DAY - t1 = nc_datetime( - 1970, 1, 3, hour=0, minute=0, second=0, calendar=calendar - ) - t2 = nc_datetime( - 1970, 1, 4, hour=0, minute=0, second=0, calendar=calendar - ) - lbft = 24 * 4 - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - ) - t2_hours = 24 * 3 - expected_result = [ - ( - DimCoord( - [t2_hours - lbft], - standard_name="forecast_reference_time", - units=_EPOCH_HOURS_UNIT, - ), - None, - ) - ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected_result) - - -class TestArrayInputWithLBTIM_0_0_1(TestField): - def test_t1_list(self): - # lbtim ia = 0, ib = 0, ic = 1 - # with a series of times (t1). - lbcode = _lbcode(1) - lbtim = _lbtim(ia=0, ib=0, ic=1) - hours = np.array([0, 3, 6, 9, 12]) - # Validity time - vector of different values - t1 = [nc_datetime(1970, 1, 9, hour=3 + hour) for hour in hours] - t1_dims = (0,) - # Forecast reference time - scalar (not used) - t2 = nc_datetime(1970, 1, 9, hour=3) - lbft = None - - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - t1_dims=t1_dims, - ) - - # Expected coords. - time_coord = DimCoord( - (24 * 8) + 3 + hours, standard_name="time", units=_EPOCH_HOURS_UNIT - ) - expected = [(time_coord, (0,))] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) - - -class TestArrayInputWithLBTIM_0_1_1(TestField): - def test_t1_list_t2_scalar(self): - # lbtim ia = 0, ib = 1, ic = 1 - # with a single forecast reference time (t2) and a series - # of validity times (t1). - lbcode = _lbcode(1) - lbtim = _lbtim(ia=0, ib=1, ic=1) - forecast_period_in_hours = np.array([0, 3, 6, 9, 12]) - # Validity time - vector of different values - t1 = [ - nc_datetime(1970, 1, 9, hour=(3 + fp)) - for fp in forecast_period_in_hours - ] - t1_dims = (0,) - # Forecast reference time - scalar - t2 = nc_datetime(1970, 1, 9, hour=3) - lbft = None # Not used. - - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - t1_dims=t1_dims, - ) - - # Expected coords. - fp_coord = DimCoord( - forecast_period_in_hours, - standard_name="forecast_period", - units="hours", - ) - time_coord = DimCoord( - (24 * 8) + 3 + forecast_period_in_hours, - standard_name="time", - units=_EPOCH_HOURS_UNIT, - ) - fref_time_coord = DimCoord( - (24 * 8) + 3, - standard_name="forecast_reference_time", - units=_EPOCH_HOURS_UNIT, - ) - expected = [ - (fp_coord, (0,)), - (time_coord, (0,)), - (fref_time_coord, None), - ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) - - def test_t1_and_t2_list(self): - # lbtim ia = 0, ib = 1, ic = 1 - # with a single repeated forecast reference time (t2) and a series - # of validity times (t1). - lbcode = _lbcode(1) - lbtim = _lbtim(ia=0, ib=1, ic=1) - forecast_period_in_hours = np.array([0, 3, 6, 9, 12]) - # Validity time - vector of different values - t1 = [ - nc_datetime(1970, 1, 9, hour=(3 + fp)) - for fp in forecast_period_in_hours - ] - t1_dims = (0,) - # Forecast reference time - vector of same values - t2 = [ - nc_datetime(1970, 1, 9, hour=3) for _ in forecast_period_in_hours - ] - t2_dims = (0,) - lbft = None # Not used. - - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - t1_dims=t1_dims, - t2_dims=t2_dims, - ) - - # Expected coords. - fp_coord = DimCoord( - forecast_period_in_hours, - standard_name="forecast_period", - units="hours", - ) - time_coord = DimCoord( - (24 * 8) + 3 + forecast_period_in_hours, - standard_name="time", - units=_EPOCH_HOURS_UNIT, - ) - fref_time_coord = DimCoord( - (24 * 8) + 3, - standard_name="forecast_reference_time", - units=_EPOCH_HOURS_UNIT, - ) - expected = [ - (fp_coord, (0,)), - (time_coord, (0,)), - (fref_time_coord, None), - ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) - - def test_t1_and_t2_orthogonal_lists(self): - # lbtim ia = 0, ib = 1, ic = 1 - # with a single repeated forecast reference time (t2) and a series - # of validity times (t1). - lbcode = _lbcode(1) - lbtim = _lbtim(ia=0, ib=1, ic=1) - years = np.array([1970, 1971, 1972]) - hours = np.array([3, 6, 9, 12]) - # Validity time - vector of different values - t1 = [nc_datetime(year, 1, 9, hour=12) for year in years] - t1_dims = (0,) - # Forecast reference time - vector of different values - t2 = [nc_datetime(1970, 1, 9, hour=hour) for hour in hours] - t2_dims = (1,) - lbft = None # Not used. - - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - t1_dims=t1_dims, - t2_dims=t2_dims, - ) - - # Expected coords. - points = [ - [(year - 1970) * 365 * 24 + 12 - hour for hour in hours] - for year in years - ] - fp_coord = AuxCoord( - points, standard_name="forecast_period", units="hours" - ) - points = (years - 1970) * 24 * 365 + (24 * 8) + 12 - time_coord = DimCoord( - points, standard_name="time", units=_EPOCH_HOURS_UNIT - ) - points = (24 * 8) + hours - fref_time_coord = DimCoord( - points, - standard_name="forecast_reference_time", - units=_EPOCH_HOURS_UNIT, - ) - expected = [ - (fp_coord, (0, 1)), # Spans dims 0 and 1. - (time_coord, (0,)), - (fref_time_coord, (1,)), - ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) - - def test_t1_multi_dim_list_t2_scalar(self): - # Another case of lbtim ia = 0, ib = 1, ic = 1 but - # with a changing forecast reference time (t2) and - # validity time (t1). - lbcode = _lbcode(1) - lbtim = _lbtim(ia=0, ib=1, ic=1) - forecast_period_in_hours = np.array([0, 3, 6, 9, 12]) - years = np.array([1970, 1971, 1972]) - # Validity time - 2d array of different values - t1 = [ - [ - nc_datetime(year, 1, 9, hour=(3 + fp)) - for fp in forecast_period_in_hours - ] - for year in years - ] - t1_dims = (0, 1) - # Forecast reference time - vector of different values - t2 = nc_datetime(1970, 1, 9, hour=3) - lbft = None # Not used. - - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - t1_dims=t1_dims, - ) - - # Expected coords. - fp_coord = AuxCoord( - [ - forecast_period_in_hours + (year - 1970) * 365 * 24 - for year in years - ], - standard_name="forecast_period", - units="hours", - ) - time_coord = AuxCoord( - [ - (24 * 8) - + 3 - + forecast_period_in_hours - + (year - 1970) * 365 * 24 - for year in years - ], - standard_name="time", - units=_EPOCH_HOURS_UNIT, - ) - fref_time_coord = DimCoord( - (24 * 8) + 3, - standard_name="forecast_reference_time", - units=_EPOCH_HOURS_UNIT, - ) - expected = [ - (fp_coord, (0, 1)), - (time_coord, (0, 1)), - (fref_time_coord, None), - ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) - - def test_t1_and_t2_nparrays(self): - # lbtim ia = 0, ib = 1, ic = 1 - # with a single repeated forecast reference time (t2) and a series - # of validity times (t1). - lbcode = _lbcode(1) - lbtim = _lbtim(ia=0, ib=1, ic=1) - forecast_period_in_hours = np.array([0, 3, 6, 9, 12]) - # Validity time - vector of different values - t1 = np.array( - [ - nc_datetime(1970, 1, 9, hour=(3 + fp)) - for fp in forecast_period_in_hours - ] - ) - t1_dims = (0,) - # Forecast reference time - vector of same values - t2 = np.array( - [nc_datetime(1970, 1, 9, hour=3) for _ in forecast_period_in_hours] - ) - t2_dims = (0,) - lbft = None # Not used. - - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - t1_dims=t1_dims, - t2_dims=t2_dims, - ) - - # Expected coords. - fp_coord = DimCoord( - forecast_period_in_hours, - standard_name="forecast_period", - units="hours", - ) - time_coord = DimCoord( - (24 * 8) + 3 + forecast_period_in_hours, - standard_name="time", - units=_EPOCH_HOURS_UNIT, - ) - fref_time_coord = DimCoord( - (24 * 8) + 3, - standard_name="forecast_reference_time", - units=_EPOCH_HOURS_UNIT, - ) - expected = [ - (fp_coord, (0,)), - (time_coord, (0,)), - (fref_time_coord, None), - ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) - - -class TestArrayInputWithLBTIM_0_2_1(TestField): - def test_t1_list_t2_scalar(self): - lbtim = _lbtim(ib=2, ic=1) - lbcode = _lbcode(1) - hours = np.array([0, 3, 6, 9]) - # Start times - vector - t1 = [nc_datetime(1970, 1, 9, hour=9 + hour) for hour in hours] - t1_dims = (0,) - # End time - scalar - t2 = nc_datetime(1970, 1, 11, hour=9) - lbft = 3.0 # Sample period - - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - t1_dims=t1_dims, - ) - - # Expected coords. - points = lbft - (48 - hours) / 2.0 - bounds = np.array( - [lbft - (48 - hours), np.ones_like(hours) * lbft] - ).transpose() - fp_coord = AuxCoord( - points, - standard_name="forecast_period", - units="hours", - bounds=bounds, - ) - points = 9 * 24 + 9 + (hours / 2.0) - bounds = np.array( - [8 * 24 + 9 + hours, np.ones_like(hours) * 10 * 24 + 9] - ).transpose() - time_coord = AuxCoord( - points, - standard_name="time", - units=_EPOCH_HOURS_UNIT, - bounds=bounds, - ) - points = 10 * 24 + 9 - lbft - fref_time_coord = DimCoord( - points, - standard_name="forecast_reference_time", - units=_EPOCH_HOURS_UNIT, - ) - expected = [ - (fp_coord, (0,)), - (time_coord, (0,)), - (fref_time_coord, None), - ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) - - -class TestArrayInputWithLBTIM_0_3_1(TestField): - @unittest.skip("#3508 investigate unit test failure") - def test_t1_scalar_t2_list(self): - lbtim = _lbtim(ib=3, ic=1) - lbcode = _lbcode(1) - years = np.array([1972, 1973, 1974]) - # Start times - scalar - t1 = nc_datetime(1970, 1, 9, hour=9) - # End time - vector - t2 = [nc_datetime(year, 1, 11, hour=9) for year in years] - t2_dims = (0,) - lbft = 3.0 # Sample period - - coords_and_dims = _convert_time_coords( - lbcode=lbcode, - lbtim=lbtim, - epoch_hours_unit=_EPOCH_HOURS_UNIT, - t1=t1, - t2=t2, - lbft=lbft, - t2_dims=t2_dims, - ) - - # Expected coords. - points = np.ones_like(years) * lbft - bounds = np.array( - [lbft - ((years - 1970) * 365 * 24 + 2 * 24), points] - ).transpose() - fp_coord = AuxCoord( - points, - standard_name="forecast_period", - units="hours", - bounds=bounds, - ) - points = (years - 1970) * 365 * 24 + 10 * 24 + 9 - bounds = np.array( - [np.ones_like(points) * (8 * 24 + 9), points] - ).transpose() - # The time coordinate is an AuxCoord as the lower bound for each - # cell is the same so it does not meet the monotonicity requirement. - time_coord = AuxCoord( - points, - standard_name="time", - units=_EPOCH_HOURS_UNIT, - bounds=bounds, - ) - fref_time_coord = DimCoord( - points - lbft, - standard_name="forecast_reference_time", - units=_EPOCH_HOURS_UNIT, - ) - expected = [ - (fp_coord, (0,)), - (time_coord, (0,)), - (fref_time_coord, (0,)), - ] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py deleted file mode 100644 index 47552a646a..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__convert_vertical_coords.py +++ /dev/null @@ -1,815 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for -:func:`iris.fileformats.pp_load_rules._convert_vertical_coords`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.aux_factory import HybridHeightFactory, HybridPressureFactory -from iris.coords import AuxCoord, DimCoord -from iris.fileformats.pp import STASH, SplittableInt -from iris.fileformats.pp_load_rules import Reference, _convert_vertical_coords -from iris.tests.unit.fileformats import TestField - - -def _lbcode(value=None, ix=None, iy=None): - if value is not None: - result = SplittableInt(value, {"iy": slice(0, 2), "ix": slice(2, 4)}) - else: - # N.B. if 'value' is None, both ix and iy must be set. - result = SplittableInt( - 10000 + 100 * ix + iy, {"iy": slice(0, 2), "ix": slice(2, 4)} - ) - return result - - -class TestLBVC001_Height(TestField): - def _check_height( - self, - blev, - stash, - expect_normal=True, - expect_fixed_height=None, - dim=None, - ): - lbvc = 1 - lbcode = _lbcode(0) # effectively unused in this case - lblev, bhlev, bhrlev, brsvd1, brsvd2, brlev = ( - None, - None, - None, - None, - None, - None, - ) - coords_and_dims, factories = _convert_vertical_coords( - lbcode=lbcode, - lbvc=lbvc, - blev=blev, - lblev=lblev, - stash=stash, - bhlev=bhlev, - bhrlev=bhrlev, - brsvd1=brsvd1, - brsvd2=brsvd2, - brlev=brlev, - dim=dim, - ) - if expect_normal: - expect_result = [ - ( - DimCoord( - blev, - standard_name="height", - units="m", - attributes={"positive": "up"}, - ), - dim, - ) - ] - elif expect_fixed_height: - expect_result = [ - ( - DimCoord( - expect_fixed_height, - standard_name="height", - units="m", - attributes={"positive": "up"}, - ), - None, - ) - ] - else: - expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) - - def test_normal_height__present(self): - self._check_height(blev=12.3, stash=STASH(1, 1, 1)) - - def test_normal_height__present_vector(self): - data = [12.3, 123.4, 1234.5] - dim = 0 - for blev in [data, np.asarray(data)]: - for dim_i in [dim, (dim,)]: - self._check_height(blev=blev, stash=STASH(1, 1, 1), dim=dim_i) - - def test_normal_height__absent(self): - self._check_height(blev=-1, stash=STASH(1, 1, 1), expect_normal=False) - - def test_normal_height__absent_vector(self): - data = [-1, -1, -1] - dim = 1 - for blev in [data, np.asarray(data)]: - for dim_i in [dim, (dim,)]: - self._check_height( - blev=blev, - stash=STASH(1, 1, 1), - expect_normal=False, - dim=dim_i, - ) - - def test_normal_height__absent_mixed_vector(self): - data = [-1, 12.3, -1, 123.4] - dim = 2 - for blev in [data, np.asarray(data)]: - for dim_i in [dim, (dim,)]: - self._check_height( - blev=blev, - stash=STASH(1, 1, 1), - expect_normal=False, - dim=dim_i, - ) - - def test_implied_height_1m5(self): - self._check_height( - blev=75.2, - stash=STASH(1, 3, 236), - expect_normal=False, - expect_fixed_height=1.5, - ) - - def test_implied_height_1m5__vector(self): - data = [1, 2, 3, 4] - dim = 3 - for blev in [data, np.asarray(data)]: - for dim_i in [dim, (dim,)]: - self._check_height( - blev=blev, - stash=STASH(1, 3, 236), - expect_normal=False, - expect_fixed_height=1.5, - dim=dim_i, - ) - - def test_implied_height_10m(self): - self._check_height( - blev=75.2, - stash=STASH(1, 3, 225), - expect_normal=False, - expect_fixed_height=10.0, - ) - - def test_implied_height_10m__vector(self): - data = list(range(10)) - dim = 4 - for blev in [data, np.asarray(data)]: - for dim_i in [dim, (dim,)]: - self._check_height( - blev=blev, - stash=STASH(1, 3, 225), - expect_normal=False, - expect_fixed_height=10.0, - dim=dim_i, - ) - - -class TestLBVC002_Depth(TestField): - def _check_depth( - self, - lbcode, - lblev=23.0, - blev=123.4, - brlev=0.0, - brsvd1=0.0, - expect_bounds=True, - expect_match=True, - expect_mixed=False, - dim=None, - ): - lbvc = 2 - stash = STASH(1, 1, 1) - bhlev, bhrlev, brsvd2 = None, None, None - coords_and_dims, factories = _convert_vertical_coords( - lbcode=lbcode, - lbvc=lbvc, - blev=blev, - lblev=lblev, - stash=stash, - bhlev=bhlev, - bhrlev=bhrlev, - brsvd1=brsvd1, - brsvd2=brsvd2, - brlev=brlev, - dim=dim, - ) - if expect_match: - expect_result = [ - ( - DimCoord( - lblev, - standard_name="model_level_number", - attributes={"positive": "down"}, - units="1", - ), - dim, - ) - ] - if expect_bounds: - brsvd1 = np.atleast_1d(brsvd1) - brlev = np.atleast_1d(brlev) - if expect_mixed: - lower = np.where(brsvd1 == brlev, blev, brsvd1) - upper = np.where(brsvd1 == brlev, blev, brlev) - else: - lower, upper = brsvd1, brlev - bounds = np.vstack((lower, upper)).T - expect_result.append( - ( - DimCoord( - blev, - standard_name="depth", - units="m", - bounds=bounds, - attributes={"positive": "down"}, - ), - dim, - ) - ) - else: - expect_result.append( - ( - DimCoord( - blev, - standard_name="depth", - units="m", - attributes={"positive": "down"}, - ), - dim, - ) - ) - else: - expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) - - def test_unbounded(self): - self._check_depth(_lbcode(1), lblev=23.0, expect_bounds=False) - - def test_unbounded__vector(self): - lblev = [1, 2, 3] - blev = [10, 20, 30] - brsvd1 = [5, 15, 25] - brlev = [5, 15, 25] - self._check_depth( - _lbcode(1), - lblev=lblev, - blev=blev, - brsvd1=brsvd1, - brlev=brlev, - expect_bounds=False, - dim=1, - ) - - def test_unbounded__vector_no_depth(self): - lblev = [1, 2, 3] - blev = [10, 20, 30] - brsvd1 = [5, 15, 25] - brlev = [5, 15, 666] # not all equal or all unequal! - self._check_depth( - _lbcode(1), - lblev=lblev, - blev=blev, - brsvd1=brsvd1, - brlev=brlev, - expect_mixed=True, - dim=0, - ) - - def test_bounded(self): - self._check_depth( - _lbcode(1), lblev=23.0, brlev=22.5, brsvd1=23.5, expect_bounds=True - ) - - def test_bounded__vector(self): - lblev = [1, 2, 3] - blev = [10, 20, 30] - brsvd1 = [5, 15, 25] - brlev = [15, 25, 35] - self._check_depth( - _lbcode(1), - lblev=lblev, - blev=blev, - brsvd1=brsvd1, - brlev=brlev, - expect_bounds=True, - dim=1, - ) - - def test_cross_section(self): - self._check_depth(_lbcode(ix=1, iy=2), lblev=23.0, expect_match=False) - - def test_cross_section__vector(self): - lblev = [1, 2, 3] - blev = [10, 20, 30] - brsvd1 = [5, 15, 25] - brlev = [15, 25, 35] - self._check_depth( - _lbcode(ix=1, iy=2), - lblev=lblev, - blev=blev, - brsvd1=brsvd1, - brlev=brlev, - expect_match=False, - dim=1, - ) - - -class TestLBVC006_SoilLevel(TestField): - def _check_soil_level( - self, lbcode, lblev=12.3, expect_match=True, dim=None - ): - lbvc = 6 - stash = STASH(1, 1, 1) - brsvd1, brlev = 0, 0 - if hasattr(lblev, "__iter__"): - brsvd1 = [0] * len(lblev) - brlev = [0] * len(lblev) - blev, bhlev, bhrlev, brsvd2 = None, None, None, None - coords_and_dims, factories = _convert_vertical_coords( - lbcode=lbcode, - lbvc=lbvc, - blev=blev, - lblev=lblev, - stash=stash, - bhlev=bhlev, - bhrlev=bhrlev, - brsvd1=brsvd1, - brsvd2=brsvd2, - brlev=brlev, - dim=dim, - ) - expect_result = [] - if expect_match: - coord = DimCoord( - lblev, - long_name="soil_model_level_number", - attributes={"positive": "down"}, - units="1", - ) - expect_result = [(coord, dim)] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) - - def test_normal(self): - self._check_soil_level(_lbcode(0)) - - def test_normal__vector(self): - lblev = np.arange(10) - self._check_soil_level(_lbcode(0), lblev=lblev, dim=0) - - def test_cross_section(self): - self._check_soil_level(_lbcode(ix=1, iy=2), expect_match=False) - - def test_cross_section__vector(self): - lblev = np.arange(10) - self._check_soil_level( - _lbcode(ix=1, iy=2), lblev=lblev, expect_match=False, dim=0 - ) - - -class TestLBVC006_SoilDepth(TestField): - def _check_soil_depth( - self, - lbcode, - blev=0.05, - brsvd1=0, - brlev=0.1, - expect_match=True, - dim=None, - ): - lbvc = 6 - stash = STASH(1, 1, 1) - lblev, bhlev, bhrlev, brsvd2 = None, None, None, None - coords_and_dims, factories = _convert_vertical_coords( - lbcode=lbcode, - lbvc=lbvc, - blev=blev, - lblev=lblev, - stash=stash, - bhlev=bhlev, - bhrlev=bhrlev, - brsvd1=brsvd1, - brsvd2=brsvd2, - brlev=brlev, - dim=dim, - ) - expect_result = [] - if expect_match: - coord = DimCoord( - blev, - standard_name="depth", - bounds=np.vstack((brsvd1, brlev)).T, - units="m", - attributes={"positive": "down"}, - ) - expect_result = [(coord, dim)] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) - - def test_normal(self): - self._check_soil_depth(_lbcode(0)) - - def test_normal__vector(self): - points = np.arange(10) - self._check_soil_depth( - _lbcode(0), blev=points, brsvd1=points - 1, brlev=points + 1, dim=0 - ) - - def test_bad_bounds(self): - points = [-0.5, 0.5] - lower = [-1, 1] - upper = [-1, 1] - self._check_soil_depth( - _lbcode(0), - blev=points, - brsvd1=lower, - brlev=upper, - dim=0, - expect_match=False, - ) - - def test_cross_section(self): - self._check_soil_depth(_lbcode(ix=1, iy=2), expect_match=False) - - def test_cross_section__vector(self): - points = np.arange(10) - self._check_soil_depth( - _lbcode(ix=1, iy=2), - blev=points, - brsvd1=points - 1, - brlev=points + 1, - expect_match=False, - dim=0, - ) - - -class TestLBVC008_Pressure(TestField): - def _check_pressure(self, lbcode, blev=250.3, expect_match=True, dim=None): - lbvc = 8 - stash = STASH(1, 1, 1) - lblev, bhlev, bhrlev, brsvd1, brsvd2, brlev = ( - None, - None, - None, - None, - None, - None, - ) - coords_and_dims, factories = _convert_vertical_coords( - lbcode=lbcode, - lbvc=lbvc, - blev=blev, - lblev=lblev, - stash=stash, - bhlev=bhlev, - bhrlev=bhrlev, - brsvd1=brsvd1, - brsvd2=brsvd2, - brlev=brlev, - dim=dim, - ) - if expect_match: - expect_result = [ - (DimCoord(blev, long_name="pressure", units="hPa"), dim) - ] - else: - expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) - - def test_normal(self): - self._check_pressure(_lbcode(0)) - - def test_normal__vector(self): - blev = [10, 100, 1000, 10000] - self._check_pressure(_lbcode(0), blev=blev, dim=2) - - def test_non_pressure_cross_section(self): - self._check_pressure(_lbcode(ix=10, iy=11)) - - def test_non_pressure_cross_section__vector(self): - blev = np.arange(10) - self._check_pressure(_lbcode(ix=10, iy=11), blev=blev, dim=0) - - def test_pressure_cross_section(self): - self._check_pressure(_lbcode(ix=10, iy=1), expect_match=False) - - def test_pressure_cross_section__vector(self): - blev = np.arange(10) - self._check_pressure( - _lbcode(ix=10, iy=1), blev=blev, dim=1, expect_match=False - ) - - -class TestLBVC019_PotentialTemperature(TestField): - def _check_potm(self, lbcode, blev=130.6, expect_match=True, dim=None): - lbvc = 19 - stash = STASH(1, 1, 1) - lblev, bhlev, bhrlev, brsvd1, brsvd2, brlev = ( - None, - None, - None, - None, - None, - None, - ) - coords_and_dims, factories = _convert_vertical_coords( - lbcode=lbcode, - lbvc=lbvc, - blev=blev, - lblev=lblev, - stash=stash, - bhlev=bhlev, - bhrlev=bhrlev, - brsvd1=brsvd1, - brsvd2=brsvd2, - brlev=brlev, - dim=dim, - ) - if expect_match: - expect_result = [ - ( - DimCoord( - blev, - standard_name="air_potential_temperature", - units="K", - attributes={"positive": "up"}, - ), - dim, - ) - ] - else: - expect_result = [] - self.assertCoordsAndDimsListsMatch(coords_and_dims, expect_result) - self.assertEqual(factories, []) - - def test_normal(self): - self._check_potm(_lbcode(0)) - - def test_normal__vector(self): - blev = list(range(10)) - self._check_potm(_lbcode(0), blev=blev, dim=0) - - def test_cross_section(self): - self._check_potm(_lbcode(ix=10, iy=11), expect_match=False) - - def test_cross_section__vector(self): - blev = np.arange(5) + 100 - self._check_potm( - _lbcode(ix=10, iy=11), blev=blev, dim=1, expect_match=False - ) - - -class TestLBVC009_HybridPressure(TestField): - def _check( - self, - lblev=37.0, - bhlev=850.1, - bhrlev=810.0, - brsvd2=875.0, - blev=0.15, - brlev=0.11, - brsvd1=0.19, - expect_match=True, - dim=None, - ): - lbvc = 9 - lbcode = _lbcode(0) # unused - stash = STASH(1, 1, 1) # unused - coords_and_dims, factories = _convert_vertical_coords( - lbcode=lbcode, - lbvc=lbvc, - blev=blev, - lblev=lblev, - stash=stash, - bhlev=bhlev, - bhrlev=bhrlev, - brsvd1=brsvd1, - brsvd2=brsvd2, - brlev=brlev, - dim=dim, - ) - expect_coords_and_dims = [ - ( - DimCoord( - lblev, - standard_name="model_level_number", - attributes={"positive": "up"}, - units="1", - ), - dim, - ) - ] - - bhrlev = np.atleast_1d(bhrlev) - brsvd2 = np.atleast_1d(brsvd2) - expect_coords_and_dims.append( - ( - DimCoord( - bhlev, - long_name="level_pressure", - units="Pa", - bounds=np.vstack((bhrlev, brsvd2)).T, - ), - dim, - ) - ) - brlev = np.atleast_1d(brlev) - brsvd1 = np.atleast_1d(brsvd1) - expect_coords_and_dims.append( - ( - AuxCoord( - blev, - long_name="sigma", - bounds=np.vstack((brlev, brsvd1)).T, - units="1", - ), - dim, - ) - ) - expect_factories = [ - ( - HybridPressureFactory, - [ - {"long_name": "level_pressure"}, - {"long_name": "sigma"}, - Reference("surface_air_pressure"), - ], - ) - ] - self.assertCoordsAndDimsListsMatch( - coords_and_dims, expect_coords_and_dims - ) - self.assertEqual(factories, expect_factories) - - def test_normal(self): - self._check() - - def test_normal__vector(self): - lblev = list(range(3)) - bhlev = [10, 20, 30] - bhrlev = [5, 15, 25] - brsvd2 = [15, 25, 35] - blev = [100, 200, 300] - brlev = [50, 150, 250] - brsvd1 = [150, 250, 350] - self._check( - lblev=lblev, - bhlev=bhlev, - bhrlev=bhrlev, - brsvd2=brsvd2, - blev=blev, - brlev=brlev, - brsvd1=brsvd1, - dim=0, - ) - - -class TestLBVC065_HybridHeight(TestField): - def _check( - self, - lblev=37.0, - blev=9596.3, - brlev=9500.0, - brsvd1=9800.0, - bhlev=0.35, - bhrlev=0.31, - brsvd2=0.39, - dim=None, - ): - lbvc = 65 - lbcode = _lbcode(0) # unused - stash = STASH(1, 1, 1) # unused - coords_and_dims, factories = _convert_vertical_coords( - lbcode=lbcode, - lbvc=lbvc, - blev=blev, - lblev=lblev, - stash=stash, - bhlev=bhlev, - bhrlev=bhrlev, - brsvd1=brsvd1, - brsvd2=brsvd2, - brlev=brlev, - dim=dim, - ) - expect_coords_and_dims = [ - ( - DimCoord( - lblev, - standard_name="model_level_number", - attributes={"positive": "up"}, - units="1", - ), - dim, - ) - ] - brlev = np.atleast_1d(brlev) - brsvd1 = np.atleast_1d(brsvd1) - expect_coords_and_dims.append( - ( - DimCoord( - blev, - long_name="level_height", - units="m", - bounds=np.vstack((brlev, brsvd1)).T, - attributes={"positive": "up"}, - ), - dim, - ) - ) - bhrlev = np.atleast_1d(bhrlev) - brsvd2 = np.atleast_1d(brsvd2) - expect_coords_and_dims.append( - ( - AuxCoord( - bhlev, - long_name="sigma", - bounds=np.vstack((bhrlev, brsvd2)).T, - units="1", - ), - dim, - ) - ) - expect_factories = [ - ( - HybridHeightFactory, - [ - {"long_name": "level_height"}, - {"long_name": "sigma"}, - Reference("orography"), - ], - ) - ] - self.assertCoordsAndDimsListsMatch( - coords_and_dims, expect_coords_and_dims - ) - self.assertEqual(factories, expect_factories) - - def test_normal(self): - self._check() - - def test_normal__vector(self): - npts = 5 - lblev = np.arange(npts) - blev = np.arange(npts) + 10 - brlev = np.arange(npts) + 5 - brsvd1 = np.arange(npts) + 15 - bhlev = np.arange(npts) + 12 - bhrlev = np.arange(npts) + 6 - brsvd2 = np.arange(npts) + 18 - self._check( - lblev=lblev, - blev=blev, - brlev=brlev, - brsvd1=brsvd1, - bhlev=bhlev, - bhrlev=bhrlev, - brsvd2=brsvd2, - dim=1, - ) - - -class TestLBVCxxx_Unhandled(TestField): - def test_unknown_lbvc(self): - lbvc = 999 - blev, lblev, bhlev, bhrlev, brsvd1, brsvd2, brlev = ( - None, - None, - None, - None, - None, - None, - None, - ) - lbcode = _lbcode(0) # unused - stash = STASH(1, 1, 1) # unused - coords_and_dims, factories = _convert_vertical_coords( - lbcode=lbcode, - lbvc=lbvc, - blev=blev, - lblev=lblev, - stash=stash, - bhlev=bhlev, - bhrlev=bhrlev, - brsvd1=brsvd1, - brsvd2=brsvd2, - brlev=brlev, - ) - self.assertEqual(coords_and_dims, []) - self.assertEqual(factories, []) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py deleted file mode 100644 index 7769ca1de1..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__dim_or_aux.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :func:`iris.fileformats.pp_load_rules._dim_or_aux`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.coords import AuxCoord, DimCoord -from iris.fileformats.pp_load_rules import _dim_or_aux - - -class Test(tests.IrisTest): - def setUp(self): - self.mono = list(range(5)) - self.non_mono = [0, 1, 3, 2, 4] - self.std_name = "depth" - self.units = "m" - self.attr = {"positive": "up", "wibble": "wobble"} - - def test_dim_monotonic(self): - result = _dim_or_aux( - self.mono, - standard_name=self.std_name, - units=self.units, - attributes=self.attr.copy(), - ) - expected = DimCoord( - self.mono, - standard_name=self.std_name, - units=self.units, - attributes=self.attr, - ) - self.assertEqual(result, expected) - - def test_dim_non_monotonic(self): - result = _dim_or_aux( - self.non_mono, - standard_name=self.std_name, - units=self.units, - attributes=self.attr.copy(), - ) - attr = self.attr.copy() - del attr["positive"] - expected = AuxCoord( - self.non_mono, - standard_name=self.std_name, - units=self.units, - attributes=attr, - ) - self.assertEqual(result, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py deleted file mode 100644 index 2877d6ea89..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__epoch_date_hours.py +++ /dev/null @@ -1,154 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for -:func:`iris.fileformats.pp_load_rules._epoch_date_hours`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import cf_units -from cf_units import Unit -from cftime import datetime as nc_datetime - -from iris.fileformats.pp_load_rules import ( - _epoch_date_hours as epoch_hours_call, -) - -# -# Run tests for each of the possible calendars from PPfield.calendar(). -# Test year=0 and all=0 cases, plus "normal" dates, for each calendar. -# Result values are the same as from 'date2num' in cftime version <= 1.0.1. -# - - -class TestEpochHours__gregorian(tests.IrisTest): - def setUp(self): - self.calendar = cf_units.CALENDAR_GREGORIAN - self.hrs_unit = Unit("hours since epoch", calendar=self.calendar) - - def test_1970_1_1(self): - test_date = nc_datetime(1970, 1, 1, calendar=self.calendar) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, 0.0) - - def test_ymd_1_1_1(self): - test_date = nc_datetime(1, 1, 1, calendar=self.calendar) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17259936.0) - - def test_year_0(self): - test_date = nc_datetime( - 0, 1, 1, calendar=self.calendar, has_year_zero=True - ) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17268720.0) - - def test_ymd_0_0_0(self): - test_date = nc_datetime(0, 0, 0, calendar=None, has_year_zero=True) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17269488.0) - - def test_ymd_0_preserves_timeofday(self): - hrs, mins, secs, usecs = (7, 13, 24, 335772) - hours_in_day = ( - hrs + 1.0 / 60 * mins + 1.0 / 3600 * secs + (1.0e-6) / 3600 * usecs - ) - test_date = nc_datetime( - 0, - 0, - 0, - hour=hrs, - minute=mins, - second=secs, - microsecond=usecs, - calendar=None, - has_year_zero=True, - ) - result = epoch_hours_call(self.hrs_unit, test_date) - # NOTE: the calculation is only accurate to approx +/- 0.5 seconds - # in such a large number of hours -- even 0.1 seconds is too fine. - absolute_tolerance = 0.5 / 3600 - self.assertArrayAllClose( - result, -17269488.0 + hours_in_day, rtol=0, atol=absolute_tolerance - ) - - -class TestEpochHours__360day(tests.IrisTest): - def setUp(self): - self.calendar = cf_units.CALENDAR_360_DAY - self.hrs_unit = Unit("hours since epoch", calendar=self.calendar) - - def test_1970_1_1(self): - test_date = nc_datetime(1970, 1, 1, calendar=self.calendar) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, 0.0) - - def test_ymd_1_1_1(self): - test_date = nc_datetime(1, 1, 1, calendar=self.calendar) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17012160.0) - - def test_year_0(self): - test_date = nc_datetime( - 0, 1, 1, calendar=self.calendar, has_year_zero=True - ) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17020800.0) - - def test_ymd_0_0_0(self): - test_date = nc_datetime(0, 0, 0, calendar=None, has_year_zero=True) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17021544.0) - - -class TestEpochHours__365day(tests.IrisTest): - def setUp(self): - self.calendar = cf_units.CALENDAR_365_DAY - self.hrs_unit = Unit("hours since epoch", calendar=self.calendar) - - def test_1970_1_1(self): - test_date = nc_datetime(1970, 1, 1, calendar=self.calendar) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, 0.0) - - def test_ymd_1_1_1(self): - test_date = nc_datetime(1, 1, 1, calendar=self.calendar) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17248440.0) - - def test_year_0(self): - test_date = nc_datetime( - 0, 1, 1, calendar=self.calendar, has_year_zero=True - ) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17257200.0) - - def test_ymd_0_0_0(self): - test_date = nc_datetime(0, 0, 0, calendar=None, has_year_zero=True) - result = epoch_hours_call(self.hrs_unit, test_date) - self.assertEqual(result, -17257968.0) - - -class TestEpochHours__invalid_calendar(tests.IrisTest): - def test_bad_calendar(self): - self.calendar = cf_units.CALENDAR_ALL_LEAP - # Setup a unit with an unrecognised calendar - hrs_unit = Unit("hours since epoch", calendar=self.calendar) - # Test against a date with year=0, which requires calendar correction. - test_date = nc_datetime( - 0, 1, 1, calendar=self.calendar, has_year_zero=True - ) - # Check that this causes an error. - with self.assertRaisesRegex(ValueError, "unrecognised calendar"): - epoch_hours_call(hrs_unit, test_date) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py deleted file mode 100644 index fa381b91c1..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__model_level_number.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for :func:`iris.fileformats.pp_load_rules._model_level_number`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.fileformats.pp_load_rules import _model_level_number - - -class Test_9999(tests.IrisTest): - def test(self): - self.assertEqual(_model_level_number(9999), 0) - - -class Test_lblev(tests.IrisTest): - def test(self): - for val in range(9999): - self.assertEqual(_model_level_number(val), val) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py deleted file mode 100644 index fc30f66f7f..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reduced_points_and_bounds.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for -:func:`iris.fileformats.pp_load_rules._reduce_points_and_bounds`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.fileformats.pp_load_rules import _reduce_points_and_bounds - - -class Test(tests.IrisTest): - def test_scalar(self): - array = np.array(1) - dims, result, bounds = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, array) - self.assertEqual(dims, None) - self.assertIsNone(bounds) - - def test_1d_nochange(self): - array = np.array([1, 2, 3]) - dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, array) - self.assertEqual(dims, (0,)) - - def test_1d_collapse(self): - array = np.array([1, 1, 1]) - dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, np.array(1)) - self.assertEqual(dims, None) - - def test_2d_nochange(self): - array = np.array([[1, 2, 3], [4, 5, 6]]) - dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, array) - self.assertEqual(dims, (0, 1)) - - def test_2d_collapse_dim0(self): - array = np.array([[1, 2, 3], [1, 2, 3]]) - dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, np.array([1, 2, 3])) - self.assertEqual(dims, (1,)) - - def test_2d_collapse_dim1(self): - array = np.array([[1, 1, 1], [2, 2, 2]]) - dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, np.array([1, 2])) - self.assertEqual(dims, (0,)) - - def test_2d_collapse_both(self): - array = np.array([[3, 3, 3], [3, 3, 3]]) - dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, np.array(3)) - self.assertEqual(dims, None) - - def test_3d(self): - array = np.array([[[3, 3, 3], [4, 4, 4]], [[3, 3, 3], [4, 4, 4]]]) - dims, result, _ = _reduce_points_and_bounds(array) - self.assertArrayEqual(result, np.array([3, 4])) - self.assertEqual(dims, (1,)) - - def test_bounds_collapse(self): - points = np.array([1, 1, 1]) - bounds = np.array([[0, 2], [0, 2], [0, 2]]) - result_dims, result_pts, result_bds = _reduce_points_and_bounds( - points, (bounds[..., 0], bounds[..., 1]) - ) - self.assertArrayEqual(result_pts, np.array(1)) - self.assertArrayEqual(result_bds, np.array([0, 2])) - self.assertEqual(result_dims, None) - - def test_bounds_no_collapse(self): - points = np.array([1, 2, 3]) - bounds = np.array([[0, 2], [1, 3], [2, 4]]) - result_dims, result_pts, result_bds = _reduce_points_and_bounds( - points, (bounds[..., 0], bounds[..., 1]) - ) - self.assertArrayEqual(result_pts, points) - self.assertArrayEqual(result_bds, bounds) - self.assertEqual(result_dims, (0,)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py deleted file mode 100644 index 4e6d50fea7..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test__reshape_vector_args.py +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for -:func:`iris.fileformats.pp_load_rules._reshape_vector_args`. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.fileformats.pp_load_rules import _reshape_vector_args - - -class TestEmpty(tests.IrisTest): - def test(self): - result = _reshape_vector_args([]) - self.assertEqual(result, []) - - -class TestSingleArg(tests.IrisTest): - def _check(self, result, expected): - self.assertEqual(len(result), len(expected)) - for result_arr, expected_arr in zip(result, expected): - self.assertArrayEqual(result_arr, expected_arr) - - def test_nochange(self): - points = np.array([[1, 2, 3], [4, 5, 6]]) - result = _reshape_vector_args([(points, (0, 1))]) - expected = [points] - self._check(result, expected) - - def test_bad_dimensions(self): - points = np.array([[1, 2, 3], [4, 5, 6]]) - with self.assertRaisesRegex(ValueError, "Length"): - _reshape_vector_args([(points, (0, 1, 2))]) - - def test_scalar(self): - points = 5 - result = _reshape_vector_args([(points, ())]) - expected = [points] - self._check(result, expected) - - def test_nonarray(self): - points = [[1, 2, 3], [4, 5, 6]] - result = _reshape_vector_args([(points, (0, 1))]) - expected = [np.array(points)] - self._check(result, expected) - - def test_transpose(self): - points = np.array([[1, 2, 3], [4, 5, 6]]) - result = _reshape_vector_args([(points, (1, 0))]) - expected = [points.T] - self._check(result, expected) - - def test_extend(self): - points = np.array([[1, 2, 3, 4], [21, 22, 23, 24], [31, 32, 33, 34]]) - result = _reshape_vector_args([(points, (1, 3))]) - expected = [points.reshape(1, 3, 1, 4)] - self._check(result, expected) - - -class TestMultipleArgs(tests.IrisTest): - def _check(self, result, expected): - self.assertEqual(len(result), len(expected)) - for result_arr, expected_arr in zip(result, expected): - self.assertArrayEqual(result_arr, expected_arr) - - def test_nochange(self): - a1 = np.array([[1, 2, 3], [4, 5, 6]]) - a2 = np.array([[0, 2, 4], [7, 8, 9]]) - result = _reshape_vector_args([(a1, (0, 1)), (a2, (0, 1))]) - expected = [a1, a2] - self._check(result, expected) - - def test_array_and_scalar(self): - a1 = [[1, 2, 3], [3, 4, 5]] - a2 = 5 - result = _reshape_vector_args([(a1, (0, 1)), (a2, ())]) - expected = [a1, np.array([[5]])] - self._check(result, expected) - - def test_transpose(self): - a1 = np.array([[1, 2, 3], [4, 5, 6]]) - a2 = np.array([[0, 2, 4], [7, 8, 9]]) - result = _reshape_vector_args([(a1, (0, 1)), (a2, (1, 0))]) - expected = [a1, a2.T] - self._check(result, expected) - - def test_incompatible(self): - # Does not enforce compatibility of results. - a1 = np.array([1, 2]) - a2 = np.array([1, 2, 3]) - result = _reshape_vector_args([(a1, (0,)), (a2, (0,))]) - expected = [a1, a2] - self._check(result, expected) - - def test_extend(self): - a1 = np.array([[1, 2, 3], [4, 5, 6]]) - a2 = np.array([11, 12, 13]) - result = _reshape_vector_args([(a1, (0, 1)), (a2, (1,))]) - expected = [a1, a2.reshape(1, 3)] - self._check(result, expected) - - def test_extend_transpose(self): - a1 = np.array([[1, 2, 3], [4, 5, 6]]) - a2 = np.array([11, 12, 13]) - result = _reshape_vector_args([(a1, (1, 0)), (a2, (1,))]) - expected = [a1.T, a2.reshape(1, 3)] - self._check(result, expected) - - def test_double_extend(self): - a1 = np.array([[1, 2, 3], [4, 5, 6]]) - a2 = np.array(1) - result = _reshape_vector_args([(a1, (0, 2)), (a2, ())]) - expected = [a1.reshape(2, 1, 3), a2.reshape(1, 1, 1)] - self._check(result, expected) - - def test_triple(self): - a1 = np.array([[1, 2, 3, 4]]) - a2 = np.array([3, 4]) - a3 = np.array(7) - result = _reshape_vector_args([(a1, (0, 2)), (a2, (1,)), (a3, ())]) - expected = [ - a1.reshape(1, 1, 4), - a2.reshape(1, 2, 1), - a3.reshape(1, 1, 1), - ] - self._check(result, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py b/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py deleted file mode 100644 index 569d676183..0000000000 --- a/lib/iris/tests/unit/fileformats/pp_load_rules/test_convert.py +++ /dev/null @@ -1,459 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :func:`iris.fileformats.pp_load_rules.convert`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from types import MethodType -from unittest import mock - -import cf_units -import cftime -import numpy as np - -from iris.fileformats.pp import STASH, PPField3, SplittableInt -from iris.fileformats.pp_load_rules import convert -import iris.tests.unit.fileformats -from iris.util import guess_coord_axis - - -def _mock_field(**kwargs): - # Generate a mock field, but ensure T1 and T2 viable for rules. - field = mock.MagicMock( - t1=mock.MagicMock(year=1990, month=3, day=7), - t2=mock.MagicMock(year=1990, month=3, day=7), - ) - field.configure_mock(**kwargs) - return field - - -class TestLBCODE(iris.tests.unit.fileformats.TestField): - @staticmethod - def _is_cross_section_height_coord(coord): - return ( - coord.standard_name == "height" - and coord.units == "km" - and coord.attributes["positive"] == "up" - ) - - def test_cross_section_height_bdy_zero(self): - lbcode = SplittableInt(19902, {"iy": slice(0, 2), "ix": slice(2, 4)}) - points = np.array([10, 20, 30, 40]) - bounds = np.array([[0, 15], [15, 25], [25, 35], [35, 45]]) - field = _mock_field(lbcode=lbcode, bdy=0, y=points, y_bounds=bounds) - self._test_for_coord( - field, - convert, - TestLBCODE._is_cross_section_height_coord, - expected_points=points, - expected_bounds=bounds, - ) - - def test_cross_section_height_bdy_bmdi(self): - lbcode = SplittableInt(19902, {"iy": slice(0, 2), "ix": slice(2, 4)}) - points = np.array([10, 20, 30, 40]) - bounds = np.array([[0, 15], [15, 25], [25, 35], [35, 45]]) - bmdi = -1.07374e09 - field = _mock_field( - lbcode=lbcode, bdy=bmdi, bmdi=bmdi, y=points, y_bounds=bounds - ) - self._test_for_coord( - field, - convert, - TestLBCODE._is_cross_section_height_coord, - expected_points=points, - expected_bounds=bounds, - ) - - -class TestLBVC(iris.tests.unit.fileformats.TestField): - @staticmethod - def _is_potm_level_coord(coord): - return ( - coord.standard_name == "air_potential_temperature" - and coord.attributes["positive"] == "up" - ) - - @staticmethod - def _is_model_level_number_coord(coord): - return ( - coord.standard_name == "model_level_number" - and coord.units.is_dimensionless() - and coord.attributes["positive"] == "up" - ) - - @staticmethod - def _is_level_pressure_coord(coord): - return coord.name() == "level_pressure" and coord.units == "Pa" - - @staticmethod - def _is_sigma_coord(coord): - return coord.name() == "sigma" and coord.units.is_dimensionless() - - @staticmethod - def _is_soil_model_level_number_coord(coord): - return ( - coord.long_name == "soil_model_level_number" - and coord.units.is_dimensionless() - and coord.attributes["positive"] == "down" - ) - - @staticmethod - def _is_soil_depth_coord(coord): - return ( - coord.standard_name == "depth" - and coord.units == "m" - and coord.attributes["positive"] == "down" - ) - - def test_soil_levels(self): - level = 1234 - field = _mock_field(lbvc=6, lblev=level, brsvd=[0, 0], brlev=0) - self._test_for_coord( - field, - convert, - self._is_soil_model_level_number_coord, - expected_points=[level], - expected_bounds=None, - ) - - def test_soil_depth(self): - lower, point, upper = 1.2, 3.4, 5.6 - field = _mock_field(lbvc=6, blev=point, brsvd=[lower, 0], brlev=upper) - self._test_for_coord( - field, - convert, - self._is_soil_depth_coord, - expected_points=[point], - expected_bounds=[[lower, upper]], - ) - - def test_hybrid_pressure_model_level_number(self): - level = 5678 - field = _mock_field( - lbvc=9, - lblev=level, - blev=20, - brlev=23, - bhlev=42, - bhrlev=45, - brsvd=[17, 40], - ) - self._test_for_coord( - field, - convert, - TestLBVC._is_model_level_number_coord, - expected_points=[level], - expected_bounds=None, - ) - - def test_hybrid_pressure_delta(self): - delta_point = 12.0 - delta_lower_bound = 11.0 - delta_upper_bound = 13.0 - field = _mock_field( - lbvc=9, - lblev=5678, - blev=20, - brlev=23, - bhlev=delta_point, - bhrlev=delta_lower_bound, - brsvd=[17, delta_upper_bound], - ) - self._test_for_coord( - field, - convert, - TestLBVC._is_level_pressure_coord, - expected_points=[delta_point], - expected_bounds=[[delta_lower_bound, delta_upper_bound]], - ) - - def test_hybrid_pressure_sigma(self): - sigma_point = 0.5 - sigma_lower_bound = 0.6 - sigma_upper_bound = 0.4 - field = _mock_field( - lbvc=9, - lblev=5678, - blev=sigma_point, - brlev=sigma_lower_bound, - bhlev=12, - bhrlev=11, - brsvd=[sigma_upper_bound, 13], - ) - self._test_for_coord( - field, - convert, - TestLBVC._is_sigma_coord, - expected_points=[sigma_point], - expected_bounds=[[sigma_lower_bound, sigma_upper_bound]], - ) - - def test_potential_temperature_levels(self): - potm_value = 27.32 - field = _mock_field(lbvc=19, blev=potm_value) - self._test_for_coord( - field, - convert, - TestLBVC._is_potm_level_coord, - expected_points=np.array([potm_value]), - expected_bounds=None, - ) - - -class TestLBTIM(iris.tests.unit.fileformats.TestField): - def test_365_calendar(self): - f = mock.MagicMock( - lbtim=SplittableInt(4, {"ia": 2, "ib": 1, "ic": 0}), - lbyr=2013, - lbmon=1, - lbdat=1, - lbhr=12, - lbmin=0, - lbsec=0, - t1=cftime.datetime(2013, 1, 1, 12, 0, 0), - t2=cftime.datetime(2013, 1, 2, 12, 0, 0), - spec=PPField3, - ) - f.time_unit = MethodType(PPField3.time_unit, f) - f.calendar = cf_units.CALENDAR_365_DAY - ( - factories, - references, - standard_name, - long_name, - units, - attributes, - cell_methods, - dim_coords_and_dims, - aux_coords_and_dims, - ) = convert(f) - - def is_t_coord(coord_and_dims): - coord, dims = coord_and_dims - return coord.standard_name == "time" - - coords_and_dims = list(filter(is_t_coord, aux_coords_and_dims)) - self.assertEqual(len(coords_and_dims), 1) - coord, dims = coords_and_dims[0] - self.assertEqual(guess_coord_axis(coord), "T") - self.assertEqual(coord.units.calendar, "365_day") - - def base_field(self): - field = PPField3(header=mock.MagicMock()) - field.lbfc = 0 - field.bdx = 1 - field.bdy = 1 - field.bmdi = 999 - field.lbproc = 0 - field.lbvc = 0 - field.lbuser = [0] * 7 - field.lbrsvd = [0] * 4 - field.brsvd = [0] * 4 - field.lbsrce = 0 - field.lbcode = 0 - return field - - @staticmethod - def is_forecast_period(coord): - return ( - coord.standard_name == "forecast_period" and coord.units == "hours" - ) - - @staticmethod - def is_time(coord): - return ( - coord.standard_name == "time" - and coord.units == "hours since epoch" - ) - - def test_time_mean_ib2(self): - field = self.base_field() - field.lbtim = 21 - # Implicit reference time: 1970-01-02 06:00 - field.lbft = 9 - # t1 - field.lbyr, field.lbmon, field.lbdat = 1970, 1, 2 - field.lbhr, field.lbmin, field.lbsec = 12, 0, 0 - # t2 - field.lbyrd, field.lbmond, field.lbdatd = 1970, 1, 2 - field.lbhrd, field.lbmind, field.lbsecd = 15, 0, 0 - - self._test_for_coord( - field, - convert, - self.is_forecast_period, - expected_points=[7.5], - expected_bounds=[[6, 9]], - ) - - self._test_for_coord( - field, - convert, - self.is_time, - expected_points=[24 + 13.5], - expected_bounds=[[36, 39]], - ) - - def test_time_mean_ib3(self): - field = self.base_field() - field.lbtim = 31 - # Implicit reference time: 1970-01-02 06:00 - field.lbft = lbft = ((365 + 1) * 24 + 15) - (24 + 6) - # t1 - field.lbyr, field.lbmon, field.lbdat = 1970, 1, 2 - field.lbhr, field.lbmin, field.lbsec = 12, 0, 0 - # t2 - field.lbyrd, field.lbmond, field.lbdatd = 1971, 1, 2 - field.lbhrd, field.lbmind, field.lbsecd = 15, 0, 0 - - self._test_for_coord( - field, - convert, - self.is_forecast_period, - expected_points=[lbft], - expected_bounds=[[36 - 30, lbft]], - ) - - self._test_for_coord( - field, - convert, - self.is_time, - expected_points=[lbft + 30], - expected_bounds=[[36, lbft + 30]], - ) - - -class TestLBRSVD(iris.tests.unit.fileformats.TestField): - @staticmethod - def _is_realization(coord): - return coord.standard_name == "realization" and coord.units == "1" - - def test_realization(self): - lbrsvd = [0] * 4 - lbrsvd[3] = 71 - points = np.array([71]) - bounds = None - field = _mock_field(lbrsvd=lbrsvd) - self._test_for_coord( - field, - convert, - TestLBRSVD._is_realization, - expected_points=points, - expected_bounds=bounds, - ) - - -class TestLBSRCE(iris.tests.IrisTest): - def check_um_source_attrs( - self, lbsrce, source_str=None, um_version_str=None - ): - field = _mock_field(lbsrce=lbsrce) - ( - factories, - references, - standard_name, - long_name, - units, - attributes, - cell_methods, - dim_coords_and_dims, - aux_coords_and_dims, - ) = convert(field) - if source_str is not None: - self.assertEqual(attributes["source"], source_str) - else: - self.assertNotIn("source", attributes) - if um_version_str is not None: - self.assertEqual(attributes["um_version"], um_version_str) - else: - self.assertNotIn("um_version", attributes) - - def test_none(self): - self.check_um_source_attrs( - lbsrce=8123, source_str=None, um_version_str=None - ) - - def test_no_um_version(self): - self.check_um_source_attrs( - lbsrce=1111, - source_str="Data from Met Office Unified Model", - um_version_str=None, - ) - - def test_um_version(self): - self.check_um_source_attrs( - lbsrce=12071111, - source_str="Data from Met Office Unified Model", - um_version_str="12.7", - ) - - -class Test_STASH_CF(iris.tests.unit.fileformats.TestField): - def test_stash_cf_air_temp(self): - lbuser = [1, 0, 0, 16203, 0, 0, 1] - lbfc = 16 - stash = STASH(lbuser[6], lbuser[3] // 1000, lbuser[3] % 1000) - field = _mock_field(lbuser=lbuser, lbfc=lbfc, stash=stash) - ( - factories, - references, - standard_name, - long_name, - units, - attributes, - cell_methods, - dim_coords_and_dims, - aux_coords_and_dims, - ) = convert(field) - self.assertEqual(standard_name, "air_temperature") - self.assertEqual(units, "K") - - def test_no_std_name(self): - lbuser = [1, 0, 0, 0, 0, 0, 0] - lbfc = 0 - stash = STASH(lbuser[6], lbuser[3] // 1000, lbuser[3] % 1000) - field = _mock_field(lbuser=lbuser, lbfc=lbfc, stash=stash) - ( - factories, - references, - standard_name, - long_name, - units, - attributes, - cell_methods, - dim_coords_and_dims, - aux_coords_and_dims, - ) = convert(field) - self.assertIsNone(standard_name) - self.assertIsNone(units) - - -class Test_LBFC_CF(iris.tests.unit.fileformats.TestField): - def test_fc_cf_air_temp(self): - lbuser = [1, 0, 0, 0, 0, 0, 0] - lbfc = 16 - stash = STASH(lbuser[6], lbuser[3] // 1000, lbuser[3] % 1000) - field = _mock_field(lbuser=lbuser, lbfc=lbfc, stash=stash) - ( - factories, - references, - standard_name, - long_name, - units, - attributes, - cell_methods, - dim_coords_and_dims, - aux_coords_and_dims, - ) = convert(field) - self.assertEqual(standard_name, "air_temperature") - self.assertEqual(units, "K") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/rules/__init__.py b/lib/iris/tests/unit/fileformats/rules/__init__.py deleted file mode 100644 index 55c9c7779e..0000000000 --- a/lib/iris/tests/unit/fileformats/rules/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.fileformats.rules` module.""" diff --git a/lib/iris/tests/unit/fileformats/rules/test_Loader.py b/lib/iris/tests/unit/fileformats/rules/test_Loader.py deleted file mode 100644 index be96f526d2..0000000000 --- a/lib/iris/tests/unit/fileformats/rules/test_Loader.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris.fileformats.rules.Loader`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.fileformats.rules import Loader - - -class Test___init__(tests.IrisTest): - def test_normal(self): - with mock.patch("warnings.warn") as warn: - loader = Loader( - mock.sentinel.GEN_FUNC, - mock.sentinel.GEN_FUNC_KWARGS, - mock.sentinel.CONVERTER, - ) - self.assertEqual(warn.call_count, 0) - self.assertIs(loader.field_generator, mock.sentinel.GEN_FUNC) - self.assertIs( - loader.field_generator_kwargs, mock.sentinel.GEN_FUNC_KWARGS - ) - self.assertIs(loader.converter, mock.sentinel.CONVERTER) - - def test_normal_with_explicit_none(self): - with mock.patch("warnings.warn") as warn: - loader = Loader( - mock.sentinel.GEN_FUNC, - mock.sentinel.GEN_FUNC_KWARGS, - mock.sentinel.CONVERTER, - ) - self.assertEqual(warn.call_count, 0) - self.assertIs(loader.field_generator, mock.sentinel.GEN_FUNC) - self.assertIs( - loader.field_generator_kwargs, mock.sentinel.GEN_FUNC_KWARGS - ) - self.assertIs(loader.converter, mock.sentinel.CONVERTER) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py b/lib/iris/tests/unit/fileformats/rules/test__make_cube.py deleted file mode 100644 index b6c4528399..0000000000 --- a/lib/iris/tests/unit/fileformats/rules/test__make_cube.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :func:`iris.fileformats.rules._make_cube`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock -import warnings - -import numpy as np - -from iris.fileformats.rules import ConversionMetadata, _make_cube - - -class Test(tests.IrisTest): - def test_invalid_units(self): - # Mock converter() function that returns an invalid - # units string amongst the collection of other elements. - factories = None - references = None - standard_name = None - long_name = None - units = "wibble" # Invalid unit. - attributes = dict(source="test") - cell_methods = None - dim_coords_and_dims = None - aux_coords_and_dims = None - metadata = ConversionMetadata( - factories, - references, - standard_name, - long_name, - units, - attributes, - cell_methods, - dim_coords_and_dims, - aux_coords_and_dims, - ) - converter = mock.Mock(return_value=metadata) - - data = np.arange(3.0) - field = mock.Mock( - core_data=lambda: data, bmdi=9999.0, realised_dtype=data.dtype - ) - with warnings.catch_warnings(record=True) as warn: - warnings.simplefilter("always") - cube, factories, references = _make_cube(field, converter) - - # Check attributes dictionary is correctly populated. - expected_attributes = attributes.copy() - expected_attributes["invalid_units"] = units - self.assertEqual(cube.attributes, expected_attributes) - - # Check warning was raised. - self.assertEqual(len(warn), 1) - exp_emsg = "invalid units {!r}".format(units) - self.assertRegex(str(warn[0]), exp_emsg) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py b/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py deleted file mode 100644 index c703284fc0..0000000000 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the -:mod:`iris.fileformats._structured_array_identification` module. - -""" diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py deleted file mode 100644 index 871aab4f1e..0000000000 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_ArrayStructure.py +++ /dev/null @@ -1,230 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the -:mod:`iris.fileformats._structured_array_identification.ArrayStructure` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.fileformats._structured_array_identification import ( - ArrayStructure, - _UnstructuredArrayException, -) - - -def construct_nd(sub_array, sub_dim, shape): - # Given a 1D array, a shape, and the axis/dimension that the 1D array - # represents on the bigger array, construct a numpy array which is - # filled appropriately. - assert sub_array.ndim == 1 - sub_shape = [1 if dim != sub_dim else -1 for dim in range(len(shape))] - return sub_array.reshape(sub_shape) * np.ones(shape) - - -class TestArrayStructure_from_array(tests.IrisTest): - def struct_from_arr(self, nd_array): - return ArrayStructure.from_array(nd_array.flatten()) - - def test_1d_len_0(self): - a = np.arange(0) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, a)) - - def test_1d_len_1(self): - a = np.arange(1) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, a)) - - def test_1d(self): - a = np.array([-1, 3, 1, 2]) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, a)) - - def test_1d_ones(self): - a = np.ones(10) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, [1])) - - def test_1d_range(self): - a = np.arange(6) - self.assertEqual( - self.struct_from_arr(a), ArrayStructure(1, list(range(6))) - ) - - def test_3d_ones(self): - a = np.ones([10, 2, 1]) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, [1])) - - def test_1d_over_2d_first_dim_manual(self): - sub = np.array([10, 10, 20, 20]) - self.assertEqual( - self.struct_from_arr(sub), ArrayStructure(2, [10, 20]) - ) - - def test_3d_first_dimension(self): - flattened = np.array([1, 1, 1, 2, 2, 2]) - self.assertEqual( - ArrayStructure.from_array(flattened), ArrayStructure(3, [1, 2]) - ) - - def test_1d_over_2d_first_dim(self): - sub = np.array([-1, 3, 1, 2]) - a = construct_nd(sub, 0, (4, 2)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(2, sub)) - - def test_1d_over_2d_second_dim(self): - sub = np.array([-1, 3, 1, 2]) - a = construct_nd(sub, 1, (2, 4)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, sub)) - - def test_1d_over_3d_first_dim(self): - sub = np.array([-1, 3, 1, 2]) - a = construct_nd(sub, 0, (4, 2, 3)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(6, sub)) - - def test_1d_over_3d_second_dim(self): - sub = np.array([-1, 3, 1, 2]) - a = construct_nd(sub, 1, (2, 4, 3)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(3, sub)) - - def test_1d_over_3d_third_dim(self): - sub = np.array([-1, 3, 1, 2]) - a = construct_nd(sub, 2, (3, 2, 4)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, sub)) - - def test_irregular_3d(self): - sub = np.array([-1, 3, 1, 2]) - a = construct_nd(sub, 2, (3, 2, 4)) - a[0, 0, 0] = 5 - self.assertEqual(self.struct_from_arr(a), None) - - def test_repeated_3d(self): - sub = np.array([-1, 3, 1, 2]) - a = construct_nd(sub, 2, (3, 2, 4)) - a[:, 0, 0] = 1 - self.assertEqual(self.struct_from_arr(a), None) - - def test_rolled_3d(self): - # Shift the 3D array on by one, making the array 1d. - sub = np.arange(4) - a = construct_nd(sub, 0, (4, 2, 3)) - a = np.roll(a.flatten(), 1) - self.assertEqual(self.struct_from_arr(a), None) - - def test_len_1_3d(self): - # Setup a case which triggers an IndexError when identifying - # the stride, but the result should still be correct. - sub = np.arange(2) - a = construct_nd(sub, 1, (1, 1, 1)) - self.assertEqual(self.struct_from_arr(a), ArrayStructure(1, sub)) - - def test_not_an_array(self): - # Support lists as an argument. - self.assertEqual( - ArrayStructure.from_array([1, 2, 3]), ArrayStructure(1, [1, 2, 3]) - ) - - def test_multi_dim_array(self): - with self.assertRaises(ValueError): - ArrayStructure.from_array(np.arange(12).reshape(3, 4)) - - -class nd_array_and_dims_cases: - """ - Defines the test functionality for nd_array_and_dims. This class - isn't actually the test case - see the C order and F order subclasses - for those. - - """ - - def test_scalar_len1_first_dim(self): - struct = ArrayStructure(1, [1]) - orig = np.array([1, 1, 1]) - - array, dims = struct.nd_array_and_dims(orig, (1, 3), order=self.order) - self.assertArrayEqual(array, [1]) - self.assertEqual(dims, ()) - - def test_scalar_non_len1_first_dim(self): - struct = ArrayStructure(1, [1]) - orig = np.array([1, 1, 1]) - - array, dims = struct.nd_array_and_dims(orig, (3, 1), order=self.order) - self.assertArrayEqual(array, [1]) - self.assertEqual(dims, ()) - - def test_single_vector(self): - orig = construct_nd(np.array([1, 2]), 0, (2, 1, 3)) - flattened = orig.flatten(order=self.order) - struct = ArrayStructure.from_array(flattened) - array, dims = struct.nd_array_and_dims( - flattened, (2, 1, 3), order=self.order - ) - self.assertArrayEqual(array, [1, 2]) - self.assertEqual(dims, (0,)) - - def test_single_vector_3rd_dim(self): - orig = construct_nd(np.array([1, 2, 3]), 2, (4, 1, 3)) - flattened = orig.flatten(order=self.order) - - struct = ArrayStructure.from_array(flattened) - array, dims = struct.nd_array_and_dims( - flattened, (4, 1, 3), order=self.order - ) - self.assertArrayEqual(array, [1, 2, 3]) - self.assertEqual(dims, (2,)) - - def test_orig_array_and_target_shape_inconsistent(self): - # An array structure which has a length which is a product - # of potential dimensions should not result in an array - struct = ArrayStructure(2, [1, 2, 3]) - orig = np.array([1, 1, 2, 2, 3, 3]) - - msg = "Original array and target shape do not match up." - with self.assertRaisesRegex(ValueError, msg): - struct.nd_array_and_dims(orig, (2, 3, 2), order=self.order) - - def test_array_bigger_than_expected(self): - # An array structure which has a length which is a product - # of potential dimensions should not result in an array - struct = ArrayStructure(2, [1, 2, 3, 4, 5, 6]) - orig = np.array([1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6]) - - with self.assertRaises(_UnstructuredArrayException): - struct.nd_array_and_dims(orig, (2, 3, 2), order=self.order) - - def test_single_vector_extra_dimension(self): - orig = construct_nd(np.array([1, 2]), 1, (3, 2)) - flattened = orig.flatten(order=self.order) - - struct = ArrayStructure.from_array(flattened) - - # Add another dimension on flattened, making it a (6, 2). - input_array = np.vstack([flattened, flattened + 100]).T - - array, dims = struct.nd_array_and_dims( - input_array, (3, 1, 2, 1), order=self.order - ) - self.assertArrayEqual(array, [[1, 101], [2, 102]]) - self.assertEqual(dims, (2,)) - - -class TestArrayStructure_nd_array_and_dims_f_order( - tests.IrisTest, nd_array_and_dims_cases -): - order = "f" - - -class TestArrayStructure_nd_array_and_dims_c_order( - tests.IrisTest, nd_array_and_dims_cases -): - order = "c" - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py b/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py deleted file mode 100644 index a7818ad802..0000000000 --- a/lib/iris/tests/unit/fileformats/structured_array_identification/test_GroupStructure.py +++ /dev/null @@ -1,198 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the -:mod:`iris.fileformats._structured_array_identification.GroupStructure` class. - -""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.fileformats._structured_array_identification import ( - ArrayStructure, - GroupStructure, -) - - -def regular_array_structures(shape, names="abcdefg"): - # Construct column major appropriate ArrayStructures for the given - # shape. - running_product = 1 - array_structures = {} - for name, dim_len in zip(names, shape): - array_structures[name] = ArrayStructure( - running_product, np.arange(dim_len) - ) - running_product *= dim_len - return array_structures - - -class TestGroupStructure_from_component_arrays(tests.IrisTest): - def test_different_sizes(self): - arrays = {"a": np.arange(6), "b": np.arange(5)} - msg = "All array elements must have the same size." - with self.assertRaisesRegex(ValueError, msg): - GroupStructure.from_component_arrays(arrays) - - def test_structure_creation(self): - # Test that the appropriate dictionary containing ArrayStructures is - # computed when constructing a GroupStructure from_component_arrays. - array = np.arange(6) - expected_structure = {"a": ArrayStructure.from_array(array)} - - grp = GroupStructure.from_component_arrays({"a": array}) - - self.assertEqual(grp.length, 6) - self.assertEqual(grp._cmpt_structure, expected_structure) - - -class TestGroupStructure_possible_structures(tests.IrisTest): - def test_simple_3d_structure(self): - # Construct a structure representing a (3, 2, 4) group and assert - # that the result is of the expected form. - array_structures = { - "a": ArrayStructure(1, [1, -1, 2]), - "b": ArrayStructure(3, [1, -1]), - "c": ArrayStructure(6, [1, -1, 2, 3]), - } - structure = GroupStructure(24, array_structures, array_order="f") - expected = ( - [ - ("a", array_structures["a"]), - ("b", array_structures["b"]), - ("c", array_structures["c"]), - ], - ) - self.assertEqual(structure.possible_structures(), expected) - - def assert_potentials(self, length, array_structures, expected): - structure = GroupStructure(length, array_structures, array_order="f") - allowed = structure.possible_structures() - names = [ - [name for (name, _) in allowed_structure] - for allowed_structure in allowed - ] - self.assertEqual(names, expected) - - def test_multiple_potentials(self): - # More than one potential dimension for dim 1. - array_structures = regular_array_structures((4, 2, 3)) - array_structures["shared b"] = ArrayStructure(4, [-10, 4]) - self.assert_potentials( - 24, array_structures, [["a", "b", "c"], ["a", "shared b", "c"]] - ) - - def test_alternate_potentials(self): - # More than one potential dimension for dim 1. - array_structures = regular_array_structures((4, 2, 3)) - array_structures.update(regular_array_structures((6, 4), names="xy")) - self.assert_potentials( - 24, array_structures, [["x", "y"], ["a", "b", "c"]] - ) - - def test_shared_first_dimension(self): - # One 2d potential as well as one 3d, using the same first dimension. - array_structures = regular_array_structures((4, 2, 3)) - array_structures["bc combined"] = ArrayStructure(4, np.arange(6)) - self.assert_potentials( - 24, array_structures, [["a", "bc combined"], ["a", "b", "c"]] - ) - - def test_non_viable_element(self): - # One 2d potential as well as one 3d, using the same first dimension. - array_structures = regular_array_structures((4, 2, 3)) - array_structures.pop("c") - array_structures["strange_length"] = ArrayStructure(4, np.arange(5)) - self.assert_potentials(24, array_structures, []) - - def test_completely_unstructured_element(self): - # One of the arrays is entirely unstructured. - array_structures = regular_array_structures((4, 2, 3)) - array_structures["unstructured"] = None - self.assert_potentials(24, array_structures, [["a", "b", "c"]]) - - -class TestGroupStructure_build_arrays(tests.IrisTest): - def assert_built_array(self, name, result, expected): - ex_arr, ex_dims = expected - re_arr, re_dims = result[name] - self.assertEqual(ex_dims, re_dims) - self.assertArrayEqual(ex_arr, re_arr) - - def test_build_arrays_regular_f_order(self): - # Construct simple orthogonal 1d array structures, adding a trailing - # dimension to the second, and assert the result of build_arrays - # produces the required result. - elements = regular_array_structures((2, 3)) - - a = elements["a"].construct_array(6) - b = elements["b"].construct_array(6) - # Make b 2 dimensional. - b = np.vstack([b, b + 100]).T - - grp = GroupStructure(6, elements, array_order="f") - - result = grp.build_arrays((2, 3), {"a": a, "b": b}) - self.assert_built_array("a", result, ([0, 1], (0,))) - self.assert_built_array( - "b", result, ([[0, 100], [1, 101], [2, 102]], (1,)) - ) - - def test_build_arrays_unstructured(self): - # Check that an unstructured array gets reshaped appropriately. - grp = GroupStructure(6, {"a": None}, array_order="c") - orig = np.array([1, 2, 3, 4, 5, 6]).reshape(2, 3) - r = grp.build_arrays((2, 3), {"a": orig.flatten(order="c")}) - self.assert_built_array("a", r, (orig, (0, 1))) - - def test_build_arrays_unstructured_ndim_f_order(self): - # Passing an unstructured array to build_arrays, should result in the - # appropriately shaped array, plus any trailing dimensions. - grp = GroupStructure(6, {"a": None}, array_order="f") - orig = np.array([1, 2, 3, 4, 5, 6]).reshape(2, 3) - orig = np.dstack([orig, orig + 10]) - r = grp.build_arrays((2, 3), {"a": orig.reshape((-1, 2), order="f")}) - self.assert_built_array("a", r, (orig, (0, 1))) - - def test_build_arrays_unstructured_ndim_c_order(self): - # Passing an unstructured array to build_arrays, should result in the - # appropriately shaped array, plus any trailing dimensions. - grp = GroupStructure(6, {"a": None}, array_order="c") - orig = np.array([1, 2, 3, 4, 5, 6]).reshape(2, 3) - orig = np.dstack([orig, orig + 10]) - r = grp.build_arrays((2, 3), {"a": orig.reshape((-1, 2), order="c")}) - self.assert_built_array("a", r, (orig, (0, 1))) - - def test_structured_array_not_applicable(self): - # Just because an array has a possible structure, does not mean it - # gets used. Check that 'd' which would make a good 1D array, doesn't - # get used in a specific shape. - elements = regular_array_structures((2, 2, 3)) - elements["d"] = ArrayStructure(3, np.arange(4)) - grp = GroupStructure(12, elements, array_order="f") - - d = np.array([0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]).reshape( - (3, 4), order="f" - ) - expected = np.array([[[0, 1, 2], [0, 2, 3]], [[0, 1, 3], [1, 2, 3]]]) - r = grp.build_arrays( - (2, 2, 3), - { - "a": np.arange(12), - "b": np.arange(12), - "c": np.arange(12), - "d": d.flatten(order="f"), - }, - ) - self.assert_built_array("d", r, (expected, (0, 1, 2))) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/test_rules.py b/lib/iris/tests/unit/fileformats/test_rules.py deleted file mode 100644 index c243a374cb..0000000000 --- a/lib/iris/tests/unit/fileformats/test_rules.py +++ /dev/null @@ -1,269 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test iris.fileformats.rules.py - metadata translation rules. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import types -from unittest import mock - -import numpy as np - -from iris.aux_factory import HybridHeightFactory -from iris.coords import CellMethod -from iris.cube import Cube -from iris.fileformats.rules import ( - ConcreteReferenceTarget, - ConversionMetadata, - Factory, - Loader, - Reference, - ReferenceTarget, - load_cubes, - scalar_cell_method, -) -import iris.tests.stock as stock - - -class TestConcreteReferenceTarget(tests.IrisTest): - def test_attributes(self): - with self.assertRaises(TypeError): - target = ConcreteReferenceTarget() - - target = ConcreteReferenceTarget("foo") - self.assertEqual(target.name, "foo") - self.assertIsNone(target.transform) - - def transform(_): - return _ - - target = ConcreteReferenceTarget("foo", transform) - self.assertEqual(target.name, "foo") - self.assertIs(target.transform, transform) - - def test_single_cube_no_transform(self): - target = ConcreteReferenceTarget("foo") - src = stock.simple_2d() - target.add_cube(src) - self.assertIs(target.as_cube(), src) - - def test_single_cube_with_transform(self): - def transform(cube): - return {"long_name": "wibble"} - - target = ConcreteReferenceTarget("foo", transform) - src = stock.simple_2d() - target.add_cube(src) - dest = target.as_cube() - self.assertEqual(dest.long_name, "wibble") - self.assertNotEqual(dest, src) - dest.long_name = src.long_name - self.assertEqual(dest, src) - - @tests.skip_data - def test_multiple_cubes_no_transform(self): - target = ConcreteReferenceTarget("foo") - src = stock.realistic_4d() - for i in range(src.shape[0]): - target.add_cube(src[i]) - dest = target.as_cube() - self.assertIsNot(dest, src) - self.assertEqual(dest, src) - - @tests.skip_data - def test_multiple_cubes_with_transform(self): - def transform(cube): - return {"long_name": "wibble"} - - target = ConcreteReferenceTarget("foo", transform) - src = stock.realistic_4d() - for i in range(src.shape[0]): - target.add_cube(src[i]) - dest = target.as_cube() - self.assertEqual(dest.long_name, "wibble") - self.assertNotEqual(dest, src) - dest.long_name = src.long_name - self.assertEqual(dest, src) - - -class TestLoadCubes(tests.IrisTest): - def test_simple_factory(self): - # Test the creation process for a factory definition which only - # uses simple dict arguments. - - # Make a minimal fake data object that passes as lazy data. - core_data_array = mock.Mock(compute=None, dtype=np.dtype("f4")) - # Make a fake PPField which will be supplied to our converter. - field = mock.Mock( - core_data=mock.Mock(return_value=core_data_array), - realised_dtype=np.dtype("f4"), - bmdi=None, - ) - - def field_generator(filename): - return [field] - - # A fake conversion function returning: - # 1) A parameter cube needing a simple factory construction. - aux_factory = mock.Mock() - factory = mock.Mock() - factory.args = [{"name": "foo"}] - factory.factory_class = ( - lambda *args: setattr(aux_factory, "fake_args", args) - or aux_factory - ) - - def converter(field): - return ConversionMetadata( - [factory], [], "", "", "", {}, [], [], [] - ) - - # Finish by making a fake Loader - fake_loader = Loader(field_generator, {}, converter) - cubes = load_cubes(["fake_filename"], None, fake_loader) - - # Check the result is a generator with a single entry. - self.assertIsInstance(cubes, types.GeneratorType) - try: - # Suppress the normal Cube.coord() and Cube.add_aux_factory() - # methods. - coord_method = Cube.coord - add_aux_factory_method = Cube.add_aux_factory - Cube.coord = lambda self, **args: args - Cube.add_aux_factory = lambda self, aux_factory: setattr( - self, "fake_aux_factory", aux_factory - ) - - cubes = list(cubes) - finally: - Cube.coord = coord_method - Cube.add_aux_factory = add_aux_factory_method - self.assertEqual(len(cubes), 1) - # Check the "cube" has an "aux_factory" added, which itself - # must have been created with the correct arguments. - self.assertTrue(hasattr(cubes[0], "fake_aux_factory")) - self.assertIs(cubes[0].fake_aux_factory, aux_factory) - self.assertTrue(hasattr(aux_factory, "fake_args")) - self.assertEqual(aux_factory.fake_args, ({"name": "foo"},)) - - @tests.skip_data - def test_cross_reference(self): - # Test the creation process for a factory definition which uses - # a cross-reference. - - param_cube = stock.realistic_4d_no_derived() - orog_coord = param_cube.coord("surface_altitude") - param_cube.remove_coord(orog_coord) - - orog_cube = param_cube[0, 0, :, :] - orog_cube.data = orog_coord.points - orog_cube.rename("surface_altitude") - orog_cube.units = orog_coord.units - orog_cube.attributes = orog_coord.attributes - - # We're going to test for the presence of the hybrid height - # stuff later, so let's make sure it's not already there! - assert len(param_cube.aux_factories) == 0 - assert not param_cube.coords("surface_altitude") - - # The fake PPFields which will be supplied to our converter. - press_field = mock.Mock( - core_data=mock.Mock(return_value=param_cube.data), - bmdi=-1e20, - realised_dtype=param_cube.dtype, - ) - - orog_field = mock.Mock( - core_data=mock.Mock(return_value=orog_cube.data), - bmdi=-1e20, - realised_dtype=orog_cube.dtype, - ) - - def field_generator(filename): - return [press_field, orog_field] - - # A fake rule set returning: - # 1) A parameter cube needing an "orography" reference - # 2) An "orography" cube - - def converter(field): - if field is press_field: - src = param_cube - factories = [ - Factory(HybridHeightFactory, [Reference("orography")]) - ] - references = [] - else: - src = orog_cube - factories = [] - references = [ReferenceTarget("orography", None)] - dim_coords_and_dims = [ - (coord, src.coord_dims(coord)[0]) for coord in src.dim_coords - ] - aux_coords_and_dims = [ - (coord, src.coord_dims(coord)) for coord in src.aux_coords - ] - return ConversionMetadata( - factories, - references, - src.standard_name, - src.long_name, - src.units, - src.attributes, - src.cell_methods, - dim_coords_and_dims, - aux_coords_and_dims, - ) - - # Finish by making a fake Loader - fake_loader = Loader(field_generator, {}, converter) - cubes = load_cubes(["fake_filename"], None, fake_loader) - - # Check the result is a generator containing two Cubes. - self.assertIsInstance(cubes, types.GeneratorType) - cubes = list(cubes) - self.assertEqual(len(cubes), 2) - # Check the "cube" has an "aux_factory" added, which itself - # must have been created with the correct arguments. - self.assertEqual(len(cubes[1].aux_factories), 1) - self.assertEqual(len(cubes[1].coords("surface_altitude")), 1) - - -class Test_scalar_cell_method(tests.IrisTest): - """Tests for iris.fileformats.rules.scalar_cell_method() function""" - - def setUp(self): - self.cube = stock.simple_2d() - self.cm = CellMethod("mean", "foo", "1 hour") - self.cube.cell_methods = (self.cm,) - - def test_cell_method_found(self): - actual = scalar_cell_method(self.cube, "mean", "foo") - self.assertEqual(actual, self.cm) - - def test_method_different(self): - actual = scalar_cell_method(self.cube, "average", "foo") - self.assertIsNone(actual) - - def test_coord_name_different(self): - actual = scalar_cell_method(self.cube, "average", "bar") - self.assertIsNone(actual) - - def test_double_coord_fails(self): - self.cube.cell_methods = ( - CellMethod("mean", ("foo", "bar"), ("1 hour", "1 hour")), - ) - actual = scalar_cell_method(self.cube, "mean", "foo") - self.assertIsNone(actual) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/um/__init__.py b/lib/iris/tests/unit/fileformats/um/__init__.py deleted file mode 100644 index 6b4abc61bb..0000000000 --- a/lib/iris/tests/unit/fileformats/um/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.um` package.""" diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py b/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py deleted file mode 100644 index b5eb259e5b..0000000000 --- a/lib/iris/tests/unit/fileformats/um/fast_load/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the module :mod:`iris.fileformats.um._fast_load`. - -""" diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py deleted file mode 100644 index 0c15e5e839..0000000000 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test_FieldCollation.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the class -:class:`iris.fileformats.um._fast_load.FieldCollation`. - -This only tests the additional functionality for recording file locations of -PPFields that make loaded cubes. -The original class is the baseclass of this, now renamed 'BasicFieldCollation'. - -""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -import iris -from iris.tests.integration.fast_load.test_fast_load import Mixin_FieldTest - - -class TestFastCallbackLocationInfo(Mixin_FieldTest, tests.IrisTest): - do_fast_loads = True - - def setUp(self): - # Call parent setup. - super().setUp() - - # Create a basic load test case. - self.callback_collations = [] - self.callback_filepaths = [] - - def fast_load_callback(cube, collation, filename): - self.callback_collations.append(collation) - self.callback_filepaths.append(filename) - - flds = self.fields(c_t="11112222", c_h="11221122", phn="01010101") - self.test_filepath = self.save_fieldcubes(flds) - iris.load(self.test_filepath, callback=fast_load_callback) - - def test_callback_collations_filepaths(self): - self.assertEqual(len(self.callback_collations), 2) - self.assertEqual( - self.callback_collations[0].data_filepath, self.test_filepath - ) - self.assertEqual( - self.callback_collations[1].data_filepath, self.test_filepath - ) - - def test_callback_collations_field_indices(self): - self.assertEqual( - self.callback_collations[0].data_field_indices.dtype, np.int64 - ) - self.assertArrayEqual( - self.callback_collations[0].data_field_indices, [[1, 3], [5, 7]] - ) - - self.assertEqual( - self.callback_collations[1].data_field_indices.dtype, np.int64 - ) - self.assertArrayEqual( - self.callback_collations[1].data_field_indices, [[0, 2], [4, 6]] - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py b/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py deleted file mode 100644 index 90c411b41d..0000000000 --- a/lib/iris/tests/unit/fileformats/um/fast_load/test__convert_collation.py +++ /dev/null @@ -1,450 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :func:`iris.fileformats.um._fast_load._convert_collation`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import cf_units -import cftime -import numpy as np - -import iris.aux_factory -import iris.coord_systems -import iris.coords -import iris.fileformats.pp -import iris.fileformats.rules -from iris.fileformats.um._fast_load import ( - _convert_collation as convert_collation, -) - -COORD_SYSTEM = iris.coord_systems.GeogCS(6371229.0) -LATITUDE = iris.coords.DimCoord( - [15, 0, -15], "latitude", units="degrees", coord_system=COORD_SYSTEM -) -LONGITUDE = iris.coords.DimCoord( - [0, 20, 40, 60], - "longitude", - units="degrees", - coord_system=COORD_SYSTEM, - circular=True, -) - - -class Test(tests.IrisTest): - def _field(self): - # Create PP field for X wind on a regular lat-lon grid. - header = [0] * 64 - # Define the regular lat-lon grid. - header[15] = 1 # LBCODE - header[17] = 3 # LBROW - header[18] = 4 # LBNPT - header[55] = 90 # BPLAT - header[58] = 30 # BZY - header[59] = -15 # BDY - header[60] = -20 # BZX - header[61] = 20 # BDX - # Define the STASH code m01s00i002. - header[41] = 2 # LBUSER(4) - header[44] = 1 # LBUSER(7) - field = iris.fileformats.pp.PPField3(header) - return field - - def _check_phenomenon(self, metadata, factory=None): - if factory is None: - self.assertEqual(metadata.factories, []) - else: - self.assertEqual(metadata.factories, [factory]) - self.assertEqual(metadata.references, []) - self.assertEqual(metadata.standard_name, "x_wind") - self.assertIsNone(metadata.long_name) - self.assertEqual(metadata.units, cf_units.Unit("m s-1")) - self.assertEqual(metadata.attributes, {"STASH": (1, 0, 2)}) - self.assertEqual(metadata.cell_methods, []) - - def test_all_scalar(self): - field = self._field() - field.lbtim = 11 - field.t1 = cftime.datetime(1970, 1, 1, 18) - field.t2 = cftime.datetime(1970, 1, 1, 12) - collation = mock.Mock( - fields=[field], vector_dims_shape=(), element_arrays_and_dims={} - ) - metadata = convert_collation(collation) - self._check_phenomenon(metadata) - coords_and_dims = [(LONGITUDE, 1), (LATITUDE, 0)] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) - coords_and_dims = [ - ( - iris.coords.DimCoord(18, "time", units="hours since epoch"), - None, - ), - ( - iris.coords.DimCoord( - 12, "forecast_reference_time", units="hours since epoch" - ), - None, - ), - (iris.coords.DimCoord(6, "forecast_period", units="hours"), None), - ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) - - def test_vector_t1(self): - field = self._field() - field.lbtim = 11 - field.t2 = cftime.datetime(1970, 1, 1, 12) - t1 = ( - [ - cftime.datetime(1970, 1, 1, 18), - cftime.datetime(1970, 1, 2, 0), - cftime.datetime(1970, 1, 2, 6), - ], - [0], - ) - collation = mock.Mock( - fields=[field], - vector_dims_shape=(3,), - element_arrays_and_dims={"t1": t1}, - ) - metadata = convert_collation(collation) - self._check_phenomenon(metadata) - coords_and_dims = [ - (LONGITUDE, 2), - (LATITUDE, 1), - ( - iris.coords.DimCoord( - [18, 24, 30], "time", units="hours since epoch" - ), - (0,), - ), - ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) - coords_and_dims = [ - ( - iris.coords.DimCoord( - 12, "forecast_reference_time", units="hours since epoch" - ), - None, - ), - ( - iris.coords.DimCoord( - [6, 12, 18], "forecast_period", units="hours" - ), - (0,), - ), - ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) - - def test_vector_t2(self): - field = self._field() - field.lbtim = 11 - field.t1 = cftime.datetime(1970, 1, 1, 18) - t2 = ( - [ - cftime.datetime(1970, 1, 1, 12), - cftime.datetime(1970, 1, 1, 15), - cftime.datetime(1970, 1, 1, 18), - ], - [0], - ) - collation = mock.Mock( - fields=[field], - vector_dims_shape=(3,), - element_arrays_and_dims={"t2": t2}, - ) - metadata = convert_collation(collation) - self._check_phenomenon(metadata) - coords_and_dims = [ - (LONGITUDE, 2), - (LATITUDE, 1), - ( - iris.coords.DimCoord( - [12, 15, 18], - "forecast_reference_time", - units="hours since epoch", - ), - (0,), - ), - ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) - coords_and_dims = [ - ( - iris.coords.DimCoord(18, "time", units="hours since epoch"), - None, - ), - ( - iris.coords.DimCoord( - [6, 3, 0.0], "forecast_period", units="hours" - ), - (0,), - ), - ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) - - def test_vector_lbft(self): - field = self._field() - field.lbtim = 21 - field.t1 = cftime.datetime(1970, 1, 1, 12) - field.t2 = cftime.datetime(1970, 1, 1, 18) - lbft = ([18, 15, 12], [0]) - collation = mock.Mock( - fields=[field], - vector_dims_shape=(3,), - element_arrays_and_dims={"lbft": lbft}, - ) - metadata = convert_collation(collation) - self._check_phenomenon(metadata) - coords_and_dims = [ - (LONGITUDE, 2), - (LATITUDE, 1), - ( - iris.coords.DimCoord( - [0, 3, 6], - "forecast_reference_time", - units="hours since epoch", - ), - (0,), - ), - ] - coords_and_dims = [ - ( - iris.coords.DimCoord( - 15, "time", units="hours since epoch", bounds=[[12, 18]] - ), - None, - ), - ( - iris.coords.DimCoord( - [15, 12, 9], - "forecast_period", - units="hours", - bounds=[[12, 18], [9, 15], [6, 12]], - ), - (0,), - ), - ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) - - def test_vector_t1_and_t2(self): - field = self._field() - field.lbtim = 11 - t1 = ( - [ - cftime.datetime(1970, 1, 2, 6), - cftime.datetime(1970, 1, 2, 9), - cftime.datetime(1970, 1, 2, 12), - ], - [1], - ) - t2 = ( - [cftime.datetime(1970, 1, 1, 12), cftime.datetime(1970, 1, 2, 0)], - [0], - ) - collation = mock.Mock( - fields=[field], - vector_dims_shape=(2, 3), - element_arrays_and_dims={"t1": t1, "t2": t2}, - ) - metadata = convert_collation(collation) - self._check_phenomenon(metadata) - coords_and_dims = [ - (LONGITUDE, 3), - (LATITUDE, 2), - ( - iris.coords.DimCoord( - [30, 33, 36], "time", units="hours since epoch" - ), - (1,), - ), - ( - iris.coords.DimCoord( - [12, 24], - "forecast_reference_time", - units="hours since epoch", - ), - (0,), - ), - ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) - coords_and_dims = [ - ( - iris.coords.AuxCoord( - [[18, 21, 24], [6, 9, 12]], - "forecast_period", - units="hours", - ), - (0, 1), - ) - ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) - - def test_vertical_pressure(self): - field = self._field() - field.lbvc = 8 - blev = ([1000, 850, 700], (0,)) - lblev = ([1000, 850, 700], (0,)) - collation = mock.Mock( - fields=[field], - vector_dims_shape=(3,), - element_arrays_and_dims={"blev": blev, "lblev": lblev}, - ) - metadata = convert_collation(collation) - self._check_phenomenon(metadata) - coords_and_dims = [ - (LONGITUDE, 2), - (LATITUDE, 1), - ( - iris.coords.DimCoord( - [1000, 850, 700], long_name="pressure", units="hPa" - ), - (0,), - ), - ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) - coords_and_dims = [] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) - - def test_soil_level(self): - field = self._field() - field.lbvc = 6 - points = [10, 20, 30] - lower = [0] * 3 - upper = [0] * 3 - lblev = (points, (0,)) - brsvd1 = (lower, (0,)) - brlev = (upper, (0,)) - collation = mock.Mock( - fields=[field], - vector_dims_shape=(3,), - element_arrays_and_dims={ - "lblev": lblev, - "brsvd1": brsvd1, - "brlev": brlev, - }, - ) - metadata = convert_collation(collation) - self._check_phenomenon(metadata) - level = iris.coords.DimCoord( - points, - long_name="soil_model_level_number", - attributes={"positive": "down"}, - units="1", - ) - coords_and_dims = [(LONGITUDE, 2), (LATITUDE, 1), (level, (0,))] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) - coords_and_dims = [] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) - - def test_soil_depth(self): - field = self._field() - field.lbvc = 6 - points = [10, 20, 30] - lower = [0, 15, 25] - upper = [15, 25, 35] - blev = (points, (0,)) - brsvd1 = (lower, (0,)) - brlev = (upper, (0,)) - collation = mock.Mock( - fields=[field], - vector_dims_shape=(3,), - element_arrays_and_dims={ - "blev": blev, - "brsvd1": brsvd1, - "brlev": brlev, - }, - ) - metadata = convert_collation(collation) - self._check_phenomenon(metadata) - depth = iris.coords.DimCoord( - points, - standard_name="depth", - bounds=np.vstack((lower, upper)).T, - units="m", - attributes={"positive": "down"}, - ) - coords_and_dims = [(LONGITUDE, 2), (LATITUDE, 1), (depth, (0,))] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) - coords_and_dims = [] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) - - def test_vertical_hybrid_height(self): - field = self._field() - field.lbvc = 65 - blev = ([5, 18, 38], (0,)) - lblev = ([1000, 850, 700], (0,)) - brsvd1 = ([10, 26, 50], (0,)) - brsvd2 = ([0.9989, 0.9970, 0.9944], (0,)) - brlev = ([0, 10, 26], (0,)) - bhrlev = ([1, 0.9989, 0.9970], (0,)) - lblev = ([1, 2, 3], (0,)) - bhlev = ([0.9994, 0.9979, 0.9957], (0,)) - collation = mock.Mock( - fields=[field], - vector_dims_shape=(3,), - element_arrays_and_dims={ - "blev": blev, - "lblev": lblev, - "brsvd1": brsvd1, - "brsvd2": brsvd2, - "brlev": brlev, - "bhrlev": bhrlev, - "lblev": lblev, - "bhlev": bhlev, - }, - ) - metadata = convert_collation(collation) - factory = iris.fileformats.rules.Factory( - iris.aux_factory.HybridHeightFactory, - [ - {"long_name": "level_height"}, - {"long_name": "sigma"}, - iris.fileformats.rules.Reference("orography"), - ], - ) - self._check_phenomenon(metadata, factory) - coords_and_dims = [ - (LONGITUDE, 2), - (LATITUDE, 1), - ( - iris.coords.DimCoord( - [1, 2, 3], - "model_level_number", - attributes={"positive": "up"}, - units="1", - ), - (0,), - ), - ] - self.assertEqual(metadata.dim_coords_and_dims, coords_and_dims) - coords_and_dims = [ - ( - iris.coords.DimCoord( - [5, 18, 38], - long_name="level_height", - units="m", - bounds=[[0, 10], [10, 26], [26, 50]], - attributes={"positive": "up"}, - ), - (0,), - ), - ( - iris.coords.AuxCoord( - [0.9994, 0.9979, 0.9957], - long_name="sigma", - bounds=[[1, 0.9989], [0.9989, 0.9970], [0.9970, 0.9944]], - units="1", - ), - (0,), - ), - ] - self.assertEqual(metadata.aux_coords_and_dims, coords_and_dims) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py deleted file mode 100644 index f0932c3ac8..0000000000 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the module -:mod:`iris.fileformats.um._fast_load_structured_fields`. - -""" diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py deleted file mode 100644 index 57100c79af..0000000000 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_BasicFieldCollation.py +++ /dev/null @@ -1,231 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the class -:class:`iris.fileformats.um._fast_load_structured_fields.BasicFieldCollation`. - -""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -from cftime import datetime -import numpy as np - -from iris._lazy_data import as_lazy_data -import iris.fileformats.pp -from iris.fileformats.um._fast_load_structured_fields import ( - BasicFieldCollation, -) - - -class Test___init__(tests.IrisTest): - def test_no_fields(self): - with self.assertRaises(AssertionError): - BasicFieldCollation([]) - - -class Test_fields(tests.IrisTest): - def test_preserve_members(self): - fields = ("foo", "bar", "wibble") - collation = BasicFieldCollation(fields) - self.assertEqual(collation.fields, fields) - - -def _make_field( - lbyr=None, lbyrd=None, lbft=None, blev=None, bhlev=None, data=None -): - header = [0] * 64 - if lbyr is not None: - header[0] = lbyr - header[1] = 1 - header[2] = 1 - if lbyrd is not None: - header[6] = lbyrd - header[7] = 1 - header[8] = 1 - if lbft is not None: - header[13] = lbft - if blev is not None: - header[51] = blev - if bhlev is not None: - header[53] = bhlev - field = iris.fileformats.pp.PPField3(header) - if data is not None: - _data = _make_data(data) - field.data = _data - return field - - -def _make_data(fill_value): - shape = (10, 10) - return as_lazy_data(np.ones(shape) * fill_value) - - -class Test_data(tests.IrisTest): - # Test order of the data attribute when fastest-varying element is changed. - def test_t1_varies_faster(self): - collation = BasicFieldCollation( - [ - _make_field(lbyr=2013, lbyrd=2000, data=0), - _make_field(lbyr=2014, lbyrd=2000, data=1), - _make_field(lbyr=2015, lbyrd=2000, data=2), - _make_field(lbyr=2013, lbyrd=2001, data=3), - _make_field(lbyr=2014, lbyrd=2001, data=4), - _make_field(lbyr=2015, lbyrd=2001, data=5), - ] - ) - result = collation.data[:, :, 0, 0] - expected = [[0, 1, 2], [3, 4, 5]] - self.assertArrayEqual(result, expected) - - def test_t2_varies_faster(self): - collation = BasicFieldCollation( - [ - _make_field(lbyr=2013, lbyrd=2000, data=0), - _make_field(lbyr=2013, lbyrd=2001, data=1), - _make_field(lbyr=2013, lbyrd=2002, data=2), - _make_field(lbyr=2014, lbyrd=2000, data=3), - _make_field(lbyr=2014, lbyrd=2001, data=4), - _make_field(lbyr=2014, lbyrd=2002, data=5), - ] - ) - result = collation.data[:, :, 0, 0] - expected = [[0, 1, 2], [3, 4, 5]] - self.assertArrayEqual(result, expected) - - -class Test_element_arrays_and_dims(tests.IrisTest): - def test_single_field(self): - field = _make_field(2013) - collation = BasicFieldCollation([field]) - self.assertEqual(collation.element_arrays_and_dims, {}) - - def test_t1(self): - collation = BasicFieldCollation( - [_make_field(lbyr=2013), _make_field(lbyr=2014)] - ) - result = collation.element_arrays_and_dims - self.assertEqual(list(result.keys()), ["t1"]) - values, dims = result["t1"] - self.assertArrayEqual( - values, [datetime(2013, 1, 1), datetime(2014, 1, 1)] - ) - self.assertEqual(dims, (0,)) - - def test_t1_and_t2(self): - collation = BasicFieldCollation( - [ - _make_field(lbyr=2013, lbyrd=2000), - _make_field(lbyr=2014, lbyrd=2001), - _make_field(lbyr=2015, lbyrd=2002), - ] - ) - result = collation.element_arrays_and_dims - self.assertEqual(set(result.keys()), set(["t1", "t2"])) - values, dims = result["t1"] - self.assertArrayEqual( - values, - [datetime(2013, 1, 1), datetime(2014, 1, 1), datetime(2015, 1, 1)], - ) - self.assertEqual(dims, (0,)) - values, dims = result["t2"] - self.assertArrayEqual( - values, - [datetime(2000, 1, 1), datetime(2001, 1, 1), datetime(2002, 1, 1)], - ) - self.assertEqual(dims, (0,)) - - def test_t1_and_t2_and_lbft(self): - collation = BasicFieldCollation( - [ - _make_field(lbyr=1, lbyrd=15, lbft=6), - _make_field(lbyr=1, lbyrd=16, lbft=9), - _make_field(lbyr=11, lbyrd=25, lbft=6), - _make_field(lbyr=11, lbyrd=26, lbft=9), - ] - ) - result = collation.element_arrays_and_dims - self.assertEqual(set(result.keys()), set(["t1", "t2", "lbft"])) - values, dims = result["t1"] - self.assertArrayEqual(values, [datetime(1, 1, 1), datetime(11, 1, 1)]) - self.assertEqual(dims, (0,)) - values, dims = result["t2"] - self.assertArrayEqual( - values, - [ - [datetime(15, 1, 1), datetime(16, 1, 1)], - [datetime(25, 1, 1), datetime(26, 1, 1)], - ], - ) - self.assertEqual(dims, (0, 1)) - values, dims = result["lbft"] - self.assertArrayEqual(values, [6, 9]) - self.assertEqual(dims, (1,)) - - def test_blev(self): - collation = BasicFieldCollation( - [_make_field(blev=1), _make_field(blev=2)] - ) - result = collation.element_arrays_and_dims - keys = set( - ["blev", "brsvd1", "brsvd2", "brlev", "bhrlev", "lblev", "bhlev"] - ) - self.assertEqual(set(result.keys()), keys) - values, dims = result["blev"] - self.assertArrayEqual(values, [1, 2]) - self.assertEqual(dims, (0,)) - - def test_bhlev(self): - collation = BasicFieldCollation( - [_make_field(blev=0, bhlev=1), _make_field(blev=1, bhlev=2)] - ) - result = collation.element_arrays_and_dims - keys = set( - ["blev", "brsvd1", "brsvd2", "brlev", "bhrlev", "lblev", "bhlev"] - ) - self.assertEqual(set(result.keys()), keys) - values, dims = result["bhlev"] - self.assertArrayEqual(values, [1, 2]) - self.assertEqual(dims, (0,)) - - -class Test__time_comparable_int(tests.IrisTest): - def test(self): - # Define a list of date-time tuples, which should remain both all - # distinct and in ascending order when converted... - test_date_tuples = [ - # Increment each component in turn to check that all are handled. - (2004, 1, 1, 0, 0, 0), - (2004, 1, 1, 0, 0, 1), - (2004, 1, 1, 0, 1, 0), - (2004, 1, 1, 1, 0, 0), - (2004, 1, 2, 0, 0, 0), - (2004, 2, 1, 0, 0, 0), - # Go across 2004-02-29 leap-day, and on to "Feb 31 .. Mar 1". - (2004, 2, 27, 0, 0, 0), - (2004, 2, 28, 0, 0, 0), - (2004, 2, 29, 0, 0, 0), - (2004, 2, 30, 0, 0, 0), - (2004, 2, 31, 0, 0, 0), - (2004, 3, 1, 0, 0, 0), - (2005, 1, 1, 0, 0, 0), - ] - - collation = BasicFieldCollation(["foo", "bar"]) - test_date_ints = [ - collation._time_comparable_int(*test_tuple) - for test_tuple in test_date_tuples - ] - # Check all values are distinct. - self.assertEqual(len(test_date_ints), len(set(test_date_ints))) - # Check all values are in order. - self.assertEqual(test_date_ints, sorted(test_date_ints)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py b/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py deleted file mode 100644 index b7ef9a62a3..0000000000 --- a/lib/iris/tests/unit/fileformats/um/fast_load_structured_fields/test_group_structured_fields.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the function :func:\ -`iris.fileformats.um._fast_load_structured_fields.group_structured_fields`. - -""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.fileformats.um._fast_load_structured_fields import ( - group_structured_fields, -) - - -def _convert_to_vector(value, length, default): - """ - Return argument (or default) in a list of length 'length'. - - The 'value' arg must either be scalar, or a list of length 'length'. - A value of None is replaced by the default. - If scalar, the value is replicated to the required length. - - """ - if value is None: - value = default - if hasattr(value, "__len__"): - assert len(value) == length - else: - value = [value] * length - return value - - -class Test__grouping(tests.IrisTest): - def _dummy_fields_iter(self, stashes=None, models=None, lbprocs=None): - # Make a group of test fields, and return an iterator over it. - a_vec = [vec for vec in (stashes, models, lbprocs) if vec is not None] - number = len(a_vec[0]) - stashes = _convert_to_vector(stashes, number, default=31) - models = _convert_to_vector(models, number, default=71) - lbprocs = _convert_to_vector(lbprocs, number, default=91) - self.test_fields = [ - mock.MagicMock( - lbuser=[0, 0, 0, x_stash, 0, 0, x_model], - lbproc=x_lbproc, - i_field=ind + 1001, - ) - for ind, x_stash, x_model, x_lbproc in zip( - range(number), stashes, models, lbprocs - ) - ] - return (fld for fld in self.test_fields) - - def _group_result(self, fields): - # Run the testee, but returning just the groups (not FieldCollations). - result = list(group_structured_fields(fields, collation_class=tuple)) - return result - - def _test_fields(self, item): - # Convert nested tuples/lists of field-numbers into fields. - if isinstance(item, int): - result = self.test_fields[item - 1001] - else: - result = type(item)(self._test_fields(el) for el in item) - return result - - def test_none(self): - null_iter = (x for x in []) - result = self._group_result(null_iter) - self.assertEqual(result, []) - - def test_one(self): - fields_iter = self._dummy_fields_iter(stashes=[1]) - result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001,)])) - - def test_allsame(self): - fields_iter = self._dummy_fields_iter(stashes=[1, 1, 1]) - result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001, 1002, 1003)])) - - def test_stashes_different(self): - fields_iter = self._dummy_fields_iter(stashes=[1, 1, 22, 1, 22, 333]) - result = self._group_result(fields_iter) - self.assertEqual( - result, - self._test_fields([(1001, 1002, 1004), (1003, 1005), (1006,)]), - ) - - def test_models_different(self): - fields_iter = self._dummy_fields_iter(models=[10, 21, 10]) - result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001, 1003), (1002,)])) - - def test_lbprocs_different(self): - fields_iter = self._dummy_fields_iter(lbprocs=[991, 995, 991]) - result = self._group_result(fields_iter) - self.assertEqual(result, self._test_fields([(1001, 1003), (1002,)])) - - def test_2d_combines(self): - fields_iter = self._dummy_fields_iter( - stashes=[11, 11, 15, 11], lbprocs=[31, 42, 31, 42] - ) - result = self._group_result(fields_iter) - self.assertEqual( - result, self._test_fields([(1001,), (1002, 1004), (1003,)]) - ) - - def test_sortorder(self): - fields_iter = self._dummy_fields_iter(stashes=[11, 7, 12]) - result = self._group_result(fields_iter) - self.assertEqual( - result, self._test_fields([(1002,), (1001,), (1003,)]) - ) - - def test_sortorder_2d(self): - fields_iter = self._dummy_fields_iter( - stashes=[11, 11, 12], lbprocs=[31, 9, 1] - ) - result = self._group_result(fields_iter) - self.assertEqual( - result, self._test_fields([(1002,), (1001,), (1003,)]) - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py deleted file mode 100644 index 8070719de8..0000000000 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the module -:mod:`iris.fileformats.um._optimal_array_structuring`. - -""" diff --git a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py b/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py deleted file mode 100644 index 96566f3c80..0000000000 --- a/lib/iris/tests/unit/fileformats/um/optimal_array_structuring/test_optimal_array_structure.py +++ /dev/null @@ -1,260 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the function -:func:`iris.fileformats.um._optimal_array_structuring.optimal_array_structure`. - -""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.fileformats.um._optimal_array_structuring import ( - optimal_array_structure, -) - - -class Test__optimal_dimensioning_structure: - pass - - -class Test_optimal_array_structure(tests.IrisTest): - def _check_arrays_and_dims(self, result, spec): - self.assertEqual(set(result.keys()), set(spec.keys())) - for keyname in spec.keys(): - result_array, result_dims = result[keyname] - spec_array, spec_dims = spec[keyname] - self.assertEqual( - result_dims, - spec_dims, - 'element dims differ for "{}": ' - "result={!r}, expected {!r}".format( - keyname, result_dims, spec_dims - ), - ) - self.assertArrayEqual( - result_array, - spec_array, - 'element arrays differ for "{}": ' - "result={!r}, expected {!r}".format( - keyname, result_array, spec_array - ), - ) - - def test_none(self): - with self.assertRaises(IndexError): - _ = optimal_array_structure([], []) - - def test_one(self): - # A single value does not make a dimension (no length-1 dims). - elements = [("a", np.array([1]))] - shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, ()) - self.assertEqual(primaries, set()) - self.assertEqual(elems_and_dims, {}) - - def test_1d(self): - elements = [("a", np.array([1, 2, 4]))] - shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set("a")) - self._check_arrays_and_dims( - elems_and_dims, {"a": (np.array([1, 2, 4]), (0,))} - ) - - def test_1d_actuals(self): - # Test use of alternate element values for array construction. - elements = [("a", np.array([1, 2, 4]))] - actual_values = [("a", np.array([7, 3, 9]))] - shape, primaries, elems_and_dims = optimal_array_structure( - elements, actual_values - ) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set("a")) - self._check_arrays_and_dims( - elems_and_dims, {"a": (np.array([7, 3, 9]), (0,))} - ) - - def test_actuals_mismatch_fail(self): - elements = [("a", np.array([1, 2, 4]))] - actual_values = [("b", np.array([7, 3, 9]))] - with self.assertRaisesRegex(ValueError, "Names.* do not match.*"): - shape, primaries, elems_and_dims = optimal_array_structure( - elements, actual_values - ) - - def test_2d(self): - elements = [ - ("a", np.array([2, 2, 2, 3, 3, 3])), - ("b", np.array([7, 8, 9, 7, 8, 9])), - ] - shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (2, 3)) - self.assertEqual(primaries, set(["a", "b"])) - self._check_arrays_and_dims( - elems_and_dims, - {"a": (np.array([2, 3]), (0,)), "b": (np.array([7, 8, 9]), (1,))}, - ) - - def test_2d_with_element_values(self): - # Confirm that elements values are used in the output when supplied. - elements = [ - ("a", np.array([2, 2, 2, 3, 3, 3])), - ("b", np.array([7, 8, 9, 7, 8, 9])), - ] - elements_values = [ - ("a", np.array([6, 6, 6, 8, 8, 8])), - ("b", np.array([3, 4, 5, 3, 4, 5])), - ] - shape, primaries, elems_and_dims = optimal_array_structure( - elements, elements_values - ) - self.assertEqual(shape, (2, 3)) - self.assertEqual(primaries, set(["a", "b"])) - self._check_arrays_and_dims( - elems_and_dims, - {"a": (np.array([6, 8]), (0,)), "b": (np.array([3, 4, 5]), (1,))}, - ) - - def test_non_2d(self): - # An incomplete 2d expansion just becomes 1d - elements = [ - ("a", np.array([2, 2, 2, 3, 3])), - ("b", np.array([7, 8, 9, 7, 8])), - ] - shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (5,)) - self.assertEqual(primaries, set()) - self._check_arrays_and_dims( - elems_and_dims, - { - "a": (np.array([2, 2, 2, 3, 3]), (0,)), - "b": (np.array([7, 8, 9, 7, 8]), (0,)), - }, - ) - - def test_degenerate(self): - # A all-same vector does not appear in the output. - elements = [("a", np.array([1, 2, 3])), ("b", np.array([4, 4, 4]))] - shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set(["a"])) - self._check_arrays_and_dims( - elems_and_dims, {"a": (np.array([1, 2, 3]), (0,))} - ) - - def test_1d_duplicates(self): - # When two have the same structure, the first is 'the dimension'. - elements = [("a", np.array([1, 3, 4])), ("b", np.array([6, 7, 9]))] - shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set("a")) - self._check_arrays_and_dims( - elems_and_dims, - { - "a": (np.array([1, 3, 4]), (0,)), - "b": (np.array([6, 7, 9]), (0,)), - }, - ) - - def test_1d_duplicates_order(self): - # Same as previous but reverse passed order of elements 'a' and 'b'. - elements = [("b", np.array([6, 7, 9])), ("a", np.array([1, 3, 4]))] - shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - # The only difference is the one chosen as 'principal' - self.assertEqual(primaries, set("b")) - self._check_arrays_and_dims( - elems_and_dims, - { - "a": (np.array([1, 3, 4]), (0,)), - "b": (np.array([6, 7, 9]), (0,)), - }, - ) - - def test_3_way(self): - elements = [ - ("t1", np.array([2, 3, 4])), - ("t2", np.array([4, 5, 6])), - ("period", np.array([9, 8, 7])), - ] - shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (3,)) - self.assertEqual(primaries, set(["t1"])) - self._check_arrays_and_dims( - elems_and_dims, - { - "t1": (np.array([2, 3, 4]), (0,)), - "t2": (np.array([4, 5, 6]), (0,)), - "period": (np.array([9, 8, 7]), (0,)), - }, - ) - - def test_mixed_dims(self): - elements = [ - ("t1", np.array([1, 1, 11, 11])), - ("t2", np.array([15, 16, 25, 26])), - ("ft", np.array([15, 16, 15, 16])), - ] - shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (2, 2)) - self.assertEqual(primaries, set(["t1", "ft"])) - self._check_arrays_and_dims( - elems_and_dims, - { - "t1": (np.array([1, 11]), (0,)), - "t2": (np.array([[15, 16], [25, 26]]), (0, 1)), - "ft": (np.array([15, 16]), (1,)), - }, - ) - - def test_missing_dim(self): - # Case with no dimension element for dimension 1. - elements = [ - ("t1", np.array([1, 1, 11, 11])), - ("t2", np.array([15, 16, 25, 26])), - ] - shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (4,)) - # The potential 2d nature can not be recognised. - # 't1' is auxiliary, as it has duplicate values over the dimension. - self.assertEqual(primaries, set(["t2"])) - self._check_arrays_and_dims( - elems_and_dims, - { - "t1": (np.array([1, 1, 11, 11]), (0,)), - "t2": (np.array([15, 16, 25, 26]), (0,)), - }, - ) - - def test_optimal_structure_decision(self): - # Checks the optimal structure decision logic is working correctly: - # given the arrays we have here we would expect 'a' to be the primary - # dimension, as it has higher priority for being supplied first. - elements = [ - ("a", np.array([1, 1, 1, 2, 2, 2])), - ("b", np.array([0, 1, 2, 0, 1, 2])), - ("c", np.array([11, 11, 11, 14, 14, 14])), - ("d", np.array([10, 10, 10, 10, 10, 10])), - ] - shape, primaries, elems_and_dims = optimal_array_structure(elements) - self.assertEqual(shape, (2, 3)) - self.assertEqual(primaries, set(["a", "b"])) - self._check_arrays_and_dims( - elems_and_dims, - { - "a": (np.array([1, 2]), (0,)), - "c": (np.array([11, 14]), (0,)), - "b": (np.array([0, 1, 2]), (1,)), - }, - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py b/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py deleted file mode 100644 index ef6369f638..0000000000 --- a/lib/iris/tests/unit/fileformats/um/test_um_to_pp.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the function -:func:`iris.fileformats.um.um_to_pp`. - -""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -from iris.fileformats.um import um_to_pp - - -class Test_call(tests.IrisTest): - def test__call(self): - # Check that the function creates an FF2PP and returns the result - # of iterating over it. - - # Make a real (test) iterator object, as otherwise iter() complains... - mock_iterator = (1 for x in ()) - # Make a mock for the iter() call of an FF2PP object. - mock_iter_call = mock.MagicMock(return_value=mock_iterator) - # Make a mock FF2PP object instance. - mock_ff2pp_instance = mock.MagicMock(__iter__=mock_iter_call) - # Make the mock FF2PP class. - mock_ff2pp_class = mock.MagicMock(return_value=mock_ff2pp_instance) - - # Call um_to_pp while patching the um._ff_replacement.FF2PP class. - test_path = "/any/old/file.name" - with mock.patch( - "iris.fileformats.um._ff_replacement.FF2PP", mock_ff2pp_class - ): - result = um_to_pp(test_path) - - # Check that it called FF2PP in the expected way. - self.assertEqual( - mock_ff2pp_class.call_args_list, - [mock.call("/any/old/file.name", read_data=False)], - ) - self.assertEqual( - mock_ff2pp_instance.__iter__.call_args_list, [mock.call()] - ) - - # Check that it returned the expected result. - self.assertIs(result, mock_iterator) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/io/__init__.py b/lib/iris/tests/unit/io/__init__.py deleted file mode 100644 index 5e347c9ebc..0000000000 --- a/lib/iris/tests/unit/io/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.io` package.""" diff --git a/lib/iris/tests/unit/io/test__generate_cubes.py b/lib/iris/tests/unit/io/test__generate_cubes.py deleted file mode 100755 index 3a896a111c..0000000000 --- a/lib/iris/tests/unit/io/test__generate_cubes.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.io._generate_cubes` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from pathlib import Path - -import iris - - -class TestGenerateCubes(tests.IrisTest): - def test_pathlib_paths(self): - test_variants = [ - ("string", "string"), - (["string"], "string"), - (Path("string"), Path("string")), - ] - - decode_uri_mock = self.patch( - "iris.iris.io.decode_uri", return_value=("file", None) - ) - self.patch("iris.iris.io.load_files") - - for gc_arg, du_arg in test_variants: - decode_uri_mock.reset_mock() - list(iris._generate_cubes(gc_arg, None, None)) - decode_uri_mock.assert_called_with(du_arg) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/io/test_expand_filespecs.py b/lib/iris/tests/unit/io/test_expand_filespecs.py deleted file mode 100644 index c28e4f9b2e..0000000000 --- a/lib/iris/tests/unit/io/test_expand_filespecs.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.io.expand_filespecs` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import os -import shutil -import tempfile -import textwrap - -import iris.io as iio - - -class TestExpandFilespecs(tests.IrisTest): - def setUp(self): - tests.IrisTest.setUp(self) - self.tmpdir = os.path.realpath(tempfile.mkdtemp()) - self.fnames = ["a.foo", "b.txt"] - for fname in self.fnames: - with open(os.path.join(self.tmpdir, fname), "w") as fh: - fh.write("anything") - - def tearDown(self): - shutil.rmtree(self.tmpdir) - - def test_absolute_path(self): - result = iio.expand_filespecs([os.path.join(self.tmpdir, "*")]) - expected = [os.path.join(self.tmpdir, fname) for fname in self.fnames] - self.assertEqual(result, expected) - - def test_double_slash(self): - product = iio.expand_filespecs(["//" + os.path.join(self.tmpdir, "*")]) - predicted = [os.path.join(self.tmpdir, fname) for fname in self.fnames] - self.assertEqual(product, predicted) - - def test_relative_path(self): - cwd = os.getcwd() - try: - os.chdir(self.tmpdir) - item_out = iio.expand_filespecs(["*"]) - item_in = [ - os.path.join(self.tmpdir, fname) for fname in self.fnames - ] - self.assertEqual(item_out, item_in) - finally: - os.chdir(cwd) - - def test_return_order(self): - # It is really quite important what order we return the - # files. They should be in the order that was provided, - # so that we can control the order of load (for instance, - # this can be used with PP files to ensure that there is - # a surface reference). - patterns = [ - os.path.join(self.tmpdir, "a.*"), - os.path.join(self.tmpdir, "b.*"), - ] - expected = [ - os.path.join(self.tmpdir, fname) for fname in ["a.foo", "b.txt"] - ] - result = iio.expand_filespecs(patterns) - self.assertEqual(result, expected) - result = iio.expand_filespecs(patterns[::-1]) - self.assertEqual(result, expected[::-1]) - - def test_no_files_found(self): - msg = r"\/no_exist.txt\" didn\'t match any files" - with self.assertRaisesRegex(IOError, msg): - iio.expand_filespecs([os.path.join(self.tmpdir, "no_exist.txt")]) - - def test_files_and_none(self): - with self.assertRaises(IOError) as err: - iio.expand_filespecs( - [ - os.path.join(self.tmpdir, "does_not_exist.txt"), - os.path.join(self.tmpdir, "*"), - ] - ) - expected = ( - textwrap.dedent( - """ - One or more of the files specified did not exist: - * "{0}/does_not_exist.txt" didn\'t match any files - - "{0}/*" matched 2 file(s) - """ - ) - .strip() - .format(self.tmpdir) - ) - - self.assertStringEqual(str(err.exception), expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/io/test_run_callback.py b/lib/iris/tests/unit/io/test_run_callback.py deleted file mode 100644 index 94ae7ac09d..0000000000 --- a/lib/iris/tests/unit/io/test_run_callback.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.io.run_callback` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import iris.exceptions -import iris.io - - -class Test_run_callback(tests.IrisTest): - def setUp(self): - tests.IrisTest.setUp(self) - self.cube = mock.sentinel.cube - - def test_no_callback(self): - # No callback results in the cube being returned. - self.assertEqual( - iris.io.run_callback(None, self.cube, None, None), self.cube - ) - - def test_ignore_cube(self): - # Ignore cube should result in None being returned. - def callback(cube, field, fname): - raise iris.exceptions.IgnoreCubeException() - - cube = self.cube - self.assertEqual( - iris.io.run_callback(callback, cube, None, None), None - ) - - def test_callback_no_return(self): - # Check that a callback not returning anything still results in the - # cube being passed back from "run_callback". - def callback(cube, field, fname): - pass - - cube = self.cube - self.assertEqual( - iris.io.run_callback(callback, cube, None, None), cube - ) - - def test_bad_callback_return_type(self): - # Check that a TypeError is raised with a bad callback return value. - def callback(cube, field, fname): - return iris.cube.CubeList() - - with self.assertRaisesRegex( - TypeError, "Callback function returned an " "unhandled data type." - ): - iris.io.run_callback(callback, None, None, None) - - def test_bad_signature(self): - # Check that a TypeError is raised with a bad callback function - # signature. - def callback(cube): - pass - - with self.assertRaisesRegex(TypeError, "takes 1 positional argument "): - iris.io.run_callback(callback, None, None, None) - - def test_callback_args(self): - # Check that the appropriate args are passed through to the callback. - self.field = mock.sentinel.field - self.fname = mock.sentinel.fname - - def callback(cube, field, fname): - self.assertEqual(cube, self.cube) - self.assertEqual(field, self.field) - self.assertEqual(fname, self.fname) - - iris.io.run_callback(callback, self.cube, self.field, self.fname) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/io/test_save.py b/lib/iris/tests/unit/io/test_save.py deleted file mode 100755 index b92e26f2d1..0000000000 --- a/lib/iris/tests/unit/io/test_save.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.io.save` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from pathlib import Path -from unittest import mock - -import iris -from iris.cube import Cube - - -class TestSave(tests.IrisTest): - def test_pathlib_save(self): - file_mock = mock.Mock() - # Have to configure after creation because "name" is special - file_mock.configure_mock(name="string") - - find_saver_mock = self.patch( - "iris.io.find_saver", return_value=(lambda *args, **kwargs: None) - ) - - test_variants = [ - ("string", "string"), - (Path("string/string"), "string/string"), - (file_mock, "string"), - ] - - for target, fs_val in test_variants: - try: - iris.save(Cube([]), target) - except ValueError: - print("ValueError") - pass - find_saver_mock.assert_called_with(fs_val) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/lazy_data/__init__.py b/lib/iris/tests/unit/lazy_data/__init__.py deleted file mode 100644 index b463897c50..0000000000 --- a/lib/iris/tests/unit/lazy_data/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris._lazy_data` module.""" diff --git a/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py b/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py deleted file mode 100644 index 1a98c81fac..0000000000 --- a/lib/iris/tests/unit/lazy_data/test_as_concrete_data.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris._lazy data.as_concrete_data`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import as_concrete_data, as_lazy_data, is_lazy_data - - -class MyProxy: - def __init__(self, a): - self.shape = a.shape - self.dtype = a.dtype - self.ndim = a.ndim - self.a = a - - def __getitem__(self, keys): - return self.a[keys] - - -class Test_as_concrete_data(tests.IrisTest): - def test_concrete_input_data(self): - data = np.arange(24).reshape((4, 6)) - result = as_concrete_data(data) - self.assertIs(data, result) - self.assertFalse(is_lazy_data(result)) - - def test_concrete_masked_input_data(self): - data = ma.masked_array([10, 12, 8, 2], mask=[True, True, False, True]) - result = as_concrete_data(data) - self.assertIs(data, result) - self.assertFalse(is_lazy_data(result)) - - def test_lazy_data(self): - data = np.arange(24).reshape((2, 12)) - lazy_array = as_lazy_data(data) - self.assertTrue(is_lazy_data(lazy_array)) - result = as_concrete_data(lazy_array) - self.assertFalse(is_lazy_data(result)) - self.assertArrayEqual(result, data) - - def test_lazy_mask_data(self): - data = np.arange(24).reshape((2, 12)) - fill_value = 1234 - mask_data = ma.masked_array(data, fill_value=fill_value) - lazy_array = as_lazy_data(mask_data) - self.assertTrue(is_lazy_data(lazy_array)) - result = as_concrete_data(lazy_array) - self.assertFalse(is_lazy_data(result)) - self.assertMaskedArrayEqual(result, mask_data) - self.assertEqual(result.fill_value, fill_value) - - def test_lazy_scalar_proxy(self): - a = np.array(5) - proxy = MyProxy(a) - lazy_array = as_lazy_data(proxy) - self.assertTrue(is_lazy_data(lazy_array)) - result = as_concrete_data(lazy_array) - self.assertFalse(is_lazy_data(result)) - self.assertEqual(result, a) - - def test_lazy_scalar_proxy_masked(self): - a = np.ma.masked_array(5, True) - proxy = MyProxy(a) - lazy_array = as_lazy_data(proxy) - self.assertTrue(is_lazy_data(lazy_array)) - result = as_concrete_data(lazy_array) - self.assertFalse(is_lazy_data(result)) - self.assertMaskedArrayEqual(result, a) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py deleted file mode 100644 index 5aeebd6045..0000000000 --- a/lib/iris/tests/unit/lazy_data/test_as_lazy_data.py +++ /dev/null @@ -1,170 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test the function :func:`iris._lazy data.as_lazy_data`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import dask.array as da -import dask.config -import numpy as np -import numpy.ma as ma - -from iris._lazy_data import _optimum_chunksize, as_lazy_data - - -class Test_as_lazy_data(tests.IrisTest): - def test_lazy(self): - data = da.from_array(np.arange(24).reshape((2, 3, 4)), chunks="auto") - result = as_lazy_data(data) - self.assertIsInstance(result, da.core.Array) - - def test_real(self): - data = np.arange(24).reshape((2, 3, 4)) - result = as_lazy_data(data) - self.assertIsInstance(result, da.core.Array) - - def test_masked(self): - data = np.ma.masked_greater(np.arange(24), 10) - result = as_lazy_data(data) - self.assertIsInstance(result, da.core.Array) - - def test_non_default_chunks(self): - data = np.arange(24) - chunks = (12,) - lazy_data = as_lazy_data(data, chunks=chunks) - (result,) = np.unique(lazy_data.chunks) - self.assertEqual(result, 24) - - def test_with_masked_constant(self): - masked_data = ma.masked_array([8], mask=True) - masked_constant = masked_data[0] - result = as_lazy_data(masked_constant) - self.assertIsInstance(result, da.core.Array) - - -class Test__optimised_chunks(tests.IrisTest): - # Stable, known chunksize for testing. - FIXED_CHUNKSIZE_LIMIT = 1024 * 1024 * 64 - - @staticmethod - def _dummydata(shape): - return mock.Mock(spec=da.core.Array, dtype=np.dtype("f4"), shape=shape) - - def test_chunk_size_limiting(self): - # Check default chunksizes for large data (with a known size limit). - given_shapes_and_resulting_chunks = [ - ((16, 1024, 1024), (16, 1024, 1024)), # largest unmodified - ((17, 1011, 1022), (8, 1011, 1022)), - ((16, 1024, 1025), (8, 1024, 1025)), - ((1, 17, 1011, 1022), (1, 8, 1011, 1022)), - ((17, 1, 1011, 1022), (8, 1, 1011, 1022)), - ((11, 2, 1011, 1022), (5, 2, 1011, 1022)), - ] - err_fmt = "Result of optimising chunks {} was {}, expected {}" - for (shape, expected) in given_shapes_and_resulting_chunks: - chunks = _optimum_chunksize( - shape, shape, limit=self.FIXED_CHUNKSIZE_LIMIT - ) - msg = err_fmt.format(shape, chunks, expected) - self.assertEqual(chunks, expected, msg) - - def test_chunk_size_expanding(self): - # Check the expansion of small chunks, (with a known size limit). - given_shapes_and_resulting_chunks = [ - ((1, 100, 100), (16, 100, 100), (16, 100, 100)), - ((1, 100, 100), (5000, 100, 100), (1667, 100, 100)), - ((3, 300, 200), (10000, 3000, 2000), (3, 1500, 2000)), - ((3, 300, 200), (10000, 300, 2000), (27, 300, 2000)), - ((3, 300, 200), (8, 300, 2000), (8, 300, 2000)), - ((3, 300, 200), (117, 300, 1000), (39, 300, 1000)), - ] - err_fmt = "Result of optimising shape={};chunks={} was {}, expected {}" - for (shape, fullshape, expected) in given_shapes_and_resulting_chunks: - chunks = _optimum_chunksize( - chunks=shape, shape=fullshape, limit=self.FIXED_CHUNKSIZE_LIMIT - ) - msg = err_fmt.format(fullshape, shape, chunks, expected) - self.assertEqual(chunks, expected, msg) - - def test_chunk_expanding_equal_division(self): - # Check that expansion chooses equal chunk sizes as far as possible. - - # Table of test cases: - # (input-chunkshape, full-shape, size-limit, result-chunkshape) - testcases_chunksin_fullshape_limit_result = [ - ((4,), (12,), 15, (12,)), # gives a single chunk, of size 12 - ((4,), (13,), 15, (8,)), # chooses chunks of 8+5, better than 12+1 - ((4,), (16,), 15, (8,)), # 8+8 is better than 12+4; 16 is too big. - ((4,), (96,), 15, (12,)), # 12 is largest 'allowed' - ((4,), (96,), 31, (24,)), # 28 doesn't divide 96 so neatly, - # A multi-dimensional case, where trailing dims are 'filled'. - ((4, 5, 100), (25, 10, 200), 16 * 2000, (16, 10, 200)), - # Equivalent case with additional initial dimensions. - ( - (1, 1, 4, 5, 100), - (3, 5, 25, 10, 200), - 16 * 2000, - (1, 1, 16, 10, 200), - ), # effectively the same as the previous. - ] - err_fmt_main = ( - "Main chunks result of optimising " - "chunks={},shape={},limit={} " - "was {}, expected {}" - ) - for ( - chunks, - shape, - limit, - expected_result, - ) in testcases_chunksin_fullshape_limit_result: - result = _optimum_chunksize( - chunks=chunks, shape=shape, limit=limit, dtype=np.dtype("b1") - ) - msg = err_fmt_main.format( - chunks, shape, limit, result, expected_result - ) - self.assertEqual(result, expected_result, msg) - - def test_default_chunksize(self): - # Check that the "ideal" chunksize is taken from the dask config. - with dask.config.set({"array.chunk-size": "20b"}): - chunks = _optimum_chunksize( - (1, 8), shape=(400, 20), dtype=np.dtype("f4") - ) - self.assertEqual(chunks, (1, 4)) - - def test_default_chunks_limiting(self): - # Check that chunking is still controlled when no specific 'chunks' - # is passed. - limitcall_patch = self.patch("iris._lazy_data._optimum_chunksize") - test_shape = (3, 2, 4) - data = self._dummydata(test_shape) - as_lazy_data(data) - self.assertEqual( - limitcall_patch.call_args_list, - [ - mock.call( - list(test_shape), shape=test_shape, dtype=np.dtype("f4") - ) - ], - ) - - def test_shapeless_data(self): - # Check that chunk optimisation is skipped if shape contains a zero. - limitcall_patch = self.patch("iris._lazy_data._optimum_chunksize") - test_shape = (2, 1, 0, 2) - data = self._dummydata(test_shape) - as_lazy_data(data, chunks=test_shape) - self.assertFalse(limitcall_patch.called) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py b/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py deleted file mode 100644 index 0c10d69c16..0000000000 --- a/lib/iris/tests/unit/lazy_data/test_co_realise_cubes.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris._lazy data.co_realise_cubes`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris._lazy_data import as_lazy_data, co_realise_cubes -from iris.cube import Cube - - -class ArrayAccessCounter: - def __init__(self, array): - self.dtype = array.dtype - self.shape = array.shape - self.ndim = array.ndim - self._array = array - self.access_count = 0 - - def __getitem__(self, keys): - self.access_count += 1 - return self._array[keys] - - -class Test_co_realise_cubes(tests.IrisTest): - def test_empty(self): - # Ensure that 'no args' case does not raise an error. - co_realise_cubes() - - def test_basic(self): - real_data = np.arange(3.0) - cube = Cube(as_lazy_data(real_data)) - co_realise_cubes(cube) - self.assertFalse(cube.has_lazy_data()) - self.assertArrayAllClose(cube.core_data(), real_data) - - def test_multi(self): - real_data = np.arange(3.0) - cube_base = Cube(as_lazy_data(real_data)) - cube_inner = cube_base + 1 - result_a = cube_base + 1 - result_b = cube_inner + 1 - co_realise_cubes(result_a, result_b) - # Check that target cubes were realised. - self.assertFalse(result_a.has_lazy_data()) - self.assertFalse(result_b.has_lazy_data()) - # Check that other cubes referenced remain lazy. - self.assertTrue(cube_base.has_lazy_data()) - self.assertTrue(cube_inner.has_lazy_data()) - - def test_combined_access(self): - wrapped_array = ArrayAccessCounter(np.arange(3.0)) - lazy_array = as_lazy_data(wrapped_array) - derived_a = lazy_array + 1 - derived_b = lazy_array + 2 - derived_c = lazy_array + 3 - derived_d = lazy_array + 4 - derived_e = lazy_array + 5 - cube_a = Cube(derived_a) - cube_b = Cube(derived_b) - cube_c = Cube(derived_c) - cube_d = Cube(derived_d) - cube_e = Cube(derived_e) - co_realise_cubes(cube_a, cube_b, cube_c, cube_d, cube_e) - # Though used more than once, the source data should only get fetched - # once by dask, when the whole data is accessed. - # This also ensures that dask does *not* perform an initial data - # access with no data payload to ascertain the metadata associated with - # the dask.array (this access is specific to dask 2+, - # see dask.array.utils.meta_from_array). - self.assertEqual(wrapped_array.access_count, 1) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py b/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py deleted file mode 100644 index 45b3194f32..0000000000 --- a/lib/iris/tests/unit/lazy_data/test_is_lazy_data.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris._lazy data.is_lazy_data`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import dask.array as da -import numpy as np - -from iris._lazy_data import is_lazy_data - - -class Test_is_lazy_data(tests.IrisTest): - def test_lazy(self): - values = np.arange(30).reshape((2, 5, 3)) - lazy_array = da.from_array(values, chunks="auto") - self.assertTrue(is_lazy_data(lazy_array)) - - def test_real(self): - real_array = np.arange(24).reshape((2, 3, 4)) - self.assertFalse(is_lazy_data(real_array)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py b/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py deleted file mode 100644 index 49fd6ad70b..0000000000 --- a/lib/iris/tests/unit/lazy_data/test_lazy_elementwise.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris._lazy data.lazy_elementwise`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris._lazy_data import as_lazy_data, is_lazy_data, lazy_elementwise - - -def _test_elementwise_op(array): - # Promotes the type of a bool argument, but not a float. - return array + 1 - - -class Test_lazy_elementwise(tests.IrisTest): - def test_basic(self): - concrete_array = np.arange(30).reshape((2, 5, 3)) - lazy_array = as_lazy_data(concrete_array) - wrapped = lazy_elementwise(lazy_array, _test_elementwise_op) - self.assertTrue(is_lazy_data(wrapped)) - self.assertArrayAllClose( - wrapped.compute(), _test_elementwise_op(concrete_array) - ) - - def test_dtype_same(self): - concrete_array = np.array([3.0], dtype=np.float16) - lazy_array = as_lazy_data(concrete_array) - wrapped = lazy_elementwise(lazy_array, _test_elementwise_op) - self.assertTrue(is_lazy_data(wrapped)) - self.assertEqual(wrapped.dtype, np.float16) - self.assertEqual(wrapped.compute().dtype, np.float16) - - def test_dtype_change(self): - concrete_array = np.array([True, False]) - lazy_array = as_lazy_data(concrete_array) - wrapped = lazy_elementwise(lazy_array, _test_elementwise_op) - self.assertTrue(is_lazy_data(wrapped)) - self.assertEqual(wrapped.dtype, np.int_) - self.assertEqual(wrapped.compute().dtype, wrapped.dtype) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py b/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py deleted file mode 100644 index e7f3adad76..0000000000 --- a/lib/iris/tests/unit/lazy_data/test_map_complete_blocks.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris._lazy data.map_complete_blocks`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import unittest - -import dask.array as da -import numpy as np - -from iris._lazy_data import is_lazy_data, map_complete_blocks - - -def create_mock_cube(array): - cube = unittest.mock.Mock() - cube_data = unittest.mock.PropertyMock(return_value=array) - type(cube).data = cube_data - cube.dtype = array.dtype - cube.has_lazy_data = unittest.mock.Mock(return_value=is_lazy_data(array)) - cube.lazy_data = unittest.mock.Mock(return_value=array) - cube.shape = array.shape - return cube, cube_data - - -class Test_map_complete_blocks(tests.IrisTest): - def setUp(self): - self.array = np.arange(8).reshape(2, 4) - self.func = lambda chunk: chunk + 1 - self.func_result = self.array + 1 - - def test_non_lazy_input(self): - # Check that a non-lazy input doesn't trip up the functionality. - cube, cube_data = create_mock_cube(self.array) - result = map_complete_blocks( - cube, self.func, dims=(1,), out_sizes=(4,) - ) - self.assertFalse(is_lazy_data(result)) - self.assertArrayEqual(result, self.func_result) - # check correct data was accessed - cube.lazy_data.assert_not_called() - cube_data.assert_called_once() - - def test_lazy_input(self): - lazy_array = da.asarray(self.array, chunks=((1, 1), (4,))) - cube, cube_data = create_mock_cube(lazy_array) - result = map_complete_blocks( - cube, self.func, dims=(1,), out_sizes=(4,) - ) - self.assertTrue(is_lazy_data(result)) - self.assertArrayEqual(result.compute(), self.func_result) - # check correct data was accessed - cube.lazy_data.assert_called_once() - cube_data.assert_not_called() - - def test_rechunk(self): - lazy_array = da.asarray(self.array, chunks=((1, 1), (2, 2))) - cube, _ = create_mock_cube(lazy_array) - result = map_complete_blocks( - cube, self.func, dims=(1,), out_sizes=(4,) - ) - self.assertTrue(is_lazy_data(result)) - self.assertArrayEqual(result.compute(), self.func_result) - - def test_different_out_shape(self): - lazy_array = da.asarray(self.array, chunks=((1, 1), (4,))) - cube, _ = create_mock_cube(lazy_array) - - def func(_): - return np.arange(2).reshape(1, 2) - - func_result = [[0, 1], [0, 1]] - result = map_complete_blocks(cube, func, dims=(1,), out_sizes=(2,)) - self.assertTrue(is_lazy_data(result)) - self.assertArrayEqual(result.compute(), func_result) - - def test_multidimensional_input(self): - array = np.arange(2 * 3 * 4).reshape(2, 3, 4) - lazy_array = da.asarray(array, chunks=((1, 1), (1, 2), (4,))) - cube, _ = create_mock_cube(lazy_array) - result = map_complete_blocks( - cube, self.func, dims=(1, 2), out_sizes=(3, 4) - ) - self.assertTrue(is_lazy_data(result)) - self.assertArrayEqual(result.compute(), array + 1) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py b/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py deleted file mode 100644 index 9fe79a0d4c..0000000000 --- a/lib/iris/tests/unit/lazy_data/test_multidim_lazy_stack.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris._lazy data.multidim_lazy_stack`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import dask.array as da -import numpy as np - -from iris._lazy_data import as_concrete_data, as_lazy_data, multidim_lazy_stack - - -class Test_multidim_lazy_stack(tests.IrisTest): - def _check(self, stack_shape): - vals = np.arange(np.prod(stack_shape)).reshape(stack_shape) - stack = np.empty(stack_shape, "object") - # Define the shape of each element in the stack. - stack_element_shape = (4, 5) - expected = np.empty(stack_shape + stack_element_shape, dtype=int) - for index, val in np.ndenumerate(vals): - stack[index] = as_lazy_data(val * np.ones(stack_element_shape)) - - expected[index] = val - result = multidim_lazy_stack(stack) - self.assertEqual(result.shape, stack_shape + stack_element_shape) - self.assertIsInstance(result, da.core.Array) - result = as_concrete_data(result) - self.assertArrayAllClose(result, expected) - - def test_0d_lazy_stack(self): - shape = () - self._check(shape) - - def test_1d_lazy_stack(self): - shape = (2,) - self._check(shape) - - def test_2d_lazy_stack(self): - shape = (3, 2) - self._check(shape) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/lazy_data/test_non_lazy.py b/lib/iris/tests/unit/lazy_data/test_non_lazy.py deleted file mode 100644 index cc4ed33ea3..0000000000 --- a/lib/iris/tests/unit/lazy_data/test_non_lazy.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris._lazy data.non_lazy`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris._lazy_data import as_lazy_data, is_lazy_data, non_lazy - - -class Test_non_lazy(tests.IrisTest): - def setUp(self): - self.array = np.arange(8).reshape(2, 4) - self.lazy_array = as_lazy_data(self.array) - self.func = non_lazy(lambda array: array.sum(axis=0)) - self.func_result = [4, 6, 8, 10] - - def test_lazy_input(self): - result = self.func(self.lazy_array) - self.assertFalse(is_lazy_data(result)) - self.assertArrayEqual(result, self.func_result) - - def test_non_lazy_input(self): - # Check that a non-lazy input doesn't trip up the functionality. - result = self.func(self.array) - self.assertFalse(is_lazy_data(result)) - self.assertArrayEqual(result, self.func_result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/merge/__init__.py b/lib/iris/tests/unit/merge/__init__.py deleted file mode 100644 index c3ead61576..0000000000 --- a/lib/iris/tests/unit/merge/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris._merge` module.""" diff --git a/lib/iris/tests/unit/merge/test_ProtoCube.py b/lib/iris/tests/unit/merge/test_ProtoCube.py deleted file mode 100644 index 31b1efb3fd..0000000000 --- a/lib/iris/tests/unit/merge/test_ProtoCube.py +++ /dev/null @@ -1,560 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris._merge.ProtoCube` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from abc import ABCMeta, abstractmethod -from unittest import mock - -import numpy as np -import numpy.ma as ma - -import iris -from iris._merge import ProtoCube -from iris.aux_factory import HybridHeightFactory, HybridPressureFactory -from iris.coords import AuxCoord, DimCoord -from iris.exceptions import MergeError - - -def example_cube(): - return iris.cube.Cube( - np.array([1, 2, 3], dtype="i4"), - standard_name="air_temperature", - long_name="screen_air_temp", - var_name="airtemp", - units="K", - attributes={"mint": "thin"}, - ) - - -class Mixin_register(metaclass=ABCMeta): - @property - def cube1(self): - return example_cube() - - @property - @abstractmethod - def cube2(self): - pass - - @property - @abstractmethod - def fragments(self): - pass - - def test_default(self): - # Test what happens when we call: - # ProtoCube.register(cube) - proto_cube = ProtoCube(self.cube1) - result = proto_cube.register(self.cube2) - self.assertEqual(result, not self.fragments) - - def test_no_error(self): - # Test what happens when we call: - # ProtoCube.register(cube, error_on_mismatch=False) - proto_cube = ProtoCube(self.cube1) - result = proto_cube.register(self.cube2, error_on_mismatch=False) - self.assertEqual(result, not self.fragments) - - def test_error(self): - # Test what happens when we call: - # ProtoCube.register(cube, error_on_mismatch=True) - proto_cube = ProtoCube(self.cube1) - if self.fragments: - with self.assertRaises(iris.exceptions.MergeError) as cm: - proto_cube.register(self.cube2, error_on_mismatch=True) - error_message = str(cm.exception) - for substr in self.fragments: - self.assertIn(substr, error_message) - else: - result = proto_cube.register(self.cube2, error_on_mismatch=True) - self.assertTrue(result) - - -@tests.iristest_timing_decorator -class Test_register__match(Mixin_register, tests.IrisTest_nometa): - @property - def fragments(self): - return [] - - @property - def cube2(self): - return example_cube() - - -@tests.iristest_timing_decorator -class Test_register__standard_name(Mixin_register, tests.IrisTest_nometa): - @property - def fragments(self): - return ["cube.standard_name", "air_temperature", "air_density"] - - @property - def cube2(self): - cube = example_cube() - cube.standard_name = "air_density" - return cube - - -@tests.iristest_timing_decorator -class Test_register__long_name(Mixin_register, tests.IrisTest_nometa): - @property - def fragments(self): - return ["cube.long_name", "screen_air_temp", "Belling"] - - @property - def cube2(self): - cube = example_cube() - cube.long_name = "Belling" - return cube - - -@tests.iristest_timing_decorator -class Test_register__var_name(Mixin_register, tests.IrisTest_nometa): - @property - def fragments(self): - return ["cube.var_name", "'airtemp'", "'airtemp2'"] - - @property - def cube2(self): - cube = example_cube() - cube.var_name = "airtemp2" - return cube - - -@tests.iristest_timing_decorator -class Test_register__units(Mixin_register, tests.IrisTest_nometa): - @property - def fragments(self): - return ["cube.units", "'K'", "'C'"] - - @property - def cube2(self): - cube = example_cube() - cube.units = "C" - return cube - - -@tests.iristest_timing_decorator -class Test_register__attributes_unequal(Mixin_register, tests.IrisTest_nometa): - @property - def fragments(self): - return ["cube.attributes", "'mint'"] - - @property - def cube2(self): - cube = example_cube() - cube.attributes["mint"] = "waffer-thin" - return cube - - -@tests.iristest_timing_decorator -class Test_register__attributes_unequal_array( - Mixin_register, tests.IrisTest_nometa -): - @property - def fragments(self): - return ["cube.attributes", "'mint'"] - - @property - def cube1(self): - cube = example_cube() - cube.attributes["mint"] = np.arange(3) - return cube - - @property - def cube2(self): - cube = example_cube() - cube.attributes["mint"] = np.arange(3) + 1 - return cube - - -@tests.iristest_timing_decorator -class Test_register__attributes_superset( - Mixin_register, tests.IrisTest_nometa -): - @property - def fragments(self): - return ["cube.attributes", "'stuffed'"] - - @property - def cube2(self): - cube = example_cube() - cube.attributes["stuffed"] = "yes" - return cube - - -@tests.iristest_timing_decorator -class Test_register__attributes_multi_diff( - Mixin_register, tests.IrisTest_nometa -): - @property - def fragments(self): - return ["cube.attributes", "'sam'", "'mint'"] - - @property - def cube1(self): - cube = example_cube() - cube.attributes["ralph"] = 1 - cube.attributes["sam"] = 2 - cube.attributes["tom"] = 3 - return cube - - @property - def cube2(self): - cube = example_cube() - cube.attributes["ralph"] = 1 - cube.attributes["sam"] = "mug" - cube.attributes["tom"] = 3 - cube.attributes["mint"] = "humbug" - return cube - - -@tests.iristest_timing_decorator -class Test_register__cell_method(Mixin_register, tests.IrisTest_nometa): - @property - def fragments(self): - return ["cube.cell_methods"] - - @property - def cube2(self): - cube = example_cube() - cube.add_cell_method(iris.coords.CellMethod("monty", ("python",))) - return cube - - -@tests.iristest_timing_decorator -class Test_register__data_shape(Mixin_register, tests.IrisTest_nometa): - @property - def fragments(self): - return ["cube.shape", "(2,)", "(3,)"] - - @property - def cube2(self): - cube = example_cube() - cube = cube[1:] - return cube - - -@tests.iristest_timing_decorator -class Test_register__data_dtype(Mixin_register, tests.IrisTest_nometa): - @property - def fragments(self): - return ["cube data dtype", "int32", "int8"] - - @property - def cube2(self): - cube = example_cube() - cube.data = cube.data.astype(np.int8) - return cube - - -class _MergeTest: - # A mixin test class for common test methods implementation. - - # used by check routine: inheritors must implement it - _mergetest_type = NotImplementedError - - def check_merge_fails_with_message(self): - proto_cube = iris._merge.ProtoCube(self.cube1) - with self.assertRaises(MergeError) as arc: - proto_cube.register(self.cube2, error_on_mismatch=True) - return str(arc.exception) - - def check_fail(self, *substrs): - if isinstance(substrs, str): - substrs = [substrs] - msg = self.check_merge_fails_with_message() - for substr in substrs: - self.assertIn(substr, msg) - - -class Test_register__CubeSig(_MergeTest, tests.IrisTest): - # Test potential registration failures. - - _mergetest_type = "cube" - - def setUp(self): - self.cube1 = iris.cube.Cube( - [1, 2, 3], - standard_name="air_temperature", - units="K", - attributes={"mint": "thin"}, - ) - self.cube2 = self.cube1.copy() - - def test_noise(self): - # Test a massive set of all defn diffs to make sure it's not noise. - self.cube1.var_name = "Arthur" - cube2 = self.cube1[1:] - cube2.data = cube2.data.astype(np.int8) - cube2.data = ma.array(cube2.data) - cube2.standard_name = "air_pressure" - cube2.var_name = "Nudge" - cube2.attributes["stuffed"] = "yes" - cube2.attributes["mint"] = "waffer-thin" - cube2.add_cell_method(iris.coords.CellMethod("monty", ("python",))) - - # Check the actual message, so we've got a readable reference text. - self.cube2 = cube2 - msg = self.check_merge_fails_with_message() - self.assertString(msg, self.result_path(ext="txt")) - - -class Test_register__CoordSig_general(_MergeTest, tests.IrisTest): - - _mergetest_type = "coord" - - def setUp(self): - self.cube1 = iris.cube.Cube(np.zeros((3, 3, 3))) - self.cube2 = self.cube1.copy() - - def test_scalar_defns_one_extra(self): - self.cube2.add_aux_coord(DimCoord([1], standard_name="latitude")) - self.check_fail("aux_coords (scalar)", "latitude") - - def test_scalar_defns_both_extra(self): - self.cube2.add_aux_coord(DimCoord([1], standard_name="latitude")) - self.cube1.add_aux_coord(DimCoord([1], standard_name="longitude")) - self.check_fail("aux_coords (scalar)", "latitude", "longitude") - - def test_vector_dim_coords_and_dims_one_extra(self): - self.cube2.add_dim_coord( - DimCoord([1, 2, 3], standard_name="latitude"), 0 - ) - self.check_fail("dim_coords", "latitude") - - def test_vector_dim_coords_and_dims_both_extra(self): - self.cube2.add_dim_coord( - DimCoord([1, 2, 3], standard_name="latitude"), 0 - ) - self.cube1.add_dim_coord( - DimCoord([1, 2, 3], standard_name="longitude"), 0 - ) - self.check_fail("dim_coords", "latitude", "longitude") - - def test_vector_aux_coords_and_dims_one_extra(self): - self.cube2.add_aux_coord( - DimCoord([1, 2, 3], standard_name="latitude"), 0 - ) - self.check_fail("aux_coords (non-scalar)", "latitude") - - def test_vector_aux_coords_and_dims_both_extra(self): - self.cube2.add_aux_coord( - DimCoord([1, 2, 3], standard_name="latitude"), 0 - ) - self.cube1.add_aux_coord( - DimCoord([1, 2, 3], standard_name="longitude"), 0 - ) - self.check_fail("aux_coords (non-scalar)", "latitude", "longitude") - - def test_factory_defns_one_extra(self): - self.cube2.add_aux_factory(mock.MagicMock(spec=HybridHeightFactory)) - self.check_fail("cube.aux_factories", "differ") - - def test_factory_defns_both_extra(self): - self.cube2.add_aux_factory(mock.MagicMock(spec=HybridHeightFactory)) - self.cube1.add_aux_factory(mock.MagicMock(spec=HybridPressureFactory)) - self.check_fail("cube.aux_factories", "differ") - - def test_factory_defns_one_missing_term(self): - self.cube1.add_aux_factory(mock.MagicMock(spec=HybridPressureFactory)) - no_delta_factory = mock.MagicMock(spec=HybridPressureFactory) - no_delta_factory.delta = None - self.cube2.add_aux_factory(no_delta_factory) - - self.check_fail("cube.aux_factories", "differ") - - def test_noise(self): - cube2 = self.cube2 - - # scalar - cube2.add_aux_coord(DimCoord([1], long_name="liff")) - cube2.add_aux_coord(DimCoord([1], long_name="life")) - cube2.add_aux_coord(DimCoord([1], long_name="like")) - - self.cube1.add_aux_coord(DimCoord([1], var_name="ming")) - self.cube1.add_aux_coord(DimCoord([1], var_name="mong")) - self.cube1.add_aux_coord(DimCoord([1], var_name="moog")) - - # aux - cube2.add_dim_coord(DimCoord([1, 2, 3], standard_name="latitude"), 0) - cube2.add_dim_coord(DimCoord([1, 2, 3], standard_name="longitude"), 1) - cube2.add_dim_coord(DimCoord([1, 2, 3], standard_name="altitude"), 2) - - self.cube1.add_dim_coord( - DimCoord([1, 2, 3], long_name="equinimity"), 0 - ) - self.cube1.add_dim_coord( - DimCoord([1, 2, 3], long_name="equinomity"), 1 - ) - self.cube1.add_dim_coord( - DimCoord([1, 2, 3], long_name="equinumity"), 2 - ) - - # dim - cube2.add_aux_coord(DimCoord([1, 2, 3], var_name="one"), 0) - cube2.add_aux_coord(DimCoord([1, 2, 3], var_name="two"), 1) - cube2.add_aux_coord(DimCoord([1, 2, 3], var_name="three"), 2) - - self.cube1.add_aux_coord(DimCoord([1, 2, 3], long_name="ay"), 0) - self.cube1.add_aux_coord(DimCoord([1, 2, 3], long_name="bee"), 1) - self.cube1.add_aux_coord(DimCoord([1, 2, 3], long_name="cee"), 2) - - # factory - cube2.add_aux_factory(mock.MagicMock(spec=HybridHeightFactory)) - self.cube1.add_aux_factory(mock.MagicMock(spec=HybridPressureFactory)) - - # Check the actual message, so we've got a readable reference text. - self.cube2 = cube2 - msg = self.check_merge_fails_with_message() - self.assertString(msg, self.result_path(ext="txt")) - - -class _MergeTest_coordprops(_MergeTest): - # A mixin test class for common coordinate properties tests. - - # This must be implemented by inheritors. - _mergetest_type = NotImplementedError - - def test_nochange(self): - # This should simply succeed. - proto_cube = iris._merge.ProtoCube(self.cube1) - proto_cube.register(self.cube2, error_on_mismatch=True) - - def _props_fail(self, *terms): - self.check_fail( - self._mergetest_type, self.coord_to_change.name(), *terms - ) - - def test_standard_name(self): - self.coord_to_change.standard_name = "soil_temperature" - self._props_fail("air_temperature", "soil_temperature") - - def test_long_name(self): - self.coord_to_change.long_name = "alternate_name" - self._props_fail("air_temperature") - - def test_var_name(self): - self.coord_to_change.var_name = "alternate_name" - self._props_fail("air_temperature") - - def test_units(self): - self.coord_to_change.units = "m" - self._props_fail("air_temperature") - - def test_attrs_unequal(self): - self.coord_to_change.attributes["att_a"] = 99 - self._props_fail("air_temperature") - - def test_attrs_set(self): - self.coord_to_change.attributes["att_extra"] = 101 - self._props_fail("air_temperature") - - def test_coord_system(self): - self.coord_to_change.coord_system = mock.Mock() - self._props_fail("air_temperature") - - -class Test_register__CoordSig_scalar(_MergeTest_coordprops, tests.IrisTest): - - _mergetest_type = "aux_coords (scalar)" - - def setUp(self): - self.cube1 = iris.cube.Cube(np.zeros((3, 3, 3))) - self.cube1.add_aux_coord( - DimCoord( - [1], - standard_name="air_temperature", - long_name="eg_scalar", - var_name="t1", - units="K", - attributes={"att_a": 1, "att_b": 2}, - coord_system=None, - ) - ) - self.coord_to_change = self.cube1.coord("air_temperature") - self.cube2 = self.cube1.copy() - - -class _MergeTest_coordprops_vect(_MergeTest_coordprops): - # A derived mixin test class. - # Adds extra props test for aux+dim coords (test points, bounds + dims) - _mergetest_type = NotImplementedError - _coord_typename = NotImplementedError - - def test_points(self): - self.coord_to_change.points = self.coord_to_change.points + 1.0 - self.check_fail(self._mergetest_type, "air_temperature") - - def test_bounds(self): - self.coord_to_change.bounds = self.coord_to_change.bounds + 1.0 - self.check_fail(self._mergetest_type, "air_temperature") - - def test_dims(self): - self.cube2.remove_coord(self.coord_to_change) - cube2_add_method = getattr(self.cube2, "add_" + self._coord_typename) - cube2_add_method(self.coord_to_change, (1,)) - self.check_fail(self._mergetest_type, "mapping") - - -class Test_register__CoordSig_dim(_MergeTest_coordprops_vect, tests.IrisTest): - - _mergetest_type = "dim_coords" - _coord_typename = "dim_coord" - - def setUp(self): - self.cube1 = iris.cube.Cube(np.zeros((3, 3))) - self.cube1.add_dim_coord( - DimCoord( - [15, 25, 35], - bounds=[[10, 20], [20, 30], [30, 40]], - standard_name="air_temperature", - long_name="eg_scalar", - var_name="t1", - units="K", - attributes={"att_a": 1, "att_b": 2}, - coord_system=None, - ), - (0,), - ) - self.coord_to_change = self.cube1.coord("air_temperature") - self.cube2 = self.cube1.copy() - - def test_circular(self): - # Extra failure mode that only applies to dim coords - self.coord_to_change.circular = True - self.check_fail(self._mergetest_type, "air_temperature") - - -class Test_register__CoordSig_aux(_MergeTest_coordprops_vect, tests.IrisTest): - - _mergetest_type = "aux_coords (non-scalar)" - _coord_typename = "aux_coord" - - def setUp(self): - self.cube1 = iris.cube.Cube(np.zeros((3, 3))) - self.cube1.add_aux_coord( - AuxCoord( - [65, 45, 85], - bounds=[[60, 70], [40, 50], [80, 90]], - standard_name="air_temperature", - long_name="eg_scalar", - var_name="t1", - units="K", - attributes={"att_a": 1, "att_b": 2}, - coord_system=None, - ), - (0,), - ) - self.coord_to_change = self.cube1.coord("air_temperature") - self.cube2 = self.cube1.copy() - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/__init__.py b/lib/iris/tests/unit/plot/__init__.py deleted file mode 100644 index f589a29e0d..0000000000 --- a/lib/iris/tests/unit/plot/__init__.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.plot` module.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.coords import AuxCoord -from iris.plot import _broadcast_2d as broadcast -from iris.tests.stock import lat_lon_cube, simple_2d - - -@tests.skip_plot -class TestGraphicStringCoord(tests.GraphicsTest): - def setUp(self): - super().setUp() - self.cube = simple_2d(with_bounds=True) - self.cube.add_aux_coord( - AuxCoord(list("abcd"), long_name="str_coord"), 1 - ) - self.lat_lon_cube = lat_lon_cube() - - def tick_loc_and_label(self, axis_name, axes=None): - # Intentional lazy import so that subclasses can have an opportunity - # to change the backend. - import matplotlib.pyplot as plt - - # Draw the plot to 'fix' the ticks. - if axes: - axes.figure.canvas.draw() - else: - axes = plt.gca() - plt.draw() - axis = getattr(axes, axis_name) - - locations = axis.get_majorticklocs() - labels = [tick.get_text() for tick in axis.get_ticklabels()] - return list(zip(locations, labels)) - - def assertBoundsTickLabels(self, axis, axes=None): - actual = self.tick_loc_and_label(axis, axes) - expected = [ - (-1.0, ""), - (0.0, "a"), - (1.0, "b"), - (2.0, "c"), - (3.0, "d"), - (4.0, ""), - ] - self.assertEqual(expected, actual) - - def assertPointsTickLabels(self, axis, axes=None): - actual = self.tick_loc_and_label(axis, axes) - expected = [(0.0, "a"), (1.0, "b"), (2.0, "c"), (3.0, "d")] - self.assertEqual(expected, actual) - - -@tests.skip_plot -class MixinCoords: - """ - Mixin class of common plotting tests providing 2-dimensional - permutations of coordinates and anonymous dimensions. - - """ - - def _check(self, u, v, data=None): - self.assertEqual(self.mpl_patch.call_count, 1) - if data is not None: - (actual_u, actual_v, actual_data), _ = self.mpl_patch.call_args - self.assertArrayEqual(actual_data, data) - else: - (actual_u, actual_v), _ = self.mpl_patch.call_args - self.assertArrayEqual(actual_u, u) - self.assertArrayEqual(actual_v, v) - - def test_foo_bar(self): - self.draw_func(self.cube, coords=("foo", "bar")) - u, v = broadcast(self.foo, self.bar) - self._check(u, v, self.data) - - def test_bar_foo(self): - self.draw_func(self.cube, coords=("bar", "foo")) - u, v = broadcast(self.bar, self.foo) - self._check(u, v, self.dataT) - - def test_foo_0(self): - self.draw_func(self.cube, coords=("foo", 0)) - u, v = broadcast(self.foo, self.bar_index) - self._check(u, v, self.data) - - def test_1_bar(self): - self.draw_func(self.cube, coords=(1, "bar")) - u, v = broadcast(self.foo_index, self.bar) - self._check(u, v, self.data) - - def test_1_0(self): - self.draw_func(self.cube, coords=(1, 0)) - u, v = broadcast(self.foo_index, self.bar_index) - self._check(u, v, self.data) - - def test_0_foo(self): - self.draw_func(self.cube, coords=(0, "foo")) - u, v = broadcast(self.bar_index, self.foo) - self._check(u, v, self.dataT) - - def test_bar_1(self): - self.draw_func(self.cube, coords=("bar", 1)) - u, v = broadcast(self.bar, self.foo_index) - self._check(u, v, self.dataT) - - def test_0_1(self): - self.draw_func(self.cube, coords=(0, 1)) - u, v = broadcast(self.bar_index, self.foo_index) - self._check(u, v, self.dataT) diff --git a/lib/iris/tests/unit/plot/_blockplot_common.py b/lib/iris/tests/unit/plot/_blockplot_common.py deleted file mode 100644 index 455b416164..0000000000 --- a/lib/iris/tests/unit/plot/_blockplot_common.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Common test code for `iris.plot.pcolor` and `iris.plot.pcolormesh`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.tests.stock import simple_2d -from iris.tests.unit.plot import MixinCoords - - -class MixinStringCoordPlot: - # Mixin for common string-coord tests on pcolor/pcolormesh. - # To use, make a class that inherits from this *and* - # :class:`iris.tests.unit.plot.TestGraphicStringCoord`, - # and defines "self.blockplot_func()", to return the `iris.plot` function. - def test_yaxis_labels(self): - self.blockplot_func()(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") - - def test_xaxis_labels(self): - self.blockplot_func()(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") - - def test_xaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - ax.set_xlim(0, 3) - self.blockplot_func()(self.cube, coords=("str_coord", "bar"), axes=ax) - plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) - - def test_yaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - ax.set_ylim(0, 3) - self.blockplot_func()(self.cube, axes=ax, coords=("bar", "str_coord")) - plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) - - def test_geoaxes_exception(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - self.assertRaises( - TypeError, self.blockplot_func(), self.lat_lon_cube, axes=ax - ) - plt.close(fig) - - -class Mixin2dCoordsPlot(MixinCoords): - # Mixin for common coordinate tests on pcolor/pcolormesh. - # To use, make a class that inherits from this *and* - # :class:`iris.tests.IrisTest`, - # and defines "self.blockplot_func()", to return the `iris.plot` function. - def blockplot_setup(self): - # We have a 2d cube with dimensionality (bar: 3; foo: 4) - self.cube = simple_2d(with_bounds=True) - coord = self.cube.coord("foo") - self.foo = coord.contiguous_bounds() - self.foo_index = np.arange(coord.points.size + 1) - coord = self.cube.coord("bar") - self.bar = coord.contiguous_bounds() - self.bar_index = np.arange(coord.points.size + 1) - self.data = self.cube.data - self.dataT = self.data.T - self.draw_func = self.blockplot_func() - patch_target_name = "matplotlib.pyplot." + self.draw_func.__name__ - self.mpl_patch = self.patch(patch_target_name) - - -class Mixin2dCoordsContigTol: - # Mixin for contiguity tolerance argument to pcolor/pcolormesh. - # To use, make a class that inherits from this *and* - # :class:`iris.tests.IrisTest`, - # and defines "self.blockplot_func()", to return the `iris.plot` function, - # and defines "self.additional_kwargs" for expected extra call args. - def test_contig_tol(self): - # Patch the inner call to ensure contiguity_tolerance is passed. - cube_argument = mock.sentinel.passed_arg - expected_result = mock.sentinel.returned_value - blockplot_patch = self.patch( - "iris.plot._draw_2d_from_bounds", - mock.Mock(return_value=expected_result), - ) - # Make the call - draw_func = self.blockplot_func() - other_kwargs = self.additional_kwargs - result = draw_func(cube_argument, contiguity_tolerance=0.0123) - drawfunc_name = draw_func.__name__ - # Check details of the call that was made. - self.assertEqual( - blockplot_patch.call_args_list, - [ - mock.call( - drawfunc_name, - cube_argument, - contiguity_tolerance=0.0123, - **other_kwargs, - ) - ], - ) - self.assertEqual(result, expected_result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py b/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py deleted file mode 100644 index 4dfc6d7f68..0000000000 --- a/lib/iris/tests/unit/plot/test__check_bounds_contiguity_and_mask.py +++ /dev/null @@ -1,101 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot._check_bounds_contiguity_and_mask` -function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np -import numpy.ma as ma - -from iris.coords import DimCoord -from iris.plot import _check_bounds_contiguity_and_mask -from iris.tests.stock import ( - make_bounds_discontiguous_at_point, - sample_2d_latlons, -) - - -@tests.skip_plot -class Test_check_bounds_contiguity_and_mask(tests.IrisTest): - def test_1d_not_checked(self): - # Test a 1D coordinate, which is not checked as atol is not set. - coord = DimCoord([1, 3, 5], bounds=[[0, 2], [2, 4], [5, 6]]) - data = np.array([278, 300, 282]) - # Make sure contiguity checking doesn't throw an error - _check_bounds_contiguity_and_mask(coord, data) - - def test_1d_contiguous(self): - # Test that a 1D coordinate which is contiguous does not fail. - coord = DimCoord([1, 3, 5], bounds=[[0, 2], [2, 4], [4, 6]]) - data = np.array([278, 300, 282]) - _check_bounds_contiguity_and_mask(coord, data, atol=1e-3) - - def test_1d_discontigous_masked(self): - # Test a 1D coordinate which is discontiguous but masked at - # discontiguities. - coord = DimCoord([1, 3, 5], bounds=[[0, 2], [2, 4], [5, 6]]) - data = ma.array(np.array([278, 300, 282]), mask=[0, 1, 0]) - _check_bounds_contiguity_and_mask(coord, data, atol=1e-3) - - def test_1d_discontigous_unmasked(self): - # Test a 1D coordinate which is discontiguous and unmasked at - # discontiguities. - coord = DimCoord([1, 3, 5], bounds=[[0, 2], [2, 4], [5, 6]]) - data = ma.array(np.array([278, 300, 282]), mask=[1, 0, 0]) - msg = ( - "coordinate are not contiguous and data is not masked where " - "the discontiguity occurs" - ) - with self.assertRaisesRegex(ValueError, msg): - _check_bounds_contiguity_and_mask(coord, data, atol=1e-3) - - def test_2d_contiguous(self): - # Test that a 2D coordinate which is contiguous does not throw - # an error. - cube = sample_2d_latlons() - _check_bounds_contiguity_and_mask(cube.coord("longitude"), cube.data) - - def test_2d_contiguous_atol(self): - # Check the atol is passed correctly. - cube = sample_2d_latlons() - with mock.patch( - "iris.coords.Coord._discontiguity_in_bounds" - ) as discontiguity_check: - # Discontiguity returns two objects that are unpacked in - # `_check_bounds_contiguity_and_mask`. - discontiguity_check.return_value = [True, None] - _check_bounds_contiguity_and_mask( - cube.coord("longitude"), cube.data, atol=1e-3 - ) - discontiguity_check.assert_called_with(atol=1e-3) - - def test_2d_discontigous_masked(self): - # Test that a 2D coordinate which is discontiguous but masked at - # discontiguities doesn't error. - cube = sample_2d_latlons() - make_bounds_discontiguous_at_point(cube, 3, 4) - _check_bounds_contiguity_and_mask(cube.coord("longitude"), cube.data) - - def test_2d_discontigous_unmasked(self): - # Test a 2D coordinate which is discontiguous and unmasked at - # discontiguities. - cube = sample_2d_latlons() - make_bounds_discontiguous_at_point(cube, 3, 4) - msg = "coordinate are not contiguous" - cube.data[3, 4] = ma.nomask - with self.assertRaisesRegex(ValueError, msg): - _check_bounds_contiguity_and_mask( - cube.coord("longitude"), cube.data - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py b/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py deleted file mode 100644 index 633dea85c4..0000000000 --- a/lib/iris/tests/unit/plot/test__check_geostationary_coords_and_convert.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot._check_geostationary_coords_and_convert -function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest.mock import Mock - -from cartopy.crs import Geostationary, NearsidePerspective -import numpy as np - -from iris.plot import _check_geostationary_coords_and_convert - - -class Test__check_geostationary_coords_and_convert(tests.IrisTest): - def setUp(self): - geostationary_altitude = 35785831.0 - # proj4_params is the one attribute of the Geostationary class that - # is needed for the function. - self.proj4_params = {"h": geostationary_altitude} - - # Simulate the maximum-dimension array that could be processed. - a = np.linspace(0, 2, 6) - b = np.linspace(2, 3, 5) - self.x_original, self.y_original = np.meshgrid(a, b) - - # Expected arrays if conversion takes place. - self.x_converted, self.y_converted = ( - i * geostationary_altitude - for i in (self.x_original, self.y_original) - ) - - def _test(self, geostationary=True): - # Re-usable test for when Geostationary is present OR absent. - if geostationary: - # A Geostationary projection WILL be processed. - projection_spec = Geostationary - target_tuple = (self.x_converted, self.y_converted) - else: - # A non-Geostationary projection WILL NOT be processed. - projection_spec = NearsidePerspective - target_tuple = (self.x_original, self.y_original) - - projection = Mock(spec=projection_spec) - projection.proj4_params = self.proj4_params - # Projection is looked for within a dictionary called kwargs. - kwargs = {"transform": projection} - - x, y = _check_geostationary_coords_and_convert( - self.x_original, self.y_original, kwargs - ) - self.assertArrayEqual((x, y), target_tuple) - - def test_geostationary_present(self): - self._test(geostationary=True) - - def test_geostationary_absent(self): - self._test(geostationary=False) diff --git a/lib/iris/tests/unit/plot/test__fixup_dates.py b/lib/iris/tests/unit/plot/test__fixup_dates.py deleted file mode 100644 index 157780dcae..0000000000 --- a/lib/iris/tests/unit/plot/test__fixup_dates.py +++ /dev/null @@ -1,84 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot._fixup_dates` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import datetime - -from cf_units import Unit -import cftime - -from iris.coords import AuxCoord -from iris.plot import _fixup_dates - - -class Test(tests.IrisTest): - def test_gregorian_calendar(self): - unit = Unit("hours since 2000-04-13 00:00:00", calendar="gregorian") - coord = AuxCoord([1, 3, 6], "time", units=unit) - result = _fixup_dates(coord, coord.points) - expected = [ - datetime.datetime(2000, 4, 13, 1), - datetime.datetime(2000, 4, 13, 3), - datetime.datetime(2000, 4, 13, 6), - ] - self.assertArrayEqual(result, expected) - - def test_gregorian_calendar_sub_second(self): - unit = Unit("seconds since 2000-04-13 00:00:00", calendar="gregorian") - coord = AuxCoord([1, 1.25, 1.5], "time", units=unit) - result = _fixup_dates(coord, coord.points) - expected = [ - datetime.datetime(2000, 4, 13, 0, 0, 1), - datetime.datetime(2000, 4, 13, 0, 0, 1), - datetime.datetime(2000, 4, 13, 0, 0, 2), - ] - self.assertArrayEqual(result, expected) - - @tests.skip_nc_time_axis - def test_360_day_calendar(self): - calendar = "360_day" - unit = Unit("days since 2000-02-25 00:00:00", calendar=calendar) - coord = AuxCoord([3, 4, 5], "time", units=unit) - result = _fixup_dates(coord, coord.points) - expected_datetimes = [ - cftime.datetime(2000, 2, 28, calendar=calendar), - cftime.datetime(2000, 2, 29, calendar=calendar), - cftime.datetime(2000, 2, 30, calendar=calendar), - ] - self.assertArrayEqual( - [cdt.datetime for cdt in result], expected_datetimes - ) - - @tests.skip_nc_time_axis - def test_365_day_calendar(self): - calendar = "365_day" - unit = Unit("minutes since 2000-02-25 00:00:00", calendar=calendar) - coord = AuxCoord([30, 60, 150], "time", units=unit) - result = _fixup_dates(coord, coord.points) - expected_datetimes = [ - cftime.datetime(2000, 2, 25, 0, 30, calendar=calendar), - cftime.datetime(2000, 2, 25, 1, 0, calendar=calendar), - cftime.datetime(2000, 2, 25, 2, 30, calendar=calendar), - ] - self.assertArrayEqual( - [cdt.datetime for cdt in result], expected_datetimes - ) - - @tests.skip_nc_time_axis - def test_360_day_calendar_attribute(self): - calendar = "360_day" - unit = Unit("days since 2000-02-01 00:00:00", calendar=calendar) - coord = AuxCoord([0, 3, 6], "time", units=unit) - result = _fixup_dates(coord, coord.points) - self.assertEqual(result[0].calendar, calendar) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test__get_plot_defn.py b/lib/iris/tests/unit/plot/test__get_plot_defn.py deleted file mode 100644 index c69173dc70..0000000000 --- a/lib/iris/tests/unit/plot/test__get_plot_defn.py +++ /dev/null @@ -1,45 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot._get_plot_defn` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import iris.coords -from iris.tests.stock import simple_2d, simple_2d_w_multidim_coords - -if tests.MPL_AVAILABLE: - import iris.plot as iplt - - -@tests.skip_plot -class Test_get_plot_defn(tests.IrisTest): - def test_axis_order_xy(self): - cube_xy = simple_2d() - defn = iplt._get_plot_defn(cube_xy, iris.coords.POINT_MODE) - self.assertEqual( - [coord.name() for coord in defn.coords], ["bar", "foo"] - ) - - def test_axis_order_yx(self): - cube_yx = simple_2d() - cube_yx.transpose() - defn = iplt._get_plot_defn(cube_yx, iris.coords.POINT_MODE) - self.assertEqual( - [coord.name() for coord in defn.coords], ["foo", "bar"] - ) - - def test_2d_coords(self): - cube = simple_2d_w_multidim_coords() - defn = iplt._get_plot_defn(cube, iris.coords.BOUND_MODE) - self.assertEqual( - [coord.name() for coord in defn.coords], ["bar", "foo"] - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py b/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py deleted file mode 100644 index 631f9bd24e..0000000000 --- a/lib/iris/tests/unit/plot/test__get_plot_defn_custom_coords_picked.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot._get_plot_defn_custom_coords_picked` -function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.coords import BOUND_MODE, POINT_MODE -from iris.tests.stock import ( - hybrid_height, - simple_2d, - simple_2d_w_multidim_coords, -) - -if tests.MPL_AVAILABLE: - import iris.plot as iplt - - -@tests.skip_plot -class Test_get_plot_defn_custom_coords_picked(tests.IrisTest): - def test_1d_coords(self): - cube = simple_2d() - defn = iplt._get_plot_defn_custom_coords_picked( - cube, ("foo", "bar"), POINT_MODE - ) - self.assertEqual( - [coord.name() for coord in defn.coords], ["bar", "foo"] - ) - self.assertFalse(defn.transpose) - - def test_1d_coords_swapped(self): - cube = simple_2d() - defn = iplt._get_plot_defn_custom_coords_picked( - cube, ("bar", "foo"), POINT_MODE - ) - self.assertEqual( - [coord.name() for coord in defn.coords], ["foo", "bar"] - ) - self.assertTrue(defn.transpose) - - def test_1d_coords_as_integers(self): - cube = simple_2d() - defn = iplt._get_plot_defn_custom_coords_picked( - cube, (1, 0), POINT_MODE - ) - self.assertEqual([coord for coord in defn.coords], [0, 1]) - self.assertFalse(defn.transpose) - - def test_1d_coords_as_integers_swapped(self): - cube = simple_2d() - defn = iplt._get_plot_defn_custom_coords_picked( - cube, (0, 1), POINT_MODE - ) - self.assertEqual([coord for coord in defn.coords], [1, 0]) - self.assertTrue(defn.transpose) - - def test_2d_coords(self): - cube = simple_2d_w_multidim_coords() - defn = iplt._get_plot_defn_custom_coords_picked( - cube, ("foo", "bar"), BOUND_MODE - ) - self.assertEqual( - [coord.name() for coord in defn.coords], ["bar", "foo"] - ) - self.assertFalse(defn.transpose) - - def test_2d_coords_as_integers(self): - cube = simple_2d_w_multidim_coords() - defn = iplt._get_plot_defn_custom_coords_picked( - cube, (0, 1), BOUND_MODE - ) - self.assertEqual([coord for coord in defn.coords], [1, 0]) - self.assertTrue(defn.transpose) - - def test_span_check(self): - cube = hybrid_height() - emsg = "don't span the 2 data dimensions" - with self.assertRaisesRegex(ValueError, emsg): - iplt._get_plot_defn_custom_coords_picked( - cube, ("sigma", "level_height"), POINT_MODE - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_contour.py b/lib/iris/tests/unit/plot/test_contour.py deleted file mode 100644 index 823b3270d0..0000000000 --- a/lib/iris/tests/unit/plot/test_contour.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot.contour` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.tests.stock import simple_2d -from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.plot as iplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def test_yaxis_labels(self): - iplt.contour(self.cube, coords=("bar", "str_coord")) - self.assertPointsTickLabels("yaxis") - - def test_xaxis_labels(self): - iplt.contour(self.cube, coords=("str_coord", "bar")) - self.assertPointsTickLabels("xaxis") - - def test_yaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - iplt.contour(self.cube, axes=ax, coords=("bar", "str_coord")) - plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) - - def test_xaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - iplt.contour(self.cube, axes=ax, coords=("str_coord", "bar")) - plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) - - def test_geoaxes_exception(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - self.assertRaises(TypeError, iplt.contour, self.lat_lon_cube, axes=ax) - plt.close(fig) - - -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): - # We have a 2d cube with dimensionality (bar: 3; foo: 4) - self.cube = simple_2d(with_bounds=False) - self.foo = self.cube.coord("foo").points - self.foo_index = np.arange(self.foo.size) - self.bar = self.cube.coord("bar").points - self.bar_index = np.arange(self.bar.size) - self.data = self.cube.data - self.dataT = self.data.T - self.mpl_patch = self.patch("matplotlib.pyplot.contour") - self.draw_func = iplt.contour - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_contourf.py b/lib/iris/tests/unit/plot/test_contourf.py deleted file mode 100644 index 0247fb5a91..0000000000 --- a/lib/iris/tests/unit/plot/test_contourf.py +++ /dev/null @@ -1,115 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot.contourf` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import matplotlib.pyplot as plt -import numpy as np - -from iris.tests.stock import simple_2d -from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.plot as iplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def test_yaxis_labels(self): - iplt.contourf(self.cube, coords=("bar", "str_coord")) - self.assertPointsTickLabels("yaxis") - - def test_xaxis_labels(self): - iplt.contourf(self.cube, coords=("str_coord", "bar")) - self.assertPointsTickLabels("xaxis") - - def test_yaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - iplt.contourf(self.cube, axes=ax, coords=("bar", "str_coord")) - plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) - - def test_xaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - iplt.contourf(self.cube, axes=ax, coords=("str_coord", "bar")) - plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) - - def test_geoaxes_exception(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - self.assertRaises(TypeError, iplt.contourf, self.lat_lon_cube, axes=ax) - plt.close(fig) - - -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): - # We have a 2d cube with dimensionality (bar: 3; foo: 4) - self.cube = simple_2d(with_bounds=False) - self.foo = self.cube.coord("foo").points - self.foo_index = np.arange(self.foo.size) - self.bar = self.cube.coord("bar").points - self.bar_index = np.arange(self.bar.size) - self.data = self.cube.data - self.dataT = self.data.T - mocker = mock.Mock(alpha=0, antialiased=False) - self.mpl_patch = self.patch( - "matplotlib.pyplot.contourf", return_value=mocker - ) - self.draw_func = iplt.contourf - - -@tests.skip_plot -class TestAntialias(tests.IrisTest): - def setUp(self): - self.fig = plt.figure() - - def test_skip_contour(self): - # Contours should not be added if data is all below second level. See #4086. - cube = simple_2d() - - levels = [5, 15, 20, 200] - colors = ["b", "r", "y"] - - with mock.patch("matplotlib.pyplot.contour") as mocked_contour: - iplt.contourf(cube, levels=levels, colors=colors, antialiased=True) - - mocked_contour.assert_not_called() - - def test_apply_contour_nans(self): - # Presence of nans should not prevent contours being added. - cube = simple_2d() - cube.data = cube.data.astype(np.float_) - cube.data[0, 0] = np.nan - - levels = [2, 4, 6, 8] - colors = ["b", "r", "y"] - - with mock.patch("matplotlib.pyplot.contour") as mocked_contour: - iplt.contourf(cube, levels=levels, colors=colors, antialiased=True) - - mocked_contour.assert_called_once() - - def tearDown(self): - plt.close(self.fig) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_outline.py b/lib/iris/tests/unit/plot/test_outline.py deleted file mode 100644 index de59287362..0000000000 --- a/lib/iris/tests/unit/plot/test_outline.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot.outline` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.tests.stock import simple_2d -from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.plot as iplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def test_yaxis_labels(self): - iplt.outline(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") - - def test_xaxis_labels(self): - iplt.outline(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") - - def test_xaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - ax.set_xlim(0, 3) - iplt.outline(self.cube, coords=("str_coord", "bar"), axes=ax) - plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) - - def test_yaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - ax.set_ylim(0, 3) - iplt.outline(self.cube, axes=ax, coords=("bar", "str_coord")) - plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) - - def test_geoaxes_exception(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - self.assertRaises(TypeError, iplt.outline, self.lat_lon_cube, axes=ax) - plt.close(fig) - - -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): - # We have a 2d cube with dimensionality (bar: 3; foo: 4) - self.cube = simple_2d(with_bounds=True) - coord = self.cube.coord("foo") - self.foo = coord.contiguous_bounds() - self.foo_index = np.arange(coord.points.size + 1) - coord = self.cube.coord("bar") - self.bar = coord.contiguous_bounds() - self.bar_index = np.arange(coord.points.size + 1) - self.data = self.cube.data - self.dataT = self.data.T - self.mpl_patch = self.patch("matplotlib.pyplot.pcolormesh") - self.draw_func = iplt.outline - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_pcolor.py b/lib/iris/tests/unit/plot/test_pcolor.py deleted file mode 100644 index 1cde9e8822..0000000000 --- a/lib/iris/tests/unit/plot/test_pcolor.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot.pcolor` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.tests.unit.plot import TestGraphicStringCoord -from iris.tests.unit.plot._blockplot_common import ( - Mixin2dCoordsContigTol, - Mixin2dCoordsPlot, - MixinStringCoordPlot, -) - -if tests.MPL_AVAILABLE: - import iris.plot as iplt - - PLOT_FUNCTION_TO_TEST = iplt.pcolor - - -@tests.skip_plot -class TestStringCoordPlot(MixinStringCoordPlot, TestGraphicStringCoord): - def blockplot_func(self): - return PLOT_FUNCTION_TO_TEST - - -@tests.skip_plot -class Test2dCoords(tests.IrisTest, Mixin2dCoordsPlot): - def setUp(self): - self.blockplot_setup() - - def blockplot_func(self): - return PLOT_FUNCTION_TO_TEST - - -@tests.skip_plot -class Test2dContigTol(tests.IrisTest, Mixin2dCoordsContigTol): - # Extra call kwargs expected. - additional_kwargs = dict(antialiased=True, snap=False) - - def blockplot_func(self): - return PLOT_FUNCTION_TO_TEST - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_pcolormesh.py b/lib/iris/tests/unit/plot/test_pcolormesh.py deleted file mode 100644 index f4e84e5765..0000000000 --- a/lib/iris/tests/unit/plot/test_pcolormesh.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot.pcolormesh` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from iris.tests.unit.plot import TestGraphicStringCoord -from iris.tests.unit.plot._blockplot_common import ( - Mixin2dCoordsContigTol, - Mixin2dCoordsPlot, - MixinStringCoordPlot, -) - -if tests.MPL_AVAILABLE: - import iris.plot as iplt - - PLOT_FUNCTION_TO_TEST = iplt.pcolormesh - - -@tests.skip_plot -class TestStringCoordPlot(MixinStringCoordPlot, TestGraphicStringCoord): - def blockplot_func(self): - return PLOT_FUNCTION_TO_TEST - - -@tests.skip_plot -class Test2dCoords(tests.IrisTest, Mixin2dCoordsPlot): - def setUp(self): - self.blockplot_setup() - - def blockplot_func(self): - return PLOT_FUNCTION_TO_TEST - - -@tests.skip_plot -class Test2dContigTol(tests.IrisTest, Mixin2dCoordsContigTol): - # Extra call kwargs expected -- unlike 'pcolor', there are none. - additional_kwargs = {} - - def blockplot_func(self): - return PLOT_FUNCTION_TO_TEST - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_plot.py b/lib/iris/tests/unit/plot/test_plot.py deleted file mode 100644 index edbef3934a..0000000000 --- a/lib/iris/tests/unit/plot/test_plot.py +++ /dev/null @@ -1,272 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot.plot` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -import iris.coord_systems as ics -import iris.coords as coords -from iris.tests.unit.plot import TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import cartopy.crs as ccrs - import cartopy.mpl.geoaxes - from matplotlib.path import Path - import matplotlib.pyplot as plt - - import iris.plot as iplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def setUp(self): - super().setUp() - self.cube = self.cube[0, :] - self.lat_lon_cube = self.lat_lon_cube[0, :] - - def test_yaxis_labels(self): - iplt.plot(self.cube, self.cube.coord("str_coord")) - self.assertBoundsTickLabels("yaxis") - - def test_xaxis_labels(self): - iplt.plot(self.cube.coord("str_coord"), self.cube) - self.assertBoundsTickLabels("xaxis") - - def test_yaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - iplt.plot(self.cube, self.cube.coord("str_coord"), axes=ax) - plt.close(fig) - self.assertBoundsTickLabels("yaxis", ax) - - def test_xaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - iplt.plot(self.cube.coord("str_coord"), self.cube, axes=ax) - plt.close(fig) - self.assertBoundsTickLabels("xaxis", ax) - - def test_plot_longitude(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - iplt.plot( - self.lat_lon_cube.coord("longitude"), self.lat_lon_cube, axes=ax - ) - plt.close(fig) - - -@tests.skip_plot -class TestTrajectoryWrap(tests.IrisTest): - """ - Test that a line plot of geographic coordinates wraps around the end of the - coordinates rather than plotting accross the map. - - """ - - def setUp(self): - plt.figure() - self.geog_cs = ics.GeogCS(6371229.0) - self.plate_carree = self.geog_cs.as_cartopy_projection() - - def lon_lat_coords(self, lons, lats, cs=None): - if cs is None: - cs = self.geog_cs - return ( - coords.AuxCoord( - lons, "longitude", units="degrees", coord_system=cs - ), - coords.AuxCoord( - lats, "latitude", units="degrees", coord_system=cs - ), - ) - - def assertPathsEqual(self, expected, actual): - """ - Assert that the given paths are equal once STOP vertices have been - removed - - """ - expected = expected.cleaned() - actual = actual.cleaned() - # Remove Path.STOP vertices - everts = expected.vertices[np.where(expected.codes != Path.STOP)] - averts = actual.vertices[np.where(actual.codes != Path.STOP)] - self.assertArrayAlmostEqual(everts, averts) - self.assertArrayEqual(expected.codes, actual.codes) - - def check_paths(self, expected_path, expected_path_crs, lines, axes): - """ - Check that the paths in `lines` match the given expected paths when - plotted on the given geoaxes - - """ - - self.assertEqual( - 1, len(lines), "Expected a single line, got {}".format(len(lines)) - ) - (line,) = lines - inter_proj_transform = cartopy.mpl.geoaxes.InterProjectionTransform( - expected_path_crs, axes.projection - ) - ax_transform = inter_proj_transform + axes.transData - - expected = ax_transform.transform_path(expected_path) - actual = line.get_transform().transform_path(line.get_path()) - - self.assertPathsEqual(expected, actual) - - def test_simple(self): - lon, lat = self.lon_lat_coords([359, 1], [0, 0]) - expected_path = Path([[-1, 0], [1, 0]], [Path.MOVETO, Path.LINETO]) - - lines = iplt.plot(lon, lat) - - self.check_paths(expected_path, self.plate_carree, lines, plt.gca()) - - def test_reverse(self): - lon, lat = self.lon_lat_coords([1, 359], [0, 0]) - expected_path = Path([[1, 0], [-1, 0]], [Path.MOVETO, Path.LINETO]) - - lines = iplt.plot(lon, lat) - - self.check_paths(expected_path, self.plate_carree, lines, plt.gca()) - - def test_multi(self): - lon, lat = self.lon_lat_coords([1, 359, 2, 358], [0, 0, 0, 0]) - expected_path = Path( - [[1, 0], [-1, 0], [2, 0], [-2, 0]], - [Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO], - ) - - lines = iplt.plot(lon, lat) - - self.check_paths(expected_path, self.plate_carree, lines, plt.gca()) - - def test_many_wraps(self): - lon, lat = self.lon_lat_coords( - [350, 10, 180, 350, 10, 180, 10, 350], [0, 0, 0, 0, 0, 0, 0, 0] - ) - expected_path = Path( - [ - [350, 0], - [370, 0], - [540, 0], - [710, 0], - [730, 0], - [900, 0], - [730, 0], - [710, 0], - ], - [ - Path.MOVETO, - Path.LINETO, - Path.LINETO, - Path.LINETO, - Path.LINETO, - Path.LINETO, - Path.LINETO, - Path.LINETO, - ], - ) - - lines = iplt.plot(lon, lat) - - self.check_paths(expected_path, self.plate_carree, lines, plt.gca()) - - def test_180(self): - lon, lat = self.lon_lat_coords([179, -179], [0, 0]) - expected_path = Path([[179, 0], [181, 0]], [Path.MOVETO, Path.LINETO]) - - lines = iplt.plot(lon, lat) - - self.check_paths(expected_path, self.plate_carree, lines, plt.gca()) - - def test_shifted_projection(self): - lon, lat = self.lon_lat_coords([359, 1], [0, 0]) - expected_path = Path([[-1, 0], [1, 0]], [Path.MOVETO, Path.LINETO]) - - shifted_plate_carree = ccrs.PlateCarree(180) - - plt.axes(projection=shifted_plate_carree) - lines = iplt.plot(lon, lat) - - self.check_paths(expected_path, self.plate_carree, lines, plt.gca()) - - def test_shifted_projection_180(self): - lon, lat = self.lon_lat_coords([179, -179], [0, 0]) - expected_path = Path([[179, 0], [181, 0]], [Path.MOVETO, Path.LINETO]) - - shifted_plate_carree = ccrs.PlateCarree(180) - - plt.axes(projection=shifted_plate_carree) - lines = iplt.plot(lon, lat) - - self.check_paths(expected_path, self.plate_carree, lines, plt.gca()) - - def test_long(self): - lon, lat = self.lon_lat_coords([271, 89], [0, 0]) - expected_path = Path([[-89, 0], [89, 0]], [Path.MOVETO, Path.LINETO]) - - lines = iplt.plot(lon, lat) - - self.check_paths(expected_path, self.plate_carree, lines, plt.gca()) - - def _test_rotated( - self, - grid_north_pole_latitude=90, - grid_north_pole_longitude=0, - north_pole_grid_longitude=0, - ): - cs = ics.RotatedGeogCS( - grid_north_pole_latitude, - grid_north_pole_longitude, - north_pole_grid_longitude, - ) - glon = coords.AuxCoord( - [359, 1], "grid_longitude", units="degrees", coord_system=cs - ) - glat = coords.AuxCoord( - [0, 0], "grid_latitude", units="degrees", coord_system=cs - ) - expected_path = Path([[-1, 0], [1, 0]], [Path.MOVETO, Path.LINETO]) - - plt.figure() - lines = iplt.plot(glon, glat) - # Matplotlib won't immediately set up the correct transform to allow us - # to compare paths. Calling set_global(), which calls set_xlim() and - # set_ylim(), will trigger Matplotlib to set up the transform. - ax = plt.gca() - ax.set_global() - - crs = cs.as_cartopy_crs() - self.check_paths(expected_path, crs, lines, ax) - - def test_rotated_90(self): - self._test_rotated(north_pole_grid_longitude=90) - - def test_rotated_180(self): - self._test_rotated(north_pole_grid_longitude=180) - - def test_rotated(self): - self._test_rotated( - grid_north_pole_latitude=-30, - grid_north_pole_longitude=120, - north_pole_grid_longitude=45, - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_points.py b/lib/iris/tests/unit/plot/test_points.py deleted file mode 100644 index e1a23eff83..0000000000 --- a/lib/iris/tests/unit/plot/test_points.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot.points` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.tests.stock import simple_2d -from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.plot as iplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def test_yaxis_labels(self): - iplt.points(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") - - def test_xaxis_labels(self): - iplt.points(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") - - def test_xaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - ax.set_xlim(0, 3) - iplt.points(self.cube, coords=("str_coord", "bar"), axes=ax) - plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) - - def test_yaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - ax.set_ylim(0, 3) - iplt.points(self.cube, coords=("bar", "str_coord"), axes=ax) - plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) - - def test_geoaxes_exception(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - self.assertRaises(TypeError, iplt.points, self.lat_lon_cube, axes=ax) - plt.close(fig) - - -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): - # We have a 2d cube with dimensionality (bar: 3; foo: 4) - self.cube = simple_2d(with_bounds=False) - self.foo = self.cube.coord("foo").points - self.foo_index = np.arange(self.foo.size) - self.bar = self.cube.coord("bar").points - self.bar_index = np.arange(self.bar.size) - self.data = None - self.dataT = None - self.mpl_patch = self.patch("matplotlib.pyplot.scatter") - self.draw_func = iplt.points - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/plot/test_scatter.py b/lib/iris/tests/unit/plot/test_scatter.py deleted file mode 100644 index c5cd9cb2f2..0000000000 --- a/lib/iris/tests/unit/plot/test_scatter.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.plot.scatter` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip -from iris.tests.unit.plot import TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.plot as iplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def setUp(self): - super().setUp() - self.cube = self.cube[0, :] - self.lat_lon_cube = self.lat_lon_cube[0, :] - - def test_xaxis_labels(self): - iplt.scatter(self.cube.coord("str_coord"), self.cube) - self.assertBoundsTickLabels("xaxis") - - def test_yaxis_labels(self): - iplt.scatter(self.cube, self.cube.coord("str_coord")) - self.assertBoundsTickLabels("yaxis") - - def test_xaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - ax.set_xlim(0, 3) - iplt.scatter(self.cube.coord("str_coord"), self.cube, axes=ax) - plt.close(fig) - self.assertPointsTickLabels("xaxis", ax) - - def test_yaxis_labels_with_axes(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - ax.set_ylim(0, 3) - iplt.scatter(self.cube, self.cube.coord("str_coord"), axes=ax) - plt.close(fig) - self.assertPointsTickLabels("yaxis", ax) - - def test_scatter_longitude(self): - import matplotlib.pyplot as plt - - fig = plt.figure() - ax = fig.add_subplot(111) - iplt.scatter( - self.lat_lon_cube, self.lat_lon_cube.coord("longitude"), axes=ax - ) - plt.close(fig) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/__init__.py b/lib/iris/tests/unit/quickplot/__init__.py deleted file mode 100644 index 471ef0f6a5..0000000000 --- a/lib/iris/tests/unit/quickplot/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.quickplot` module.""" diff --git a/lib/iris/tests/unit/quickplot/test_contour.py b/lib/iris/tests/unit/quickplot/test_contour.py deleted file mode 100644 index 8e3db7c3e0..0000000000 --- a/lib/iris/tests/unit/quickplot/test_contour.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.quickplot.contour` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.tests.stock import simple_2d -from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.quickplot as qplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def test_yaxis_labels(self): - qplt.contour(self.cube, coords=("bar", "str_coord")) - self.assertPointsTickLabels("yaxis") - - def test_xaxis_labels(self): - qplt.contour(self.cube, coords=("str_coord", "bar")) - self.assertPointsTickLabels("xaxis") - - -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): - # We have a 2d cube with dimensionality (bar: 3; foo: 4) - self.cube = simple_2d(with_bounds=False) - self.foo = self.cube.coord("foo").points - self.foo_index = np.arange(self.foo.size) - self.bar = self.cube.coord("bar").points - self.bar_index = np.arange(self.bar.size) - self.data = self.cube.data - self.dataT = self.data.T - self.mpl_patch = self.patch("matplotlib.pyplot.contour") - self.draw_func = qplt.contour - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_contourf.py b/lib/iris/tests/unit/quickplot/test_contourf.py deleted file mode 100644 index 2624ebd08e..0000000000 --- a/lib/iris/tests/unit/quickplot/test_contourf.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.quickplot.contourf` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -from unittest import mock - -import numpy as np - -from iris.tests.stock import simple_2d -from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.quickplot as qplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def test_yaxis_labels(self): - qplt.contourf(self.cube, coords=("bar", "str_coord")) - self.assertPointsTickLabels("yaxis") - - def test_xaxis_labels(self): - qplt.contourf(self.cube, coords=("str_coord", "bar")) - self.assertPointsTickLabels("xaxis") - - -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): - # We have a 2d cube with dimensionality (bar: 3; foo: 4) - self.cube = simple_2d(with_bounds=False) - self.foo = self.cube.coord("foo").points - self.foo_index = np.arange(self.foo.size) - self.bar = self.cube.coord("bar").points - self.bar_index = np.arange(self.bar.size) - self.data = self.cube.data - self.dataT = self.data.T - mocker = mock.Mock(alpha=0, antialiased=False) - self.mpl_patch = self.patch( - "matplotlib.pyplot.contourf", return_value=mocker - ) - # Also need to mock the colorbar. - self.patch("matplotlib.pyplot.colorbar") - self.draw_func = qplt.contourf - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_outline.py b/lib/iris/tests/unit/quickplot/test_outline.py deleted file mode 100644 index 70d96372fa..0000000000 --- a/lib/iris/tests/unit/quickplot/test_outline.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.quickplot.outline` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.tests.stock import simple_2d -from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.quickplot as qplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def test_yaxis_labels(self): - qplt.outline(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") - - def test_xaxis_labels(self): - qplt.outline(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") - - -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): - # We have a 2d cube with dimensionality (bar: 3; foo: 4) - self.cube = simple_2d(with_bounds=True) - coord = self.cube.coord("foo") - self.foo = coord.contiguous_bounds() - self.foo_index = np.arange(coord.points.size + 1) - coord = self.cube.coord("bar") - self.bar = coord.contiguous_bounds() - self.bar_index = np.arange(coord.points.size + 1) - self.data = self.cube.data - self.dataT = self.data.T - self.mpl_patch = self.patch("matplotlib.pyplot.pcolormesh") - self.draw_func = qplt.outline - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_pcolor.py b/lib/iris/tests/unit/quickplot/test_pcolor.py deleted file mode 100644 index 2e559d6308..0000000000 --- a/lib/iris/tests/unit/quickplot/test_pcolor.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.quickplot.pcolor` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.tests.stock import simple_2d -from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.quickplot as qplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def test_yaxis_labels(self): - qplt.pcolor(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") - - def test_xaxis_labels(self): - qplt.pcolor(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") - - -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): - # We have a 2d cube with dimensionality (bar: 3; foo: 4) - self.cube = simple_2d(with_bounds=True) - coord = self.cube.coord("foo") - self.foo = coord.contiguous_bounds() - self.foo_index = np.arange(coord.points.size + 1) - coord = self.cube.coord("bar") - self.bar = coord.contiguous_bounds() - self.bar_index = np.arange(coord.points.size + 1) - self.data = self.cube.data - self.dataT = self.data.T - self.mpl_patch = self.patch( - "matplotlib.pyplot.pcolor", return_value=None - ) - self.draw_func = qplt.pcolor - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_pcolormesh.py b/lib/iris/tests/unit/quickplot/test_pcolormesh.py deleted file mode 100644 index 32ae3ed716..0000000000 --- a/lib/iris/tests/unit/quickplot/test_pcolormesh.py +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.quickplot.pcolormesh` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.tests.stock import simple_2d -from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.quickplot as qplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def test_yaxis_labels(self): - qplt.pcolormesh(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") - - def test_xaxis_labels(self): - qplt.pcolormesh(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") - - -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): - # We have a 2d cube with dimensionality (bar: 3; foo: 4) - self.cube = simple_2d(with_bounds=True) - coord = self.cube.coord("foo") - self.foo = coord.contiguous_bounds() - self.foo_index = np.arange(coord.points.size + 1) - coord = self.cube.coord("bar") - self.bar = coord.contiguous_bounds() - self.bar_index = np.arange(coord.points.size + 1) - self.data = self.cube.data - self.dataT = self.data.T - self.mpl_patch = self.patch( - "matplotlib.pyplot.pcolormesh", return_value=None - ) - self.draw_func = qplt.pcolormesh - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_plot.py b/lib/iris/tests/unit/quickplot/test_plot.py deleted file mode 100644 index 0a36a3fa4e..0000000000 --- a/lib/iris/tests/unit/quickplot/test_plot.py +++ /dev/null @@ -1,58 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.quickplot.plot` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip -from iris.tests.stock import simple_1d -from iris.tests.unit.plot import TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.quickplot as qplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def setUp(self): - super().setUp() - self.cube = self.cube[0, :] - - def test_yaxis_labels(self): - qplt.plot(self.cube, self.cube.coord("str_coord")) - self.assertBoundsTickLabels("yaxis") - - def test_xaxis_labels(self): - qplt.plot(self.cube.coord("str_coord"), self.cube) - self.assertBoundsTickLabels("xaxis") - - -class TestAxisLabels(tests.GraphicsTest): - def test_xy_cube(self): - c = simple_1d() - qplt.plot(c) - ax = qplt.plt.gca() - x = ax.xaxis.get_label().get_text() - self.assertEqual(x, "Foo") - y = ax.yaxis.get_label().get_text() - self.assertEqual(y, "Thingness") - - def test_yx_cube(self): - c = simple_1d() - c.transpose() - # Making the cube a vertical coordinate should change the default - # orientation of the plot. - c.coord("foo").attributes["positive"] = "up" - qplt.plot(c) - ax = qplt.plt.gca() - x = ax.xaxis.get_label().get_text() - self.assertEqual(x, "Thingness") - y = ax.yaxis.get_label().get_text() - self.assertEqual(y, "Foo") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_points.py b/lib/iris/tests/unit/quickplot/test_points.py deleted file mode 100644 index 3810cdd343..0000000000 --- a/lib/iris/tests/unit/quickplot/test_points.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.quickplot.points` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.tests.stock import simple_2d -from iris.tests.unit.plot import MixinCoords, TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.quickplot as qplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def test_yaxis_labels(self): - qplt.points(self.cube, coords=("bar", "str_coord")) - self.assertBoundsTickLabels("yaxis") - - def test_xaxis_labels(self): - qplt.points(self.cube, coords=("str_coord", "bar")) - self.assertBoundsTickLabels("xaxis") - - -@tests.skip_plot -class TestCoords(tests.IrisTest, MixinCoords): - def setUp(self): - # We have a 2d cube with dimensionality (bar: 3; foo: 4) - self.cube = simple_2d(with_bounds=False) - self.foo = self.cube.coord("foo").points - self.foo_index = np.arange(self.foo.size) - self.bar = self.cube.coord("bar").points - self.bar_index = np.arange(self.bar.size) - self.data = None - self.dataT = None - self.mpl_patch = self.patch("matplotlib.pyplot.scatter") - self.draw_func = qplt.points - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/quickplot/test_scatter.py b/lib/iris/tests/unit/quickplot/test_scatter.py deleted file mode 100644 index c1cf853970..0000000000 --- a/lib/iris/tests/unit/quickplot/test_scatter.py +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.quickplot.scatter` function.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip -from iris.tests.unit.plot import TestGraphicStringCoord - -if tests.MPL_AVAILABLE: - import iris.quickplot as qplt - - -@tests.skip_plot -class TestStringCoordPlot(TestGraphicStringCoord): - def setUp(self): - super().setUp() - self.cube = self.cube[0, :] - - def test_xaxis_labels(self): - qplt.scatter(self.cube.coord("str_coord"), self.cube) - self.assertBoundsTickLabels("xaxis") - - def test_yaxis_labels(self): - qplt.scatter(self.cube, self.cube.coord("str_coord")) - self.assertBoundsTickLabels("yaxis") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/representation/__init__.py b/lib/iris/tests/unit/representation/__init__.py deleted file mode 100644 index e943ad149b..0000000000 --- a/lib/iris/tests/unit/representation/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris._representation` module.""" diff --git a/lib/iris/tests/unit/representation/cube_printout/__init__.py b/lib/iris/tests/unit/representation/cube_printout/__init__.py deleted file mode 100644 index 50ab3f8e45..0000000000 --- a/lib/iris/tests/unit/representation/cube_printout/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris._representation.cube_printout` module.""" diff --git a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py b/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py deleted file mode 100644 index 40a932b9e0..0000000000 --- a/lib/iris/tests/unit/representation/cube_printout/test_CubePrintout.py +++ /dev/null @@ -1,536 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris._representation.cube_printout.CubePrintout`.""" -import iris.tests as tests # isort:skip - -import numpy as np - -from iris._representation.cube_printout import CubePrinter -from iris._representation.cube_summary import CubeSummary -from iris.coords import ( - AncillaryVariable, - AuxCoord, - CellMeasure, - CellMethod, - DimCoord, -) -from iris.cube import Cube -from iris.tests.stock.mesh import sample_mesh_cube - - -class TestCubePrintout___str__(tests.IrisTest): - def test_str(self): - # Just check that its str representation is the 'to_string' result. - cube = Cube(0) - printer = CubePrinter(CubeSummary(cube)) - result = str(printer) - self.assertEqual(result, printer.to_string()) - - -def cube_replines(cube, **kwargs): - return CubePrinter(cube).to_string(**kwargs).split("\n") - - -class TestCubePrintout__to_string(tests.IrisTest): - def test_empty(self): - cube = Cube([0]) - rep = cube_replines(cube) - expect = ["unknown / (unknown) (-- : 1)"] - self.assertEqual(expect, rep) - - def test_shortform__default(self): - cube = Cube([0]) - expect = ["unknown / (unknown) (-- : 1)"] - # In this case, default one-line is the same. - rep = cube_replines(cube, oneline=True) - self.assertEqual(expect, rep) - - def test_shortform__compressed(self): - cube = Cube([0]) - rep = cube_replines(cube, oneline=True, name_padding=0) - expect = ["unknown / (unknown) (-- : 1)"] - self.assertEqual(rep, expect) - - def _sample_wide_cube(self): - cube = Cube([0, 1]) - cube.add_aux_coord( - AuxCoord( - [0, 1], - long_name="long long long long long long long long name", - ), - 0, - ) - return cube - - def test_wide_cube(self): - # For comparison with the shortform and padding-controlled cases. - cube = self._sample_wide_cube() - rep = cube_replines(cube) - expect_full = [ - "unknown / (unknown) (-- : 2)", - " Auxiliary coordinates:", - " long long long long long long long long name x", - ] - self.assertEqual(expect_full, rep) - - def test_shortform__wide__default(self): - cube = self._sample_wide_cube() - rep = cube_replines(cube, oneline=True) - # *default* one-line is shorter than full header, but not minimal. - expect = ["unknown / (unknown) (-- : 2)"] - self.assertEqual(rep, expect) - - def test_shortform__wide__compressed(self): - cube = self._sample_wide_cube() - rep = cube_replines(cube, oneline=True, name_padding=0) - expect = ["unknown / (unknown) (-- : 2)"] - self.assertEqual(rep, expect) - - def test_shortform__wide__intermediate(self): - cube = self._sample_wide_cube() - rep = cube_replines(cube, oneline=True, name_padding=25) - expect = ["unknown / (unknown) (-- : 2)"] - self.assertEqual(expect, rep) - - def test_scalar_cube_summaries(self): - cube = Cube(0) - expect = ["unknown / (unknown) (scalar cube)"] - rep = cube_replines(cube) - self.assertEqual(expect, rep) - # Shortform is the same. - rep = cube_replines(cube, oneline=True) - self.assertEqual(expect, rep) - - def test_name_padding(self): - cube = Cube([1, 2], long_name="cube_accel", units="ms-2") - rep = cube_replines(cube) - self.assertEqual(rep, ["cube_accel / (ms-2) (-- : 2)"]) - rep = cube_replines(cube, name_padding=0) - self.assertEqual(rep, ["cube_accel / (ms-2) (-- : 2)"]) - rep = cube_replines(cube, name_padding=25) - self.assertEqual(rep, ["cube_accel / (ms-2) (-- : 2)"]) - - def test_columns_long_coordname(self): - cube = Cube([0], long_name="short", units=1) - coord = AuxCoord( - [0], long_name="very_very_very_very_very_long_coord_name" - ) - cube.add_aux_coord(coord, 0) - rep = cube_replines(cube) - expected = [ - "short / (1) (-- : 1)", - " Auxiliary coordinates:", - " very_very_very_very_very_long_coord_name x", - ] - self.assertEqual(expected, rep) - rep = cube_replines(cube, oneline=True) - # Note: the default short-form is short-ER, but not minimal. - short_expected = ["short / (1) (-- : 1)"] - self.assertEqual(short_expected, rep) - - def test_columns_long_attribute(self): - cube = Cube([0], long_name="short", units=1) - cube.attributes[ - "very_very_very_very_very_long_name" - ] = "longish string extends beyond dim columns" - rep = cube_replines(cube) - expected = [ - "short / (1) (-- : 1)", - " Attributes:", - ( - " very_very_very_very_very_long_name " - "'longish string extends beyond dim columns'" - ), - ] - self.assertEqual(rep, expected) - - def test_coord_distinguishing_attributes(self): - # Printout of differing attributes to differentiate same-named coords. - # include : vector + scalar - cube = Cube([0, 1], long_name="name", units=1) - # Add a pair of vector coords with same name but different attributes. - cube.add_aux_coord( - AuxCoord([0, 1], long_name="co1", attributes=dict(a=1)), 0 - ) - cube.add_aux_coord( - AuxCoord([0, 1], long_name="co1", attributes=dict(a=2)), 0 - ) - # Likewise for scalar coords with same name but different attributes. - cube.add_aux_coord( - AuxCoord([0], long_name="co2", attributes=dict(a=10, b=12)) - ) - cube.add_aux_coord( - AuxCoord([1], long_name="co2", attributes=dict(a=10, b=11)) - ) - - rep = cube_replines(cube) - expected = [ - "name / (1) (-- : 2)", - " Auxiliary coordinates:", - " co1 x", - " a=1", - " co1 x", - " a=2", - " Scalar coordinates:", - " co2 0", - " b=12", - " co2 1", - " b=11", - ] - self.assertEqual(rep, expected) - - def test_coord_extra_attributes__array(self): - cube = Cube(0, long_name="name", units=1) - # Add a pair of vector coords with same name but different attributes. - array1 = np.arange(0, 3) - array2 = np.arange(10, 13) - cube.add_aux_coord( - AuxCoord([1.2], long_name="co1", attributes=dict(a=1, arr=array1)) - ) - cube.add_aux_coord( - AuxCoord([3.4], long_name="co1", attributes=dict(a=1, arr=array2)) - ) - - rep = cube_replines(cube) - expected = [ - "name / (1) (scalar cube)", - " Scalar coordinates:", - " co1 1.2", - " arr=array([0, 1, 2])", - " co1 3.4", - " arr=array([10, 11, 12])", - ] - self.assertEqual(rep, expected) - - def test_coord_extra_attributes__array__long(self): - # Also test with a long array representation. - # NOTE: this also pushes the dimension map right-wards. - array = 10 + np.arange(24.0).reshape((2, 3, 4)) - cube = Cube(0, long_name="name", units=1) - cube.add_aux_coord(AuxCoord([1], long_name="co")) - cube.add_aux_coord( - AuxCoord([2], long_name="co", attributes=dict(a=array + 1.0)) - ) - - rep = cube_replines(cube) - expected = [ - ( - "name / (1) " - " (scalar cube)" - ), - " Scalar coordinates:", - ( - " co " - " 1" - ), - ( - " co " - " 2" - ), - ( - " a=array([[[11., 12., 13., 14.], [15., 16., 17.," - " 18.], [19., 20., 21., 22.]],..." - ), - ] - self.assertEqual(rep, expected) - - def test_coord_extra_attributes__string(self): - cube = Cube(0, long_name="name", units=1) - cube.add_aux_coord(AuxCoord([1], long_name="co")) - cube.add_aux_coord( - AuxCoord( - [2], long_name="co", attributes=dict(note="string content") - ) - ) - rep = cube_replines(cube) - expected = [ - "name / (1) (scalar cube)", - " Scalar coordinates:", - " co 1", - " co 2", - " note='string content'", - ] - self.assertEqual(rep, expected) - - def test_coord_extra_attributes__string_escaped(self): - cube = Cube(0, long_name="name", units=1) - cube.add_aux_coord(AuxCoord([1], long_name="co")) - cube.add_aux_coord( - AuxCoord( - [2], - long_name="co", - attributes=dict(note="line 1\nline 2\tends."), - ) - ) - rep = cube_replines(cube) - expected = [ - "name / (1) (scalar cube)", - " Scalar coordinates:", - " co 1", - " co 2", - " note='line 1\\nline 2\\tends.'", - ] - self.assertEqual(rep, expected) - - def test_coord_extra_attributes__string_overlong(self): - cube = Cube(0, long_name="name", units=1) - cube.add_aux_coord(AuxCoord([1], long_name="co")) - long_string = ( - "this is very very very very very very very " - "very very very very very very very long." - ) - cube.add_aux_coord( - AuxCoord([2], long_name="co", attributes=dict(note=long_string)) - ) - rep = cube_replines(cube) - expected = [ - ( - "name / (1) " - " (scalar cube)" - ), - " Scalar coordinates:", - ( - " co " - " 1" - ), - ( - " co " - " 2" - ), - ( - " note='this is very very very very " - "very very very very very very very very..." - ), - ] - self.assertEqual(rep, expected) - - def test_section_vector_dimcoords(self): - cube = Cube(np.zeros((2, 3)), long_name="name", units=1) - cube.add_dim_coord(DimCoord([0, 1], long_name="y"), 0) - cube.add_dim_coord(DimCoord([0, 1, 2], long_name="x"), 1) - - rep = cube_replines(cube) - expected = [ - "name / (1) (y: 2; x: 3)", - " Dimension coordinates:", - " y x -", - " x - x", - ] - self.assertEqual(rep, expected) - - def test_section_vector_auxcoords(self): - cube = Cube(np.zeros((2, 3)), long_name="name", units=1) - cube.add_aux_coord(DimCoord([0, 1], long_name="y"), 0) - cube.add_aux_coord(DimCoord([0, 1, 2], long_name="x"), 1) - - rep = cube_replines(cube) - expected = [ - "name / (1) (-- : 2; -- : 3)", - " Auxiliary coordinates:", - " y x -", - " x - x", - ] - self.assertEqual(rep, expected) - - def test_section_vector_ancils(self): - cube = Cube(np.zeros((2, 3)), long_name="name", units=1) - cube.add_ancillary_variable( - AncillaryVariable([0, 1], long_name="av1"), 0 - ) - - rep = cube_replines(cube) - expected = [ - "name / (1) (-- : 2; -- : 3)", - " Ancillary variables:", - " av1 x -", - ] - self.assertEqual(rep, expected) - - def test_section_vector_cell_measures(self): - cube = Cube(np.zeros((2, 3)), long_name="name", units=1) - cube.add_cell_measure(CellMeasure([0, 1, 2], long_name="cm"), 1) - - rep = cube_replines(cube) - expected = [ - "name / (1) (-- : 2; -- : 3)", - " Cell measures:", - " cm - x", - ] - self.assertEqual(rep, expected) - - def test_section_scalar_coords(self): - # incl points + bounds - # TODO: ought to incorporate coord-based summary - # - which would allow for special printout of time values - cube = Cube([0], long_name="name", units=1) - cube.add_aux_coord(DimCoord([0.0], long_name="unbounded")) - cube.add_aux_coord(DimCoord([0], bounds=[[0, 7]], long_name="bounded")) - - rep = cube_replines(cube) - expected = [ - "name / (1) (-- : 1)", - " Scalar coordinates:", - " bounded 0, bound=(0, 7)", - " unbounded 0.0", - ] - self.assertEqual(rep, expected) - - def test_section_scalar_coords__string(self): - # incl a newline-escaped one - # incl a long (clipped) one - # CHECK THAT CLIPPED+ESCAPED WORKS (don't lose final quote) - cube = Cube([0], long_name="name", units=1) - cube.add_aux_coord(AuxCoord(["string-value"], long_name="text")) - long_string = ( - "A string value which is very very very very very very " - "very very very very very very very very long." - ) - cube.add_aux_coord( - AuxCoord([long_string], long_name="very_long_string") - ) - - rep = cube_replines(cube) - expected = [ - "name / (1) (-- : 1)", - " Scalar coordinates:", - " text string-value", - ( - " very_long_string A string value which is " - "very very very very very very very very very very..." - ), - ] - self.assertEqual(rep, expected) - - def test_section_scalar_cell_measures(self): - cube = Cube(np.zeros((2, 3)), long_name="name", units=1) - cube.add_cell_measure(CellMeasure([0], long_name="cm")) - - rep = cube_replines(cube) - expected = [ - "name / (1) (-- : 2; -- : 3)", - " Scalar cell measures:", - " cm", - ] - self.assertEqual(rep, expected) - - def test_section_scalar_ancillaries(self): - # There *is* no section for this. But there probably ought to be. - cube = Cube(np.zeros((2, 3)), long_name="name", units=1) - cube.add_ancillary_variable(AncillaryVariable([0], long_name="av")) - - rep = cube_replines(cube) - expected = [ - "name / (1) (-- : 2; -- : 3)", - " Ancillary variables:", - " av - -", - ] - self.assertEqual(rep, expected) - - def test_section_cube_attributes(self): - cube = Cube([0], long_name="name", units=1) - cube.attributes["number"] = 1.2 - cube.attributes["list"] = [3] - cube.attributes["string"] = "four five in a string" - cube.attributes["z_tupular"] = (6, (7, 8)) - rep = cube_replines(cube) - # NOTE: 'list' before 'number', as it uses "sorted(attrs.items())" - expected = [ - "name / (1) (-- : 1)", - " Attributes:", - " list [3]", - " number 1.2", - " string 'four five in a string'", - " z_tupular (6, (7, 8))", - ] - self.assertEqual(rep, expected) - - def test_section_cube_attributes__string_extras(self): - cube = Cube([0], long_name="name", units=1) - # Overlong strings are truncated (with iris.util.clip_string). - long_string = ( - "this is very very very very very very very " - "very very very very very very very long." - ) - # Strings with embedded newlines or quotes are printed in quoted form. - cube.attributes["escaped"] = "escaped\tstring" - cube.attributes["long"] = long_string - cube.attributes["long_multi"] = "multi\nline, " + long_string - rep = cube_replines(cube) - expected = [ - "name / (1) (-- : 1)", - " Attributes:", - " escaped 'escaped\\tstring'", - ( - " long 'this is very very very " - "very very very very very very very very very very..." - ), - ( - " long_multi 'multi\\nline, " - "this is very very very very very very very very very very..." - ), - ] - self.assertEqual(rep, expected) - - def test_section_cube_attributes__array(self): - # Including a long one, which gets a truncated representation. - cube = Cube([0], long_name="name", units=1) - small_array = np.array([1.2, 3.4]) - large_array = np.arange(36).reshape((18, 2)) - cube.attributes["array"] = small_array - cube.attributes["bigarray"] = large_array - rep = cube_replines(cube) - expected = [ - "name / (1) (-- : 1)", - " Attributes:", - " array array([1.2, 3.4])", - ( - " bigarray array([[ 0, 1], [ 2, 3], " - "[ 4, 5], [ 6, 7], [ 8, 9], [10, 11], [12, 13],..." - ), - ] - self.assertEqual(rep, expected) - - def test_section_cell_methods(self): - cube = Cube([0], long_name="name", units=1) - cube.add_cell_method(CellMethod("stdev", "area")) - cube.add_cell_method( - CellMethod( - method="mean", - coords=["y", "time"], - intervals=["10m", "3min"], - comments=["vertical", "=duration"], - ) - ) - rep = cube_replines(cube) - # Note: not alphabetical -- provided order is significant - expected = [ - "name / (1) (-- : 1)", - " Cell methods:", - " stdev area", - " mean y (10m, vertical), time (3min, =duration)", - ] - self.assertEqual(rep, expected) - - def test_unstructured_cube(self): - # Check a sample mesh-cube against the expected result. - cube = sample_mesh_cube() - rep = cube_replines(cube) - expected = [ - "mesh_phenom / (unknown) (level: 2; i_mesh_face: 3)", - " Dimension coordinates:", - " level x -", - " i_mesh_face - x", - " Mesh coordinates:", - " latitude - x", - " longitude - x", - " Auxiliary coordinates:", - " mesh_face_aux - x", - ] - self.assertEqual(rep, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/representation/cube_printout/test_Table.py b/lib/iris/tests/unit/representation/cube_printout/test_Table.py deleted file mode 100644 index 2ff6738998..0000000000 --- a/lib/iris/tests/unit/representation/cube_printout/test_Table.py +++ /dev/null @@ -1,159 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris._representation.cube_printout.Table`.""" -from iris._representation.cube_printout import Table -import iris.tests as tests - - -class TestTable(tests.IrisTest): - # Note: this is just barely an independent definition, not *strictly* part - # of CubePrinter, but effectively more-or-less so. - def setUp(self): - table = Table() - table.add_row(["one", "b", "three"], aligns=["left", "right", "left"]) - table.add_row(["a", "two", "c"], aligns=["right", "left", "right"]) - self.simple_table = table - - def test_empty(self): - table = Table() - self.assertIsNone(table.n_columns) - self.assertEqual(len(table.rows), 0) - self.assertIsNone(table.col_widths) - # Check other methods : should be ok but do nothing. - table.set_min_column_widths() # Ok but does nothing. - self.assertIsNone(table.col_widths) - self.assertEqual(table.formatted_as_strings(), []) - self.assertEqual(str(table), "") - - def test_basic_content(self): - # Mirror the above 'empty' tests on a small basic table. - table = self.simple_table - self.assertEqual(table.n_columns, 3) - self.assertEqual(len(table.rows), 2) - self.assertIsNone(table.col_widths) - table.set_min_column_widths() # Ok but does nothing. - self.assertEqual(table.col_widths, [3, 3, 5]) - self.assertEqual( - table.formatted_as_strings(), ["one b three", " a two c"] - ) - self.assertEqual(str(table), "one b three\n a two c") - - def test_copy(self): - table = self.simple_table - # Add some detail information - table.rows[1].i_col_unlimited = 77 # Doesn't actually affect anything - table.col_widths = [10, 15, 12] - # Make the copy - table2 = table.copy() - self.assertIsNot(table2, table) - self.assertNotEqual(table2, table) # Note: equality is not implemented - # Check the parts match the original. - self.assertEqual(len(table2.rows), len(table.rows)) - for row2, row in zip(table2.rows, table.rows): - self.assertEqual(row2.cols, row.cols) - self.assertEqual(row2.aligns, row.aligns) - self.assertEqual(row2.i_col_unlimited, row.i_col_unlimited) - - def test_add_row(self): - table = Table() - self.assertEqual(table.n_columns, None) - # Add onw row. - table.add_row(["one", "two", "three"], aligns=["left", "left", "left"]) - self.assertEqual(len(table.rows), 1) - self.assertEqual(table.n_columns, 3) - self.assertIsNone(table.rows[0].i_col_unlimited) - # Second row ok. - table.add_row( - ["x", "y", "z"], - aligns=["right", "right", "right"], - i_col_unlimited=199, - ) - self.assertEqual(len(table.rows), 2) - self.assertEqual(table.rows[-1].i_col_unlimited, 199) - - # Fails with bad number of columns - regex = "columns.*!=.*existing" - with self.assertRaisesRegex(ValueError, regex): - table.add_row(["1", "2"], ["left", "right"]) - - # Fails with bad number of aligns - regex = "aligns.*!=.*col" - with self.assertRaisesRegex(ValueError, regex): - table.add_row(["1", "2", "3"], ["left", "left", "left", "left"]) - - def test_formatted_as_strings(self): - # Test simple self-print is same as - table = Table() - aligns = ["left", "right", "left"] - table.add_row(["1", "266", "32"], aligns) - table.add_row(["123", "2", "3"], aligns) - - # Check that printing calculates default column widths, and result.. - self.assertEqual(table.col_widths, None) - result = table.formatted_as_strings() - self.assertEqual(result, ["1 266 32", "123 2 3"]) - self.assertEqual(table.col_widths, [3, 3, 2]) - - def test_fail_bad_alignments(self): - # Invalid 'aligns' content : only detected when printed - table = Table() - table.add_row(["1", "2", "3"], ["left", "right", "BAD"]) - regex = 'Unknown alignment "BAD"' - with self.assertRaisesRegex(ValueError, regex): - str(table) - - def test_table_set_width(self): - # Check that changes do *not* affect pre-existing widths. - table = Table() - aligns = ["left", "right", "left"] - table.col_widths = [3, 3, 2] - table.add_row(["333", "333", "22"], aligns) - table.add_row(["a", "b", "c"], aligns) - table.add_row(["12345", "12345", "12345"], aligns) - result = table.formatted_as_strings() - self.assertEqual(table.col_widths, [3, 3, 2]) - self.assertEqual( - result, - [ - "333 333 22", - "a b c", - "12345 12345 12345", # These are exceeding the given widths. - ], - ) - - def test_unlimited_column(self): - table = Table() - aligns = ["left", "right", "left"] - table.add_row(["a", "beee", "c"], aligns) - table.add_row( - ["abcd", "any-longer-stuff", "this"], aligns, i_col_unlimited=1 - ) - table.add_row(["12", "x", "yy"], aligns) - result = table.formatted_as_strings() - self.assertEqual( - result, - [ - "a beee c", - "abcd any-longer-stuff this", - # NOTE: the widths-calc is ignoring cols 1-2, but - # entry#0 *is* extending the width of col#0 - "12 x yy", - ], - ) - - def test_str(self): - # Check that str returns the formatted_as_strings() output. - table = Table() - aligns = ["left", "left", "left"] - table.add_row(["one", "two", "three"], aligns=aligns) - table.add_row(["1", "2", "3"], aligns=aligns) - expected = "\n".join(table.formatted_as_strings()) - result = str(table) - self.assertEqual(result, expected) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/representation/cube_summary/__init__.py b/lib/iris/tests/unit/representation/cube_summary/__init__.py deleted file mode 100644 index c20a621ba2..0000000000 --- a/lib/iris/tests/unit/representation/cube_summary/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris._representation.cube_summary` module.""" diff --git a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py b/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py deleted file mode 100644 index 3e411c020d..0000000000 --- a/lib/iris/tests/unit/representation/cube_summary/test_CubeSummary.py +++ /dev/null @@ -1,325 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :class:`iris._representation.cube_summary.CubeSummary`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris._representation.cube_summary import CubeSummary -from iris.coords import ( - AncillaryVariable, - AuxCoord, - CellMeasure, - CellMethod, - DimCoord, -) -from iris.cube import Cube -from iris.tests.stock.mesh import sample_mesh_cube - - -def example_cube(): - cube = Cube( - np.arange(6).reshape([3, 2]), - standard_name="air_temperature", - long_name="screen_air_temp", - var_name="airtemp", - units="K", - ) - lat = DimCoord([0, 1, 2], standard_name="latitude", units="degrees") - cube.add_dim_coord(lat, 0) - return cube - - -class Test_CubeSummary(tests.IrisTest): - def setUp(self): - self.cube = example_cube() - - def test_header(self): - rep = CubeSummary(self.cube) - header_left = rep.header.nameunit - header_right = rep.header.dimension_header.contents - - self.assertEqual(header_left, "air_temperature / (K)") - self.assertEqual(header_right, ["latitude: 3", "-- : 2"]) - - def test_blank_cube(self): - cube = Cube([1, 2]) - rep = CubeSummary(cube) - - self.assertEqual(rep.header.nameunit, "unknown / (unknown)") - self.assertEqual(rep.header.dimension_header.contents, ["-- : 2"]) - - expected_vector_sections = [ - "Dimension coordinates:", - "Mesh coordinates:", - "Auxiliary coordinates:", - "Derived coordinates:", - "Cell measures:", - "Ancillary variables:", - ] - self.assertEqual( - list(rep.vector_sections.keys()), expected_vector_sections - ) - for title in expected_vector_sections: - vector_section = rep.vector_sections[title] - self.assertEqual(vector_section.contents, []) - self.assertTrue(vector_section.is_empty()) - - expected_scalar_sections = [ - "Scalar coordinates:", - "Scalar cell measures:", - "Cell methods:", - "Attributes:", - ] - - self.assertEqual( - list(rep.scalar_sections.keys()), expected_scalar_sections - ) - for title in expected_scalar_sections: - scalar_section = rep.scalar_sections[title] - self.assertEqual(scalar_section.contents, []) - self.assertTrue(scalar_section.is_empty()) - - def test_vector_coord(self): - rep = CubeSummary(self.cube) - dim_section = rep.vector_sections["Dimension coordinates:"] - - self.assertEqual(len(dim_section.contents), 1) - self.assertFalse(dim_section.is_empty()) - - dim_summary = dim_section.contents[0] - - name = dim_summary.name - dim_chars = dim_summary.dim_chars - extra = dim_summary.extra - - self.assertEqual(name, "latitude") - self.assertEqual(dim_chars, ["x", "-"]) - self.assertEqual(extra, "") - - def test_scalar_coord(self): - cube = self.cube - scalar_coord_no_bounds = AuxCoord([10], long_name="bar", units="K") - scalar_coord_with_bounds = AuxCoord( - [10], long_name="foo", units="K", bounds=[(5, 15)] - ) - scalar_coord_simple_text = AuxCoord( - ["this and that"], - long_name="foo", - attributes={"key": 42, "key2": "value-str"}, - ) - scalar_coord_awkward_text = AuxCoord( - ["a is\nb\n and c"], long_name="foo_2" - ) - cube.add_aux_coord(scalar_coord_no_bounds) - cube.add_aux_coord(scalar_coord_with_bounds) - cube.add_aux_coord(scalar_coord_simple_text) - cube.add_aux_coord(scalar_coord_awkward_text) - rep = CubeSummary(cube) - - scalar_section = rep.scalar_sections["Scalar coordinates:"] - - self.assertEqual(len(scalar_section.contents), 4) - - no_bounds_summary = scalar_section.contents[0] - bounds_summary = scalar_section.contents[1] - text_summary_simple = scalar_section.contents[2] - text_summary_awkward = scalar_section.contents[3] - - self.assertEqual(no_bounds_summary.name, "bar") - self.assertEqual(no_bounds_summary.content, "10 K") - self.assertEqual(no_bounds_summary.extra, "") - - self.assertEqual(bounds_summary.name, "foo") - self.assertEqual(bounds_summary.content, "10 K, bound=(5, 15) K") - self.assertEqual(bounds_summary.extra, "") - - self.assertEqual(text_summary_simple.name, "foo") - self.assertEqual(text_summary_simple.content, "this and that") - self.assertEqual(text_summary_simple.lines, ["this and that"]) - self.assertEqual(text_summary_simple.extra, "key=42, key2='value-str'") - - self.assertEqual(text_summary_awkward.name, "foo_2") - self.assertEqual(text_summary_awkward.content, r"'a is\nb\n and c'") - self.assertEqual(text_summary_awkward.lines, ["a is", "b", " and c"]) - self.assertEqual(text_summary_awkward.extra, "") - - def test_cell_measure(self): - cube = self.cube - cell_measure = CellMeasure([1, 2, 3], long_name="foo") - cube.add_cell_measure(cell_measure, 0) - rep = CubeSummary(cube) - - cm_section = rep.vector_sections["Cell measures:"] - self.assertEqual(len(cm_section.contents), 1) - - cm_summary = cm_section.contents[0] - self.assertEqual(cm_summary.name, "foo") - self.assertEqual(cm_summary.dim_chars, ["x", "-"]) - - def test_ancillary_variable(self): - cube = self.cube - cell_measure = AncillaryVariable([1, 2, 3], long_name="foo") - cube.add_ancillary_variable(cell_measure, 0) - rep = CubeSummary(cube) - - av_section = rep.vector_sections["Ancillary variables:"] - self.assertEqual(len(av_section.contents), 1) - - av_summary = av_section.contents[0] - self.assertEqual(av_summary.name, "foo") - self.assertEqual(av_summary.dim_chars, ["x", "-"]) - - def test_attributes(self): - cube = self.cube - cube.attributes = {"a": 1, "b": "two", "c": " this \n that\tand."} - rep = CubeSummary(cube) - - attribute_section = rep.scalar_sections["Attributes:"] - attribute_contents = attribute_section.contents - expected_contents = [ - "a: 1", - "b: 'two'", - "c: ' this \\n that\\tand.'", - ] - # Note: a string with \n or \t in it gets "repr-d". - # Other strings don't (though in coord 'extra' lines, they do.) - - self.assertEqual(attribute_contents, expected_contents) - - def test_cell_methods(self): - cube = self.cube - x = AuxCoord(1, long_name="x") - y = AuxCoord(1, long_name="y") - cell_method_xy = CellMethod("mean", [x, y]) - cell_method_x = CellMethod("mean", x) - cube.add_cell_method(cell_method_xy) - cube.add_cell_method(cell_method_x) - - rep = CubeSummary(cube) - cell_method_section = rep.scalar_sections["Cell methods:"] - expected_contents = ["mean: x, y", "mean: x"] - self.assertEqual(cell_method_section.contents, expected_contents) - - def test_scalar_cube(self): - cube = self.cube - while cube.ndim > 0: - cube = cube[0] - rep = CubeSummary(cube) - self.assertEqual(rep.header.nameunit, "air_temperature / (K)") - self.assertTrue(rep.header.dimension_header.scalar) - self.assertEqual(rep.header.dimension_header.dim_names, []) - self.assertEqual(rep.header.dimension_header.shape, []) - self.assertEqual(rep.header.dimension_header.contents, ["scalar cube"]) - self.assertEqual(len(rep.vector_sections), 6) - self.assertTrue( - all(sect.is_empty() for sect in rep.vector_sections.values()) - ) - self.assertEqual(len(rep.scalar_sections), 4) - self.assertEqual( - len(rep.scalar_sections["Scalar coordinates:"].contents), 1 - ) - self.assertTrue( - rep.scalar_sections["Scalar cell measures:"].is_empty() - ) - self.assertTrue(rep.scalar_sections["Attributes:"].is_empty()) - self.assertTrue(rep.scalar_sections["Cell methods:"].is_empty()) - - def test_coord_attributes(self): - cube = self.cube - co1 = cube.coord("latitude") - co1.attributes.update(dict(a=1, b=2)) - co2 = co1.copy() - co2.attributes.update(dict(a=7, z=77, text="ok", text2="multi\nline")) - cube.add_aux_coord(co2, cube.coord_dims(co1)) - rep = CubeSummary(cube) - co1_summ = rep.vector_sections["Dimension coordinates:"].contents[0] - co2_summ = rep.vector_sections["Auxiliary coordinates:"].contents[0] - # Notes: 'b' is same so does not appear; sorted order; quoted strings. - self.assertEqual(co1_summ.extra, "a=1") - self.assertEqual( - co2_summ.extra, "a=7, text='ok', text2='multi\\nline', z=77" - ) - - def test_array_attributes(self): - cube = self.cube - co1 = cube.coord("latitude") - co1.attributes.update(dict(a=1, array=np.array([1.2, 3]))) - co2 = co1.copy() - co2.attributes.update(dict(b=2, array=np.array([3.2, 1]))) - cube.add_aux_coord(co2, cube.coord_dims(co1)) - rep = CubeSummary(cube) - co1_summ = rep.vector_sections["Dimension coordinates:"].contents[0] - co2_summ = rep.vector_sections["Auxiliary coordinates:"].contents[0] - self.assertEqual(co1_summ.extra, "array=array([1.2, 3. ])") - self.assertEqual(co2_summ.extra, "array=array([3.2, 1. ]), b=2") - - def test_attributes_subtle_differences(self): - cube = Cube([0]) - - # Add a pair that differ only in having a list instead of an array. - co1a = DimCoord( - [0], - long_name="co1_list_or_array", - attributes=dict(x=1, arr1=np.array(2), arr2=np.array([1, 2])), - ) - co1b = co1a.copy() - co1b.attributes.update(dict(arr2=[1, 2])) - for co in (co1a, co1b): - cube.add_aux_coord(co) - - # Add a pair that differ only in an attribute array dtype. - co2a = AuxCoord( - [0], - long_name="co2_dtype", - attributes=dict(x=1, arr1=np.array(2), arr2=np.array([3, 4])), - ) - co2b = co2a.copy() - co2b.attributes.update(dict(arr2=np.array([3.0, 4.0]))) - assert co2b != co2a - for co in (co2a, co2b): - cube.add_aux_coord(co) - - # Add a pair that differ only in an attribute array shape. - co3a = DimCoord( - [0], - long_name="co3_shape", - attributes=dict(x=1, arr1=np.array([5, 6]), arr2=np.array([3, 4])), - ) - co3b = co3a.copy() - co3b.attributes.update(dict(arr1=np.array([[5], [6]]))) - for co in (co3a, co3b): - cube.add_aux_coord(co) - - rep = CubeSummary(cube) - co_summs = rep.scalar_sections["Scalar coordinates:"].contents - co1a_summ, co1b_summ = co_summs[0:2] - self.assertEqual(co1a_summ.extra, "arr2=array([1, 2])") - self.assertEqual(co1b_summ.extra, "arr2=[1, 2]") - co2a_summ, co2b_summ = co_summs[2:4] - self.assertEqual(co2a_summ.extra, "arr2=array([3, 4])") - self.assertEqual(co2b_summ.extra, "arr2=array([3., 4.])") - co3a_summ, co3b_summ = co_summs[4:6] - self.assertEqual(co3a_summ.extra, "arr1=array([5, 6])") - self.assertEqual(co3b_summ.extra, "arr1=array([[5], [6]])") - - def test_unstructured_cube(self): - cube = sample_mesh_cube() - rep = CubeSummary(cube) - # Just check that coordinates appear in the expected sections - dim_section = rep.vector_sections["Dimension coordinates:"] - mesh_section = rep.vector_sections["Mesh coordinates:"] - aux_section = rep.vector_sections["Auxiliary coordinates:"] - self.assertEqual(len(dim_section.contents), 2) - self.assertEqual(len(mesh_section.contents), 2) - self.assertEqual(len(aux_section.contents), 1) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/test_Future.py b/lib/iris/tests/unit/test_Future.py deleted file mode 100644 index dddc752b6f..0000000000 --- a/lib/iris/tests/unit/test_Future.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.Future` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import warnings - -from iris import Future - - -def patched_future(value=False, deprecated=False, error=False): - class LocalFuture(Future): - # Modified Future class, with controlled deprecation options. - # - # NOTE: it is necessary to subclass this in order to modify the - # 'deprecated_options' property, because we don't want to modify the - # class variable of the actual Future class ! - deprecated_options = {} - if deprecated: - if error: - deprecated_options["example_future_flag"] = "error" - else: - deprecated_options["example_future_flag"] = "warning" - - future = LocalFuture() - future.__dict__["example_future_flag"] = value - return future - - -class Test___setattr__(tests.IrisTest): - def test_valid_setting(self): - future = patched_future() - new_value = not future.example_future_flag - with warnings.catch_warnings(): - warnings.simplefilter("error") # Check no warning emitted ! - future.example_future_flag = new_value - self.assertEqual(future.example_future_flag, new_value) - - def test_deprecated_warning(self): - future = patched_future(deprecated=True, error=False) - msg = "'Future' property 'example_future_flag' is deprecated" - with self.assertWarnsRegexp(msg): - future.example_future_flag = False - - def test_deprecated_error(self): - future = patched_future(deprecated=True, error=True) - exp_emsg = ( - "'Future' property 'example_future_flag' has been deprecated" - ) - with self.assertRaisesRegex(AttributeError, exp_emsg): - future.example_future_flag = False - - def test_invalid_attribute(self): - future = Future() - with self.assertRaises(AttributeError): - future.numberwang = 7 - - -class Test_context(tests.IrisTest): - def test_generic_no_args(self): - # While Future has no properties, it is necessary to patch Future in - # order for these tests to work. This test is not a precise emulation - # of the test it is replacing, but ought to cover most of the same - # behaviour while Future is empty. - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - future = patched_future(value=False) - self.assertFalse(future.example_future_flag) - with future.context(): - self.assertFalse(future.example_future_flag) - future.example_future_flag = True - self.assertTrue(future.example_future_flag) - self.assertFalse(future.example_future_flag) - - def test_generic_with_arg(self): - # While Future has no properties, it is necessary to patch Future in - # order for these tests to work. This test is not a precise emulation - # of the test it is replacing, but ought to cover most of the same - # behaviour while Future is empty. - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - future = patched_future(value=False) - self.assertFalse(future.example_future_flag) - self.assertFalse(future.example_future_flag) - with future.context(example_future_flag=True): - self.assertTrue(future.example_future_flag) - self.assertFalse(future.example_future_flag) - - def test_invalid_arg(self): - future = Future() - with self.assertRaises(AttributeError): - with future.context(this_does_not_exist=True): - # Don't need to do anything here... the context manager - # will (assuming it's working!) have already raised the - # exception we're looking for. - pass - - def test_generic_exception(self): - # Check that an interrupted context block restores the initial state. - class LocalTestException(Exception): - pass - - # While Future has no properties, it is necessary to patch Future in - # order for these tests to work. This test is not a precise emulation - # of the test it is replacing, but ought to cover most of the same - # behaviour while Future is empty. - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - future = patched_future(value=False) - try: - with future.context(example_future_flag=True): - raise LocalTestException() - except LocalTestException: - pass - self.assertEqual(future.example_future_flag, False) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/test_sample_data_path.py b/lib/iris/tests/unit/test_sample_data_path.py deleted file mode 100644 index ebf3b8108c..0000000000 --- a/lib/iris/tests/unit/test_sample_data_path.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for :func:`iris.sample_data_path` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import os -import os.path -import shutil -import tempfile -from unittest import mock - -from iris import sample_data_path - - -def _temp_file(sample_dir): - # Return the full path to a new genuine file within our - # temporary directory. - sample_handle, sample_path = tempfile.mkstemp(dir=sample_dir) - os.close(sample_handle) - return sample_path - - -@tests.skip_sample_data -class TestIrisSampleData_path(tests.IrisTest): - def setUp(self): - self.sample_dir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.sample_dir) - - def test_path(self): - with mock.patch("iris_sample_data.path", self.sample_dir): - import iris_sample_data - - self.assertEqual(iris_sample_data.path, self.sample_dir) - - def test_call(self): - sample_file = _temp_file(self.sample_dir) - with mock.patch("iris_sample_data.path", self.sample_dir): - result = sample_data_path(os.path.basename(sample_file)) - self.assertEqual(result, sample_file) - - def test_file_not_found(self): - with mock.patch("iris_sample_data.path", self.sample_dir): - with self.assertRaisesRegex( - ValueError, "Sample data .* not found" - ): - sample_data_path("foo") - - def test_file_absolute(self): - with mock.patch("iris_sample_data.path", self.sample_dir): - with self.assertRaisesRegex(ValueError, "Absolute path"): - sample_data_path(os.path.abspath("foo")) - - def test_glob_ok(self): - sample_path = _temp_file(self.sample_dir) - sample_glob = "?" + os.path.basename(sample_path)[1:] - with mock.patch("iris_sample_data.path", self.sample_dir): - result = sample_data_path(sample_glob) - self.assertEqual( - result, os.path.join(self.sample_dir, sample_glob) - ) - - def test_glob_not_found(self): - with mock.patch("iris_sample_data.path", self.sample_dir): - with self.assertRaisesRegex( - ValueError, "Sample data .* not found" - ): - sample_data_path("foo.*") - - def test_glob_absolute(self): - with mock.patch("iris_sample_data.path", self.sample_dir): - with self.assertRaisesRegex(ValueError, "Absolute path"): - sample_data_path(os.path.abspath("foo.*")) - - -class TestIrisSampleDataMissing(tests.IrisTest): - def test_no_iris_sample_data(self): - self.patch("iris.iris_sample_data", None) - with self.assertRaisesRegex(ImportError, "Please install"): - sample_data_path("") - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/tests/__init__.py b/lib/iris/tests/unit/tests/__init__.py deleted file mode 100644 index b8d27d34d3..0000000000 --- a/lib/iris/tests/unit/tests/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.tests` package.""" diff --git a/lib/iris/tests/unit/tests/stock/__init__.py b/lib/iris/tests/unit/tests/stock/__init__.py deleted file mode 100644 index f91390c2b3..0000000000 --- a/lib/iris/tests/unit/tests/stock/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.tests.stock` module.""" diff --git a/lib/iris/tests/unit/tests/stock/test_netcdf.py b/lib/iris/tests/unit/tests/stock/test_netcdf.py deleted file mode 100644 index 54d7b895cc..0000000000 --- a/lib/iris/tests/unit/tests/stock/test_netcdf.py +++ /dev/null @@ -1,142 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.tests.stock.netcdf` module.""" - -import shutil -import tempfile - -from iris import load_cube -from iris.experimental.ugrid.load import PARSE_UGRID_ON_LOAD -from iris.experimental.ugrid.mesh import Mesh, MeshCoord - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests -from iris.tests.stock import netcdf - - -class XIOSFileMixin(tests.IrisTest): - @classmethod - def setUpClass(cls): - # Create a temp directory for transient test files. - cls.temp_dir = tempfile.mkdtemp() - - @classmethod - def tearDownClass(cls): - # Destroy the temp directory. - shutil.rmtree(cls.temp_dir) - - def create_synthetic_file(self, **create_kwargs): - # Should be overridden to invoke one of the create_file_ functions. - # E.g. - # return netcdf.create_file__xios_2d_face_half_levels( - # temp_file_dir=self.temp_dir, dataset_name="mesh", **create_kwargs - # ) - raise NotImplementedError - - def create_synthetic_test_cube(self, **create_kwargs): - file_path = self.create_synthetic_file(**create_kwargs) - with PARSE_UGRID_ON_LOAD.context(): - cube = load_cube(file_path) - return cube - - def check_cube(self, cube, shape, location, level): - # Basic checks on the primary data cube. - self.assertEqual(cube.var_name, "thing") - self.assertEqual(cube.long_name, "thingness") - self.assertEqual(cube.shape, shape) - - # Also a few checks on the attached mesh-related information. - last_dim = cube.ndim - 1 - self.assertIsInstance(cube.mesh, Mesh) - self.assertEqual(cube.mesh_dim(), last_dim) - self.assertEqual(cube.location, location) - for coord_name in ("longitude", "latitude"): - coord = cube.coord(coord_name) - self.assertIsInstance(coord, MeshCoord) - self.assertEqual(coord.shape, (shape[last_dim],)) - self.assertTrue(cube.mesh.var_name.endswith(f"{level}_levels")) - - -class Test_create_file__xios_2d_face_half_levels(XIOSFileMixin): - def create_synthetic_file(self, **create_kwargs): - return netcdf.create_file__xios_2d_face_half_levels( - temp_file_dir=self.temp_dir, dataset_name="mesh", **create_kwargs - ) - - def test_basic_load(self): - cube = self.create_synthetic_test_cube() - self.check_cube(cube, shape=(1, 866), location="face", level="half") - - def test_scale_mesh(self): - cube = self.create_synthetic_test_cube(n_faces=10) - self.check_cube(cube, shape=(1, 10), location="face", level="half") - - def test_scale_time(self): - cube = self.create_synthetic_test_cube(n_times=3) - self.check_cube(cube, shape=(3, 866), location="face", level="half") - - -class Test_create_file__xios_3d_face_half_levels(XIOSFileMixin): - def create_synthetic_file(self, **create_kwargs): - return netcdf.create_file__xios_3d_face_half_levels( - temp_file_dir=self.temp_dir, dataset_name="mesh", **create_kwargs - ) - - def test_basic_load(self): - cube = self.create_synthetic_test_cube() - self.check_cube( - cube, shape=(1, 38, 866), location="face", level="half" - ) - - def test_scale_mesh(self): - cube = self.create_synthetic_test_cube(n_faces=10) - self.check_cube(cube, shape=(1, 38, 10), location="face", level="half") - - def test_scale_time(self): - cube = self.create_synthetic_test_cube(n_times=3) - self.check_cube( - cube, shape=(3, 38, 866), location="face", level="half" - ) - - def test_scale_levels(self): - cube = self.create_synthetic_test_cube(n_levels=10) - self.check_cube( - cube, shape=(1, 10, 866), location="face", level="half" - ) - - -class Test_create_file__xios_3d_face_full_levels(XIOSFileMixin): - def create_synthetic_file(self, **create_kwargs): - return netcdf.create_file__xios_3d_face_full_levels( - temp_file_dir=self.temp_dir, dataset_name="mesh", **create_kwargs - ) - - def test_basic_load(self): - cube = self.create_synthetic_test_cube() - self.check_cube( - cube, shape=(1, 39, 866), location="face", level="full" - ) - - def test_scale_mesh(self): - cube = self.create_synthetic_test_cube(n_faces=10) - self.check_cube(cube, shape=(1, 39, 10), location="face", level="full") - - def test_scale_time(self): - cube = self.create_synthetic_test_cube(n_times=3) - self.check_cube( - cube, shape=(3, 39, 866), location="face", level="full" - ) - - def test_scale_levels(self): - cube = self.create_synthetic_test_cube(n_levels=10) - self.check_cube( - cube, shape=(1, 10, 866), location="face", level="full" - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/tests/test_IrisTest.py b/lib/iris/tests/unit/tests/test_IrisTest.py deleted file mode 100644 index 5725b59d40..0000000000 --- a/lib/iris/tests/unit/tests/test_IrisTest.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.tests.IrisTest` class.""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from abc import ABCMeta, abstractmethod - -import numpy as np - - -class _MaskedArrayEquality(metaclass=ABCMeta): - def setUp(self): - self.arr1 = np.ma.array([1, 2, 3, 4], mask=[False, True, True, False]) - self.arr2 = np.ma.array([1, 3, 2, 4], mask=[False, True, True, False]) - - @property - @abstractmethod - def _func(self): - pass - - def test_strict_comparison(self): - # Comparing both mask and data array completely. - with self.assertRaises(AssertionError): - self._func(self.arr1, self.arr2, strict=True) - - def test_non_strict_comparison(self): - # Checking masked array equality and all unmasked array data values. - self._func(self.arr1, self.arr2, strict=False) - - def test_default_strict_arg_comparison(self): - self._func(self.arr1, self.arr2) - - def test_nomask(self): - # Test that an assertion is raised when comparing a masked array - # containing masked and unmasked values with a masked array with - # 'nomask'. - arr1 = np.ma.array([1, 2, 3, 4]) - with self.assertRaises(AssertionError): - self._func(arr1, self.arr2, strict=False) - - def test_nomask_unmasked(self): - # Ensure that a masked array with 'nomask' can compare with an entirely - # unmasked array. - arr1 = np.ma.array([1, 2, 3, 4]) - arr2 = np.ma.array([1, 2, 3, 4], mask=False) - self._func(arr1, arr2, strict=False) - - def test_different_mask_strict(self): - # Differing masks, equal data - arr2 = self.arr1.copy() - arr2[0] = np.ma.masked - with self.assertRaises(AssertionError): - self._func(self.arr1, arr2, strict=True) - - def test_different_mask_nonstrict(self): - # Differing masks, equal data - arr2 = self.arr1.copy() - arr2[0] = np.ma.masked - with self.assertRaises(AssertionError): - self._func(self.arr1, arr2, strict=False) - - -@tests.iristest_timing_decorator -class Test_assertMaskedArrayEqual(_MaskedArrayEquality, tests.IrisTest_nometa): - @property - def _func(self): - return self.assertMaskedArrayEqual - - -class Test_assertMaskedArrayEqual__Nonmaasked(tests.IrisTest): - def test_nonmasked_same(self): - # Masked test can be used on non-masked arrays. - arr1 = np.array([1, 2]) - self.assertMaskedArrayEqual(arr1, arr1) - - def test_masked_nonmasked_same(self): - # Masked test can be used between masked + non-masked arrays, and will - # consider them equal, when mask=None. - arr1 = np.ma.masked_array([1, 2]) - arr2 = np.array([1, 2]) - self.assertMaskedArrayEqual(arr1, arr2) - - def test_masked_nonmasked_different(self): - arr1 = np.ma.masked_array([1, 2]) - arr2 = np.array([1, 3]) - with self.assertRaisesRegex(AssertionError, "Arrays are not equal"): - self.assertMaskedArrayEqual(arr1, arr2) - - def test_nonmasked_masked_same(self): - # Masked test can be used between masked + non-masked arrays, and will - # consider them equal, when mask=None. - arr1 = np.array([1, 2]) - arr2 = np.ma.masked_array([1, 2]) - self.assertMaskedArrayEqual(arr1, arr2) - - def test_masked_nonmasked_same_falsemask(self): - # Masked test can be used between masked + non-masked arrays, and will - # consider them equal, when mask=False. - arr1 = np.ma.masked_array([1, 2], mask=False) - arr2 = np.array([1, 2]) - self.assertMaskedArrayEqual(arr1, arr2) - - def test_masked_nonmasked_same_emptymask(self): - # Masked test can be used between masked + non-masked arrays, and will - # consider them equal, when mask=zeros. - arr1 = np.ma.masked_array([1, 2], mask=[False, False]) - arr2 = np.array([1, 2]) - self.assertMaskedArrayEqual(arr1, arr2) - - -@tests.iristest_timing_decorator -class Test_assertMaskedArrayAlmostEqual( - _MaskedArrayEquality, tests.IrisTest_nometa -): - @property - def _func(self): - return self.assertMaskedArrayAlmostEqual - - def test_decimal(self): - arr1, arr2 = np.ma.array([100.0]), np.ma.array([100.003]) - self.assertMaskedArrayAlmostEqual(arr1, arr2, decimal=2) - with self.assertRaises(AssertionError): - self.assertMaskedArrayAlmostEqual(arr1, arr2, decimal=3) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/time/__init__.py b/lib/iris/tests/unit/time/__init__.py deleted file mode 100644 index 3483b92e62..0000000000 --- a/lib/iris/tests/unit/time/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.time` module.""" diff --git a/lib/iris/tests/unit/time/test_PartialDateTime.py b/lib/iris/tests/unit/time/test_PartialDateTime.py deleted file mode 100644 index cfffafea2c..0000000000 --- a/lib/iris/tests/unit/time/test_PartialDateTime.py +++ /dev/null @@ -1,278 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.time.PartialDateTime` class.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import datetime -import operator -from unittest import mock - -import cftime - -from iris.time import PartialDateTime - - -class Test___init__(tests.IrisTest): - def test_positional(self): - # Test that we can define PartialDateTimes with positional arguments. - pd = PartialDateTime(1066, None, 10) - self.assertEqual(pd.year, 1066) - self.assertEqual(pd.month, None) - self.assertEqual(pd.day, 10) - - def test_keyword_args(self): - # Test that we can define PartialDateTimes with keyword arguments. - pd = PartialDateTime(microsecond=10) - self.assertEqual(pd.year, None) - self.assertEqual(pd.microsecond, 10) - - -class Test___repr__(tests.IrisTest): - def test_full(self): - pd = PartialDateTime(*list(range(7))) - result = repr(pd) - self.assertEqual( - result, - "PartialDateTime(year=0, month=1, day=2," - " hour=3, minute=4, second=5," - " microsecond=6)", - ) - - def test_partial(self): - pd = PartialDateTime(month=2, day=30) - result = repr(pd) - self.assertEqual(result, "PartialDateTime(month=2, day=30)") - - def test_empty(self): - pd = PartialDateTime() - result = repr(pd) - self.assertEqual(result, "PartialDateTime()") - - -class Test_timetuple(tests.IrisTest): - def test_exists(self): - # Check that the PartialDateTime class implements a timetuple (needed - # because of http://bugs.python.org/issue8005). - pd = PartialDateTime(*list(range(7))) - self.assertTrue(hasattr(pd, "timetuple")) - - -class _Test_operator: - def test_invalid_type(self): - pdt = PartialDateTime() - with self.assertRaises(TypeError): - self.op(pdt, 1) - - def _test(self, pdt, other, name): - expected = self.expected_value[name] - if isinstance(expected, type): - with self.assertRaises(expected): - result = self.op(pdt, other) - else: - result = self.op(pdt, other) - self.assertIs(result, expected) - - def _test_dt(self, pdt, name): - other = mock.Mock( - name="datetime", - spec=datetime.datetime, - year=2013, - month=3, - day=20, - second=2, - ) - self._test(pdt, other, name) - - def test_no_difference(self): - self._test_dt( - PartialDateTime(year=2013, month=3, day=20, second=2), - "no_difference", - ) - - def test_null(self): - self._test_dt(PartialDateTime(), "null") - - def test_item1_lo(self): - self._test_dt( - PartialDateTime(year=2011, month=3, second=2), "item1_lo" - ) - - def test_item1_hi(self): - self._test_dt(PartialDateTime(year=2015, month=3, day=24), "item1_hi") - - def test_item2_lo(self): - self._test_dt( - PartialDateTime(year=2013, month=1, second=2), "item2_lo" - ) - - def test_item2_hi(self): - self._test_dt(PartialDateTime(year=2013, month=5, day=24), "item2_hi") - - def test_item3_lo(self): - self._test_dt( - PartialDateTime(year=2013, month=3, second=1), "item3_lo" - ) - - def test_item3_hi(self): - self._test_dt( - PartialDateTime(year=2013, month=3, second=42), "item3_hi" - ) - - def test_mix_hi_lo(self): - self._test_dt(PartialDateTime(year=2015, month=1, day=24), "mix_hi_lo") - - def test_mix_lo_hi(self): - self._test_dt(PartialDateTime(year=2011, month=5, day=24), "mix_lo_hi") - - def _test_pdt(self, other, name): - pdt = PartialDateTime(year=2013, day=24) - self._test(pdt, other, name) - - def test_pdt_same(self): - self._test_pdt(PartialDateTime(year=2013, day=24), "pdt_same") - - def test_pdt_diff(self): - self._test_pdt(PartialDateTime(year=2013, day=25), "pdt_diff") - - def test_pdt_diff_fewer_fields(self): - self._test_pdt(PartialDateTime(year=2013), "pdt_diff_fewer") - - def test_pdt_diff_more_fields(self): - self._test_pdt( - PartialDateTime(year=2013, day=24, hour=12), "pdt_diff_more" - ) - - def test_pdt_diff_no_fields(self): - pdt1 = PartialDateTime() - pdt2 = PartialDateTime(month=3, day=24) - self._test(pdt1, pdt2, "pdt_empty") - - -def negate_expectations(expectations): - def negate(expected): - if not isinstance(expected, type): - expected = not expected - return expected - - return {name: negate(value) for name, value in expectations.items()} - - -EQ_EXPECTATIONS = { - "no_difference": True, - "item1_lo": False, - "item1_hi": False, - "item2_lo": False, - "item2_hi": False, - "item3_lo": False, - "item3_hi": False, - "mix_hi_lo": False, - "mix_lo_hi": False, - "null": True, - "pdt_same": True, - "pdt_diff": False, - "pdt_diff_fewer": False, - "pdt_diff_more": False, - "pdt_empty": False, -} - -GT_EXPECTATIONS = { - "no_difference": False, - "item1_lo": False, - "item1_hi": True, - "item2_lo": False, - "item2_hi": True, - "item3_lo": False, - "item3_hi": True, - "mix_hi_lo": True, - "mix_lo_hi": False, - "null": False, - "pdt_same": TypeError, - "pdt_diff": TypeError, - "pdt_diff_fewer": TypeError, - "pdt_diff_more": TypeError, - "pdt_empty": TypeError, -} - -LT_EXPECTATIONS = { - "no_difference": False, - "item1_lo": True, - "item1_hi": False, - "item2_lo": True, - "item2_hi": False, - "item3_lo": True, - "item3_hi": False, - "mix_hi_lo": False, - "mix_lo_hi": True, - "null": False, - "pdt_same": TypeError, - "pdt_diff": TypeError, - "pdt_diff_fewer": TypeError, - "pdt_diff_more": TypeError, - "pdt_empty": TypeError, -} - - -class Test___eq__(tests.IrisTest, _Test_operator): - def setUp(self): - self.op = operator.eq - self.expected_value = EQ_EXPECTATIONS - - def test_cftime_equal(self): - pdt = PartialDateTime(month=3, second=2) - other = cftime.datetime(year=2013, month=3, day=20, second=2) - self.assertTrue(pdt == other) - - def test_cftime_not_equal(self): - pdt = PartialDateTime(month=3, second=2) - other = cftime.datetime(year=2013, month=4, day=20, second=2) - self.assertFalse(pdt == other) - - -class Test___ne__(tests.IrisTest, _Test_operator): - def setUp(self): - self.op = operator.ne - self.expected_value = negate_expectations(EQ_EXPECTATIONS) - - -class Test___gt__(tests.IrisTest, _Test_operator): - def setUp(self): - self.op = operator.gt - self.expected_value = GT_EXPECTATIONS - - def test_cftime_greater(self): - pdt = PartialDateTime(month=3, microsecond=2) - other = cftime.datetime(year=2013, month=2, day=20, second=3) - self.assertTrue(pdt > other) - - def test_cftime_not_greater(self): - pdt = PartialDateTime(month=3, microsecond=2) - other = cftime.datetime(year=2013, month=3, day=20, second=3) - self.assertFalse(pdt > other) - - -class Test___le__(tests.IrisTest, _Test_operator): - def setUp(self): - self.op = operator.le - self.expected_value = negate_expectations(GT_EXPECTATIONS) - - -class Test___lt__(tests.IrisTest, _Test_operator): - def setUp(self): - self.op = operator.lt - self.expected_value = LT_EXPECTATIONS - - -class Test___ge__(tests.IrisTest, _Test_operator): - def setUp(self): - self.op = operator.ge - self.expected_value = negate_expectations(LT_EXPECTATIONS) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/__init__.py b/lib/iris/tests/unit/util/__init__.py deleted file mode 100644 index 9aed566a19..0000000000 --- a/lib/iris/tests/unit/util/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.util` module.""" diff --git a/lib/iris/tests/unit/util/test__coord_regular.py b/lib/iris/tests/unit/util/test__coord_regular.py deleted file mode 100644 index a5e9aca9ed..0000000000 --- a/lib/iris/tests/unit/util/test__coord_regular.py +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test elements of :mod:`iris.util` that deal with checking coord regularity. -Specifically, this module tests the following functions: - - * :func:`iris.util.is_regular`, - * :func:`iris.util.regular_step`, and - * :func:`iris.util.points_step`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.coords import AuxCoord, DimCoord -from iris.exceptions import CoordinateMultiDimError, CoordinateNotRegularError -from iris.util import is_regular, points_step, regular_step - - -class Test_is_regular(tests.IrisTest): - def test_coord_with_regular_step(self): - coord = DimCoord(np.arange(5)) - result = is_regular(coord) - self.assertTrue(result) - - def test_coord_with_irregular_step(self): - # Check that a `CoordinateNotRegularError` is captured. - coord = AuxCoord(np.array([2, 5, 1, 4])) - result = is_regular(coord) - self.assertFalse(result) - - def test_scalar_coord(self): - # Check that a `ValueError` is captured. - coord = DimCoord(5) - result = is_regular(coord) - self.assertFalse(result) - - def test_coord_with_string_points(self): - # Check that a `TypeError` is captured. - coord = AuxCoord(["a", "b", "c"]) - result = is_regular(coord) - self.assertFalse(result) - - -class Test_regular_step(tests.IrisTest): - def test_basic(self): - dtype = np.float64 - points = np.arange(5, dtype=dtype) - coord = DimCoord(points) - expected = np.mean(np.diff(points)) - result = regular_step(coord) - self.assertEqual(expected, result) - self.assertEqual(result.dtype, dtype) - - def test_2d_coord(self): - coord = AuxCoord(np.arange(8).reshape(2, 4)) - exp_emsg = "Expected 1D coord" - with self.assertRaisesRegex(CoordinateMultiDimError, exp_emsg): - regular_step(coord) - - def test_scalar_coord(self): - coord = DimCoord(5) - exp_emsg = "non-scalar coord" - with self.assertRaisesRegex(ValueError, exp_emsg): - regular_step(coord) - - def test_coord_with_irregular_step(self): - name = "latitude" - coord = AuxCoord(np.array([2, 5, 1, 4]), standard_name=name) - exp_emsg = "{} is not regular".format(name) - with self.assertRaisesRegex(CoordinateNotRegularError, exp_emsg): - regular_step(coord) - - -class Test_points_step(tests.IrisTest): - def test_regular_points(self): - regular_points = np.arange(5) - exp_avdiff = np.mean(np.diff(regular_points)) - result_avdiff, result = points_step(regular_points) - self.assertEqual(exp_avdiff, result_avdiff) - self.assertTrue(result) - - def test_irregular_points(self): - irregular_points = np.array([2, 5, 1, 4]) - exp_avdiff = np.mean(np.diff(irregular_points)) - result_avdiff, result = points_step(irregular_points) - self.assertEqual(exp_avdiff, result_avdiff) - self.assertFalse(result) - - def test_single_point(self): - lone_point = np.array([4]) - result_avdiff, result = points_step(lone_point) - self.assertTrue(np.isnan(result_avdiff)) - self.assertTrue(result) - - def test_no_points(self): - no_points = np.array([]) - result_avdiff, result = points_step(no_points) - self.assertTrue(np.isnan(result_avdiff)) - self.assertTrue(result) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test__is_circular.py b/lib/iris/tests/unit/util/test__is_circular.py deleted file mode 100644 index e67eb38294..0000000000 --- a/lib/iris/tests/unit/util/test__is_circular.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util._is_circular`.""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.util import _is_circular - - -class Test(tests.IrisTest): - def test_simple(self): - data = np.arange(12) * 30 - self.assertTrue(_is_circular(data, 360)) - - def test_negative_diff(self): - data = (np.arange(96) * -3.749998) + 3.56249908e02 - self.assertTrue(_is_circular(data, 360)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test__slice_data_with_keys.py b/lib/iris/tests/unit/util/test__slice_data_with_keys.py deleted file mode 100644 index 061a2f5b37..0000000000 --- a/lib/iris/tests/unit/util/test__slice_data_with_keys.py +++ /dev/null @@ -1,436 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.util._slice_data_with_keys`. - -Note: much of the functionality really belongs to the other routines, -:func:`iris.util._build_full_slice_given_keys`, and -:func:`column_slices_generator`. -However, it is relatively simple to test multiple aspects of all three here -in combination. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import numpy as np - -from iris._lazy_data import as_concrete_data, as_lazy_data -from iris.util import _slice_data_with_keys - - -class DummyArray: - # A dummy array-like that records the keys of indexing calls. - def __init__(self, shape, _indexing_record_list=None): - self.shape = shape - self.ndim = len(shape) - if _indexing_record_list is None: - _indexing_record_list = [] - self._getitem_call_keys = _indexing_record_list - - def __getitem__(self, keys): - # Add the indexing keys to the call list. - self._getitem_call_keys.append(keys) - # Return a new object with the correct derived shape, and record its - # indexing operations in the same key list as this. - shape_array = np.zeros(self.shape) - shape_array = shape_array.__getitem__(keys) - new_shape = shape_array.shape - return DummyArray( - new_shape, _indexing_record_list=self._getitem_call_keys - ) - - -class Indexer: - # An object to make __getitem__ arglists from indexing operations. - def __getitem__(self, keys): - return keys - - -# An Indexer object for generating indexing keys in a nice visible way. -Index = Indexer() - - -class MixinIndexingTest: - def check(self, shape, keys, expect_call_keys=None, expect_map=None): - data = DummyArray(shape) - dim_map, _ = _slice_data_with_keys(data, keys) - if expect_call_keys is not None: - calls_got = data._getitem_call_keys - # Check that the indexing keys applied were the expected ones. - equal = len(calls_got) == len(expect_call_keys) - for act_call, expect_call in zip(calls_got, expect_call_keys): - equal &= len(act_call) == len(expect_call) - # A problem here is that in each call, some keys may be - # *arrays*, and arrays can't be compared in the "normal" - # way. So we must use np.all for comparison :-( - for act_key, expect_key in zip(act_call, expect_call): - equal &= np.asanyarray(act_key).dtype == np.asanyarray( - expect_key - ).dtype and np.all(act_key == expect_key) - errmsg = "Different key lists:\n{!s}\n!=\n{!s}\n" - - def showkeys(keys_list): - msg = "[\n " - msg += "\n ".join(str(x) for x in keys_list) - msg += "\n]" - return msg - - self.assertTrue( - equal, - errmsg.format(showkeys(calls_got), showkeys(expect_call_keys)), - ) - if expect_map is not None: - self.assertEqual(dim_map, expect_map) - - -class Test_indexing(MixinIndexingTest, tests.IrisTest): - # Check the indexing operations performed for various requested keys. - - def test_0d_nokeys(self): - # Performs *no* underlying indexing operation. - self.check((), Index[()], []) - - def test_1d_int(self): - self.check((4,), Index[2], [(2,)]) - - def test_1d_all(self): - self.check((3,), Index[:], [(slice(None),)]) - - def test_1d_tuple(self): - # The call makes tuples into 1-D arrays, and a trailing Ellipsis is - # added (for the 1-D case only). - self.check( - (3,), Index[((2, 0, 1),)], [(np.array([2, 0, 1]), Ellipsis)] - ) - - def test_fail_1d_2keys(self): - msg = "More slices .* than dimensions" - with self.assertRaisesRegex(IndexError, msg): - self.check((3,), Index[1, 2]) - - def test_fail_empty_slice(self): - msg = "Cannot index with zero length slice" - with self.assertRaisesRegex(IndexError, msg): - self.check((3,), Index[1:1]) - - def test_2d_tuple(self): - # Like the above, but there is an extra no-op at the start and no - # trailing Ellipsis is generated. - self.check( - (3, 2), - Index[((2, 0, 1),)], - [(slice(None), slice(None)), (np.array([2, 0, 1]), slice(None))], - ) - - def test_2d_two_tuples(self): - # Could be treated as fancy indexing, but must not be ! - # Two separate 2-D indexing operations. - self.check( - (3, 2), - Index[(2, 0, 1, 1), (0, 1, 0, 1)], - [ - (np.array([2, 0, 1, 1]), slice(None)), - (slice(None), np.array([0, 1, 0, 1])), - ], - ) - - def test_2d_tuple_and_value(self): - # The two keys are applied in separate operations, and in the reverse - # order (?) : The second op is then slicing a 1-D array, not 2-D. - self.check( - (3, 5), - Index[(2, 0, 1), 3], - [(slice(None), 3), (np.array([2, 0, 1]), Ellipsis)], - ) - - def test_2d_single_int(self): - self.check((3, 4), Index[2], [(2, slice(None))]) - - def test_2d_multiple_int(self): - self.check((3, 4), Index[2, 1:3], [(2, slice(1, 3))]) - - def test_3d_1int(self): - self.check((3, 4, 5), Index[2], [(2, slice(None), slice(None))]) - - def test_3d_2int(self): - self.check((3, 4, 5), Index[2, 3], [(2, 3, slice(None))]) - - def test_3d_tuple_and_value(self): - # The two keys are applied in separate operations, and in the reverse - # order (?) : The second op is slicing a 2-D array, not 3-D. - self.check( - (3, 5, 7), - Index[(2, 0, 1), 4], - [ - (slice(None), 4, slice(None)), - (np.array([2, 0, 1]), slice(None)), - ], - ) - - def test_3d_ellipsis_last(self): - self.check((3, 4, 5), Index[2, ...], [(2, slice(None), slice(None))]) - - def test_3d_ellipsis_first_1int(self): - self.check((3, 4, 5), Index[..., 2], [(slice(None), slice(None), 2)]) - - def test_3d_ellipsis_first_2int(self): - self.check((3, 4, 5), Index[..., 2, 3], [(slice(None), 2, 3)]) - - def test_3d_multiple_tuples(self): - # Where there are TWO or more tuple keys, this could be misinterpreted - # as 'fancy' indexing : It should resolve into multiple calls. - self.check( - (3, 4, 5), - Index[(1, 2, 1), :, (2, 2, 3)], - [ - (slice(None), slice(None), slice(None)), - (np.array([1, 2, 1]), slice(None), slice(None)), - (slice(None), slice(None), np.array([2, 2, 3])), - ], - ) - # NOTE: there seem to be an extra initial [:, :, :]. - # That's just what it does at present. - - -class Test_dimensions_mapping(MixinIndexingTest, tests.IrisTest): - # Check the dimensions map returned for various requested keys. - - def test_1d_nochange(self): - self.check((3,), Index[1:2], expect_map={None: None, 0: 0}) - - def test_1d_1int_losedim0(self): - self.check((3,), Index[1], expect_map={None: None, 0: None}) - - def test_1d_tuple_nochange(self): - # A selection index leaves the dimension intact. - self.check((3,), Index[((1, 0, 1, 2),)], expect_map={None: None, 0: 0}) - - def test_1d_1tuple_nochange(self): - # A selection index with only one value in it *still* leaves the - # dimension intact. - self.check((3,), Index[((2,),)], expect_map={None: None, 0: 0}) - - def test_1d_slice_nochange(self): - # A slice leaves the dimension intact. - self.check((3,), Index[1:7], expect_map={None: None, 0: 0}) - - def test_2d_nochange(self): - self.check((3, 4), Index[:, :], expect_map={None: None, 0: 0, 1: 1}) - - def test_2d_losedim0(self): - self.check((3, 4), Index[1, :], expect_map={None: None, 0: None, 1: 0}) - - def test_2d_losedim1(self): - self.check( - (3, 4), Index[1:4, 2], expect_map={None: None, 0: 0, 1: None} - ) - - def test_2d_loseboth(self): - # Two indices give scalar result. - self.check( - (3, 4), Index[1, 2], expect_map={None: None, 0: None, 1: None} - ) - - def test_3d_losedim1(self): - # Cutting out the middle dim. - self.check( - (3, 4, 2), - Index[:, 2], - expect_map={None: None, 0: 0, 1: None, 2: 1}, - ) - - -class TestResults(tests.IrisTest): - # Integration-style test, exercising (mostly) the same cases as above, - # but checking actual results, for both real and lazy array inputs. - - def check(self, real_data, keys, expect_result, expect_map): - real_data = np.array(real_data) - lazy_data = as_lazy_data(real_data, real_data.shape) - real_dim_map, real_result = _slice_data_with_keys(real_data, keys) - lazy_dim_map, lazy_result = _slice_data_with_keys(lazy_data, keys) - lazy_result = as_concrete_data(lazy_result) - self.assertArrayEqual(real_result, expect_result) - self.assertArrayEqual(lazy_result, expect_result) - self.assertEqual(real_dim_map, expect_map) - self.assertEqual(lazy_dim_map, expect_map) - - def test_1d_int(self): - self.check([1, 2, 3, 4], Index[2], [3], {None: None, 0: None}) - - def test_1d_all(self): - self.check([1, 2, 3], Index[:], [1, 2, 3], {None: None, 0: 0}) - - def test_1d_tuple(self): - self.check( - [1, 2, 3], Index[((2, 0, 1, 0),)], [3, 1, 2, 1], {None: None, 0: 0} - ) - - def test_fail_1d_2keys(self): - msg = "More slices .* than dimensions" - with self.assertRaisesRegex(IndexError, msg): - self.check([1, 2, 3], Index[1, 2], None, None) - - def test_fail_empty_slice(self): - msg = "Cannot index with zero length slice" - with self.assertRaisesRegex(IndexError, msg): - self.check([1, 2, 3], Index[1:1], None, None) - - def test_2d_tuple(self): - self.check( - [[11, 12], [21, 22], [31, 32]], - Index[((2, 0, 1),)], - [[31, 32], [11, 12], [21, 22]], - {None: None, 0: 0, 1: 1}, - ) - - def test_2d_two_tuples(self): - # Could be treated as fancy indexing, but must not be ! - # Two separate 2-D indexing operations. - self.check( - [[11, 12, 13], [21, 22, 23], [31, 32, 33]], - Index[(2, 0), (0, 1, 0, 1)], - [[31, 32, 31, 32], [11, 12, 11, 12]], - {None: None, 0: 0, 1: 1}, - ) - - def test_2d_tuple_and_value(self): - # The two keys are applied in separate operations, and in the reverse - # order (?) : The second op is then slicing a 1-D array, not 2-D. - self.check( - [[11, 12, 13, 14], [21, 22, 23, 24], [31, 32, 33, 34]], - Index[(2, 0, 1), 3], - [34, 14, 24], - {None: None, 0: 0, 1: None}, - ) - - def test_2d_single_int(self): - self.check( - [[11, 12, 13], [21, 22, 23], [31, 32, 33]], - Index[1], - [21, 22, 23], - {None: None, 0: None, 1: 0}, - ) - - def test_2d_int_slice(self): - self.check( - [[11, 12, 13], [21, 22, 23], [31, 32, 33]], - Index[2, 1:3], - [32, 33], - {None: None, 0: None, 1: 0}, - ) - - def test_3d_1int(self): - self.check( - [ - [[111, 112, 113], [121, 122, 123]], - [[211, 212, 213], [221, 222, 223]], - [[311, 312, 313], [321, 322, 323]], - ], - Index[1], - [[211, 212, 213], [221, 222, 223]], - {None: None, 0: None, 1: 0, 2: 1}, - ) - - def test_3d_2int(self): - self.check( - [ - [[111, 112, 113], [121, 122, 123], [131, 132, 133]], - [[211, 212, 213], [221, 222, 223], [231, 232, 233]], - ], - Index[1, 2], - [231, 232, 233], - {None: None, 0: None, 1: None, 2: 0}, - ) - - def test_3d_tuple_and_value(self): - # The two keys are applied in separate operations, and in the reverse - # order (?) : The second op is slicing a 2-D array, not 3-D. - self.check( - [ - [[111, 112, 113, 114], [121, 122, 123, 124]], - [[211, 212, 213, 214], [221, 222, 223, 224]], - [[311, 312, 313, 314], [321, 322, 323, 324]], - ], - Index[(2, 0, 1), 1], - [[321, 322, 323, 324], [121, 122, 123, 124], [221, 222, 223, 224]], - {None: None, 0: 0, 1: None, 2: 1}, - ) - - def test_3d_ellipsis_last(self): - self.check( - [ - [[111, 112, 113], [121, 122, 123]], - [[211, 212, 213], [221, 222, 223]], - [[311, 312, 313], [321, 322, 323]], - ], - Index[2, ...], - [[311, 312, 313], [321, 322, 323]], - {None: None, 0: None, 1: 0, 2: 1}, - ) - - def test_3d_ellipsis_first_1int(self): - self.check( - [ - [[111, 112, 113, 114], [121, 122, 123, 124]], - [[211, 212, 213, 214], [221, 222, 223, 224]], - [[311, 312, 313, 314], [321, 322, 323, 324]], - ], - Index[..., 2], - [[113, 123], [213, 223], [313, 323]], - {None: None, 0: 0, 1: 1, 2: None}, - ) - - def test_3d_ellipsis_mid_1int(self): - self.check( - [ - [[111, 112, 113], [121, 122, 123]], - [[211, 212, 213], [221, 222, 223]], - [[311, 312, 313], [321, 322, 323]], - ], - Index[..., 1, ...], - [[121, 122, 123], [221, 222, 223], [321, 322, 323]], - {None: None, 0: 0, 1: None, 2: 1}, - ) - - def test_3d_ellipsis_first_2int(self): - self.check( - [ - [[111, 112, 113], [121, 122, 123]], - [[211, 212, 213], [221, 222, 223]], - [[311, 312, 313], [321, 322, 323]], - ], - Index[..., 1, 2], - [123, 223, 323], - {None: None, 0: 0, 1: None, 2: None}, - ) - - def test_3d_multiple_tuples(self): - # Where there are TWO or more tuple keys, this could be misinterpreted - # as 'fancy' indexing : It should resolve into multiple calls. - self.check( - [ - [[111, 112, 113, 114], [121, 122, 123, 124]], - [[211, 212, 213, 214], [221, 222, 223, 224]], - [[311, 312, 313, 314], [321, 322, 323, 324]], - ], - Index[(1, 2, 1), :, (2, 2, 3)], - [ - [[213, 213, 214], [223, 223, 224]], - [[313, 313, 314], [323, 323, 324]], - [[213, 213, 214], [223, 223, 224]], - ], - {None: None, 0: 0, 1: 1, 2: 2}, - ) - # NOTE: there seem to be an extra initial [:, :, :]. - # That's just what it does at present. - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_array_equal.py b/lib/iris/tests/unit/util/test_array_equal.py deleted file mode 100644 index 77631907a1..0000000000 --- a/lib/iris/tests/unit/util/test_array_equal.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.array_equal`.""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris.util import array_equal - - -class Test(tests.IrisTest): - def test_0d(self): - array_a = np.array(23) - array_b = np.array(23) - array_c = np.array(7) - self.assertTrue(array_equal(array_a, array_b)) - self.assertFalse(array_equal(array_a, array_c)) - - def test_0d_and_scalar(self): - array_a = np.array(23) - self.assertTrue(array_equal(array_a, 23)) - self.assertFalse(array_equal(array_a, 45)) - - def test_1d_and_sequences(self): - for sequence_type in (list, tuple): - seq_a = sequence_type([1, 2, 3]) - array_a = np.array(seq_a) - self.assertTrue(array_equal(array_a, seq_a)) - self.assertFalse(array_equal(array_a, seq_a[:-1])) - array_a[1] = 45 - self.assertFalse(array_equal(array_a, seq_a)) - - def test_nd(self): - array_a = np.array(np.arange(24).reshape(2, 3, 4)) - array_b = np.array(np.arange(24).reshape(2, 3, 4)) - array_c = np.array(np.arange(24).reshape(2, 3, 4)) - array_c[0, 1, 2] = 100 - self.assertTrue(array_equal(array_a, array_b)) - self.assertFalse(array_equal(array_a, array_c)) - - def test_masked_is_ignored(self): - array_a = ma.masked_array([1, 2, 3], mask=[1, 0, 1]) - array_b = ma.masked_array([2, 2, 2], mask=[1, 0, 1]) - self.assertFalse(array_equal(array_a, array_b)) - - def test_fully_masked_arrays(self): - array_a = ma.masked_array(np.arange(24).reshape(2, 3, 4), mask=True) - array_b = ma.masked_array(np.arange(24).reshape(2, 3, 4), mask=True) - self.assertTrue(array_equal(array_a, array_b)) - - def test_fully_masked_0d_arrays(self): - array_a = ma.masked_array(3, mask=True) - array_b = ma.masked_array(3, mask=True) - self.assertTrue(array_equal(array_a, array_b)) - - def test_fully_masked_string_arrays(self): - array_a = ma.masked_array(["a", "b", "c"], mask=True) - array_b = ma.masked_array(["a", "b", "c"], mask=[1, 1, 1]) - self.assertTrue(array_equal(array_a, array_b)) - - def test_partially_masked_string_arrays(self): - array_a = ma.masked_array(["a", "b", "c"], mask=[1, 0, 1]) - array_b = ma.masked_array(["a", "b", "c"], mask=[1, 0, 1]) - self.assertTrue(array_equal(array_a, array_b)) - - def test_string_arrays_equal(self): - array_a = np.array(["abc", "def", "efg"]) - array_b = np.array(["abc", "def", "efg"]) - self.assertTrue(array_equal(array_a, array_b)) - - def test_string_arrays_different_contents(self): - array_a = np.array(["abc", "def", "efg"]) - array_b = np.array(["abc", "de", "efg"]) - self.assertFalse(array_equal(array_a, array_b)) - - def test_string_arrays_subset(self): - array_a = np.array(["abc", "def", "efg"]) - array_b = np.array(["abc", "def"]) - self.assertFalse(array_equal(array_a, array_b)) - self.assertFalse(array_equal(array_b, array_a)) - - def test_string_arrays_unequal_dimensionality(self): - array_a = np.array("abc") - array_b = np.array(["abc"]) - array_c = np.array([["abc"]]) - self.assertFalse(array_equal(array_a, array_b)) - self.assertFalse(array_equal(array_b, array_a)) - self.assertFalse(array_equal(array_a, array_c)) - self.assertFalse(array_equal(array_b, array_c)) - - def test_string_arrays_0d_and_scalar(self): - array_a = np.array("foobar") - self.assertTrue(array_equal(array_a, "foobar")) - self.assertFalse(array_equal(array_a, "foo")) - self.assertFalse(array_equal(array_a, "foobar.")) - - def test_nan_equality_nan_ne_nan(self): - array = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) - self.assertFalse(array_equal(array, array)) - - def test_nan_equality_nan_naneq_nan(self): - array_a = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) - array_b = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) - self.assertTrue(array_equal(array_a, array_b, withnans=True)) - - def test_nan_equality_nan_nanne_a(self): - array_a = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) - array_b = np.array([1.0, np.nan, 2.0, 0.0, 3.0]) - self.assertFalse(array_equal(array_a, array_b, withnans=True)) - - def test_nan_equality_a_nanne_b(self): - array_a = np.array([1.0, np.nan, 2.0, np.nan, 3.0]) - array_b = np.array([1.0, np.nan, 2.0, np.nan, 4.0]) - self.assertFalse(array_equal(array_a, array_b, withnans=True)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_broadcast_to_shape.py b/lib/iris/tests/unit/util/test_broadcast_to_shape.py deleted file mode 100644 index 36f00fa53f..0000000000 --- a/lib/iris/tests/unit/util/test_broadcast_to_shape.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.broadcast_to_shape`.""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris.util import broadcast_to_shape - - -class Test_broadcast_to_shape(tests.IrisTest): - def test_same_shape(self): - # broadcast to current shape should result in no change - a = np.random.random([2, 3]) - b = broadcast_to_shape(a, a.shape, (0, 1)) - self.assertArrayEqual(b, a) - - def test_added_dimensions(self): - # adding two dimensions, on at the front and one in the middle of - # the existing dimensions - a = np.random.random([2, 3]) - b = broadcast_to_shape(a, (5, 2, 4, 3), (1, 3)) - for i in range(5): - for j in range(4): - self.assertArrayEqual(b[i, :, j, :], a) - - def test_added_dimensions_transpose(self): - # adding dimensions and having the dimensions of the input - # transposed - a = np.random.random([2, 3]) - b = broadcast_to_shape(a, (5, 3, 4, 2), (3, 1)) - for i in range(5): - for j in range(4): - self.assertArrayEqual(b[i, :, j, :].T, a) - - def test_masked(self): - # masked arrays are also accepted - a = np.random.random([2, 3]) - m = ma.array(a, mask=[[0, 1, 0], [0, 1, 1]]) - b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1)) - for i in range(5): - for j in range(4): - self.assertMaskedArrayEqual(b[i, :, j, :].T, m) - - def test_masked_degenerate(self): - # masked arrays can have degenerate masks too - a = np.random.random([2, 3]) - m = ma.array(a) - b = broadcast_to_shape(m, (5, 3, 4, 2), (3, 1)) - for i in range(5): - for j in range(4): - self.assertMaskedArrayEqual(b[i, :, j, :].T, m) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_column_slices_generator.py b/lib/iris/tests/unit/util/test_column_slices_generator.py deleted file mode 100644 index 899c6b98ba..0000000000 --- a/lib/iris/tests/unit/util/test_column_slices_generator.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.column_slices_generator`.""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.util import column_slices_generator - - -class Test_int_types(tests.IrisTest): - def _test(self, key): - full_slice = (key,) - ndims = 1 - mapping, iterable = column_slices_generator(full_slice, ndims) - self.assertEqual(mapping, {0: None, None: None}) - self.assertEqual(list(iterable), [(0,)]) - - def test_int(self): - self._test(0) - - def test_int_32(self): - self._test(np.int32(0)) - - def test_int_64(self): - self._test(np.int64(0)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py b/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py deleted file mode 100644 index ec8f9904f1..0000000000 --- a/lib/iris/tests/unit/util/test_demote_dim_coord_to_aux_coord.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.demote_dim_coord_to_aux_coord`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import unittest - -import iris -import iris.tests.stock as stock -from iris.util import demote_dim_coord_to_aux_coord - - -class Test(tests.IrisTest): - def test_argument_is_basestring(self): - cube_a = stock.simple_3d() - cube_b = cube_a.copy() - demote_dim_coord_to_aux_coord(cube_b, cube_b.coord("wibble")) - self.assertEqual( - cube_b.dim_coords, - (cube_a.coord("latitude"), cube_a.coord("longitude")), - ) - - @tests.skip_data - def test_argument_is_coord_instance(self): - cube_a = stock.realistic_4d() - cube_b = cube_a.copy() - coord = cube_b.coord("model_level_number").copy() - demote_dim_coord_to_aux_coord(cube_b, coord) - self.assertEqual( - cube_b.dim_coords, - ( - cube_a.coord("time"), - cube_a.coord("grid_latitude"), - cube_a.coord("grid_longitude"), - ), - ) - - def test_old_dim_coord_is_now_aux_coord(self): - cube_a = stock.hybrid_height() - cube_b = cube_a.copy() - demote_dim_coord_to_aux_coord(cube_b, "level_height") - self.assertTrue(cube_a.coord("level_height") in cube_b.aux_coords) - - def test_coord_of_that_name_does_not_exist(self): - cube_a = stock.simple_2d_w_multidim_and_scalars() - with self.assertRaises(iris.exceptions.CoordinateNotFoundError): - demote_dim_coord_to_aux_coord(cube_a, "wibble") - - def test_coord_does_not_exist(self): - cube_a = stock.simple_2d_w_multidim_and_scalars() - cube_b = cube_a.copy() - coord = cube_b.coord("dim1").copy() - coord.rename("new") - demote_dim_coord_to_aux_coord(cube_b, coord) - self.assertEqual(cube_a, cube_b) - - def test_argument_is_wrong_type(self): - cube_a = stock.simple_1d() - with self.assertRaises(TypeError): - demote_dim_coord_to_aux_coord(cube_a, 0.0) - - def test_trying_to_demote_a_scalar_coord(self): - cube_a = stock.simple_2d_w_multidim_and_scalars() - cube_b = cube_a.copy() - demote_dim_coord_to_aux_coord(cube_b, "an_other") - self.assertEqual(cube_a, cube_b) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/iris/tests/unit/util/test_describe_diff.py b/lib/iris/tests/unit/util/test_describe_diff.py deleted file mode 100644 index 0bb13cab94..0000000000 --- a/lib/iris/tests/unit/util/test_describe_diff.py +++ /dev/null @@ -1,60 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.describe_diff`.""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -from io import StringIO - -import numpy as np - -import iris.cube -from iris.util import describe_diff - - -class Test(iris.tests.IrisTest): - def setUp(self): - self.cube_a = iris.cube.Cube([]) - self.cube_b = self.cube_a.copy() - - def _compare_result(self, cube_a, cube_b): - result_sio = StringIO() - describe_diff(cube_a, cube_b, output_file=result_sio) - return result_sio.getvalue() - - def test_noncommon_array_attributes(self): - # test non-common array attribute - self.cube_a.attributes["test_array"] = np.array([1, 2, 3]) - return_str = self._compare_result(self.cube_a, self.cube_b) - self.assertString(return_str, ["compatible_cubes.str.txt"]) - - def test_same_array_attributes(self): - # test matching array attribute - self.cube_a.attributes["test_array"] = np.array([1, 2, 3]) - self.cube_b.attributes["test_array"] = np.array([1, 2, 3]) - return_str = self._compare_result(self.cube_a, self.cube_b) - self.assertString(return_str, ["compatible_cubes.str.txt"]) - - def test_different_array_attributes(self): - # test non-matching array attribute - self.cube_a.attributes["test_array"] = np.array([1, 2, 3]) - self.cube_b.attributes["test_array"] = np.array([1, 7, 3]) - return_str = self._compare_result(self.cube_a, self.cube_b) - self.assertString( - return_str, - [ - "unit", - "util", - "describe_diff", - "incompatible_array_attrs.str.txt", - ], - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_equalise_attributes.py b/lib/iris/tests/unit/util/test_equalise_attributes.py deleted file mode 100644 index 13aa1e2af4..0000000000 --- a/lib/iris/tests/unit/util/test_equalise_attributes.py +++ /dev/null @@ -1,157 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the :func:`iris.util.equalise_attributes` function. - -""" - -# import iris tests first so that some things can be initialised -# before importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.cube import Cube -import iris.tests.stock -from iris.util import equalise_attributes - - -class TestEqualiseAttributes(tests.IrisTest): - def setUp(self): - empty = Cube([]) - - self.cube_no_attrs = empty.copy() - - self.cube_a1 = empty.copy() - self.cube_a1.attributes.update({"a": 1}) - - self.cube_a2 = empty.copy() - self.cube_a2.attributes.update({"a": 2}) - - self.cube_a1b5 = empty.copy() - self.cube_a1b5.attributes.update({"a": 1, "b": 5}) - - self.cube_a1b6 = empty.copy() - self.cube_a1b6.attributes.update({"a": 1, "b": 6}) - - self.cube_a2b6 = empty.copy() - self.cube_a2b6.attributes.update({"a": 2, "b": 6}) - - self.cube_b5 = empty.copy() - self.cube_b5.attributes.update({"b": 5}) - - # Array attribute values - v1 = np.array([11, 12, 13]) - v2 = np.array([11, 9999, 13]) - self.v1 = v1 - self.v2 = v2 - - self.cube_a1b5v1 = empty.copy() - self.cube_a1b5v1.attributes.update({"a": 1, "b": 5, "v": v1}) - - self.cube_a1b6v1 = empty.copy() - self.cube_a1b6v1.attributes.update({"a": 1, "b": 6, "v": v1}) - - self.cube_a1b6v2 = empty.copy() - self.cube_a1b6v2.attributes.update({"a": 1, "b": 6, "v": v2}) - - def _test(self, cubes, expect_attributes, expect_removed): - """Test.""" - working_cubes = [cube.copy() for cube in cubes] - original_working_list = [cube for cube in working_cubes] - # Exercise basic operation - actual_removed = equalise_attributes(working_cubes) - # Check they are the same cubes - self.assertEqual(working_cubes, original_working_list) - # Check resulting attributes all match the expected set - for cube in working_cubes: - self.assertEqual(cube.attributes, expect_attributes) - # Check removed attributes all match as expected - self.assertEqual(len(actual_removed), len(expect_removed)) - for actual, expect in zip(actual_removed, expect_removed): - self.assertEqual(actual, expect) - # Check everything else remains the same - for new_cube, old_cube in zip(working_cubes, cubes): - cube_before_noatts = old_cube.copy() - cube_before_noatts.attributes.clear() - cube_after_noatts = new_cube.copy() - cube_after_noatts.attributes.clear() - self.assertEqual(cube_after_noatts, cube_before_noatts) - - def test_no_attrs(self): - cubes = [self.cube_no_attrs] - self._test(cubes, {}, [{}]) - - def test_single(self): - cubes = [self.cube_a1] - self._test(cubes, {"a": 1}, [{}]) - - def test_identical(self): - cubes = [self.cube_a1, self.cube_a1.copy()] - self._test(cubes, {"a": 1}, [{}, {}]) - - def test_one_extra(self): - cubes = [self.cube_a1, self.cube_a1b5.copy()] - self._test(cubes, {"a": 1}, [{}, {"b": 5}]) - - def test_one_different(self): - cubes = [self.cube_a1b5, self.cube_a1b6] - self._test(cubes, {"a": 1}, [{"b": 5}, {"b": 6}]) - - def test_common_no_diffs(self): - cubes = [self.cube_a1b5, self.cube_a1b5.copy()] - self._test(cubes, {"a": 1, "b": 5}, [{}, {}]) - - def test_common_all_diffs(self): - cubes = [self.cube_a1b5, self.cube_a2b6] - self._test(cubes, {}, [{"a": 1, "b": 5}, {"a": 2, "b": 6}]) - - def test_none_common(self): - cubes = [self.cube_a1, self.cube_b5] - self._test(cubes, {}, [{"a": 1}, {"b": 5}]) - - def test_array_extra(self): - cubes = [self.cube_a1b6, self.cube_a1b6v1] - self._test(cubes, {"a": 1, "b": 6}, [{}, {"v": self.v1}]) - - def test_array_different(self): - cubes = [self.cube_a1b5v1, self.cube_a1b6v2] - self._test( - cubes, {"a": 1}, [{"b": 5, "v": self.v1}, {"b": 6, "v": self.v2}] - ) - - def test_array_same(self): - cubes = [self.cube_a1b5v1, self.cube_a1b6v1] - self._test(cubes, {"a": 1, "v": self.v1}, [{"b": 5}, {"b": 6}]) - - @tests.skip_data - def test_complex_nonecommon(self): - # Example with cell methods and factories, but no common attributes. - cubes = [ - iris.tests.stock.global_pp(), - iris.tests.stock.hybrid_height(), - ] - removed = cubes[0].attributes.copy() - self._test(cubes, {}, [removed, {}]) - - @tests.skip_data - def test_complex_somecommon(self): - # Example with cell methods and factories, plus some common attributes. - cubes = [iris.tests.stock.global_pp(), iris.tests.stock.simple_pp()] - self._test( - cubes, - { - "STASH": iris.fileformats.pp.STASH( - model=1, section=16, item=203 - ), - "source": "Data from Met Office Unified Model", - }, - [{}, {}], - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_file_is_newer_than.py b/lib/iris/tests/unit/util/test_file_is_newer_than.py deleted file mode 100644 index cff878a294..0000000000 --- a/lib/iris/tests/unit/util/test_file_is_newer_than.py +++ /dev/null @@ -1,130 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Test function :func:`iris.util.test_file_is_newer`. - -""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import os -import os.path -import shutil -import tempfile - -from iris.util import file_is_newer_than - - -class TestFileIsNewer(tests.IrisTest): - """Test the :func:`iris.util.file_is_newer_than` function.""" - - def _name2path(self, filename): - """Add the temporary dirpath to a filename to make a full path.""" - return os.path.join(self.temp_dir, filename) - - def setUp(self): - # make a temporary directory with testfiles of known timestamp order. - self.temp_dir = tempfile.mkdtemp("_testfiles_tempdir") - # define the names of some files to create - create_file_names = [ - "older_source_1", - "older_source_2", - "example_result", - "newer_source_1", - "newer_source_2", - ] - # create testfiles + ensure distinct 'mtime's in the required order. - for i_file, file_name in enumerate(create_file_names): - file_path = self._name2path(file_name) - with open(file_path, "w") as test_file: - test_file.write("..content..") - # Ensure 'mtime's are adequately separated and after create times. - mtime = os.stat(file_path).st_mtime - mtime += 5.0 + 10.0 * i_file - os.utime(file_path, (mtime, mtime)) - - def tearDown(self): - # destroy whole contents of temporary directory - shutil.rmtree(self.temp_dir) - - def _test(self, boolean_result, result_name, source_names): - """Test expected result of executing with given args.""" - # Make args into full paths - result_path = self._name2path(result_name) - if isinstance(source_names, str): - source_paths = self._name2path(source_names) - else: - source_paths = [self._name2path(name) for name in source_names] - # Check result is as expected. - self.assertEqual( - boolean_result, file_is_newer_than(result_path, source_paths) - ) - - def test_no_sources(self): - self._test(True, "example_result", []) - - def test_string_ok(self): - self._test(True, "example_result", "older_source_1") - - def test_string_fail(self): - self._test(False, "example_result", "newer_source_1") - - def test_self_result(self): - # This fails, because same-timestamp is *not* acceptable. - self._test(False, "example_result", "example_result") - - def test_single_ok(self): - self._test(True, "example_result", ["older_source_2"]) - - def test_single_fail(self): - self._test(False, "example_result", ["newer_source_2"]) - - def test_multiple_ok(self): - self._test( - True, "example_result", ["older_source_1", "older_source_2"] - ) - - def test_multiple_fail(self): - self._test( - False, - "example_result", - ["older_source_1", "older_source_2", "newer_source_1"], - ) - - def test_wild_ok(self): - self._test(True, "example_result", ["older_sour*_*"]) - - def test_wild_fail(self): - self._test(False, "example_result", ["older_sour*", "newer_sour*"]) - - def test_error_missing_result(self): - with self.assertRaises(OSError) as error_trap: - self._test(False, "non_exist", ["older_sour*"]) - error = error_trap.exception - self.assertEqual(error.strerror, "No such file or directory") - self.assertEqual(error.filename, self._name2path("non_exist")) - - def test_error_missing_source(self): - with self.assertRaises(IOError) as error_trap: - self._test(False, "example_result", ["older_sour*", "non_exist"]) - self.assertIn( - "One or more of the files specified did not exist", - str(error_trap.exception), - ) - - def test_error_missing_wild(self): - with self.assertRaises(IOError) as error_trap: - self._test(False, "example_result", ["older_sour*", "unknown_*"]) - self.assertIn( - "One or more of the files specified did not exist", - str(error_trap.exception), - ) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_find_discontiguities.py b/lib/iris/tests/unit/util/test_find_discontiguities.py deleted file mode 100644 index e939416e7d..0000000000 --- a/lib/iris/tests/unit/util/test_find_discontiguities.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.find_discontiguities""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np - -from iris.tests.stock import ( - make_bounds_discontiguous_at_point, - sample_2d_latlons, - simple_3d, -) -from iris.util import find_discontiguities - - -def full2d_global(): - return sample_2d_latlons(transformed=True) - - -@tests.skip_data -class Test(tests.IrisTest): - def setUp(self): - # Set up a 2d lat-lon cube with 2d coordinates that have been - # transformed so they are not in a regular lat-lon grid. - # Then generate a discontiguity at a single lat-lon point. - self.testcube_discontig = full2d_global() - make_bounds_discontiguous_at_point(self.testcube_discontig, 3, 3) - # Repeat that for a discontiguity in the grid 'Y' direction. - self.testcube_discontig_along_y = full2d_global() - make_bounds_discontiguous_at_point( - self.testcube_discontig_along_y, 2, 4, in_y=True - ) - - def test_find_discontiguities(self): - # Check that the mask we generate when making the discontiguity - # matches that generated by find_discontiguities - cube = self.testcube_discontig - expected = cube.data.mask - returned = find_discontiguities(cube) - self.assertTrue(np.all(expected == returned)) - - def test_find_discontiguities_in_y(self): - # Check that the mask we generate when making the discontiguity - # matches that generated by find_discontiguities - cube = self.testcube_discontig_along_y - expected = cube.data.mask - returned = find_discontiguities(cube) - self.assertTrue(np.all(expected == returned)) - - def test_find_discontiguities_1d_coord(self): - # Check that an error is raised when we try and use - # find_discontiguities on 1D coordinates: - cube = simple_3d() - with self.assertRaises(NotImplementedError): - find_discontiguities(cube) - - def test_find_discontiguities_with_atol(self): - cube = self.testcube_discontig - # Choose a very large absolute tolerance which will result in fine - # discontiguities being disregarded - atol = 100 - # Construct an array the size of the points array filled with 'False' - # to represent a mask showing no discontiguities - expected = np.zeros(cube.shape, dtype=bool) - returned = find_discontiguities(cube, abs_tol=atol) - self.assertTrue(np.all(expected == returned)) - - def test_find_discontiguities_with_rtol(self): - cube = self.testcube_discontig - # Choose a very large relative tolerance which will result in fine - # discontiguities being disregarded - rtol = 1000 - # Construct an array the size of the points array filled with 'False' - # to represent a mask showing no discontiguities - expected = np.zeros(cube.shape, dtype=bool) - returned = find_discontiguities(cube, rel_tol=rtol) - self.assertTrue(np.all(expected == returned)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_mask_cube.py b/lib/iris/tests/unit/util/test_mask_cube.py deleted file mode 100644 index 2d5aaa21f1..0000000000 --- a/lib/iris/tests/unit/util/test_mask_cube.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.mask_cube""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris.tests.stock import ( - make_bounds_discontiguous_at_point, - sample_2d_latlons, -) -from iris.util import mask_cube - - -def full2d_global(): - return sample_2d_latlons(transformed=True) - - -@tests.skip_data -class Test(tests.IrisTest): - def setUp(self): - # Set up a 2d cube with a masked discontiguity to test masking - # of 2-dimensional cubes - self.cube_2d = full2d_global() - make_bounds_discontiguous_at_point(self.cube_2d, 3, 3) - - def test_mask_cube_2d(self): - # This tests the masking of a 2d data array - cube = self.cube_2d - discontiguity_array = ma.getmaskarray(cube.data).copy() - expected = cube.copy() - - # Remove mask so that we can pass an unmasked data set to - # mask_discontiguities, and check that it masks the correct point by - # comparing with masked data - cube.data.mask = ma.nomask - returned = mask_cube(cube, discontiguity_array) - self.assertTrue(np.all(expected.data.mask == returned.data.mask)) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_new_axis.py b/lib/iris/tests/unit/util/test_new_axis.py deleted file mode 100644 index 74b59cc7ec..0000000000 --- a/lib/iris/tests/unit/util/test_new_axis.py +++ /dev/null @@ -1,177 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.new_axis`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip -import copy -import unittest - -import numpy as np - -import iris -from iris._lazy_data import as_lazy_data -import iris.tests.stock as stock -from iris.util import new_axis - - -class Test(tests.IrisTest): - def setUp(self): - self.data = np.array([[1, 2], [1, 2]]) - self.cube = iris.cube.Cube(self.data) - lat = iris.coords.DimCoord([1, 2], standard_name="latitude") - lon = iris.coords.DimCoord([1, 2], standard_name="longitude") - - time = iris.coords.DimCoord([1], standard_name="time") - wibble = iris.coords.AuxCoord([1], long_name="wibble") - - self.cube.add_dim_coord(lat, 0) - self.cube.add_dim_coord(lon, 1) - self.cube.add_aux_coord(time, None) - self.cube.add_aux_coord(wibble, None) - - self.coords = {"lat": lat, "lon": lon, "time": time, "wibble": wibble} - - def _assert_cube_notis(self, cube_a, cube_b): - for coord_a, coord_b in zip(cube_a.coords(), cube_b.coords()): - self.assertIsNot(coord_a, coord_b) - - self.assertIsNot(cube_a.metadata, cube_b.metadata) - - for factory_a, factory_b in zip( - cube_a.aux_factories, cube_b.aux_factories - ): - self.assertIsNot(factory_a, factory_b) - - def test_no_coord(self): - # Providing no coordinate to promote. - res = new_axis(self.cube) - com = iris.cube.Cube(self.data[None]) - com.add_dim_coord(self.coords["lat"].copy(), 1) - com.add_dim_coord(self.coords["lon"].copy(), 2) - com.add_aux_coord(self.coords["time"].copy(), None) - com.add_aux_coord(self.coords["wibble"].copy(), None) - - self.assertEqual(res, com) - self._assert_cube_notis(res, self.cube) - - def test_scalar_dimcoord(self): - # Providing a scalar coordinate to promote. - res = new_axis(self.cube, "time") - com = iris.cube.Cube(self.data[None]) - com.add_dim_coord(self.coords["lat"].copy(), 1) - com.add_dim_coord(self.coords["lon"].copy(), 2) - com.add_aux_coord(self.coords["time"].copy(), 0) - com.add_aux_coord(self.coords["wibble"].copy(), None) - - self.assertEqual(res, com) - self._assert_cube_notis(res, self.cube) - - def test_scalar_auxcoord(self): - # Providing a scalar coordinate to promote. - res = new_axis(self.cube, "wibble") - com = iris.cube.Cube(self.data[None]) - com.add_dim_coord(self.coords["lat"].copy(), 1) - com.add_dim_coord(self.coords["lon"].copy(), 2) - com.add_aux_coord(self.coords["time"].copy(), None) - com.add_aux_coord(self.coords["wibble"].copy(), 0) - - self.assertEqual(res, com) - self._assert_cube_notis(res, self.cube) - - def test_maint_factory(self): - # Ensure that aux factory persists. - data = np.arange(12, dtype="i8").reshape((3, 4)) - - orography = iris.coords.AuxCoord( - [10, 25, 50, 5], standard_name="surface_altitude", units="m" - ) - - model_level = iris.coords.AuxCoord( - [2, 1, 0], standard_name="model_level_number" - ) - - level_height = iris.coords.DimCoord( - [100, 50, 10], - long_name="level_height", - units="m", - attributes={"positive": "up"}, - bounds=[[150, 75], [75, 20], [20, 0]], - ) - - sigma = iris.coords.AuxCoord( - [0.8, 0.9, 0.95], - long_name="sigma", - bounds=[[0.7, 0.85], [0.85, 0.97], [0.97, 1.0]], - ) - - hybrid_height = iris.aux_factory.HybridHeightFactory( - level_height, sigma, orography - ) - - cube = iris.cube.Cube( - data, - standard_name="air_temperature", - units="K", - dim_coords_and_dims=[(level_height, 0)], - aux_coords_and_dims=[(orography, 1), (model_level, 0), (sigma, 0)], - aux_factories=[hybrid_height], - ) - - com = iris.cube.Cube( - data[None], - standard_name="air_temperature", - units="K", - dim_coords_and_dims=[(copy.copy(level_height), 1)], - aux_coords_and_dims=[ - (copy.copy(orography), 2), - (copy.copy(model_level), 1), - (copy.copy(sigma), 1), - ], - aux_factories=[copy.copy(hybrid_height)], - ) - res = new_axis(cube) - - self.assertEqual(res, com) - self._assert_cube_notis(res, cube) - - # Check that factory dependencies are actual coords within the cube. - # Addresses a former bug : https://github.com/SciTools/iris/pull/3263 - (factory,) = list(res.aux_factories) - deps = factory.dependencies - for dep_name, dep_coord in deps.items(): - coord_name = dep_coord.name() - msg = ( - "Factory dependency {!r} is a coord named {!r}, " - "but it is *not* the coord of that name in the new cube." - ) - self.assertIs( - dep_coord, - res.coord(coord_name), - msg.format(dep_name, coord_name), - ) - - def test_lazy_data(self): - cube = iris.cube.Cube(as_lazy_data(self.data)) - cube.add_aux_coord(iris.coords.DimCoord([1], standard_name="time")) - res = new_axis(cube, "time") - self.assertTrue(cube.has_lazy_data()) - self.assertTrue(res.has_lazy_data()) - self.assertEqual(res.shape, (1,) + cube.shape) - - def test_masked_unit_array(self): - cube = stock.simple_3d_mask() - test_cube = cube[0, 0, 0] - test_cube = new_axis(test_cube, "longitude") - test_cube = new_axis(test_cube, "latitude") - data_shape = test_cube.data.shape - mask_shape = test_cube.data.mask.shape - self.assertEqual(data_shape, mask_shape) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py b/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py deleted file mode 100644 index 0e1e56fee5..0000000000 --- a/lib/iris/tests/unit/util/test_promote_aux_coord_to_dim_coord.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.promote_aux_coord_to_dim_coord`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import unittest - -import iris -import iris.tests.stock as stock -from iris.util import promote_aux_coord_to_dim_coord - - -class Test(tests.IrisTest): - def test_dimension_already_has_dimcoord(self): - cube_a = stock.hybrid_height() - cube_b = cube_a.copy() - promote_aux_coord_to_dim_coord(cube_b, "model_level_number") - self.assertEqual( - cube_b.dim_coords, (cube_a.coord("model_level_number"),) - ) - - def test_old_dim_coord_is_now_aux_coord(self): - cube_a = stock.hybrid_height() - cube_b = cube_a.copy() - promote_aux_coord_to_dim_coord(cube_b, "model_level_number") - self.assertTrue(cube_a.coord("level_height") in cube_b.aux_coords) - - @tests.skip_data - def test_argument_is_coord_instance(self): - cube_a = stock.realistic_4d() - cube_b = cube_a.copy() - promote_aux_coord_to_dim_coord(cube_b, cube_b.coord("level_height")) - self.assertEqual( - cube_b.dim_coords, - ( - cube_a.coord("time"), - cube_a.coord("level_height"), - cube_a.coord("grid_latitude"), - cube_a.coord("grid_longitude"), - ), - ) - - @tests.skip_data - def test_dimension_is_anonymous(self): - cube_a = stock.realistic_4d() - cube_b = cube_a.copy() - cube_b.remove_coord("model_level_number") - promote_aux_coord_to_dim_coord(cube_b, "level_height") - self.assertEqual( - cube_b.dim_coords, - ( - cube_a.coord("time"), - cube_a.coord("level_height"), - cube_a.coord("grid_latitude"), - cube_a.coord("grid_longitude"), - ), - ) - - def test_already_a_dim_coord(self): - cube_a = stock.simple_2d_w_multidim_and_scalars() - cube_b = cube_a.copy() - promote_aux_coord_to_dim_coord(cube_b, "dim1") - self.assertEqual(cube_a, cube_b) - - def test_coord_of_that_name_does_not_exist(self): - cube_a = stock.simple_2d_w_multidim_and_scalars() - with self.assertRaises(iris.exceptions.CoordinateNotFoundError): - promote_aux_coord_to_dim_coord(cube_a, "wibble") - - def test_coord_does_not_exist(self): - cube_a = stock.simple_2d_w_multidim_and_scalars() - coord = cube_a.coord("dim1").copy() - coord.rename("new") - with self.assertRaises(ValueError): - promote_aux_coord_to_dim_coord(cube_a, coord) - - def test_argument_is_wrong_type(self): - cube_a = stock.simple_1d() - with self.assertRaises(TypeError): - promote_aux_coord_to_dim_coord(cube_a, 0.0) - - def test_trying_to_promote_a_multidim_coord(self): - cube_a = stock.simple_2d_w_multidim_coords() - with self.assertRaises(ValueError): - promote_aux_coord_to_dim_coord(cube_a, "bar") - - def test_trying_to_promote_a_scalar_coord(self): - cube_a = stock.simple_2d_w_multidim_and_scalars() - with self.assertRaises(ValueError): - promote_aux_coord_to_dim_coord(cube_a, "an_other") - - def test_trying_to_promote_a_nonmonotonic_coord(self): - cube_a = stock.hybrid_height() - with self.assertRaises(ValueError): - promote_aux_coord_to_dim_coord(cube_a, "surface_altitude") - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/iris/tests/unit/util/test_reverse.py b/lib/iris/tests/unit/util/test_reverse.py deleted file mode 100644 index 1efc73700b..0000000000 --- a/lib/iris/tests/unit/util/test_reverse.py +++ /dev/null @@ -1,204 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.reverse`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import unittest - -import numpy as np - -import iris -from iris.util import reverse - - -class Test_array(tests.IrisTest): - def test_simple_array(self): - a = np.arange(12).reshape(3, 4) - self.assertArrayEqual(a[::-1], reverse(a, 0)) - self.assertArrayEqual(a[::-1, ::-1], reverse(a, [0, 1])) - self.assertArrayEqual(a[:, ::-1], reverse(a, 1)) - self.assertArrayEqual(a[:, ::-1], reverse(a, [1])) - - msg = "Reverse was expecting a single axis or a 1d array *" - with self.assertRaisesRegex(ValueError, msg): - reverse(a, []) - - msg = "An axis value out of range for the number of dimensions *" - with self.assertRaisesRegex(ValueError, msg): - reverse(a, -1) - with self.assertRaisesRegex(ValueError, msg): - reverse(a, 10) - with self.assertRaisesRegex(ValueError, msg): - reverse(a, [-1]) - with self.assertRaisesRegex(ValueError, msg): - reverse(a, [0, -1]) - - msg = "To reverse an array, provide an int *" - with self.assertRaisesRegex(TypeError, msg): - reverse(a, "latitude") - - def test_single_array(self): - a = np.arange(36).reshape(3, 4, 3) - self.assertArrayEqual(a[::-1], reverse(a, 0)) - self.assertArrayEqual(a[::-1, ::-1], reverse(a, [0, 1])) - self.assertArrayEqual(a[:, ::-1, ::-1], reverse(a, [1, 2])) - self.assertArrayEqual(a[..., ::-1], reverse(a, 2)) - - msg = "Reverse was expecting a single axis or a 1d array *" - with self.assertRaisesRegex(ValueError, msg): - reverse(a, []) - - msg = "An axis value out of range for the number of dimensions *" - with self.assertRaisesRegex(ValueError, msg): - reverse(a, -1) - with self.assertRaisesRegex(ValueError, msg): - reverse(a, 10) - with self.assertRaisesRegex(ValueError, msg): - reverse(a, [-1]) - with self.assertRaisesRegex(ValueError, msg): - reverse(a, [0, -1]) - - with self.assertRaisesRegex( - TypeError, "To reverse an array, provide an int *" - ): - reverse(a, "latitude") - - -class Test_cube(tests.IrisTest): - def setUp(self): - # On this cube pair, the coordinates to perform operations on have - # matching long names but the points array on one cube is reversed - # with respect to that on the other. - data = np.arange(12).reshape(3, 4) - self.a1 = iris.coords.DimCoord([1, 2, 3], long_name="a") - self.b1 = iris.coords.DimCoord([1, 2, 3, 4], long_name="b") - a2 = iris.coords.DimCoord([3, 2, 1], long_name="a") - b2 = iris.coords.DimCoord([4, 3, 2, 1], long_name="b") - self.span = iris.coords.AuxCoord( - np.arange(12).reshape(3, 4), long_name="spanning" - ) - - self.cube1 = iris.cube.Cube( - data, - dim_coords_and_dims=[(self.a1, 0), (self.b1, 1)], - aux_coords_and_dims=[(self.span, (0, 1))], - ) - - self.cube2 = iris.cube.Cube( - data, dim_coords_and_dims=[(a2, 0), (b2, 1)] - ) - - def test_cube_dim(self): - cube1_reverse0 = reverse(self.cube1, 0) - cube1_reverse1 = reverse(self.cube1, 1) - cube1_reverse_both = reverse(self.cube1, (0, 1)) - - self.assertArrayEqual(self.cube1.data[::-1], cube1_reverse0.data) - self.assertArrayEqual( - self.cube2.coord("a").points, cube1_reverse0.coord("a").points - ) - self.assertArrayEqual( - self.cube1.coord("b").points, cube1_reverse0.coord("b").points - ) - - self.assertArrayEqual(self.cube1.data[:, ::-1], cube1_reverse1.data) - self.assertArrayEqual( - self.cube1.coord("a").points, cube1_reverse1.coord("a").points - ) - self.assertArrayEqual( - self.cube2.coord("b").points, cube1_reverse1.coord("b").points - ) - - self.assertArrayEqual( - self.cube1.data[::-1, ::-1], cube1_reverse_both.data - ) - self.assertArrayEqual( - self.cube2.coord("a").points, cube1_reverse_both.coord("a").points - ) - self.assertArrayEqual( - self.cube2.coord("b").points, cube1_reverse_both.coord("b").points - ) - - def test_cube_coord(self): - cube1_reverse0 = reverse(self.cube1, self.a1) - cube1_reverse1 = reverse(self.cube1, "b") - cube1_reverse_both = reverse(self.cube1, (self.a1, self.b1)) - cube1_reverse_spanning = reverse(self.cube1, "spanning") - - self.assertArrayEqual(self.cube1.data[::-1], cube1_reverse0.data) - self.assertArrayEqual( - self.cube2.coord("a").points, cube1_reverse0.coord("a").points - ) - self.assertArrayEqual( - self.cube1.coord("b").points, cube1_reverse0.coord("b").points - ) - - self.assertArrayEqual(self.cube1.data[:, ::-1], cube1_reverse1.data) - self.assertArrayEqual( - self.cube1.coord("a").points, cube1_reverse1.coord("a").points - ) - self.assertArrayEqual( - self.cube2.coord("b").points, cube1_reverse1.coord("b").points - ) - - self.assertArrayEqual( - self.cube1.data[::-1, ::-1], cube1_reverse_both.data - ) - self.assertArrayEqual( - self.cube2.coord("a").points, cube1_reverse_both.coord("a").points - ) - self.assertArrayEqual( - self.cube2.coord("b").points, cube1_reverse_both.coord("b").points - ) - - self.assertArrayEqual( - self.cube1.data[::-1, ::-1], cube1_reverse_spanning.data - ) - self.assertArrayEqual( - self.cube2.coord("a").points, - cube1_reverse_spanning.coord("a").points, - ) - self.assertArrayEqual( - self.cube2.coord("b").points, - cube1_reverse_spanning.coord("b").points, - ) - self.assertArrayEqual( - self.span.points[::-1, ::-1], - cube1_reverse_spanning.coord("spanning").points, - ) - - msg = ( - "Expected to find exactly 1 'latitude' coordinate, but found none." - ) - with self.assertRaisesRegex( - iris.exceptions.CoordinateNotFoundError, msg - ): - reverse(self.cube1, "latitude") - - msg = "Reverse was expecting a single axis or a 1d array *" - with self.assertRaisesRegex(ValueError, msg): - reverse(self.cube1, []) - - msg = ( - "coords_or_dims must be int, str, coordinate or sequence of " - "these. Got cube." - ) - with self.assertRaisesRegex(TypeError, msg): - reverse(self.cube1, self.cube1) - - msg = ( - "coords_or_dims must be int, str, coordinate or sequence of " - "these." - ) - with self.assertRaisesRegex(TypeError, msg): - reverse(self.cube1, 3.0) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/iris/tests/unit/util/test_rolling_window.py b/lib/iris/tests/unit/util/test_rolling_window.py deleted file mode 100644 index 3644da9c9c..0000000000 --- a/lib/iris/tests/unit/util/test_rolling_window.py +++ /dev/null @@ -1,119 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.rolling_window`.""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import numpy as np -import numpy.ma as ma - -from iris.util import rolling_window - - -class Test_rolling_window(tests.IrisTest): - def test_1d(self): - # 1-d array input - a = np.array([0, 1, 2, 3, 4], dtype=np.int32) - expected_result = np.array( - [[0, 1], [1, 2], [2, 3], [3, 4]], dtype=np.int32 - ) - result = rolling_window(a, window=2) - self.assertArrayEqual(result, expected_result) - - def test_2d(self): - # 2-d array input - a = np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], dtype=np.int32) - expected_result = np.array( - [ - [[0, 1, 2], [1, 2, 3], [2, 3, 4]], - [[5, 6, 7], [6, 7, 8], [7, 8, 9]], - ], - dtype=np.int32, - ) - result = rolling_window(a, window=3, axis=1) - self.assertArrayEqual(result, expected_result) - - def test_1d_masked(self): - # 1-d masked array input - a = ma.array([0, 1, 2, 3, 4], mask=[0, 0, 1, 0, 0], dtype=np.int32) - expected_result = ma.array( - [[0, 1], [1, 2], [2, 3], [3, 4]], - mask=[[0, 0], [0, 1], [1, 0], [0, 0]], - dtype=np.int32, - ) - result = rolling_window(a, window=2) - self.assertMaskedArrayEqual(result, expected_result) - - def test_2d_masked(self): - # 2-d masked array input - a = ma.array( - [[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], - mask=[[0, 0, 1, 0, 0], [1, 0, 1, 0, 0]], - dtype=np.int32, - ) - expected_result = ma.array( - [ - [[0, 1, 2], [1, 2, 3], [2, 3, 4]], - [[5, 6, 7], [6, 7, 8], [7, 8, 9]], - ], - mask=[ - [[0, 0, 1], [0, 1, 0], [1, 0, 0]], - [[1, 0, 1], [0, 1, 0], [1, 0, 0]], - ], - dtype=np.int32, - ) - result = rolling_window(a, window=3, axis=1) - self.assertMaskedArrayEqual(result, expected_result) - - def test_degenerate_mask(self): - a = ma.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], dtype=np.int32) - expected_result = ma.array( - [ - [[0, 1, 2], [1, 2, 3], [2, 3, 4]], - [[5, 6, 7], [6, 7, 8], [7, 8, 9]], - ], - mask=[ - [[0, 0, 0], [0, 0, 0], [0, 0, 0]], - [[0, 0, 0], [0, 0, 0], [0, 0, 0]], - ], - dtype=np.int32, - ) - result = rolling_window(a, window=3, axis=1) - self.assertMaskedArrayEqual(result, expected_result) - - def test_step(self): - # step should control how far apart consecutive windows are - a = np.array([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]], dtype=np.int32) - expected_result = np.array( - [[[0, 1, 2], [2, 3, 4]], [[5, 6, 7], [7, 8, 9]]], dtype=np.int32 - ) - result = rolling_window(a, window=3, step=2, axis=1) - self.assertArrayEqual(result, expected_result) - - def test_window_too_short(self): - # raise an error if the window length is less than 1 - a = np.empty([5]) - with self.assertRaises(ValueError): - rolling_window(a, window=0) - - def test_window_too_long(self): - # raise an error if the window length is longer than the - # corresponding array dimension - a = np.empty([7, 5]) - with self.assertRaises(ValueError): - rolling_window(a, window=6, axis=1) - - def test_invalid_step(self): - # raise an error if the step between windows is less than 1 - a = np.empty([5]) - with self.assertRaises(ValueError): - rolling_window(a, step=0) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/util/test_squeeze.py b/lib/iris/tests/unit/util/test_squeeze.py deleted file mode 100644 index b5f0a91b99..0000000000 --- a/lib/iris/tests/unit/util/test_squeeze.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.squeeze`.""" - -# Import iris.tests first so that some things can be initialised before -# importing anything else. -import iris.tests as tests # isort:skip - -import unittest - -import iris -import iris.tests.stock as stock - - -class Test(tests.IrisTest): - def setUp(self): - self.cube = stock.simple_2d_w_multidim_and_scalars() - - def test_no_change(self): - self.assertEqual(self.cube, iris.util.squeeze(self.cube)) - - def test_squeeze_one_dim(self): - cube_3d = iris.util.new_axis(self.cube, scalar_coord="an_other") - cube_2d = iris.util.squeeze(cube_3d) - - self.assertEqual(self.cube, cube_2d) - - def test_squeeze_two_dims(self): - cube_3d = iris.util.new_axis(self.cube, scalar_coord="an_other") - cube_4d = iris.util.new_axis(cube_3d, scalar_coord="air_temperature") - - self.assertEqual(self.cube, iris.util.squeeze(cube_4d)) - - def test_squeeze_one_anonymous_dim(self): - cube_3d = iris.util.new_axis(self.cube) - cube_2d = iris.util.squeeze(cube_3d) - - self.assertEqual(self.cube, cube_2d) - - def test_squeeze_to_scalar_cube(self): - cube_scalar = self.cube[0, 0] - cube_1d = iris.util.new_axis(cube_scalar) - - self.assertEqual(cube_scalar, iris.util.squeeze(cube_1d)) - - -if __name__ == "__main__": - unittest.main() diff --git a/lib/iris/tests/unit/util/test_unify_time_units.py b/lib/iris/tests/unit/util/test_unify_time_units.py deleted file mode 100644 index 16dc7054f3..0000000000 --- a/lib/iris/tests/unit/util/test_unify_time_units.py +++ /dev/null @@ -1,117 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Test function :func:`iris.util.array_equal`.""" - -# import iris tests first so that some things can be initialised before -# importing anything else -import iris.tests as tests # isort:skip - -import copy - -import cf_units -import numpy as np - -import iris -import iris.tests.stock as stock -from iris.util import unify_time_units - - -class Test(tests.IrisTest): - def simple_1d_time_cubes(self, calendar="gregorian"): - coord_points = [1, 2, 3, 4, 5] - data_points = [273, 275, 278, 277, 274] - reftimes = [ - "hours since 1970-01-01 00:00:00", - "hours since 1970-01-02 00:00:00", - ] - list_of_cubes = [] - for reftime in reftimes: - cube = iris.cube.Cube( - np.array(data_points, dtype=np.float32), - standard_name="air_temperature", - units="K", - ) - unit = cf_units.Unit(reftime, calendar=calendar) - coord = iris.coords.DimCoord( - points=np.array(coord_points, dtype=np.float32), - standard_name="time", - units=unit, - ) - cube.add_dim_coord(coord, 0) - list_of_cubes.append(cube) - return list_of_cubes - - def _common(self, expected, result, coord_name="time"): - # This tests time-like coords only. - for cube in result: - try: - epoch = cube.coord(coord_name).units.origin - except iris.exceptions.CoordinateNotFoundError: - pass - else: - self.assertEqual(expected, epoch) - - def test_cubelist_with_time_coords(self): - # Tests an :class:`iris.cube.CubeList` containing cubes with time - # coords against a time string and a time coord. - cubelist = iris.cube.CubeList(self.simple_1d_time_cubes()) - expected = "hours since 1970-01-01 00:00:00" - unify_time_units(cubelist) - self._common(expected, cubelist) - - def test_list_of_cubes_with_time_coords(self): - # Tests an iterable containing cubes with time coords against a time - # string and a time coord. - list_of_cubes = self.simple_1d_time_cubes() - expected = "hours since 1970-01-01 00:00:00" - unify_time_units(list_of_cubes) - self._common(expected, list_of_cubes) - - @tests.skip_data - def test_no_time_coord_in_cubes(self): - path0 = tests.get_data_path(("PP", "aPPglob1", "global.pp")) - path1 = tests.get_data_path(("PP", "aPPglob1", "global_t_forecast.pp")) - cube0 = iris.load_cube(path0) - cube1 = iris.load_cube(path1) - cubes = iris.cube.CubeList([cube0, cube1]) - result = copy.copy(cubes) - unify_time_units(result) - self.assertEqual(cubes, result) - - def test_time_coord_only_in_some_cubes(self): - list_of_cubes = self.simple_1d_time_cubes() - cube = stock.simple_2d() - list_of_cubes.append(cube) - expected = "hours since 1970-01-01 00:00:00" - unify_time_units(list_of_cubes) - self._common(expected, list_of_cubes) - - def test_multiple_time_coords_in_cube(self): - cube0, cube1 = self.simple_1d_time_cubes() - units = cf_units.Unit( - "days since 1980-05-02 00:00:00", calendar="gregorian" - ) - aux_coord = iris.coords.AuxCoord( - 72, standard_name="forecast_reference_time", units=units - ) - cube1.add_aux_coord(aux_coord) - cubelist = iris.cube.CubeList([cube0, cube1]) - expected = "hours since 1970-01-01 00:00:00" - unify_time_units(cubelist) - self._common(expected, cubelist) - self._common(expected, cubelist, coord_name="forecast_reference_time") - - def test_multiple_calendars(self): - cube0, cube1 = self.simple_1d_time_cubes() - cube2, cube3 = self.simple_1d_time_cubes(calendar="360_day") - cubelist = iris.cube.CubeList([cube0, cube1, cube2, cube3]) - expected = "hours since 1970-01-01 00:00:00" - unify_time_units(cubelist) - self._common(expected, cubelist) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/util.py b/lib/iris/util.py index 53cd78724e..9e0db9e66e 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -24,6 +24,8 @@ from iris._deprecation import warn_deprecated from iris._lazy_data import as_concrete_data, is_lazy_data +from iris.common import SERVICES +from iris.common.lenient import _lenient_client import iris.exceptions @@ -733,7 +735,6 @@ def _build_full_slice_given_keys(keys, ndim): for i, key in enumerate(keys): if key is Ellipsis: - # replace any subsequent Ellipsis objects in keys with # slice(None, None) as per Numpy keys = keys[:i] + tuple( @@ -1094,7 +1095,7 @@ def format_array(arr): return result -def new_axis(src_cube, scalar_coord=None): +def new_axis(src_cube, scalar_coord=None, expand_extras=()): """ Create a new axis as the leading dimension of the cube, promoting a scalar coordinate if specified. @@ -1109,9 +1110,16 @@ def new_axis(src_cube, scalar_coord=None): * scalar_coord (:class:`iris.coord.Coord` or 'string') Scalar coordinate to promote to a dimension coordinate. + * expand_extras (iterable) + Auxiliary coordinates, ancillary variables and cell measures which will + be expanded so that they map to the new dimension as well as the + existing dimensions. + Returns: A new :class:`iris.cube.Cube` instance with one extra leading dimension - (length 1). + (length 1). Chosen auxiliary coordinates, cell measures and ancillary + variables will also be given an additional dimension, associated with + the leading dimension of the cube. For example:: @@ -1120,40 +1128,83 @@ def new_axis(src_cube, scalar_coord=None): >>> ncube = iris.util.new_axis(cube, 'time') >>> ncube.shape (1, 360, 360) - """ - from iris.coords import DimCoord - from iris.cube import Cube + + def _reshape_data_array(data_manager): + # Indexing numpy arrays requires loading deferred data here returning a + # copy of the data with a new leading dimension. + # If the data of the source cube (or values of the dimensional metadata + # object) is a Masked Constant, it is changed here to a Masked Array to + # allow the mask to gain an extra dimension with the data. + if data_manager.has_lazy_data(): + new_data = data_manager.lazy_data()[None] + else: + if isinstance(data_manager.data, ma.core.MaskedConstant): + new_data = ma.array([np.nan], mask=[True]) + else: + new_data = data_manager.data[None] + return new_data + + def _handle_dimensional_metadata( + cube, dm_item, cube_add_method, expand_extras + ): + cube_dims = dm_item.cube_dims(cube) + if dm_item in expand_extras: + if cube_dims == (): + new_dm_item, new_dims = dm_item.copy(), 0 + else: + new_dims = np.concatenate([(0,), np.array(cube_dims) + 1]) + new_values = _reshape_data_array(dm_item._values_dm) + kwargs = dm_item.metadata._asdict() + new_dm_item = dm_item.__class__(new_values, **kwargs) + try: + if dm_item.has_bounds(): + new_dm_item.bounds = _reshape_data_array( + dm_item._bounds_dm + ) + except AttributeError: + pass + else: + new_dims = np.array(cube_dims) + 1 + new_dm_item = dm_item.copy() + + cube_add_method(new_dm_item, new_dims) if scalar_coord is not None: scalar_coord = src_cube.coord(scalar_coord) + if not scalar_coord.shape == (1,): + emsg = scalar_coord.name() + "is not a scalar coordinate." + raise ValueError(emsg) - # Indexing numpy arrays requires loading deferred data here returning a - # copy of the data with a new leading dimension. - # If the source cube is a Masked Constant, it is changed here to a Masked - # Array to allow the mask to gain an extra dimension with the data. - if src_cube.has_lazy_data(): - new_cube = Cube(src_cube.lazy_data()[None]) - else: - if isinstance(src_cube.data, ma.core.MaskedConstant): - new_data = ma.array([np.nan], mask=[True]) - else: - new_data = src_cube.data[None] - new_cube = Cube(new_data) + expand_extras = [ + src_cube._dimensional_metadata(item) for item in expand_extras + ] + new_cube = iris.cube.Cube(_reshape_data_array(src_cube._data_manager)) new_cube.metadata = src_cube.metadata + for coord in src_cube.dim_coords: + coord_dims = np.array(src_cube.coord_dims(coord)) + 1 + new_cube.add_dim_coord(coord.copy(), coord_dims) + for coord in src_cube.aux_coords: if scalar_coord and scalar_coord == coord: - dim_coord = DimCoord.from_coord(coord) + dim_coord = iris.coords.DimCoord.from_coord(coord) new_cube.add_dim_coord(dim_coord, 0) else: - dims = np.array(src_cube.coord_dims(coord)) + 1 - new_cube.add_aux_coord(coord.copy(), dims) + _handle_dimensional_metadata( + src_cube, coord, new_cube.add_aux_coord, expand_extras + ) - for coord in src_cube.dim_coords: - coord_dims = np.array(src_cube.coord_dims(coord)) + 1 - new_cube.add_dim_coord(coord.copy(), coord_dims) + for cm in src_cube.cell_measures(): + _handle_dimensional_metadata( + src_cube, cm, new_cube.add_cell_measure, expand_extras + ) + + for av in src_cube.ancillary_variables(): + _handle_dimensional_metadata( + src_cube, av, new_cube.add_ancillary_variable, expand_extras + ) nonderived_coords = src_cube.dim_coords + src_cube.aux_coords coord_mapping = { @@ -1281,6 +1332,32 @@ def regular_step(coord): return avdiff.astype(coord.points.dtype) +def regular_points(zeroth, step, count): + """Make an array of regular points. + + Create an array of `count` points from `zeroth` + `step`, adding `step` each + time. In float32 if this gives a sufficiently regular array (tested with + points_step) and float64 if not. + + Parameters + ---------- + zeroth : number + The value *prior* to the first point value. + + step : number + The numeric difference between successive point values. + + count : number + The number of point values. + + """ + points = (zeroth + step) + step * np.arange(count, dtype=np.float32) + _, regular = iris.util.points_step(points) + if not regular: + points = (zeroth + step) + step * np.arange(count, dtype=np.float64) + return points + + def points_step(points): """Determine whether `points` has a regular step. @@ -1728,29 +1805,124 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): return bad_points_boolean -def mask_cube(cube, points_to_mask): +def _mask_array(array, points_to_mask, in_place=False): """ - Masks any cells in the data array which correspond to cells marked `True` - in the `points_to_mask` array. + Apply masking to array where points_to_mask is True/non-zero. Designed to + work with iris.analysis.maths._binary_op_common so array and points_to_mask + will be broadcastable to each other. array and points_to_mask may be numpy + or dask types (or one of each). - Args: + If array is lazy then in_place is ignored: _math_op_common will use the + returned value regardless of in_place, so we do not need to implement it + here. If in_place is True then array must be a + :class:`numpy.ma.MaskedArray` or :class:`dask.array.Array` + (must be a dask array if points_to_mask is lazy). - * cube (`iris.cube.Cube`): - A 2-dimensional instance of :class:`iris.cube.Cube`. + """ + # Decide which array library to use. + if is_lazy_data(points_to_mask) or is_lazy_data(array): + al = da + if not is_lazy_data(array) and in_place: + # Non-lazy array and lazy mask should not come up for in_place + # case, due to _binary_op_common handling added at #3790. + raise TypeError( + "Cannot apply lazy mask in-place to a non-lazy array." + ) + in_place = False - * points_to_mask (`numpy.ndarray` of bool): - A 2d boolean array of Truth values representing points to mask in the - x and y arrays of the cube. + elif in_place and not isinstance(array, ma.MaskedArray): + raise TypeError("Cannot apply a mask in-place to a plain numpy array.") + else: + al = np - Returns: + points_to_mask = points_to_mask.astype(bool) + + # Treat any masked points on our mask as False. + points_to_mask = al.ma.filled(points_to_mask, False) + + # Get broadcasted views of the arrays. Note that broadcast_arrays does not + # preserve masks, so we need to explicitly handle any exising mask on array. + array_mask = al.ma.getmaskarray(array) - * result (`iris.cube.Cube`): - A cube whose data array is masked at points specified by input array. + array_data, array_mask, points_to_mask = al.broadcast_arrays( + array, array_mask, points_to_mask + ) + + new_mask = al.logical_or(array_mask, points_to_mask) + if in_place: + array.mask = new_mask + result = array # Resolve uses returned value even if working in place. + else: + # Return a new, independent array. + result = al.ma.masked_array(array_data.copy(), mask=new_mask) + + return result + + +@_lenient_client(services=SERVICES) +def mask_cube(cube, points_to_mask, in_place=False, dim=None): """ - cube.data = ma.masked_array(cube.data) - cube.data[points_to_mask] = ma.masked - return cube + Masks any cells in the cube's data array which correspond to cells marked + ``True`` (or non zero) in ``points_to_mask``. ``points_to_mask`` may be + specified as a :class:`numpy.ndarray`, :class:`iris.coords.Coord` or + :class:`iris.cube.Cube`, following the same broadcasting approach as cube + arithmetic (see :ref:`cube maths`). + + Parameters + ---------- + + cube : iris.cube.Cube + Cube containing data that requires masking. + + points_to_mask : numpy.ndarray, iris.coords.Coord or iris.cube.Cube + Specifies booleans (or ones and zeros) indicating which points will be masked. + + in_place : bool, default=False + If `True`, masking is applied to the input cube. Otherwise a copy is masked + and returned. + + dim : int, optional + If `points_to_mask` is a coord which does not exist on the cube, specify the + dimension to which it should be mapped. + + Returns + ------- + + iris.cube.Cube + A cube whose data array is masked at points specified by ``points_to_mask``. + + Notes + ----- + + If either ``cube`` or ``points_to_mask`` is lazy, the result will be lazy. + + """ + if in_place and not cube.has_lazy_data(): + # Ensure cube data is masked type so we can work on it in-place. + cube.data = ma.asanyarray(cube.data) + mask_function = functools.partial(_mask_array, in_place=True) + else: + mask_function = _mask_array + + input_metadata = cube.metadata + result = iris.analysis.maths._binary_op_common( + mask_function, + "mask", + cube, + points_to_mask, + cube.units, + in_place=in_place, + dim=dim, + sanitise_metadata=False, + ) + + # Resolve combines the metadata from the two operands, but we want to + # preserve the metadata from the (first) input cube. + result.metadata = input_metadata + + if not in_place: + return result def equalise_attributes(cubes): @@ -1806,7 +1978,7 @@ def is_masked(array): Parameters ---------- - array : :class:`numpy.Array` or `dask.array.Array` + array : :class:`numpy.Array` or :class:`dask.array.Array` The array to be checked for masks. Returns diff --git a/noxfile.py b/noxfile.py index 8b23948677..c7b0a0e05b 100755 --- a/noxfile.py +++ b/noxfile.py @@ -5,9 +5,13 @@ """ +from datetime import datetime import hashlib import os from pathlib import Path +import re +from tempfile import NamedTemporaryFile +from typing import Literal import nox from nox.logger import logger @@ -31,9 +35,7 @@ # https://github.com/numpy/numpy/pull/19478 # https://github.com/matplotlib/matplotlib/pull/22099 #: Common session environment variables. -ENV = dict( - NPY_DISABLE_CPU_FEATURES="AVX512F,AVX512CD,AVX512VL,AVX512BW,AVX512DQ,AVX512_SKX" -) +ENV = dict(NPY_DISABLE_CPU_FEATURES="AVX512F,AVX512CD,AVX512_SKX") def session_lockfile(session: nox.sessions.Session) -> Path: @@ -169,46 +171,13 @@ def prepare_venv(session: nox.sessions.Session) -> None: ) -@nox.session -def precommit(session: nox.sessions.Session): - """ - Perform pre-commit hooks of iris codebase. - - Parameters - ---------- - session: object - A `nox.sessions.Session` object. - - """ - import yaml - - # Pip install the session requirements. - session.install("pre-commit") - - # Load the pre-commit configuration YAML file. - with open(".pre-commit-config.yaml", "r") as fi: - config = yaml.load(fi, Loader=yaml.FullLoader) - - # List of pre-commit hook ids that we don't want to run. - excluded = ["no-commit-to-branch"] - - # Enumerate the ids of pre-commit hooks we do want to run. - ids = [ - hook["id"] - for entry in config["repos"] - for hook in entry["hooks"] - if hook["id"] not in excluded - ] - - # Execute the pre-commit hooks. - [session.run("pre-commit", "run", "--all-files", id) for id in ids] - - @nox.session(python=PY_VER, venv_backend="conda") def tests(session: nox.sessions.Session): """ Perform iris system, integration and unit tests. + Coverage testing is enabled if the "--coverage" or "-c" flag is used. + Parameters ---------- session: object @@ -218,13 +187,15 @@ def tests(session: nox.sessions.Session): prepare_venv(session) session.install("--no-deps", "--editable", ".") session.env.update(ENV) - session.run( + run_args = [ "python", "-m", "iris.tests.runner", "--default-tests", - "--system-tests", - ) + ] + if "-c" in session.posargs or "--coverage" in session.posargs: + run_args.append("--coverage") + session.run(*run_args) @nox.session(python=_PY_VERSION_DOCSBUILD, venv_backend="conda") @@ -253,7 +224,22 @@ def doctest(session: nox.sessions.Session): "doctest", external=True, ) - session.cd("..") + + +@nox.session(python=_PY_VERSION_DOCSBUILD, venv_backend="conda") +def gallery(session: nox.sessions.Session): + """ + Perform iris gallery doc-tests. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + prepare_venv(session) + session.install("--no-deps", "--editable", ".") + session.env.update(ENV) session.run( "python", "-m", @@ -289,48 +275,264 @@ def linkcheck(session: nox.sessions.Session): ) -@nox.session(python=PY_VER[-1], venv_backend="conda") +@nox.session(python=PY_VER, venv_backend="conda") +def wheel(session: nox.sessions.Session): + """ + Perform iris local wheel install and import test. + + Parameters + ---------- + session: object + A `nox.sessions.Session` object. + + """ + prepare_venv(session) + session.cd("dist") + fname = list(Path(".").glob("scitools_iris-*.whl")) + if len(fname) == 0: + raise ValueError("Cannot find wheel to install.") + if len(fname) > 1: + emsg = ( + f"Expected to find 1 wheel to install, found {len(fname)} instead." + ) + raise ValueError(emsg) + session.install(fname[0].name) + session.run( + "python", + "-c", + "import iris; print(f'{iris.__version__=}')", + external=True, + ) + + +@nox.session @nox.parametrize( - ["ci_mode"], - [True, False], - ids=["ci compare", "full"], + "run_type", + ["overnight", "branch", "cperf", "sperf", "custom"], + ids=["overnight", "branch", "cperf", "sperf", "custom"], ) -def benchmarks(session: nox.sessions.Session, ci_mode: bool): +def benchmarks( + session: nox.sessions.Session, + run_type: Literal["overnight", "branch", "cperf", "sperf", "custom"], +): """ - Perform esmf-regrid performance benchmarks (using Airspeed Velocity). + Perform Iris performance benchmarks (using Airspeed Velocity). + + All run types require a single Nox positional argument (e.g. + ``nox --session="foo" -- my_pos_arg``) - detailed in the parameters + section - and can optionally accept a series of further arguments that will + be added to session's ASV command. Parameters ---------- session: object A `nox.sessions.Session` object. - ci_mode: bool - Run a cut-down selection of benchmarks, comparing the current commit to - the last commit for performance regressions. - - Notes - ----- - ASV is set up to use ``nox --session=tests --install-only`` to prepare - the benchmarking environment. This session environment must use a Python - version that is also available for ``--session=tests``. + run_type: {"overnight", "branch", "cperf", "sperf", "custom"} + * ``overnight``: benchmarks all commits between the input **first + commit** to ``HEAD``, comparing each to its parent for performance + shifts. If a commit causes shifts, the output is saved to a file: + ``.asv/performance-shifts/``. Designed for checking the + previous 24 hours' commits, typically in a scheduled script. + * ``branch``: Performs the same operations as ``overnight``, but always + on two commits only - ``HEAD``, and ``HEAD``'s merge-base with the + input **base branch**. Output from this run is never saved to a file. + Designed for testing if the active branch's changes cause performance + shifts - anticipating what would be caught by ``overnight`` once + merged. + **For maximum accuracy, avoid using the machine that is running this + session. Run time could be >1 hour for the full benchmark suite.** + * ``cperf``: Run the on-demand CPerf suite of benchmarks (part of the + UK Met Office NG-VAT project) for the ``HEAD`` of ``upstream/main`` + only, and publish the results to the input **publish directory**, + within a unique subdirectory for this run. + * ``sperf``: As with CPerf, but for the SPerf suite. + * ``custom``: run ASV with the input **ASV sub-command**, without any + preset arguments - must all be supplied by the user. So just like + running ASV manually, with the convenience of re-using the session's + scripted setup steps. + + Examples + -------- + * ``nox --session="benchmarks(overnight)" -- a1b23d4`` + * ``nox --session="benchmarks(branch)" -- upstream/main`` + * ``nox --session="benchmarks(branch)" -- upstream/mesh-data-model`` + * ``nox --session="benchmarks(branch)" -- upstream/main --bench=regridding`` + * ``nox --session="benchmarks(cperf)" -- my_publish_dir + * ``nox --session="benchmarks(custom)" -- continuous a1b23d4 HEAD --quick`` """ + # The threshold beyond which shifts are 'notable'. See `asv compare`` docs + # for more. + COMPARE_FACTOR = 1.2 + session.install("asv", "nox") + + data_gen_var = "DATA_GEN_PYTHON" + if data_gen_var in os.environ: + print("Using existing data generation environment.") + else: + print("Setting up the data generation environment...") + # Get Nox to build an environment for the `tests` session, but don't + # run the session. Will re-use a cached environment if appropriate. + session.run_always( + "nox", + "--session=tests", + "--install-only", + f"--python={_PY_VERSION_LATEST}", + ) + # Find the environment built above, set it to be the data generation + # environment. + data_gen_python = next( + Path(".nox").rglob(f"tests*/bin/python{_PY_VERSION_LATEST}") + ).resolve() + session.env[data_gen_var] = data_gen_python + + mule_dir = data_gen_python.parents[1] / "resources" / "mule" + if not mule_dir.is_dir(): + print("Installing Mule into data generation environment...") + session.run_always( + "git", + "clone", + "https://github.com/metomi/mule.git", + str(mule_dir), + external=True, + ) + session.run_always( + str(data_gen_python), + "-m", + "pip", + "install", + str(mule_dir / "mule"), + external=True, + ) + + print("Running ASV...") session.cd("benchmarks") # Skip over setup questions for a new machine. session.run("asv", "machine", "--yes") - def asv_exec(*sub_args: str) -> None: - run_args = ["asv", *sub_args] - session.run(*run_args) - - if ci_mode: - # If on a PR: compare to the base (target) branch. - # Else: compare to previous commit. - previous_commit = os.environ.get("PR_BASE_SHA", "HEAD^1") - try: - asv_exec("continuous", "--factor=1.2", previous_commit, "HEAD") - finally: - asv_exec("compare", previous_commit, "HEAD") + # All run types require one Nox posarg. + run_type_arg = { + "overnight": "first commit", + "branch": "base branch", + "cperf": "publish directory", + "sperf": "publish directory", + "custom": "ASV sub-command", + } + if run_type not in run_type_arg.keys(): + message = f"Unsupported run-type: {run_type}" + raise NotImplementedError(message) + if not session.posargs: + message = ( + f"Missing mandatory first Nox session posarg: " + f"{run_type_arg[run_type]}" + ) + raise ValueError(message) + first_arg = session.posargs[0] + # Optional extra arguments to be passed down to ASV. + asv_args = session.posargs[1:] + + def asv_compare(*commits): + """Run through a list of commits comparing each one to the next.""" + commits = [commit[:8] for commit in commits] + shifts_dir = Path(".asv") / "performance-shifts" + for i in range(len(commits) - 1): + before = commits[i] + after = commits[i + 1] + asv_command_ = f"asv compare {before} {after} --factor={COMPARE_FACTOR} --split" + session.run(*asv_command_.split(" ")) + + if run_type == "overnight": + # Record performance shifts. + # Run the command again but limited to only showing performance + # shifts. + shifts = session.run( + *asv_command_.split(" "), "--only-changed", silent=True + ) + if shifts: + # Write the shifts report to a file. + # Dir is used by .github/workflows/benchmarks.yml, + # but not cached - intended to be discarded after run. + shifts_dir.mkdir(exist_ok=True, parents=True) + shifts_path = (shifts_dir / after).with_suffix(".txt") + with shifts_path.open("w") as shifts_file: + shifts_file.write(shifts) + + # Common ASV arguments for all run_types except `custom`. + asv_harness = ( + "asv run {posargs} --attribute rounds=4 --interleave-rounds --strict " + "--show-stderr" + ) + + if run_type == "overnight": + first_commit = first_arg + commit_range = f"{first_commit}^^.." + asv_command = asv_harness.format(posargs=commit_range) + session.run(*asv_command.split(" "), *asv_args) + + # git rev-list --first-parent is the command ASV uses. + git_command = f"git rev-list --first-parent {commit_range}" + commit_string = session.run( + *git_command.split(" "), silent=True, external=True + ) + commit_list = commit_string.rstrip().split("\n") + asv_compare(*reversed(commit_list)) + + elif run_type == "branch": + base_branch = first_arg + git_command = f"git merge-base HEAD {base_branch}" + merge_base = session.run( + *git_command.split(" "), silent=True, external=True + )[:8] + + with NamedTemporaryFile("w") as hashfile: + hashfile.writelines([merge_base, "\n", "HEAD"]) + hashfile.flush() + commit_range = f"HASHFILE:{hashfile.name}" + asv_command = asv_harness.format(posargs=commit_range) + session.run(*asv_command.split(" "), *asv_args) + + asv_compare(merge_base, "HEAD") + + elif run_type in ("cperf", "sperf"): + publish_dir = Path(first_arg) + if not publish_dir.is_dir(): + message = ( + f"Input 'publish directory' is not a directory: {publish_dir}" + ) + raise NotADirectoryError(message) + publish_subdir = ( + publish_dir + / f"{run_type}_{datetime.now().strftime('%Y%m%d_%H%M%S')}" + ) + publish_subdir.mkdir() + + # Activate on demand benchmarks (C/SPerf are deactivated for 'standard' runs). + session.env["ON_DEMAND_BENCHMARKS"] = "True" + commit_range = "upstream/main^!" + + asv_command = ( + asv_harness.format(posargs=commit_range) + f" --bench={run_type}" + ) + # C/SPerf benchmarks are much bigger than the CI ones: + # Don't fail the whole run if memory blows on 1 benchmark. + asv_command = asv_command.replace(" --strict", "") + # Only do a single round. + asv_command = re.sub(r"rounds=\d", "rounds=1", asv_command) + session.run(*asv_command.split(" "), *asv_args) + + asv_command = f"asv publish {commit_range} --html-dir={publish_subdir}" + session.run(*asv_command.split(" ")) + + # Print completion message. + location = Path().cwd() / ".asv" + print( + f'New ASV results for "{run_type}".\n' + f'See "{publish_subdir}",' + f'\n or JSON files under "{location / "results"}".' + ) + else: - # f5ceb808 = first commit supporting nox --install-only . - asv_exec("run", "f5ceb808..HEAD") + asv_subcommand = first_arg + assert run_type == "custom" + session.run("asv", asv_subcommand, *asv_args) diff --git a/pyproject.toml b/pyproject.toml index 26e6ae727a..b44187191b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,17 @@ [build-system] # Defined by PEP 518 requires = [ - "setuptools>=40.8.0", + "setuptools>=64", + "setuptools_scm[toml]>=7.0", "wheel", ] # Defined by PEP 517 build-backend = "setuptools.build_meta" +[tool.setuptools_scm] +write_to = "lib/iris/_version.py" +local_scheme = "dirty-tag" +version_scheme = "release-branch-semver" [tool.black] line-length = 79 @@ -37,3 +42,23 @@ extend_skip = [ ] skip_gitignore = "True" verbose = "False" + +[tool.pytest.ini_options] +addopts = "-ra" +testpaths = "lib/iris" + +[tool.coverage.run] +branch = true +source = [ + "lib/iris", +] +omit = [ + "lib/iris/tests/*", + "lib/iris/etc/*", +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "if __name__ == .__main__.:" +] diff --git a/requirements/ci/iris.yml b/requirements/ci/iris.yml index a76932b56e..1e473d36d5 120000 --- a/requirements/ci/iris.yml +++ b/requirements/ci/iris.yml @@ -1 +1 @@ -py38.yml \ No newline at end of file +py310.yml \ No newline at end of file diff --git a/requirements/ci/nox.lock/py310-linux-64.lock b/requirements/ci/nox.lock/py310-linux-64.lock new file mode 100644 index 0000000000..0d97158f45 --- /dev/null +++ b/requirements/ci/nox.lock/py310-linux-64.lock @@ -0,0 +1,272 @@ +# Generated by conda-lock. +# platform: linux-64 +# input_hash: f8af5f4aafcb766f463a1a897d3dab9e04f05f1494bced5931d78175ca0c66df +@EXPLICIT +https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 +https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.10-3_cp310.conda#4eb33d14d794b0f4be116443ffed3853 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2022g-h191b570_0.conda#51fc4fcfb19f5d95ffc8c339db5068e8 +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d +https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373 +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar.bz2#e4c94f80aef025c17ab0828cd85ef535 +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a +https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-h27087fc_0.tar.bz2#c4fbad8d4bddeb3c085f18cbf97fbfad +https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_106.conda#d7407e695358f068a2a7f8295cde0567 +https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.1-h27087fc_0.tar.bz2#917b9a50001fffdd89b321b5dba31e55 +https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d +https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f +https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed +https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h0b41bf4_3.conda#c7a069243e1fbe9a556ed2ec030e6407 +https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 +https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 +https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4 +https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.17-h0b41bf4_0.conda#5cc781fd91968b11a8a7fdbee0982676 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d +https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35 +https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f +https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 +https://conda.anaconda.org/conda-forge/linux-64/libudev1-252-h166bdaf_0.tar.bz2#174243089ec111479298a5b7099b64b5 +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.2-hcb278e6_0.conda#08efb1e1813f1a151b7a945b972a049b +https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.3-h846660c_100.tar.bz2#50d66bb751cfa71ee2a48b2d3eb90ac1 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.0.8-h0b41bf4_0.conda#e043403cd18faf815bf7705ab6c1e092 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 +https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 +https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 +https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.22-h11f4161_0.conda#504fa9e712b99494a9cf4630e3ca7d78 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.66-ha37c62d_0.tar.bz2#2d7665abd0997f1a6d4b7596bc27b657 +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 +https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.51.0-hff17c54_0.conda#dd682f0b6d65e75b2bc868fc8e93d87e +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906 +https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.10.3-h7463322_0.tar.bz2#3b933ea47ef8f330c4c068af25fcd6a8 +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-ha901b37_0.conda#6a39818710235826181e104aada40c75 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b +https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_5.tar.bz2#ee08782aff2ff9b3291c967fa6bc7336 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.74.1-h606061b_1.tar.bz2#ed5349aa96776e00b34eccecf4a948fe +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad +https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-hadd5161_0.conda#70cbb0c2033665f2a7339bf0ec51a67f +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-h6adf6a1_2.conda#2e648a34072eb39d7c4fc2a9981c5f0c +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h79f4944_0.conda#3f67368c9b0e77a693acad193310baf1 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hd7da12d_0.conda#b05d7ea8b76f1172d5fe4f30e03277ea +https://conda.anaconda.org/conda-forge/linux-64/nss-3.88-he45b914_0.conda#d7a81dfb99ad8fbb88872fb7ec646e6c +https://conda.anaconda.org/conda-forge/linux-64/python-3.10.9-he550d4f_0_cpython.conda#3cb3e91b3fe66baa68a12c85f39b9b40 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.40.0-h4ff8645_0.tar.bz2#bb11803129cbbb53ed56f9506ff74145 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py310hff52083_1003.tar.bz2#8324f8fff866055d4b32eb25e091fe31 +https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b +https://conda.anaconda.org/conda-forge/noarch/attrs-22.2.0-pyh71513ae_0.conda#8b76db7818a4e401ed4486c4c1635cd9 +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 +https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda#fb9addc3db06e56abe03e0e9f21a63e6 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e +https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py310hff52083_3.tar.bz2#785160da087cf1d70e989afbb761f01c +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.0-pyhd8ed1ab_0.conda#a385c3e8968b4cf8fbc426ace915fd1a +https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.9.0-pyhd8ed1ab_0.conda#1addc115923d646ca19ed90edc413506 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.1.0-pyhd8ed1ab_0.conda#44f6828b8f7cc3433d68d1d1c0e9add2 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h05c8ddd_0.conda#1a109126a43003d65b39c1cad656bc9b +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.74.1-h6239696_1.tar.bz2#5f442e6bc9d89ba236eb25a25c5c2815 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 +https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py310hbf28c38_1.tar.bz2#ad5647e517ba68e2868ef2e6e6ff7723 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.14-hfd0df8a_1.conda#c2566c2ea5f153ddd6bf4acaf7547d97 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h3e3d535_1.conda#a3a0f7a6f0885f5e1e0ec691566afb77 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f +https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.88.1-hdc1c0ab_0.conda#81eaeb3b35163c8e90e57532bc93754d +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.2-hb675445_0.conda#4654b17eccaba55b8581d6b9c77f53cc +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-252-h2a991cd_0.tar.bz2#3c5ae9f61f663b3d5e1bf7f7da0c85f5 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 +https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py310h1fa729e_0.conda#a1f0db6709778b77b5903541eeac4032 +https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.4-py310h37cc914_0.tar.bz2#98d598d9178d7f3091212c61c0be693c +https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py310h8deb116_0.conda#b7085457309e206174b8e234d90a7605 +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea +https://conda.anaconda.org/conda-forge/noarch/packaging-23.0-pyhd8ed1ab_0.conda#1ff2e3ca41f0ce16afec7190db28288b +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 +https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.4-py310h5764c6d_0.tar.bz2#c3c55664e9becc48e6a652e2b641961f +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 +https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py310h1fa729e_0.conda#8d155ac95b1dfe585bcb6bec6a91c73b +https://conda.anaconda.org/conda-forge/noarch/pytz-2022.7.1-pyhd8ed1ab_0.conda#f59d49a7b464901cf714b9e7984d01a2 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py310h5764c6d_5.tar.bz2#9e68d2ff6d98737c855b65f48dd3c597 +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.3.2-pyhd8ed1ab_0.conda#543af74c4042aee5702a033e03a216d0 +https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py310h5764c6d_1.tar.bz2#be4a201ac582c11d89ed7d15b3157cc3 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.4.0-pyha770c72_0.tar.bz2#2d93b130d148d7fc77e583677792fc6a +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py310h5764c6d_0.tar.bz2#e972c5a1f472561cf4a91962cb01f4b4 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.38.4-pyhd8ed1ab_0.tar.bz2#c829cfb8cb826acb9de0ac1a2df0a940 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zipp-3.14.0-pyhd8ed1ab_0.conda#01ea04980fa39d7b6dbdd6c67016d177 +https://conda.anaconda.org/conda-forge/noarch/babel-2.11.0-pyhd8ed1ab_0.tar.bz2#2ea70fde8d581ba9425a761609eed6ba +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.2-pyha770c72_0.conda#88b59f6989f0ed5ab3433af0b82555e1 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0 +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py310h255011f_3.conda#800596144bb613cd7ac58b80900ce835 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py310hde88566_1.tar.bz2#94ce7a76b0c912279f6958e0b6b21d2b +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py310hdf3cbec_0.conda#7bf9d8c765b6b04882c719509652c6d6 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.1.0-py310h1fa729e_0.conda#da7c45dbe780f5e162011a3af44e5009 +https://conda.anaconda.org/conda-forge/linux-64/curl-7.88.1-hdc1c0ab_0.conda#1968e4fef727858ac04746560e820928 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.38.0-py310h5764c6d_1.tar.bz2#12ebe92a8a578bc903bd844744f4d040 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.74.1-h6239696_1.tar.bz2#f3220a9e9d3abcbfca43419a219df7e4 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h5d83325_1.conda#811c4d55cf17b42336ffa314239717b0 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.0.0-pyha770c72_0.conda#691644becbcdca9f73243450b1c63e62 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 +https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_had23c3d_1.conda#36c65ed73b7c92589bd9562ef8a6023d +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5aea950_4.conda#82ef57611ace65b59db35a9687264572 +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py310hde88566_1008.tar.bz2#f9dd8a7a2fcc23eb2cd95cd817c949e7 +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c +https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54 +https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py310h023d228_1.conda#bbea829b541aa15df5c65bd40b8c1981 +https://conda.anaconda.org/conda-forge/noarch/pip-23.0.1-pyhd8ed1ab_0.conda#8025ca83b8ba5430b640b83917c2a6f7 +https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.1-h8ffa02c_2.conda#c264aea0e16bba26afa0a0940e954492 +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-16.1-ha8d29e2_1.conda#dbfc2a8d63a43a11acf4c704e1ef9d0c +https://conda.anaconda.org/conda-forge/noarch/pygments-2.14.0-pyhd8ed1ab_0.conda#c78cd16b11cd6a295484bd6c8f24bea1 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.2.1-pyhd8ed1ab_0.conda#f0be05afc9c9ab45e273c088e00c258b +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py310hde88566_3.tar.bz2#0b686f306a76fba9a61e7019f854321f +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py310h0a54255_0.conda#b9e952fe3f7528ab603d2776175ba8d2 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py310h8b84c32_0.conda#965113c401c7dc9b7a4cd5f9af57e185 +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.7-py310heca2aa9_0.conda#142c074701cf90c88667b461678aee81 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.4.0-hd8ed1ab_0.tar.bz2#be969210b61b897775a0de63cd9e9026 +https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py310h5764c6d_1005.tar.bz2#87669c3468dff637bbd0363bc0f895cf +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py310hde88566_2.tar.bz2#7433944046deda7775c5b1f7e0b6fe18 +https://conda.anaconda.org/conda-forge/linux-64/cryptography-39.0.1-py310h34c0648_0.conda#763b301155631438b09e6f2072d3ffaa +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.2.0-pyhd8ed1ab_0.conda#156fb994a4e07091c4fad2c148589eb2 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.0-h25f0c4b_0.conda#d764367398de61c0d5531dd912e6cc96 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-6.0.0-h8e241bc_0.conda#448fe40d2fed88ccf4d9ded37cbb2b38 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_hcd871d9_6.tar.bz2#6cdc429ed22edb566ac4308f3da6916d +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.0-py310he60537e_0.conda#83a21bbd1c6fbeb339ba914fb5e5c02d +https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.3-py310h9b08913_0.conda#467244b0dbb7da40927ac6ee0e9491de +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.0.0-pyhd8ed1ab_0.conda#c34694044915d7f291ef257029f2e2af +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.1-py310h15e2413_1.conda#5be35366687def87437d210fd673100c +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py310heca2aa9_3.conda#3b1946b676534472ce65181dda0b9554 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.0.0-pyhd8ed1ab_0.tar.bz2#c9e3f8bfdb9bfc34aa1836a6ed4b25d7 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.2.0-pyhd8ed1ab_0.conda#70ab87b96126f35d1e68de2ad9fb6423 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py310hbf28c38_3.tar.bz2#703ff1ac7d1b27fb5944b8052b5d1edb +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.0-h4243ec0_0.conda#81c20b15d2281a1ea48eac5b4eee8cfa +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.18-pyhd8ed1ab_0.conda#e07a5691c27e65d8d3d9278c578c7771 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_h1e13492_2.conda#d4ed7704f0fa589e4d7656780fa87557 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.2-nompi_py310h55e1e36_100.tar.bz2#4dd7aa28fb7d9a6de061c9579a30e7dd +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.13-hd33c08f_0.conda#e3b13445b8ee9d6a3d53a714f89ccd76 +https://conda.anaconda.org/conda-forge/linux-64/parallelio-2.5.10-mpi_mpich_h862c5c2_100.conda#56e43c5226670aa0943fae9a2628a934 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.0.0-pyhd8ed1ab_0.conda#d41957700e83bbb925928764cb7f8878 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.19.0-pyhd8ed1ab_0.conda#afaa9bf6992f67a82d75fad47a93ec84 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.0-mpi_mpich_hc592774_104.conda#ed3526a8b7f37a7ee04ab0de2a0ac314 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a +https://conda.anaconda.org/conda-forge/linux-64/pre-commit-3.0.4-py310hff52083_0.conda#099815f9de141008e85f4ede8c55991c +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69 +https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.14-pyhd8ed1ab_0.conda#01f33ad2e0aaf6b5ba4add50dad5ad29 +https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-mpi_mpich_py310h515c5ea_102.conda#bf8276009073388b7159736877eccd79 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py310hab646b1_3.conda#d049da3204bf5ecb54a852b622f2d7d2 +https://conda.anaconda.org/conda-forge/noarch/requests-2.28.2-pyhd8ed1ab_0.conda#11d178fc55199482ee48d6812ea83983 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.0-py310hff52083_0.conda#215e2a4504900bef6d68f520c12ef800 +https://conda.anaconda.org/conda-forge/noarch/pooch-1.6.0-pyhd8ed1ab_0.tar.bz2#6429e1d1091c51f626b5dcfdd38bf429 +https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.12.0-pyhd8ed1ab_0.tar.bz2#fe4a16a5ffc6ff74d4a479a44f6bf6a2 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.0-py310h8deb116_2.conda#a12933d43fc0e55c2e5e00f56196108c +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8 +https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py310hcb7e713_0.conda#bd14eaad9bbf54b78e48ecb8b644fcf6 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a + diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock index 368554bb25..3e3349cb4b 100644 --- a/requirements/ci/nox.lock/py38-linux-64.lock +++ b/requirements/ci/nox.lock/py38-linux-64.lock @@ -1,57 +1,64 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 0b8e98b045b5545a96321ab961f5e97fe2da8aa929328cc8df2d4d5f33ed8159 +# input_hash: fb647c05bdf2998763af9a184ece4f66796aff1cff2ae207f504c94e6062acaf @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2021.10.8-ha878542_0.tar.bz2#575611b8a84f45960e87722eeb51fa26 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.36.1-hea4e1c9_2.tar.bz2#bd4f2e711b39af170e7ff15163fe87ee -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-11.2.0-h5c6108e_12.tar.bz2#f547bf125ab234cec9c89491b262fc2f -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-11.2.0-he4da1e4_12.tar.bz2#7ff3b832ba5e6918c0d026976359d065 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.28-ha770c72_0.tar.bz2#56594fdd5a80774a80d546fbbccf2c03 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-3_cp38.conda#2f3f7af062b42d664117662612022204 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-11.2.0-h69a702a_12.tar.bz2#33c165be455015cc74e8d857182f3f58 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-11.2.0-h1d223b6_12.tar.bz2#763c5ec8116d984b4a33342236d7da36 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-1_gnu.tar.bz2#561e277319a41d4f24f5c05a9ef63c04 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d +https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373 +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-11.2.0-h1d223b6_12.tar.bz2#d34efbb8d7d6312c816b4bb647b818b1 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar.bz2#e4c94f80aef025c17ab0828cd85ef535 +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a -https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.3-h9c3ff4c_0.tar.bz2#bd783d12b65023e333bb7016de41570b +https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-h27087fc_0.tar.bz2#c4fbad8d4bddeb3c085f18cbf97fbfad +https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_106.conda#d7407e695358f068a2a7f8295cde0567 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.10.2-h9c3ff4c_0.tar.bz2#fe9a66a351bfa7a84c3108304c7bcba5 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.1-h27087fc_0.tar.bz2#917b9a50001fffdd89b321b5dba31e55 +https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-69.1-h9c3ff4c_0.tar.bz2#e0773c9556d588b062a4e1424a6a02fa -https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf -https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h7f98852_0.tar.bz2#5c214edc675a7fb7cbb34b1d854e5141 -https://conda.anaconda.org/conda-forge/linux-64/lerc-3.0-h9c3ff4c_0.tar.bz2#7fcefde484980d23f0ec24c11e314d2e -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h7f98852_6.tar.bz2#b0f44f63f7d771d7670747a1dd5d5ac1 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.8-h7f98852_0.tar.bz2#91d22aefa665265e8e31988b15145c8a +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f +https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed +https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h0b41bf4_3.conda#c7a069243e1fbe9a556ed2ec030e6407 +https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 +https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 +https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4 +https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.17-h0b41bf4_0.conda#5cc781fd91968b11a8a7fdbee0982676 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 -https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 -https://conda.anaconda.org/conda-forge/linux-64/libllvm13-13.0.0-hf817b99_0.tar.bz2#b10bb2ebebfffa8800fa80ad3285719e +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.18-pthreads_h8fe5266_0.tar.bz2#41532e4448c0cce086d6570f95e4e12e +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35 https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f -https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h9c3ff4c_1008.tar.bz2#16e143a1ed4b4fd169536373957f6fee +https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 +https://conda.anaconda.org/conda-forge/linux-64/libudev1-252-h166bdaf_0.tar.bz2#174243089ec111479298a5b7099b64b5 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.2-h7f98852_1.tar.bz2#46cf26ecc8775a0aab300ea1821aaa3c -https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.11-h36c2ea0_1013.tar.bz2#dcddf696ff5dfcab567100d691678e18 -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_1.tar.bz2#fbe97e8fa6f275d7c76a09e795adc3e6 -https://conda.anaconda.org/conda-forge/linux-64/mpich-3.4.3-h846660c_100.tar.bz2#1bb747e2de717cb9a6501d72539d6556 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h9c3ff4c_0.tar.bz2#fb31bcb7af058244479ca635d20f0f4a -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.32-h9c3ff4c_1.tar.bz2#29ded371806431b0499aaee146abfc3e -https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1l-h7f98852_0.tar.bz2#de7b38a1542dbe6f41653a8ae71adc53 -https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.2-hcb278e6_0.conda#08efb1e1813f1a151b7a945b972a049b +https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.3-h846660c_100.tar.bz2#50d66bb751cfa71ee2a48b2d3eb90ac1 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.0.8-h0b41bf4_0.conda#e043403cd18faf815bf7705ab6c1e092 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a @@ -61,164 +68,204 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.t https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.5-h516909a_1.tar.bz2#33f601066901f3e1a85af3522a8113f9 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae -https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h73d1719_1008.tar.bz2#af49250eca8e139378f8ff0ae9e57251 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-13_linux64_openblas.tar.bz2#8a4038563ed92dfa622bd72c0d8f31d3 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h7f98852_6.tar.bz2#c7c03a2592cac92246a13a0732bd1573 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h7f98852_6.tar.bz2#28bfe0a70154e6881da7bae97517c948 -https://conda.anaconda.org/conda-forge/linux-64/libclang-13.0.0-default_hc23dcda_0.tar.bz2#7b140452b5bc91e46410b84807307249 +https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.22-h11f4161_0.conda#504fa9e712b99494a9cf4630e3ca7d78 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.66-ha37c62d_0.tar.bz2#2d7665abd0997f1a6d4b7596bc27b657 https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h9b69904_4.tar.bz2#390026683aef81db27ff1b8570ca1336 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 +https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.51.0-hff17c54_0.conda#dd682f0b6d65e75b2bc868fc8e93d87e +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906 https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 -https://conda.anaconda.org/conda-forge/linux-64/readline-8.1-h46c0cb4_0.tar.bz2#5788de3c8d7a7d64ac56c784c4ef48e6 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.27.27-hc3e0081_3.tar.bz2#a47110f41fcbf88fcdf8549d7f69a6d8 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.10.3-h7463322_0.tar.bz2#3b933ea47ef8f330c4c068af25fcd6a8 +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-ha901b37_0.conda#6a39818710235826181e104aada40c75 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b +https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.11-h36c2ea0_1013.tar.bz2#cf7190238072a41e9579e4476a6a60b8 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-ha95c52a_0.tar.bz2#5222b231b1ef49a7f60d40b363469b70 -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h7f98852_6.tar.bz2#9e94bf16f14c78a36561d5019f490d22 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h10796ff_3.tar.bz2#21a8d66dc17f065023b33145c42652fe -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-13_linux64_openblas.tar.bz2#b17676dbd6688396c3a3076259fb7907 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.70.2-h174f98d_1.tar.bz2#d03a54631298fd1ab732ff65f6ed3a07 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-13_linux64_openblas.tar.bz2#018b80e8f21d8560ae4961567e3e00c9 -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.46.0-h812cca2_0.tar.bz2#507fa47e9075f889af8e8b72925379be -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h21135ba_2.tar.bz2#b6acf807307d033d4b7e758b4f44b036 -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-ha56f1ee_2.tar.bz2#6ab4eaa11ff01801cffca0a27489dc04 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.3.0-h6f004c6_2.tar.bz2#34fda41ca84e67232888c9a885903055 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h885dcf4_1.tar.bz2#d1355eaa48f465782f228275a0a69771 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.8.0-h4de3113_1.tar.bz2#175a746a43d42c053b91aa765fbc197d -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.28-hfa10184_0.tar.bz2#aac17542e50a474e2e632878dc696d50 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.37.0-h9cd32fc_0.tar.bz2#eb66fc098824d25518a79e83d12a81d6 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.11-h27826a3_1.tar.bz2#84e76fb280e735fec1efd2d21fd9cb27 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_5.tar.bz2#ee08782aff2ff9b3291c967fa6bc7336 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.74.1-h606061b_1.tar.bz2#ed5349aa96776e00b34eccecf4a948fe +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad +https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-hadd5161_0.conda#70cbb0c2033665f2a7339bf0ec51a67f +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-h6adf6a1_2.conda#2e648a34072eb39d7c4fc2a9981c5f0c +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h79f4944_0.conda#3f67368c9b0e77a693acad193310baf1 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hd7da12d_0.conda#b05d7ea8b76f1172d5fe4f30e03277ea +https://conda.anaconda.org/conda-forge/linux-64/nss-3.88-he45b914_0.conda#d7a81dfb99ad8fbb88872fb7ec646e6c +https://conda.anaconda.org/conda-forge/linux-64/python-3.8.16-he550d4f_1_cpython.conda#9de84cccfbc5f8350a3667bb6ef6fc30 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.40.0-h4ff8645_0.tar.bz2#bb11803129cbbb53ed56f9506ff74145 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 -https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685 -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h7f98852_6.tar.bz2#612385c4a83edb0619fe911d9da317f4 -https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.10.4-h0708190_1.tar.bz2#4a06f2ac2e5bfae7b6b245171c3f07aa -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.6-h04a7f16_0.tar.bz2#b24a1e18325a6e8f8b6b4a2ec5860ce2 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.5-h9f60fe5_3.tar.bz2#511aa83cdfcc0132380db5daf2f15f27 -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.2-hcc1bbae_3.tar.bz2#e29650992ae593bc05fc93722483e5c3 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.2-h3452ae3_0.tar.bz2#c363665b4aabe56aae4f8981cff5b153 -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b -https://conda.anaconda.org/conda-forge/linux-64/nss-3.74-hb5efdd6_0.tar.bz2#136876ca50177058594f6c2944e95c40 -https://conda.anaconda.org/conda-forge/linux-64/python-3.8.12-hb7a2778_2_cpython.tar.bz2#148ea076514259c7f562fbfba956a693 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1003.tar.bz2#db8b471d9a764f561a129f94ea215c0a +https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b +https://conda.anaconda.org/conda-forge/noarch/attrs-22.2.0-pyh71513ae_0.conda#8b76db7818a4e401ed4486c4c1635cd9 +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 +https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda#fb9addc3db06e56abe03e0e9f21a63e6 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.10-pyhd8ed1ab_0.tar.bz2#ea77236c8031cfa821720b21b4cb0ceb -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.0.0-pyhd8ed1ab_0.tar.bz2#3a8fc8b627d5fb6af827e126a10a86c6 -https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2#c08b4c1326b880ed44f3ffb04803332f +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e +https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.4-pyhd8ed1ab_0.tar.bz2#7b50d840543d9cdae100e91582c33035 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.4.2-pyhd8ed1ab_1.tar.bz2#d3f5797d3f9625c64860c93fc4359e64 -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.94-ha180cfb_0.tar.bz2#c534c5248da4913002473919d76d0161 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.1.0-pyhd8ed1ab_0.tar.bz2#188e095f4dc38887bb48b065734b9e8d -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.5-hf529b03_3.tar.bz2#524a9f1718bac53a6cf4906bcc51d044 -https://conda.anaconda.org/conda-forge/noarch/idna-3.3-pyhd8ed1ab_0.tar.bz2#40b50b8b030f5f2f22085c062ed013dd -https://conda.anaconda.org/conda-forge/noarch/imagesize-1.3.0-pyhd8ed1ab_0.tar.bz2#be807e7606fff9436e5e700f6bffb7c6 +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py38h578d9bd_3.tar.bz2#34e1f12e3ed15aff218644e9d865b722 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.0-pyhd8ed1ab_0.conda#a385c3e8968b4cf8fbc426ace915fd1a +https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.9.0-pyhd8ed1ab_0.conda#1addc115923d646ca19ed90edc413506 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.1.0-pyhd8ed1ab_0.conda#44f6828b8f7cc3433d68d1d1c0e9add2 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h05c8ddd_0.conda#1a109126a43003d65b39c1cad656bc9b +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.74.1-h6239696_1.tar.bz2#5f442e6bc9d89ba236eb25a25c5c2815 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 +https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.81.0-h2574ce0_0.tar.bz2#1f8655741d0269ca6756f131522da1e8 -https://conda.anaconda.org/conda-forge/linux-64/libpq-14.1-hd57d9b9_1.tar.bz2#a7024916bfdf33a014a0cc803580c9a1 -https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671651c7ec3d1ad07800339ff1d +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py38h43d8883_1.tar.bz2#41ca56d5cac7bfc7eb4fcdbee878eb84 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.14-hfd0df8a_1.conda#c2566c2ea5f153ddd6bf4acaf7547d97 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h3e3d535_1.conda#a3a0f7a6f0885f5e1e0ec691566afb77 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f +https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.88.0-hdc1c0ab_0.conda#c44acb3847ff118c068b662aff858afd +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.2-hb675445_0.conda#4654b17eccaba55b8581d6b9c77f53cc +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-252-h2a991cd_0.tar.bz2#3c5ae9f61f663b3d5e1bf7f7da0c85f5 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 +https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py38h1de0b5d_0.conda#6d97b5d6f06933ab653f1862ddf6e33e +https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.4-py38h97ac3a3_0.tar.bz2#0c469687a517052c0d581fc6e1a4189d https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 -https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f -https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-2.3.0-pyhd8ed1ab_0.tar.bz2#7bc119135be2a43e1701432399d8c28a +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py38h10c12cc_0.conda#05592c85b9f6931dc2df1e80c0d56294 +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea +https://conda.anaconda.org/conda-forge/noarch/packaging-23.0-pyhd8ed1ab_0.conda#1ff2e3ca41f0ce16afec7190db28288b +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 +https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.4-py38h0a891b7_0.tar.bz2#fe2ef279417faa1af0adf178de2032f7 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.7-pyhd8ed1ab_0.tar.bz2#727e2216d9c47455d8ddc060eb2caad9 -https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-2_cp38.tar.bz2#bfbb29d517281e78ac53e48d21e6e860 -https://conda.anaconda.org/conda-forge/noarch/pytz-2021.3-pyhd8ed1ab_0.tar.bz2#7e4f811bff46a5a6a7e0094921389395 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 +https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py38h1de0b5d_0.conda#7db73572d4f7e10a759bad609a228ad0 +https://conda.anaconda.org/conda-forge/noarch/pytz-2022.7.1-pyhd8ed1ab_0.conda#f59d49a7b464901cf714b9e7984d01a2 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h0a891b7_5.tar.bz2#0856c59f9ddb710c640dc0428d66b1b7 +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.3.2-pyhd8ed1ab_0.conda#543af74c4042aee5702a033e03a216d0 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/toolz-0.11.2-pyhd8ed1ab_0.tar.bz2#f348d1590550371edfac5ed3c1d44f7e -https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2#1ca02aaf78d9c70d9a81a3bed5752022 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.7.0-pyhd8ed1ab_0.tar.bz2#947f7f41958eabc0f6e886557512bb76 -https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1003.tar.bz2#db8b471d9a764f561a129f94ea215c0a -https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#74136ed39bfea0832d338df1e58d013e -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha00ac49_1009.tar.bz2#d1dff57b8731c245d3247b46d002e1c9 -https://conda.anaconda.org/conda-forge/linux-64/certifi-2021.10.8-py38h578d9bd_1.tar.bz2#52a6cee65a5d10ed1c3f0af24fb48dd3 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.0-py38h3931269_0.tar.bz2#9c491a90ae11d08ca97326a0ed876f3a -https://conda.anaconda.org/conda-forge/linux-64/curl-7.81.0-h2574ce0_0.tar.bz2#3a95d393b490f82aa406f1892fad84d9 -https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py38h578d9bd_3.tar.bz2#a7866449fb9e5e4008a02df276549d34 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.1-mpi_mpich_h9c45103_3.tar.bz2#4f1a733e563d27b98010b62888e149c9 -https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.10.1-py38h578d9bd_0.tar.bz2#26da12e39b1b93e82fb865e967d0cbe0 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.2-py38h1fd1430_1.tar.bz2#085365abfe53d5d13bb68b1dda0b439e -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h3cfcdeb_1.tar.bz2#37d7568c595f0cfcd0c493f5ca0344ab -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py38h497a2fe_1.tar.bz2#1ef7b5f4826ca48a15e2cd98a5c3436d -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.3-py38he865349_0.tar.bz2#b1b3d6847a68251a1465206ab466b475 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.22.1-py38h6ae9a64_0.tar.bz2#9ec24c7acb2252816f1f6b6687317432 -https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2#71f1ab2de48613876becddd496371c85 -https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 -https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.1-py38hd70f55b_1.tar.bz2#80d719bee2b77a106b199150c0829107 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py38h0a891b7_1.tar.bz2#358beb228a53b5e1031862de3525d1d3 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.4.0-pyha770c72_0.tar.bz2#2d93b130d148d7fc77e583677792fc6a +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py38h0a891b7_0.tar.bz2#44421904760e9f5ae2035193e04360f0 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.38.4-pyhd8ed1ab_0.tar.bz2#c829cfb8cb826acb9de0ac1a2df0a940 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zipp-3.14.0-pyhd8ed1ab_0.conda#01ea04980fa39d7b6dbdd6c67016d177 +https://conda.anaconda.org/conda-forge/noarch/babel-2.11.0-pyhd8ed1ab_0.tar.bz2#2ea70fde8d581ba9425a761609eed6ba +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.2-pyha770c72_0.conda#88b59f6989f0ed5ab3433af0b82555e1 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0 +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py38h4a40e3a_3.conda#3ac112151c6b6cfe457e976de41af0c5 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py38h26c90d9_1.tar.bz2#dcc025a7bb54374979c500c2e161fac9 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py38hfbd4bf9_0.conda#638537863b298151635c05c762a997ab +https://conda.anaconda.org/conda-forge/linux-64/curl-7.88.0-hdc1c0ab_0.conda#5d9ac94ee84305ada32c3d287d0ec602 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.38.0-py38h0a891b7_1.tar.bz2#62c89ddefed9c5835e228a32b357a28d +https://conda.anaconda.org/conda-forge/linux-64/glib-2.74.1-h6239696_1.tar.bz2#f3220a9e9d3abcbfca43419a219df7e4 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h5d83325_1.conda#811c4d55cf17b42336ffa314239717b0 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.0.0-pyha770c72_0.conda#691644becbcdca9f73243450b1c63e62 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-5.10.2-pyhd8ed1ab_0.conda#de76905f801c22fc43e624058574eab3 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 +https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_had23c3d_1.conda#36c65ed73b7c92589bd9562ef8a6023d +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5aea950_4.conda#82ef57611ace65b59db35a9687264572 +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h26c90d9_1008.tar.bz2#6bc8cd29312f4fc77156b78124e165cd +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c +https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54 +https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py38hde6dc18_1.conda#3de5619d3f556f966189e5251a266125 +https://conda.anaconda.org/conda-forge/noarch/pip-23.0.1-pyhd8ed1ab_0.conda#8025ca83b8ba5430b640b83917c2a6f7 https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 -https://conda.anaconda.org/conda-forge/linux-64/proj-8.2.1-h277dcde_0.tar.bz2#f2ceb1be6565c35e2db0ac948754751d -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-4.19.18-py38h709712a_8.tar.bz2#11b72f5b1cc15427c89232321172a0bc -https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py38h578d9bd_4.tar.bz2#9c4bbee6f682f2fc7d7803df3996e77e +https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.1-h8ffa02c_2.conda#c264aea0e16bba26afa0a0940e954492 +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-16.1-ha8d29e2_1.conda#dbfc2a8d63a43a11acf4c704e1ef9d0c +https://conda.anaconda.org/conda-forge/noarch/pygments-2.14.0-pyhd8ed1ab_0.conda#c78cd16b11cd6a295484bd6c8f24bea1 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.2.1-pyhd8ed1ab_0.conda#f0be05afc9c9ab45e273c088e00c258b https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py38h497a2fe_1.tar.bz2#977d03222271270ea8fe35388bf13752 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py38h497a2fe_3.tar.bz2#131de7d638aa59fb8afbce59f1a8aa98 -https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-ha98a1a1_5.tar.bz2#9b27fa0b1044a2119fb1b290617fe06f -https://conda.anaconda.org/conda-forge/linux-64/setuptools-60.5.0-py38h578d9bd_0.tar.bz2#9807c89f3ce846015dbad3c1d04348a5 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py38h497a2fe_2.tar.bz2#63b3b55c98b4239134e0be080f448944 -https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-14.0.0-py38h497a2fe_0.tar.bz2#8da7787169411910df2a62dc8ef533e0 -https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.13.0-py38h578d9bd_0.tar.bz2#561081f4a30990533541979c9ee84732 -https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h497a2fe_1003.tar.bz2#9189b42c42b9c87b2b2068cbe31901a8 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.5.2-py38h6c62de6_0.tar.bz2#73892e60ccea826c7f7a2215e48d22cf -https://conda.anaconda.org/conda-forge/linux-64/cryptography-36.0.1-py38h3e25421_0.tar.bz2#acc14d0d71dbf74f6a15f2456951b6cf -https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.1.1-pyhd8ed1ab_0.tar.bz2#7968db84df10b74d9792d66d7da216df -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.29.0-py38h497a2fe_0.tar.bz2#3d96473ac57b7260a3fc3bdb13d2db79 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-3.2.0-hb4a5f5f_0.tar.bz2#d03d53e6bcb97e6a97a1659fb38aa76e -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.3-pyhd8ed1ab_0.tar.bz2#036d872c653780cb26e797e2e2f61b4c -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_h319fa22_1.tar.bz2#7583fbaea3648f692c0c019254bc196c -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h6c62de6_1006.tar.bz2#829b1209dfadd431a11048d6eeaf5bef -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 -https://conda.anaconda.org/conda-forge/linux-64/pandas-1.4.0-py38h43a58ef_0.tar.bz2#23427f52c81076594a95c006ebf7552e -https://conda.anaconda.org/conda-forge/noarch/pip-21.3.1-pyhd8ed1ab_0.tar.bz2#e4fe2a9af78ff11f1aced7e62128c6a8 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.11.2-pyhd8ed1ab_0.tar.bz2#caef60540e2239e27bf62569a5015e3b -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.3.0-py38h5383654_1.tar.bz2#5b600e019fa7c33be73bdb626236936b -https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h0ffb2e6_8.tar.bz2#acfc7625a212c27f7decdca86fdb2aba -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38h6c62de6_1.tar.bz2#a350e3f4ca899e95122f66806e048858 -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.2.0-py38h6c62de6_1.tar.bz2#2953d3fc0113fc6ffb955a5b72811fb0 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.7.3-py38h56a6a73_0.tar.bz2#2d318049369bb52d2687b0ac2be82751 -https://conda.anaconda.org/conda-forge/linux-64/shapely-1.8.0-py38h596eeab_5.tar.bz2#ec3b783081e14a9dc0eb5ce609649728 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38h26c90d9_3.tar.bz2#6e7902b0e96f42fa1b73daa5f65dd669 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py38h7e4f40d_0.conda#17f682c947f9cabd348e7276f00c6d85 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py38hd07e089_0.conda#84c9262ab4057ed9f80888fcfc4bf60a +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.7-py38h8dc9893_0.conda#ea242937718f3dacf253355e1d634535 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.4.0-hd8ed1ab_0.tar.bz2#be969210b61b897775a0de63cd9e9026 +https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h0a891b7_1005.tar.bz2#e99e08812dfff30fdd17b3f8838e2759 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py38h26c90d9_2.tar.bz2#0ea017e84efe45badce6c32f274dbf8e +https://conda.anaconda.org/conda-forge/linux-64/cryptography-39.0.1-py38h3d167d9_0.conda#375c00c98c36b0e79aaaf2149e51f27d +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.2.0-pyhd8ed1ab_0.conda#156fb994a4e07091c4fad2c148589eb2 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.0-h25f0c4b_0.conda#d764367398de61c0d5531dd912e6cc96 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-6.0.0-h8e241bc_0.conda#448fe40d2fed88ccf4d9ded37cbb2b38 +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-5.10.2-pyhd8ed1ab_0.conda#ebf8b116aac3fe86270bfe5f61fe2b80 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_hcd871d9_6.tar.bz2#6cdc429ed22edb566ac4308f3da6916d +https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.3-py38hdc8b05c_0.conda#5073966d63a54434d2a2fc41d325b072 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.0.0-pyhd8ed1ab_0.conda#c34694044915d7f291ef257029f2e2af +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.1-py38h58d5fe2_1.conda#5286eaec7e93586e4ae05e7d658cd3e2 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py38h8dc9893_3.conda#7bb0328b4a0f857aeb432426b9a5f908 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.2.0-pyhd8ed1ab_0.conda#70ab87b96126f35d1e68de2ad9fb6423 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py38h1fd1430_1.tar.bz2#c494f75082f9c052944fda1b22c83336 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.1-py38h6c62de6_2.tar.bz2#350322b046c129e5802b79358a1343f7 -https://conda.anaconda.org/conda-forge/noarch/identify-2.4.6-pyhd8ed1ab_0.tar.bz2#d4030c75256440b8375b2f32c4ed35cd -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.1-pyhd8ed1ab_0.tar.bz2#01cc8698b6e1a124dc4f585516c27643 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.5.1-py38hf4fb855_0.tar.bz2#47cf0cab2ae368e1062e75cfbc4277af -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.4-mpi_mpich_h1364a43_0.tar.bz2#b6ba4f487ef9fd5d353ff277df06d133 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.8-nompi_py38h2823cc8_101.tar.bz2#1dfe1cdee4532c72f893955259eb3de9 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.3-h9967ed3_0.tar.bz2#37f1c68380bc5dfe0f5bb2655e207a73 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-21.0.0-pyhd8ed1ab_0.tar.bz2#8c49efecb7dca466e18b06015e8c88ce -https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py38h7400c14_8.tar.bz2#78a2a6cb4ef31f997c1bee8223a9e579 -https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py38h7400c14_8.tar.bz2#857894ea9c5e53c962c3a0932efa71ea -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.20.2-py38ha217159_3.tar.bz2#d7461e191f7a0522e4709612786bdf4e -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.2.0-mpi_mpich_h4975321_100.tar.bz2#56f5c650937b1667ad0a557a0dff3bc4 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py38h43d8883_3.tar.bz2#82b3797d08a43a101b645becbb938e65 +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.0-h4243ec0_0.conda#81c20b15d2281a1ea48eac5b4eee8cfa +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.18-pyhd8ed1ab_0.conda#e07a5691c27e65d8d3d9278c578c7771 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.0-py38hd6c3c57_0.conda#dd63f6486ba95c036b6bfe0b5c53d875 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_h1e13492_2.conda#d4ed7704f0fa589e4d7656780fa87557 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.2-nompi_py38h2250339_100.tar.bz2#dd97e93b1f64f1cc58879d53c23ec93f +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.12-hd33c08f_1.conda#667dc93c913f0156e1237032e3a22046 +https://conda.anaconda.org/conda-forge/linux-64/parallelio-2.5.10-mpi_mpich_h862c5c2_100.conda#56e43c5226670aa0943fae9a2628a934 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.0.0-pyhd8ed1ab_0.conda#d41957700e83bbb925928764cb7f8878 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.19.0-pyhd8ed1ab_0.conda#afaa9bf6992f67a82d75fad47a93ec84 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.0-mpi_mpich_hc592774_104.conda#ed3526a8b7f37a7ee04ab0de2a0ac314 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.52.5-h0a9e6e8_2.tar.bz2#aa768fdaad03509a97df37f81163346b -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.0-pyhd8ed1ab_0.tar.bz2#9113b4e4fa2fa4a7f129c71a6f319475 -https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.17.0-py38h578d9bd_0.tar.bz2#839ac9dba9a6126c9532781a9ea4506b -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.12.3-py38h578d9bd_8.tar.bz2#88368a5889f31dff922a2d57bbfc3f5b -https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.8-pyhd8ed1ab_1.tar.bz2#53f1387c68c21cecb386e2cde51b3f7c -https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.2.0-mpi_mpich_py38h9147699_101.tar.bz2#5a9de1dec507b6614150a77d1aabf257 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.50.0-h8e749b2_2.tar.bz2#8c20fd968c8b6af73444b1199d5fb0cb -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.5.1-py38h578d9bd_0.tar.bz2#0d78be9cf1c400ba8e3077cf060492f1 -https://conda.anaconda.org/conda-forge/noarch/requests-2.27.1-pyhd8ed1ab_0.tar.bz2#7c1c427246b057b8fa97200ecdb2ed62 -https://conda.anaconda.org/conda-forge/noarch/sphinx-4.4.0-pyh6c4a22f_1.tar.bz2#a9025d14c2a609e0d895ad3e75b5369c -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.4.0-pyhd8ed1ab_0.tar.bz2#80fd2cc25ad45911b4e42d5b91593e2f -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.10.1-pyhd8ed1ab_0.tar.bz2#4918585fe5e5341740f7e63c61743efb +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/pre-commit-3.0.4-py38h578d9bd_0.conda#ae802cf221c9549ce9924e1a3718342d +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69 +https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.14-pyhd8ed1ab_0.conda#01f33ad2e0aaf6b5ba4add50dad5ad29 +https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-mpi_mpich_py38h4407c66_102.conda#9a5c841acef11d7e4f0bf98cbc6308b3 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py38ha0d8c90_3.conda#e965dc172d67920d058ac2b3a0e27565 +https://conda.anaconda.org/conda-forge/noarch/requests-2.28.2-pyhd8ed1ab_0.conda#11d178fc55199482ee48d6812ea83983 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.0-py38h578d9bd_0.conda#7fb6ab52eb5de5023445561d86dbd602 +https://conda.anaconda.org/conda-forge/noarch/pooch-1.6.0-pyhd8ed1ab_0.tar.bz2#6429e1d1091c51f626b5dcfdd38bf429 +https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.12.0-pyhd8ed1ab_0.tar.bz2#fe4a16a5ffc6ff74d4a479a44f6bf6a2 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.0-py38h10c12cc_2.conda#d6a3defdc4ab4acd69c04c8ef73d9b57 +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8 https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a -https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-1.0.0-pyhd8ed1ab_0.tar.bz2#9f633f2f2869184e31acfeae95b24345 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_1.tar.bz2#63d2f874f990fdcab47c822b608d6ade +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py38h3d2c718_0.conda#55ba6e3a49c4293302262286a49607d8 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a + diff --git a/requirements/ci/nox.lock/py39-linux-64.lock b/requirements/ci/nox.lock/py39-linux-64.lock new file mode 100644 index 0000000000..c58911fb63 --- /dev/null +++ b/requirements/ci/nox.lock/py39-linux-64.lock @@ -0,0 +1,272 @@ +# Generated by conda-lock. +# platform: linux-64 +# input_hash: 23dff964b0b7254aa6b68bd471a7276f62e9eaa86280f550ef4f34a2022201e0 +@EXPLICIT +https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2022.12.7-ha878542_0.conda#ff9f73d45c4a07d6f424495288a26080 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-12.2.0-h337968e_19.tar.bz2#164b4b1acaedc47ee7e658ae6b308ca3 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-12.2.0-h46fd767_19.tar.bz2#1030b1f38c129f2634eae026f704fe60 +https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-3_cp39.conda#0dd193187d54e585cac7eab942a8847e +https://conda.anaconda.org/conda-forge/noarch/tzdata-2022g-h191b570_0.conda#51fc4fcfb19f5d95ffc8c339db5068e8 +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-12.2.0-h69a702a_19.tar.bz2#cd7a806282c16e1f2d39a7e80d3a3e0d +https://conda.anaconda.org/conda-forge/linux-64/libgomp-12.2.0-h65d4601_19.tar.bz2#cedcee7c064c01c403f962c9e8d3c373 +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-12.2.0-h65d4601_19.tar.bz2#e4c94f80aef025c17ab0828cd85ef535 +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.8-h166bdaf_0.tar.bz2#be733e69048951df1e4b4b7bb8c7666f +https://conda.anaconda.org/conda-forge/linux-64/attr-2.5.1-h166bdaf_1.tar.bz2#d9c69a24ad678ffce24c6543a0176b00 +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.18.1-h7f98852_0.tar.bz2#f26ef8098fab1f719c91eb760d63381a +https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-h27087fc_0.tar.bz2#c4fbad8d4bddeb3c085f18cbf97fbfad +https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf0379b8_106.conda#d7407e695358f068a2a7f8295cde0567 +https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.1-h27087fc_0.tar.bz2#917b9a50001fffdd89b321b5dba31e55 +https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d +https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-orc-0.4.33-h166bdaf_0.tar.bz2#879c93426c9d0b84a9de4513fbce5f4f +https://conda.anaconda.org/conda-forge/linux-64/icu-70.1-h27087fc_0.tar.bz2#87473a15119779e021c314249d4b4aed +https://conda.anaconda.org/conda-forge/linux-64/jpeg-9e-h0b41bf4_3.conda#c7a069243e1fbe9a556ed2ec030e6407 +https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 +https://conda.anaconda.org/conda-forge/linux-64/lame-3.100-h166bdaf_1003.tar.bz2#a8832b479f93521a9e7b5b743803be51 +https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4 +https://conda.anaconda.org/conda-forge/linux-64/libdb-6.2.32-h9c3ff4c_0.tar.bz2#3f3258d8f841fbac63b36b75bdac1afd +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.17-h0b41bf4_0.conda#5cc781fd91968b11a8a7fdbee0982676 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d +https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.21-pthreads_h78a6416_3.tar.bz2#8c5963a49b6035c40646a763293fbb35 +https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f +https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 +https://conda.anaconda.org/conda-forge/linux-64/libudev1-252-h166bdaf_0.tar.bz2#174243089ec111479298a5b7099b64b5 +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.4-h166bdaf_0.tar.bz2#ac2ccf7323d21f2994e4d1f5da664f37 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 +https://conda.anaconda.org/conda-forge/linux-64/mpg123-1.31.2-hcb278e6_0.conda#08efb1e1813f1a151b7a945b972a049b +https://conda.anaconda.org/conda-forge/linux-64/mpich-4.0.3-h846660c_100.tar.bz2#50d66bb751cfa71ee2a48b2d3eb90ac1 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.3-h27087fc_1.tar.bz2#4acfc691e64342b9dae57cf2adc63238 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.0.8-h0b41bf4_0.conda#e043403cd18faf815bf7705ab6c1e092 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 +https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 +https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 +https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae +https://conda.anaconda.org/conda-forge/linux-64/jack-1.9.22-h11f4161_0.conda#504fa9e712b99494a9cf4630e3ca7d78 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-16_linux64_openblas.tar.bz2#d9b7a8639171f6c6fa0a983edabcfe2b +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 +https://conda.anaconda.org/conda-forge/linux-64/libcap-2.66-ha37c62d_0.tar.bz2#2d7665abd0997f1a6d4b7596bc27b657 +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-h28343ad_4.tar.bz2#4a049fc560e00e43151dc51368915fdd +https://conda.anaconda.org/conda-forge/linux-64/libflac-1.4.2-h27087fc_0.tar.bz2#7daf72d8e2a8e848e11d63ed6d1026e0 +https://conda.anaconda.org/conda-forge/linux-64/libgpg-error-1.46-h620e276_0.conda#27e745f6f2e4b757e95dd7225fbe6bdb +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.51.0-hff17c54_0.conda#dd682f0b6d65e75b2bc868fc8e93d87e +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.40.0-h753d276_0.tar.bz2#2e5f9a37d487e1019fd4d8113adb2f9f +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.10.0-hf14f497_3.tar.bz2#d85acad4b47dff4e3def14a769a97906 +https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1004.tar.bz2#b3653fdc58d03face9724f602218a904 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.10.3-h7463322_0.tar.bz2#3b933ea47ef8f330c4c068af25fcd6a8 +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.32-ha901b37_0.conda#6a39818710235826181e104aada40c75 +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b +https://conda.anaconda.org/conda-forge/linux-64/readline-8.1.2-h0f457ee_0.tar.bz2#db2ebbe2943aae81ed051a6a9af8e0fa +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h9772cbc_5.tar.bz2#ee08782aff2ff9b3291c967fa6bc7336 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-16_linux64_openblas.tar.bz2#20bae26d0a1db73f758fc3754cab4719 +https://conda.anaconda.org/conda-forge/linux-64/libgcrypt-1.10.1-h166bdaf_0.tar.bz2#f967fc95089cd247ceed56eda31de3a9 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.74.1-h606061b_1.tar.bz2#ed5349aa96776e00b34eccecf4a948fe +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-16_linux64_openblas.tar.bz2#955d993f41f9354bf753d29864ea20ad +https://conda.anaconda.org/conda-forge/linux-64/libllvm15-15.0.7-hadd5161_0.conda#70cbb0c2033665f2a7339bf0ec51a67f +https://conda.anaconda.org/conda-forge/linux-64/libsndfile-1.2.0-hb75c966_0.conda#c648d19cd9c8625898d5d370414de7c7 +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-h6adf6a1_2.conda#2e648a34072eb39d7c4fc2a9981c5f0c +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.5.0-h79f4944_0.conda#3f67368c9b0e77a693acad193310baf1 +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.32-hd7da12d_0.conda#b05d7ea8b76f1172d5fe4f30e03277ea +https://conda.anaconda.org/conda-forge/linux-64/nss-3.88-he45b914_0.conda#d7a81dfb99ad8fbb88872fb7ec646e6c +https://conda.anaconda.org/conda-forge/linux-64/python-3.9.16-h2782a2a_0_cpython.conda#95c9b7c96a7fd7342e0c9d0a917b8f78 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.40.0-h4ff8645_0.tar.bz2#bb11803129cbbb53ed56f9506ff74145 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-0.4.0-h166bdaf_0.tar.bz2#384e7fcb3cd162ba3e4aed4b687df566 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-keysyms-0.4.0-h166bdaf_0.tar.bz2#637054603bb7594302e3bf83f0a99879 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-renderutil-0.3.9-h166bdaf_0.tar.bz2#732e22f1741bccea861f5668cf7342a7 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-wm-0.4.1-h166bdaf_0.tar.bz2#0a8e20a8aef954390b9481a527421a8c +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py39hf3d152e_1003.tar.bz2#5e8330e806e50bd6137ebd125f4bc1bb +https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b +https://conda.anaconda.org/conda-forge/noarch/attrs-22.2.0-pyh71513ae_0.conda#8b76db7818a4e401ed4486c4c1635cd9 +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 +https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda#fb9addc3db06e56abe03e0e9f21a63e6 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2#c1d5b294fbf9a795dec349a6f4d8be8e +https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h5008d03_3.tar.bz2#ecfff944ba3960ecb334b9a2663d708d +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.17.1-py39hf3d152e_3.tar.bz2#3caf51fb6a259d377f05d6913193b11c +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.0-pyhd8ed1ab_0.conda#a385c3e8968b4cf8fbc426ace915fd1a +https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.9.0-pyhd8ed1ab_0.conda#1addc115923d646ca19ed90edc413506 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.1.0-pyhd8ed1ab_0.conda#44f6828b8f7cc3433d68d1d1c0e9add2 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h05c8ddd_0.conda#1a109126a43003d65b39c1cad656bc9b +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.74.1-h6239696_1.tar.bz2#5f442e6bc9d89ba236eb25a25c5c2815 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 +https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.4.0-pyhd8ed1ab_0.tar.bz2#18ee9c07cf945a33f92caf1ee3d23ad9 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py39hf939315_1.tar.bz2#41679a052a8ce841c74df1ebc802e411 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.14-hfd0df8a_1.conda#c2566c2ea5f153ddd6bf4acaf7547d97 +https://conda.anaconda.org/conda-forge/linux-64/libclang13-15.0.7-default_h3e3d535_1.conda#a3a0f7a6f0885f5e1e0ec691566afb77 +https://conda.anaconda.org/conda-forge/linux-64/libcups-2.3.3-h36d4200_3.conda#c9f4416a34bc91e0eb029f912c68f81f +https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.88.0-hdc1c0ab_0.conda#c44acb3847ff118c068b662aff858afd +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.2-hb675445_0.conda#4654b17eccaba55b8581d6b9c77f53cc +https://conda.anaconda.org/conda-forge/linux-64/libsystemd0-252-h2a991cd_0.tar.bz2#3c5ae9f61f663b3d5e1bf7f7da0c85f5 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.4-h1daa5a0_1.conda#77003f63d1763c1e6569a02c1742c9f4 +https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.2-py39h72bdee0_0.conda#35514f5320206df9f4661c138c02e1c1 +https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.1.4-py39h32b9844_0.tar.bz2#b035b507f55bb6a967d86d4b7e059437 +https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py39h7360e5f_0.conda#757070dc7cc33003254888808cd34f1e +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea +https://conda.anaconda.org/conda-forge/noarch/packaging-23.0-pyhd8ed1ab_0.conda#1ff2e3ca41f0ce16afec7190db28288b +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 +https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2#7205635cd71531943440fbfe3b6b5727 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.4-py39hb9d737c_0.tar.bz2#12184951da572828fb986b06ffb63eed +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 +https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py39h72bdee0_0.conda#18927f971926b7271600368de71de557 +https://conda.anaconda.org/conda-forge/noarch/pytz-2022.7.1-pyhd8ed1ab_0.conda#f59d49a7b464901cf714b9e7984d01a2 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py39hb9d737c_5.tar.bz2#ef9db3c38ae7275f6b14491cfe61a248 +https://conda.anaconda.org/conda-forge/noarch/setuptools-67.3.2-pyhd8ed1ab_0.conda#543af74c4042aee5702a033e03a216d0 +https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.2-py39hb9d737c_1.tar.bz2#8a7d309b08cff6386fe384aa10dd3748 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.4.0-pyha770c72_0.tar.bz2#2d93b130d148d7fc77e583677792fc6a +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.0.0-py39hb9d737c_0.tar.bz2#230d65004135bf312504a1bbcb0c7a08 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.38.4-pyhd8ed1ab_0.tar.bz2#c829cfb8cb826acb9de0ac1a2df0a940 +https://conda.anaconda.org/conda-forge/linux-64/xcb-util-image-0.4.0-h166bdaf_0.tar.bz2#c9b568bd804cb2903c6be6f5f68182e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/zipp-3.14.0-pyhd8ed1ab_0.conda#01ea04980fa39d7b6dbdd6c67016d177 +https://conda.anaconda.org/conda-forge/noarch/babel-2.11.0-pyhd8ed1ab_0.tar.bz2#2ea70fde8d581ba9425a761609eed6ba +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.2-pyha770c72_0.conda#88b59f6989f0ed5ab3433af0b82555e1 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-ha61ee94_1014.tar.bz2#d1a88f3ed5b52e1024b80d4bcd26a7a0 +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py39he91dace_3.conda#20080319ef73fbad74dcd6d62f2a3ffe +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py39h2ae25f5_1.tar.bz2#c943fb9a2818ecc5be1e0ecc8b7738f1 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py39h4b4f3f3_0.conda#c5387f3fb1f5b8b71e1c865fc55f4951 +https://conda.anaconda.org/conda-forge/linux-64/curl-7.88.0-hdc1c0ab_0.conda#5d9ac94ee84305ada32c3d287d0ec602 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.38.0-py39hb9d737c_1.tar.bz2#3f2d104f2fefdd5e8a205dd3aacbf1d7 +https://conda.anaconda.org/conda-forge/linux-64/glib-2.74.1-h6239696_1.tar.bz2#f3220a9e9d3abcbfca43419a219df7e4 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.12.2-mpi_mpich_h5d83325_1.conda#811c4d55cf17b42336ffa314239717b0 +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.0.0-pyha770c72_0.conda#691644becbcdca9f73243450b1c63e62 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-5.10.2-pyhd8ed1ab_0.conda#de76905f801c22fc43e624058574eab3 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 +https://conda.anaconda.org/conda-forge/linux-64/libclang-15.0.7-default_had23c3d_1.conda#36c65ed73b7c92589bd9562ef8a6023d +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-h5aea950_4.conda#82ef57611ace65b59db35a9687264572 +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py39h2ae25f5_1008.tar.bz2#d90acb3804f16c63eb6726652e4e25b3 +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.7.0-pyhd8ed1ab_0.tar.bz2#fbe1182f650c04513046d6894046cd6c +https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2#af8c82d121e63082926062d61d9abb54 +https://conda.anaconda.org/conda-forge/linux-64/pillow-9.4.0-py39h2320bf1_1.conda#d2f79132b9c8e416058a4cd84ef27b3d +https://conda.anaconda.org/conda-forge/noarch/pip-23.0.1-pyhd8ed1ab_0.conda#8025ca83b8ba5430b640b83917c2a6f7 +https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.1.1-h8ffa02c_2.conda#c264aea0e16bba26afa0a0940e954492 +https://conda.anaconda.org/conda-forge/linux-64/pulseaudio-16.1-ha8d29e2_1.conda#dbfc2a8d63a43a11acf4c704e1ef9d0c +https://conda.anaconda.org/conda-forge/noarch/pygments-2.14.0-pyhd8ed1ab_0.conda#c78cd16b11cd6a295484bd6c8f24bea1 +https://conda.anaconda.org/conda-forge/noarch/pytest-7.2.1-pyhd8ed1ab_0.conda#f0be05afc9c9ab45e273c088e00c258b +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py39h2ae25f5_3.tar.bz2#bcc7de3bb458a198b598ac1f75bf37e3 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py39h389d5f1_0.conda#9eeb2b2549f836ca196c6cbd22344122 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39hc9151fd_0.conda#d26cc40830285883abaa766a7f7798bf +https://conda.anaconda.org/conda-forge/linux-64/sip-6.7.7-py39h227be39_0.conda#7d9a35091552af3655151f164ddd64a3 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.4.0-hd8ed1ab_0.tar.bz2#be969210b61b897775a0de63cd9e9026 +https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py39hb9d737c_1005.tar.bz2#a639fdd9428d8b25f8326a3838d54045 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.1.1-py39h2ae25f5_2.tar.bz2#b3b4aab96d1c4ed394d6f4b9146699d4 +https://conda.anaconda.org/conda-forge/linux-64/cryptography-39.0.1-py39h079d5ae_0.conda#3245013812dfbff6a22e57533ac6f69d +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.2.0-pyhd8ed1ab_0.conda#156fb994a4e07091c4fad2c148589eb2 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.22.0-h25f0c4b_0.conda#d764367398de61c0d5531dd912e6cc96 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-6.0.0-h8e241bc_0.conda#448fe40d2fed88ccf4d9ded37cbb2b38 +https://conda.anaconda.org/conda-forge/noarch/importlib-resources-5.10.2-pyhd8ed1ab_0.conda#ebf8b116aac3fe86270bfe5f61fe2b80 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.1-mpi_mpich_hcd871d9_6.tar.bz2#6cdc429ed22edb566ac4308f3da6916d +https://conda.anaconda.org/conda-forge/linux-64/pandas-1.5.3-py39h2ad29b5_0.conda#3ea96adbbc2a66fa45178102a9cfbecc +https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.0.0-pyhd8ed1ab_0.conda#c34694044915d7f291ef257029f2e2af +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.4.1-py39hf14cbfd_1.conda#67766c515601b3ee1514072d6fd060bb +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-12.11.0-py39h227be39_3.conda#9e381db00691e26bcf670c3586397be1 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.2.0-pyhd8ed1ab_0.conda#70ab87b96126f35d1e68de2ad9fb6423 +https://conda.anaconda.org/conda-forge/noarch/setuptools-scm-7.1.0-pyhd8ed1ab_0.conda#6613dbb3b25cc648a107f33ca9f80fc1 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py39hf939315_3.tar.bz2#0f11bcdf9669a5ae0f39efd8c830209a +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.22.0-h4243ec0_0.conda#81c20b15d2281a1ea48eac5b4eee8cfa +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.18-pyhd8ed1ab_0.conda#e07a5691c27e65d8d3d9278c578c7771 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.0-py39he190548_0.conda#62d6ddd9e534f4d325d12470cc4961ab +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.0-mpi_mpich_h1e13492_2.conda#d4ed7704f0fa589e4d7656780fa87557 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.2-nompi_py39hfaa66c4_100.tar.bz2#b5f2db23900499e96f88e39199ffc7b8 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.12-hd33c08f_1.conda#667dc93c913f0156e1237032e3a22046 +https://conda.anaconda.org/conda-forge/linux-64/parallelio-2.5.10-mpi_mpich_h862c5c2_100.conda#56e43c5226670aa0943fae9a2628a934 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.0.0-pyhd8ed1ab_0.conda#d41957700e83bbb925928764cb7f8878 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.19.0-pyhd8ed1ab_0.conda#afaa9bf6992f67a82d75fad47a93ec84 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.0-mpi_mpich_hc592774_104.conda#ed3526a8b7f37a7ee04ab0de2a0ac314 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.54.4-h7abd40a_0.tar.bz2#921e53675ed5ea352f022b79abab076a +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/pre-commit-3.0.4-py39hf3d152e_0.conda#8a98273ee904735747a8f6706b187f3e +https://conda.anaconda.org/conda-forge/linux-64/qt-main-5.15.8-h5d23da1_6.conda#59c73debd9405771690ddbbad6c57b69 +https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.14-pyhd8ed1ab_0.conda#01f33ad2e0aaf6b5ba4add50dad5ad29 +https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.4.0-mpi_mpich_py39h3088dd8_102.conda#a022e48c8b12bc56083bcce841978519 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-7.1.0-h2e5815a_0.conda#e7ecda996c443142a0e9c379f3b28e48 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.15.7-py39h5c7b992_3.conda#19e30314fe824605750da905febb8ee6 +https://conda.anaconda.org/conda-forge/noarch/requests-2.28.2-pyhd8ed1ab_0.conda#11d178fc55199482ee48d6812ea83983 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.7.0-py39hf3d152e_0.conda#0967228e228ebeded6a36a6f4d5509ed +https://conda.anaconda.org/conda-forge/noarch/pooch-1.6.0-pyhd8ed1ab_0.tar.bz2#6429e1d1091c51f626b5dcfdd38bf429 +https://conda.anaconda.org/conda-forge/noarch/sphinx-4.5.0-pyh6c4a22f_0.tar.bz2#46b38d88c4270ff9ba78a89c83c66345 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.12.0-pyhd8ed1ab_0.tar.bz2#fe4a16a5ffc6ff74d4a479a44f6bf6a2 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.0-py39h7360e5f_2.conda#fbee2ab3fe7729f2ff5c5699d58e40b9 +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.5.0-pyhd8ed1ab_0.tar.bz2#4c969cdd5191306c269490f7ff236d9c +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.11.1-pyhd8ed1ab_0.tar.bz2#729254314a5d178eefca50acbc2687b8 +https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.6.0-pyhd8ed1ab_0.tar.bz2#6eec6480601f5d15babf9c3b3987f34a +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py39h6e7ad6e_0.conda#7cb72bd5b1e7c5a23a062db90889356b +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a + diff --git a/requirements/ci/py310.yml b/requirements/ci/py310.yml new file mode 100644 index 0000000000..ae66d66a77 --- /dev/null +++ b/requirements/ci/py310.yml @@ -0,0 +1,56 @@ +name: iris-dev + +channels: + - conda-forge + +dependencies: + - python =3.10 + +# Setup dependencies. + - setuptools >=64 + - setuptools-scm >=7 + +# Core dependencies. + - cartopy >=0.21 + - cf-units >=3.1 + - cftime >=1.5 + - dask-core >=2.26 + - matplotlib >=3.5 + - netcdf4 + - numpy >=1.19 + - python-xxhash + - pyproj + - scipy + - shapely !=1.8.3 + +# Optional dependencies. + - esmpy >=7.0 + - graphviz + - iris-sample-data >=2.4.0 + - mo_pack + - nc-time-axis >=1.4 + - pandas + - pip + - python-stratify + +# Test dependencies. + - filelock + - imagehash >=4.0 + - pre-commit + - psutil + - pytest + - pytest-cov + - pytest-xdist + - requests + +# Documentation dependencies. + - sphinx + - sphinxcontrib-napoleon + - sphinx-copybutton + - sphinx-gallery >=0.11.0 + - sphinx-panels + - pydata-sphinx-theme + +# Temporary minimum pins. +# See https://github.com/SciTools/iris/pull/5051 + - graphviz >=6.0.0 diff --git a/requirements/ci/py38.yml b/requirements/ci/py38.yml index d3d7f9d0c2..e18be0efe4 100644 --- a/requirements/ci/py38.yml +++ b/requirements/ci/py38.yml @@ -7,25 +7,28 @@ dependencies: - python =3.8 # Setup dependencies. - - setuptools >=40.8.0 + - setuptools >=64 + - setuptools-scm >=7 # Core dependencies. - - cartopy >=0.20 - - cf-units >=3 + - cartopy >=0.21 + - cf-units >=3.1 - cftime >=1.5 - - dask-core >=2 - - matplotlib + - dask-core >=2.26 + - matplotlib >=3.5 - netcdf4 - numpy >=1.19 - python-xxhash + - pyproj - scipy + - shapely !=1.8.3 # Optional dependencies. - esmpy >=7.0 - graphviz - iris-sample-data >=2.4.0 - mo_pack - - nc-time-axis >=1.3 + - nc-time-axis >=1.4 - pandas - pip - python-stratify @@ -33,15 +36,20 @@ dependencies: # Test dependencies. - filelock - imagehash >=4.0 - - nose - - pillow <7 - pre-commit + - psutil + - pytest + - pytest-xdist - requests # Documentation dependencies. - sphinx - sphinxcontrib-napoleon - sphinx-copybutton - - sphinx-gallery + - sphinx-gallery >=0.11.0 - sphinx-panels - - sphinx_rtd_theme + - pydata-sphinx-theme + +# Temporary minimum pins. +# See https://github.com/SciTools/iris/pull/5051 + - graphviz >=6.0.0 diff --git a/requirements/ci/py39.yml b/requirements/ci/py39.yml new file mode 100644 index 0000000000..50dcd77cd9 --- /dev/null +++ b/requirements/ci/py39.yml @@ -0,0 +1,55 @@ +name: iris-dev + +channels: + - conda-forge + +dependencies: + - python =3.9 + +# Setup dependencies. + - setuptools >=64 + - setuptools-scm >=7 + +# Core dependencies. + - cartopy >=0.21 + - cf-units >=3.1 + - cftime >=1.5 + - dask-core >=2.26 + - matplotlib >=3.5 + - netcdf4 + - numpy >=1.19 + - python-xxhash + - pyproj + - scipy + - shapely !=1.8.3 + +# Optional dependencies. + - esmpy >=7.0 + - graphviz + - iris-sample-data >=2.4.0 + - mo_pack + - nc-time-axis >=1.4 + - pandas + - pip + - python-stratify + +# Test dependencies. + - filelock + - imagehash >=4.0 + - pre-commit + - psutil + - pytest + - pytest-xdist + - requests + +# Documentation dependencies. + - sphinx + - sphinxcontrib-napoleon + - sphinx-copybutton + - sphinx-gallery >=0.11.0 + - sphinx-panels + - pydata-sphinx-theme + +# Temporary minimum pins. +# See https://github.com/SciTools/iris/pull/5051 + - graphviz >=6.0.0 diff --git a/setup.cfg b/setup.cfg index 1d3fb8b7c9..b40ace671e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,8 +1,8 @@ [metadata] author = SciTools Developers -author_email = scitools-iris-dev@googlegroups.com +author_email = scitools.pub@gmail.com classifiers = - Development Status :: 5 Production/Stable + Development Status :: 5 - Production/Stable Intended Audience :: Science/Research License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+) Operating System :: MacOS @@ -11,8 +11,9 @@ classifiers = Operating System :: Unix Programming Language :: Python Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 Programming Language :: Python :: Implementation :: CPython Topic :: Scientific/Engineering Topic :: Scientific/Engineering :: Atmospheric Science @@ -31,7 +32,7 @@ keywords = ugrid visualisation license = LGPL-3.0-or-later -license_file = COPYING.LESSER +license_files = COPYING.LESSER long_description = file: README.md long_description_content_type = text/markdown name = scitools-iris @@ -46,16 +47,17 @@ version = attr: iris.__version__ [options] include_package_data = True install_requires = - cartopy>=0.20 - cf-units>=3 + cartopy>=0.21 + cf-units>=3.1 cftime>=1.5.0 - dask[array]>=2 - matplotlib + dask[array]>=2.26 + matplotlib>=3.5 netcdf4 numpy>=1.19 scipy + shapely!=1.8.3 xxhash -packages = find: +packages = find_namespace: package_dir = =lib python_requires = @@ -67,22 +69,22 @@ where = lib [options.extras_require] docs = - sphinx + sphinx<5 sphinx-copybutton - sphinx-gallery + sphinx-gallery>=0.11.0 sphinx_rtd_theme sphinxcontrib-napoleon sphinx-panels test = filelock imagehash>=4.0 - nose - pillow<7 pre-commit requests + pytest + pytest-xdist all = mo_pack - nc-time-axis>=1.3 + nc-time-axis>=1.4 pandas stratify %(docs)s diff --git a/setup.py b/setup.py index f48f3fe25a..061b35c262 100644 --- a/setup.py +++ b/setup.py @@ -1,42 +1,16 @@ -from contextlib import contextmanager import os -from shutil import copyfile import sys from setuptools import Command, setup from setuptools.command.build_py import build_py -from setuptools.command.develop import develop as develop_cmd - - -@contextmanager -def temporary_path(directory): - """ - Context manager that adds and subsequently removes the given directory - to sys.path - - """ - sys.path.insert(0, directory) - try: - yield - finally: - del sys.path[0] - - -# Add full path so Python doesn't load any __init__.py in the intervening -# directories, thereby saving setup.py from additional dependencies. -with temporary_path("lib/iris/tests/runner"): - from _runner import TestRunner # noqa: - - -class SetupTestRunner(TestRunner, Command): - pass +from setuptools.command.develop import develop class BaseCommand(Command): - """A valid no-op command for setuptools & distutils.""" + """A minimal no-op setuptools command.""" - description = "A no-op command." - user_options = [] + description: str = "A no-op command." + user_options: list = [] def initialize_options(self): pass @@ -48,75 +22,65 @@ def run(self): pass -class CleanSource(BaseCommand): - description = "clean orphaned pyc/pyo files from the source directory" - - def run(self): - for root_path, dir_names, file_names in os.walk("lib"): - for file_name in file_names: - if file_name.endswith("pyc") or file_name.endswith("pyo"): - compiled_path = os.path.join(root_path, file_name) - source_path = compiled_path[:-1] - if not os.path.exists(source_path): - print("Cleaning", compiled_path) - os.remove(compiled_path) - - -def copy_copyright(cmd, directory): - # Copy the COPYRIGHT information into the package root - iris_build_dir = os.path.join(directory, "iris") - for fname in ["COPYING", "COPYING.LESSER"]: - copyfile(fname, os.path.join(iris_build_dir, fname)) - +def custom_command(cmd, help=""): + """ + Factory function to generate a custom command that adds additional + behaviour to build the CF standard names module. -def build_std_names(cmd, directory): - # Call out to tools/generate_std_names.py to build std_names module. + """ - script_path = os.path.join("tools", "generate_std_names.py") - xml_path = os.path.join("etc", "cf-standard-name-table.xml") - module_path = os.path.join(directory, "iris", "std_names.py") - args = (sys.executable, script_path, xml_path, module_path) - cmd.spawn(args) + class CustomCommand(cmd): + description = help or cmd.description + def _build_std_names(self, directory): + # Call out to tools/generate_std_names.py to build std_names module. -def custom_cmd(command_to_override, functions, help_doc=""): - """ - Allows command specialisation to include calls to the given functions. + script_path = os.path.join("tools", "generate_std_names.py") + xml_path = os.path.join("etc", "cf-standard-name-table.xml") + module_path = os.path.join(directory, "iris", "std_names.py") + args = (sys.executable, script_path, xml_path, module_path) + self.spawn(args) - """ + def finalize_options(self): + # Execute the parent "cmd" class method. + cmd.finalize_options(self) - class ExtendedCommand(command_to_override): - description = help_doc or command_to_override.description + if ( + not hasattr(self, "editable_mode") + or self.editable_mode is None + ): + # Default to editable i.e., applicable to "std_names" and + # and "develop" commands. + self.editable_mode = True def run(self): - # Run the original command first to make sure all the target - # directories are in place. - command_to_override.run(self) + # Execute the parent "cmd" class method. + cmd.run(self) + + # Determine the target root directory + if self.editable_mode: + # Pick the source dir instead (currently in the sub-dir "lib"). + target = "lib" + msg = "in-place" + else: + # Not editable - must be building. + target = self.build_lib + msg = "as-build" - # build_lib is defined if we are building the package. Otherwise - # we want to to the work in-place. - dest = getattr(self, "build_lib", None) - if dest is None: - print(" [Running in-place]") - # Pick the source dir instead (currently in the sub-dir "lib") - dest = "lib" + print(f"\n[Running {msg}]") - for func in functions: - func(self, dest) + # Build the CF standard names. + self._build_std_names(target) - return ExtendedCommand + return CustomCommand custom_commands = { - "test": SetupTestRunner, - "develop": custom_cmd(develop_cmd, [build_std_names]), - "build_py": custom_cmd(build_py, [build_std_names, copy_copyright]), - "std_names": custom_cmd( - BaseCommand, - [build_std_names], - help_doc="generate CF standard name module", + "develop": custom_command(develop), + "build_py": custom_command(build_py), + "std_names": custom_command( + BaseCommand, help="generate CF standard names" ), - "clean_source": CleanSource, } diff --git a/tools/update_lockfiles.py b/tools/update_lockfiles.py index 9d5705c7a7..dc898784ae 100755 --- a/tools/update_lockfiles.py +++ b/tools/update_lockfiles.py @@ -17,6 +17,15 @@ from pathlib import Path import subprocess import sys +from warnings import warn + + +message = ( + "Iris' large requirements may require Mamba to successfully solve. If you " + "don't want to install Mamba, consider using the workflow_dispatch on " + "Iris' GitHub action." +) +warn(message) try: @@ -29,9 +38,9 @@ "Iris Lockfile Generator", ) -parser.add_argument('files', nargs='+', +parser.add_argument('files', nargs='+', help="List of environment.yml files to lock") -parser.add_argument('--output-dir', '-o', default='.', +parser.add_argument('--output-dir', '-o', default='.', help="Directory to save output lock files") args = parser.parse_args() @@ -43,7 +52,7 @@ ftype = fname.split('.')[-1] if ftype.lower() in ('yaml', 'yml'): fname = '.'.join(fname.split('.')[:-1]) - + # conda-lock --filename-template expects a string with a "...{platform}..." # placeholder in it, so we have to build the .lock filname without # using .format @@ -53,7 +62,8 @@ 'lock', '--filename-template', ofile_template, '--file', infile, + '-k', 'explicit', '--platform', 'linux-64' ]) print(f"lockfile saved to {ofile_template}".format(platform='linux-64'), - file=sys.stderr) \ No newline at end of file + file=sys.stderr)